]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
fold-const.c (fold_comparison): Fold comparisons like (x * 1000 < 0) to (x < 0).
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate.
45
46 Note: Since the folders get called on non-gimple code as well as
47 gimple code, we need to handle GIMPLE tuples as well as their
48 corresponding tree equivalents. */
49
50 #include "config.h"
51 #include "system.h"
52 #include "coretypes.h"
53 #include "tm.h"
54 #include "flags.h"
55 #include "tree.h"
56 #include "real.h"
57 #include "rtl.h"
58 #include "expr.h"
59 #include "tm_p.h"
60 #include "toplev.h"
61 #include "ggc.h"
62 #include "hashtab.h"
63 #include "langhooks.h"
64 #include "md5.h"
65
66 /* Non-zero if we are folding constants inside an initializer; zero
67 otherwise. */
68 int folding_initializer = 0;
69
70 /* The following constants represent a bit based encoding of GCC's
71 comparison operators. This encoding simplifies transformations
72 on relational comparison operators, such as AND and OR. */
73 enum comparison_code {
74 COMPCODE_FALSE = 0,
75 COMPCODE_LT = 1,
76 COMPCODE_EQ = 2,
77 COMPCODE_LE = 3,
78 COMPCODE_GT = 4,
79 COMPCODE_LTGT = 5,
80 COMPCODE_GE = 6,
81 COMPCODE_ORD = 7,
82 COMPCODE_UNORD = 8,
83 COMPCODE_UNLT = 9,
84 COMPCODE_UNEQ = 10,
85 COMPCODE_UNLE = 11,
86 COMPCODE_UNGT = 12,
87 COMPCODE_NE = 13,
88 COMPCODE_UNGE = 14,
89 COMPCODE_TRUE = 15
90 };
91
92 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
93 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
94 static bool negate_mathfn_p (enum built_in_function);
95 static bool negate_expr_p (tree);
96 static tree negate_expr (tree);
97 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
98 static tree associate_trees (tree, tree, enum tree_code, tree);
99 static tree const_binop (enum tree_code, tree, tree, int);
100 static enum comparison_code comparison_to_compcode (enum tree_code);
101 static enum tree_code compcode_to_comparison (enum comparison_code);
102 static tree combine_comparisons (enum tree_code, enum tree_code,
103 enum tree_code, tree, tree, tree);
104 static int truth_value_p (enum tree_code);
105 static int operand_equal_for_comparison_p (tree, tree, tree);
106 static int twoval_comparison_p (tree, tree *, tree *, int *);
107 static tree eval_subst (tree, tree, tree, tree, tree);
108 static tree pedantic_omit_one_operand (tree, tree, tree);
109 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
110 static tree make_bit_field_ref (tree, tree, int, int, int);
111 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
112 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
113 enum machine_mode *, int *, int *,
114 tree *, tree *);
115 static int all_ones_mask_p (tree, int);
116 static tree sign_bit_p (tree, tree);
117 static int simple_operand_p (tree);
118 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
119 static tree range_predecessor (tree);
120 static tree range_successor (tree);
121 static tree make_range (tree, int *, tree *, tree *);
122 static tree build_range_check (tree, tree, int, tree, tree);
123 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
124 tree);
125 static tree fold_range_test (enum tree_code, tree, tree, tree);
126 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
127 static tree unextend (tree, int, int, tree);
128 static tree fold_truthop (enum tree_code, tree, tree, tree);
129 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
130 static tree extract_muldiv (tree, tree, enum tree_code, tree);
131 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
132 static int multiple_of_p (tree, tree, tree);
133 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
134 tree, tree,
135 tree, tree, int);
136 static bool fold_real_zero_addition_p (tree, tree, int);
137 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
138 tree, tree, tree);
139 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
140 static tree fold_div_compare (enum tree_code, tree, tree, tree);
141 static bool reorder_operands_p (tree, tree);
142 static tree fold_negate_const (tree, tree);
143 static tree fold_not_const (tree, tree);
144 static tree fold_relational_const (enum tree_code, tree, tree, tree);
145 static int native_encode_expr (tree, unsigned char *, int);
146 static tree native_interpret_expr (tree, unsigned char *, int);
147
148
149 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
150 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
151 and SUM1. Then this yields nonzero if overflow occurred during the
152 addition.
153
154 Overflow occurs if A and B have the same sign, but A and SUM differ in
155 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
156 sign. */
157 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
158 \f
159 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
160 We do that by representing the two-word integer in 4 words, with only
161 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
162 number. The value of the word is LOWPART + HIGHPART * BASE. */
163
164 #define LOWPART(x) \
165 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
166 #define HIGHPART(x) \
167 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
168 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
169
170 /* Unpack a two-word integer into 4 words.
171 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
172 WORDS points to the array of HOST_WIDE_INTs. */
173
174 static void
175 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
176 {
177 words[0] = LOWPART (low);
178 words[1] = HIGHPART (low);
179 words[2] = LOWPART (hi);
180 words[3] = HIGHPART (hi);
181 }
182
183 /* Pack an array of 4 words into a two-word integer.
184 WORDS points to the array of words.
185 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
186
187 static void
188 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
189 HOST_WIDE_INT *hi)
190 {
191 *low = words[0] + words[1] * BASE;
192 *hi = words[2] + words[3] * BASE;
193 }
194 \f
195 /* Force the double-word integer L1, H1 to be within the range of the
196 integer type TYPE. Stores the properly truncated and sign-extended
197 double-word integer in *LV, *HV. Returns true if the operation
198 overflows, that is, argument and result are different. */
199
200 int
201 fit_double_type (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
202 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, tree type)
203 {
204 unsigned HOST_WIDE_INT low0 = l1;
205 HOST_WIDE_INT high0 = h1;
206 unsigned int prec;
207 int sign_extended_type;
208
209 if (POINTER_TYPE_P (type)
210 || TREE_CODE (type) == OFFSET_TYPE)
211 prec = POINTER_SIZE;
212 else
213 prec = TYPE_PRECISION (type);
214
215 /* Size types *are* sign extended. */
216 sign_extended_type = (!TYPE_UNSIGNED (type)
217 || (TREE_CODE (type) == INTEGER_TYPE
218 && TYPE_IS_SIZETYPE (type)));
219
220 /* First clear all bits that are beyond the type's precision. */
221 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
222 ;
223 else if (prec > HOST_BITS_PER_WIDE_INT)
224 h1 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
225 else
226 {
227 h1 = 0;
228 if (prec < HOST_BITS_PER_WIDE_INT)
229 l1 &= ~((HOST_WIDE_INT) (-1) << prec);
230 }
231
232 /* Then do sign extension if necessary. */
233 if (!sign_extended_type)
234 /* No sign extension */;
235 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
236 /* Correct width already. */;
237 else if (prec > HOST_BITS_PER_WIDE_INT)
238 {
239 /* Sign extend top half? */
240 if (h1 & ((unsigned HOST_WIDE_INT)1
241 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
242 h1 |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
243 }
244 else if (prec == HOST_BITS_PER_WIDE_INT)
245 {
246 if ((HOST_WIDE_INT)l1 < 0)
247 h1 = -1;
248 }
249 else
250 {
251 /* Sign extend bottom half? */
252 if (l1 & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
253 {
254 h1 = -1;
255 l1 |= (HOST_WIDE_INT)(-1) << prec;
256 }
257 }
258
259 *lv = l1;
260 *hv = h1;
261
262 /* If the value didn't fit, signal overflow. */
263 return l1 != low0 || h1 != high0;
264 }
265
266 /* We force the double-int HIGH:LOW to the range of the type TYPE by
267 sign or zero extending it.
268 OVERFLOWABLE indicates if we are interested
269 in overflow of the value, when >0 we are only interested in signed
270 overflow, for <0 we are interested in any overflow. OVERFLOWED
271 indicates whether overflow has already occurred. CONST_OVERFLOWED
272 indicates whether constant overflow has already occurred. We force
273 T's value to be within range of T's type (by setting to 0 or 1 all
274 the bits outside the type's range). We set TREE_OVERFLOWED if,
275 OVERFLOWED is nonzero,
276 or OVERFLOWABLE is >0 and signed overflow occurs
277 or OVERFLOWABLE is <0 and any overflow occurs
278 We set TREE_CONSTANT_OVERFLOWED if,
279 CONST_OVERFLOWED is nonzero
280 or we set TREE_OVERFLOWED.
281 We return a new tree node for the extended double-int. The node
282 is shared if no overflow flags are set. */
283
284 tree
285 force_fit_type_double (tree type, unsigned HOST_WIDE_INT low,
286 HOST_WIDE_INT high, int overflowable,
287 bool overflowed, bool overflowed_const)
288 {
289 int sign_extended_type;
290 bool overflow;
291
292 /* Size types *are* sign extended. */
293 sign_extended_type = (!TYPE_UNSIGNED (type)
294 || (TREE_CODE (type) == INTEGER_TYPE
295 && TYPE_IS_SIZETYPE (type)));
296
297 overflow = fit_double_type (low, high, &low, &high, type);
298
299 /* If we need to set overflow flags, return a new unshared node. */
300 if (overflowed || overflowed_const || overflow)
301 {
302 if (overflowed
303 || overflowable < 0
304 || (overflowable > 0 && sign_extended_type))
305 {
306 tree t = make_node (INTEGER_CST);
307 TREE_INT_CST_LOW (t) = low;
308 TREE_INT_CST_HIGH (t) = high;
309 TREE_TYPE (t) = type;
310 TREE_OVERFLOW (t) = 1;
311 TREE_CONSTANT_OVERFLOW (t) = 1;
312
313 return t;
314 }
315 else if (overflowed_const)
316 {
317 tree t = make_node (INTEGER_CST);
318 TREE_INT_CST_LOW (t) = low;
319 TREE_INT_CST_HIGH (t) = high;
320 TREE_TYPE (t) = type;
321 TREE_CONSTANT_OVERFLOW (t) = 1;
322
323 return t;
324 }
325 }
326
327 /* Else build a shared node. */
328 return build_int_cst_wide (type, low, high);
329 }
330 \f
331 /* Add two doubleword integers with doubleword result.
332 Return nonzero if the operation overflows according to UNSIGNED_P.
333 Each argument is given as two `HOST_WIDE_INT' pieces.
334 One argument is L1 and H1; the other, L2 and H2.
335 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
336
337 int
338 add_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
339 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
340 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
341 bool unsigned_p)
342 {
343 unsigned HOST_WIDE_INT l;
344 HOST_WIDE_INT h;
345
346 l = l1 + l2;
347 h = h1 + h2 + (l < l1);
348
349 *lv = l;
350 *hv = h;
351
352 if (unsigned_p)
353 return (unsigned HOST_WIDE_INT) h < (unsigned HOST_WIDE_INT) h1;
354 else
355 return OVERFLOW_SUM_SIGN (h1, h2, h);
356 }
357
358 /* Negate a doubleword integer with doubleword result.
359 Return nonzero if the operation overflows, assuming it's signed.
360 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
361 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
362
363 int
364 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
365 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
366 {
367 if (l1 == 0)
368 {
369 *lv = 0;
370 *hv = - h1;
371 return (*hv & h1) < 0;
372 }
373 else
374 {
375 *lv = -l1;
376 *hv = ~h1;
377 return 0;
378 }
379 }
380 \f
381 /* Multiply two doubleword integers with doubleword result.
382 Return nonzero if the operation overflows according to UNSIGNED_P.
383 Each argument is given as two `HOST_WIDE_INT' pieces.
384 One argument is L1 and H1; the other, L2 and H2.
385 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
386
387 int
388 mul_double_with_sign (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
389 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
390 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
391 bool unsigned_p)
392 {
393 HOST_WIDE_INT arg1[4];
394 HOST_WIDE_INT arg2[4];
395 HOST_WIDE_INT prod[4 * 2];
396 unsigned HOST_WIDE_INT carry;
397 int i, j, k;
398 unsigned HOST_WIDE_INT toplow, neglow;
399 HOST_WIDE_INT tophigh, neghigh;
400
401 encode (arg1, l1, h1);
402 encode (arg2, l2, h2);
403
404 memset (prod, 0, sizeof prod);
405
406 for (i = 0; i < 4; i++)
407 {
408 carry = 0;
409 for (j = 0; j < 4; j++)
410 {
411 k = i + j;
412 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
413 carry += arg1[i] * arg2[j];
414 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
415 carry += prod[k];
416 prod[k] = LOWPART (carry);
417 carry = HIGHPART (carry);
418 }
419 prod[i + 4] = carry;
420 }
421
422 decode (prod, lv, hv);
423 decode (prod + 4, &toplow, &tophigh);
424
425 /* Unsigned overflow is immediate. */
426 if (unsigned_p)
427 return (toplow | tophigh) != 0;
428
429 /* Check for signed overflow by calculating the signed representation of the
430 top half of the result; it should agree with the low half's sign bit. */
431 if (h1 < 0)
432 {
433 neg_double (l2, h2, &neglow, &neghigh);
434 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
435 }
436 if (h2 < 0)
437 {
438 neg_double (l1, h1, &neglow, &neghigh);
439 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
440 }
441 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
442 }
443 \f
444 /* Shift the doubleword integer in L1, H1 left by COUNT places
445 keeping only PREC bits of result.
446 Shift right if COUNT is negative.
447 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
448 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
449
450 void
451 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
452 HOST_WIDE_INT count, unsigned int prec,
453 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
454 {
455 unsigned HOST_WIDE_INT signmask;
456
457 if (count < 0)
458 {
459 rshift_double (l1, h1, -count, prec, lv, hv, arith);
460 return;
461 }
462
463 if (SHIFT_COUNT_TRUNCATED)
464 count %= prec;
465
466 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
467 {
468 /* Shifting by the host word size is undefined according to the
469 ANSI standard, so we must handle this as a special case. */
470 *hv = 0;
471 *lv = 0;
472 }
473 else if (count >= HOST_BITS_PER_WIDE_INT)
474 {
475 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
476 *lv = 0;
477 }
478 else
479 {
480 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
481 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
482 *lv = l1 << count;
483 }
484
485 /* Sign extend all bits that are beyond the precision. */
486
487 signmask = -((prec > HOST_BITS_PER_WIDE_INT
488 ? ((unsigned HOST_WIDE_INT) *hv
489 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
490 : (*lv >> (prec - 1))) & 1);
491
492 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
493 ;
494 else if (prec >= HOST_BITS_PER_WIDE_INT)
495 {
496 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
497 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
498 }
499 else
500 {
501 *hv = signmask;
502 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
503 *lv |= signmask << prec;
504 }
505 }
506
507 /* Shift the doubleword integer in L1, H1 right by COUNT places
508 keeping only PREC bits of result. COUNT must be positive.
509 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
511
512 void
513 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
516 int arith)
517 {
518 unsigned HOST_WIDE_INT signmask;
519
520 signmask = (arith
521 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
522 : 0);
523
524 if (SHIFT_COUNT_TRUNCATED)
525 count %= prec;
526
527 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
528 {
529 /* Shifting by the host word size is undefined according to the
530 ANSI standard, so we must handle this as a special case. */
531 *hv = 0;
532 *lv = 0;
533 }
534 else if (count >= HOST_BITS_PER_WIDE_INT)
535 {
536 *hv = 0;
537 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
538 }
539 else
540 {
541 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
542 *lv = ((l1 >> count)
543 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
544 }
545
546 /* Zero / sign extend all bits that are beyond the precision. */
547
548 if (count >= (HOST_WIDE_INT)prec)
549 {
550 *hv = signmask;
551 *lv = signmask;
552 }
553 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
554 ;
555 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
556 {
557 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
558 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
559 }
560 else
561 {
562 *hv = signmask;
563 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
564 *lv |= signmask << (prec - count);
565 }
566 }
567 \f
568 /* Rotate the doubleword integer in L1, H1 left by COUNT places
569 keeping only PREC bits of result.
570 Rotate right if COUNT is negative.
571 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
572
573 void
574 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
575 HOST_WIDE_INT count, unsigned int prec,
576 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
577 {
578 unsigned HOST_WIDE_INT s1l, s2l;
579 HOST_WIDE_INT s1h, s2h;
580
581 count %= prec;
582 if (count < 0)
583 count += prec;
584
585 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
586 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
587 *lv = s1l | s2l;
588 *hv = s1h | s2h;
589 }
590
591 /* Rotate the doubleword integer in L1, H1 left by COUNT places
592 keeping only PREC bits of result. COUNT must be positive.
593 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
594
595 void
596 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
597 HOST_WIDE_INT count, unsigned int prec,
598 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
599 {
600 unsigned HOST_WIDE_INT s1l, s2l;
601 HOST_WIDE_INT s1h, s2h;
602
603 count %= prec;
604 if (count < 0)
605 count += prec;
606
607 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
608 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
609 *lv = s1l | s2l;
610 *hv = s1h | s2h;
611 }
612 \f
613 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
614 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
615 CODE is a tree code for a kind of division, one of
616 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
617 or EXACT_DIV_EXPR
618 It controls how the quotient is rounded to an integer.
619 Return nonzero if the operation overflows.
620 UNS nonzero says do unsigned division. */
621
622 int
623 div_and_round_double (enum tree_code code, int uns,
624 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
625 HOST_WIDE_INT hnum_orig,
626 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
627 HOST_WIDE_INT hden_orig,
628 unsigned HOST_WIDE_INT *lquo,
629 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
630 HOST_WIDE_INT *hrem)
631 {
632 int quo_neg = 0;
633 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
634 HOST_WIDE_INT den[4], quo[4];
635 int i, j;
636 unsigned HOST_WIDE_INT work;
637 unsigned HOST_WIDE_INT carry = 0;
638 unsigned HOST_WIDE_INT lnum = lnum_orig;
639 HOST_WIDE_INT hnum = hnum_orig;
640 unsigned HOST_WIDE_INT lden = lden_orig;
641 HOST_WIDE_INT hden = hden_orig;
642 int overflow = 0;
643
644 if (hden == 0 && lden == 0)
645 overflow = 1, lden = 1;
646
647 /* Calculate quotient sign and convert operands to unsigned. */
648 if (!uns)
649 {
650 if (hnum < 0)
651 {
652 quo_neg = ~ quo_neg;
653 /* (minimum integer) / (-1) is the only overflow case. */
654 if (neg_double (lnum, hnum, &lnum, &hnum)
655 && ((HOST_WIDE_INT) lden & hden) == -1)
656 overflow = 1;
657 }
658 if (hden < 0)
659 {
660 quo_neg = ~ quo_neg;
661 neg_double (lden, hden, &lden, &hden);
662 }
663 }
664
665 if (hnum == 0 && hden == 0)
666 { /* single precision */
667 *hquo = *hrem = 0;
668 /* This unsigned division rounds toward zero. */
669 *lquo = lnum / lden;
670 goto finish_up;
671 }
672
673 if (hnum == 0)
674 { /* trivial case: dividend < divisor */
675 /* hden != 0 already checked. */
676 *hquo = *lquo = 0;
677 *hrem = hnum;
678 *lrem = lnum;
679 goto finish_up;
680 }
681
682 memset (quo, 0, sizeof quo);
683
684 memset (num, 0, sizeof num); /* to zero 9th element */
685 memset (den, 0, sizeof den);
686
687 encode (num, lnum, hnum);
688 encode (den, lden, hden);
689
690 /* Special code for when the divisor < BASE. */
691 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
692 {
693 /* hnum != 0 already checked. */
694 for (i = 4 - 1; i >= 0; i--)
695 {
696 work = num[i] + carry * BASE;
697 quo[i] = work / lden;
698 carry = work % lden;
699 }
700 }
701 else
702 {
703 /* Full double precision division,
704 with thanks to Don Knuth's "Seminumerical Algorithms". */
705 int num_hi_sig, den_hi_sig;
706 unsigned HOST_WIDE_INT quo_est, scale;
707
708 /* Find the highest nonzero divisor digit. */
709 for (i = 4 - 1;; i--)
710 if (den[i] != 0)
711 {
712 den_hi_sig = i;
713 break;
714 }
715
716 /* Insure that the first digit of the divisor is at least BASE/2.
717 This is required by the quotient digit estimation algorithm. */
718
719 scale = BASE / (den[den_hi_sig] + 1);
720 if (scale > 1)
721 { /* scale divisor and dividend */
722 carry = 0;
723 for (i = 0; i <= 4 - 1; i++)
724 {
725 work = (num[i] * scale) + carry;
726 num[i] = LOWPART (work);
727 carry = HIGHPART (work);
728 }
729
730 num[4] = carry;
731 carry = 0;
732 for (i = 0; i <= 4 - 1; i++)
733 {
734 work = (den[i] * scale) + carry;
735 den[i] = LOWPART (work);
736 carry = HIGHPART (work);
737 if (den[i] != 0) den_hi_sig = i;
738 }
739 }
740
741 num_hi_sig = 4;
742
743 /* Main loop */
744 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
745 {
746 /* Guess the next quotient digit, quo_est, by dividing the first
747 two remaining dividend digits by the high order quotient digit.
748 quo_est is never low and is at most 2 high. */
749 unsigned HOST_WIDE_INT tmp;
750
751 num_hi_sig = i + den_hi_sig + 1;
752 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
753 if (num[num_hi_sig] != den[den_hi_sig])
754 quo_est = work / den[den_hi_sig];
755 else
756 quo_est = BASE - 1;
757
758 /* Refine quo_est so it's usually correct, and at most one high. */
759 tmp = work - quo_est * den[den_hi_sig];
760 if (tmp < BASE
761 && (den[den_hi_sig - 1] * quo_est
762 > (tmp * BASE + num[num_hi_sig - 2])))
763 quo_est--;
764
765 /* Try QUO_EST as the quotient digit, by multiplying the
766 divisor by QUO_EST and subtracting from the remaining dividend.
767 Keep in mind that QUO_EST is the I - 1st digit. */
768
769 carry = 0;
770 for (j = 0; j <= den_hi_sig; j++)
771 {
772 work = quo_est * den[j] + carry;
773 carry = HIGHPART (work);
774 work = num[i + j] - LOWPART (work);
775 num[i + j] = LOWPART (work);
776 carry += HIGHPART (work) != 0;
777 }
778
779 /* If quo_est was high by one, then num[i] went negative and
780 we need to correct things. */
781 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
782 {
783 quo_est--;
784 carry = 0; /* add divisor back in */
785 for (j = 0; j <= den_hi_sig; j++)
786 {
787 work = num[i + j] + den[j] + carry;
788 carry = HIGHPART (work);
789 num[i + j] = LOWPART (work);
790 }
791
792 num [num_hi_sig] += carry;
793 }
794
795 /* Store the quotient digit. */
796 quo[i] = quo_est;
797 }
798 }
799
800 decode (quo, lquo, hquo);
801
802 finish_up:
803 /* If result is negative, make it so. */
804 if (quo_neg)
805 neg_double (*lquo, *hquo, lquo, hquo);
806
807 /* Compute trial remainder: rem = num - (quo * den) */
808 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
809 neg_double (*lrem, *hrem, lrem, hrem);
810 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
811
812 switch (code)
813 {
814 case TRUNC_DIV_EXPR:
815 case TRUNC_MOD_EXPR: /* round toward zero */
816 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
817 return overflow;
818
819 case FLOOR_DIV_EXPR:
820 case FLOOR_MOD_EXPR: /* round toward negative infinity */
821 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
822 {
823 /* quo = quo - 1; */
824 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
825 lquo, hquo);
826 }
827 else
828 return overflow;
829 break;
830
831 case CEIL_DIV_EXPR:
832 case CEIL_MOD_EXPR: /* round toward positive infinity */
833 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
834 {
835 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
836 lquo, hquo);
837 }
838 else
839 return overflow;
840 break;
841
842 case ROUND_DIV_EXPR:
843 case ROUND_MOD_EXPR: /* round to closest integer */
844 {
845 unsigned HOST_WIDE_INT labs_rem = *lrem;
846 HOST_WIDE_INT habs_rem = *hrem;
847 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
848 HOST_WIDE_INT habs_den = hden, htwice;
849
850 /* Get absolute values. */
851 if (*hrem < 0)
852 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
853 if (hden < 0)
854 neg_double (lden, hden, &labs_den, &habs_den);
855
856 /* If (2 * abs (lrem) >= abs (lden)) */
857 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
858 labs_rem, habs_rem, &ltwice, &htwice);
859
860 if (((unsigned HOST_WIDE_INT) habs_den
861 < (unsigned HOST_WIDE_INT) htwice)
862 || (((unsigned HOST_WIDE_INT) habs_den
863 == (unsigned HOST_WIDE_INT) htwice)
864 && (labs_den < ltwice)))
865 {
866 if (*hquo < 0)
867 /* quo = quo - 1; */
868 add_double (*lquo, *hquo,
869 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
870 else
871 /* quo = quo + 1; */
872 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
873 lquo, hquo);
874 }
875 else
876 return overflow;
877 }
878 break;
879
880 default:
881 gcc_unreachable ();
882 }
883
884 /* Compute true remainder: rem = num - (quo * den) */
885 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
886 neg_double (*lrem, *hrem, lrem, hrem);
887 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
888 return overflow;
889 }
890
891 /* If ARG2 divides ARG1 with zero remainder, carries out the division
892 of type CODE and returns the quotient.
893 Otherwise returns NULL_TREE. */
894
895 static tree
896 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
897 {
898 unsigned HOST_WIDE_INT int1l, int2l;
899 HOST_WIDE_INT int1h, int2h;
900 unsigned HOST_WIDE_INT quol, reml;
901 HOST_WIDE_INT quoh, remh;
902 tree type = TREE_TYPE (arg1);
903 int uns = TYPE_UNSIGNED (type);
904
905 int1l = TREE_INT_CST_LOW (arg1);
906 int1h = TREE_INT_CST_HIGH (arg1);
907 int2l = TREE_INT_CST_LOW (arg2);
908 int2h = TREE_INT_CST_HIGH (arg2);
909
910 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
911 &quol, &quoh, &reml, &remh);
912 if (remh != 0 || reml != 0)
913 return NULL_TREE;
914
915 return build_int_cst_wide (type, quol, quoh);
916 }
917 \f
918 /* Return true if the built-in mathematical function specified by CODE
919 is odd, i.e. -f(x) == f(-x). */
920
921 static bool
922 negate_mathfn_p (enum built_in_function code)
923 {
924 switch (code)
925 {
926 CASE_FLT_FN (BUILT_IN_ASIN):
927 CASE_FLT_FN (BUILT_IN_ASINH):
928 CASE_FLT_FN (BUILT_IN_ATAN):
929 CASE_FLT_FN (BUILT_IN_ATANH):
930 CASE_FLT_FN (BUILT_IN_CBRT):
931 CASE_FLT_FN (BUILT_IN_ERF):
932 CASE_FLT_FN (BUILT_IN_LLROUND):
933 CASE_FLT_FN (BUILT_IN_LROUND):
934 CASE_FLT_FN (BUILT_IN_ROUND):
935 CASE_FLT_FN (BUILT_IN_SIN):
936 CASE_FLT_FN (BUILT_IN_SINH):
937 CASE_FLT_FN (BUILT_IN_TAN):
938 CASE_FLT_FN (BUILT_IN_TANH):
939 CASE_FLT_FN (BUILT_IN_TRUNC):
940 return true;
941
942 CASE_FLT_FN (BUILT_IN_LLRINT):
943 CASE_FLT_FN (BUILT_IN_LRINT):
944 CASE_FLT_FN (BUILT_IN_NEARBYINT):
945 CASE_FLT_FN (BUILT_IN_RINT):
946 return !flag_rounding_math;
947
948 default:
949 break;
950 }
951 return false;
952 }
953
954 /* Check whether we may negate an integer constant T without causing
955 overflow. */
956
957 bool
958 may_negate_without_overflow_p (tree t)
959 {
960 unsigned HOST_WIDE_INT val;
961 unsigned int prec;
962 tree type;
963
964 gcc_assert (TREE_CODE (t) == INTEGER_CST);
965
966 type = TREE_TYPE (t);
967 if (TYPE_UNSIGNED (type))
968 return false;
969
970 prec = TYPE_PRECISION (type);
971 if (prec > HOST_BITS_PER_WIDE_INT)
972 {
973 if (TREE_INT_CST_LOW (t) != 0)
974 return true;
975 prec -= HOST_BITS_PER_WIDE_INT;
976 val = TREE_INT_CST_HIGH (t);
977 }
978 else
979 val = TREE_INT_CST_LOW (t);
980 if (prec < HOST_BITS_PER_WIDE_INT)
981 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
982 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
983 }
984
985 /* Determine whether an expression T can be cheaply negated using
986 the function negate_expr without introducing undefined overflow. */
987
988 static bool
989 negate_expr_p (tree t)
990 {
991 tree type;
992
993 if (t == 0)
994 return false;
995
996 type = TREE_TYPE (t);
997
998 STRIP_SIGN_NOPS (t);
999 switch (TREE_CODE (t))
1000 {
1001 case INTEGER_CST:
1002 if (TYPE_UNSIGNED (type)
1003 || (flag_wrapv && ! flag_trapv))
1004 return true;
1005
1006 /* Check that -CST will not overflow type. */
1007 return may_negate_without_overflow_p (t);
1008 case BIT_NOT_EXPR:
1009 return INTEGRAL_TYPE_P (type)
1010 && (TYPE_UNSIGNED (type)
1011 || (flag_wrapv && !flag_trapv));
1012
1013 case REAL_CST:
1014 case NEGATE_EXPR:
1015 return true;
1016
1017 case COMPLEX_CST:
1018 return negate_expr_p (TREE_REALPART (t))
1019 && negate_expr_p (TREE_IMAGPART (t));
1020
1021 case PLUS_EXPR:
1022 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1023 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1024 return false;
1025 /* -(A + B) -> (-B) - A. */
1026 if (negate_expr_p (TREE_OPERAND (t, 1))
1027 && reorder_operands_p (TREE_OPERAND (t, 0),
1028 TREE_OPERAND (t, 1)))
1029 return true;
1030 /* -(A + B) -> (-A) - B. */
1031 return negate_expr_p (TREE_OPERAND (t, 0));
1032
1033 case MINUS_EXPR:
1034 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
1035 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1036 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1037 && reorder_operands_p (TREE_OPERAND (t, 0),
1038 TREE_OPERAND (t, 1));
1039
1040 case MULT_EXPR:
1041 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1042 break;
1043
1044 /* Fall through. */
1045
1046 case RDIV_EXPR:
1047 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1048 return negate_expr_p (TREE_OPERAND (t, 1))
1049 || negate_expr_p (TREE_OPERAND (t, 0));
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case ROUND_DIV_EXPR:
1054 case FLOOR_DIV_EXPR:
1055 case CEIL_DIV_EXPR:
1056 case EXACT_DIV_EXPR:
1057 if (TYPE_UNSIGNED (TREE_TYPE (t)) || flag_wrapv)
1058 break;
1059 return negate_expr_p (TREE_OPERAND (t, 1))
1060 || negate_expr_p (TREE_OPERAND (t, 0));
1061
1062 case NOP_EXPR:
1063 /* Negate -((double)float) as (double)(-float). */
1064 if (TREE_CODE (type) == REAL_TYPE)
1065 {
1066 tree tem = strip_float_extensions (t);
1067 if (tem != t)
1068 return negate_expr_p (tem);
1069 }
1070 break;
1071
1072 case CALL_EXPR:
1073 /* Negate -f(x) as f(-x). */
1074 if (negate_mathfn_p (builtin_mathfn_code (t)))
1075 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1076 break;
1077
1078 case RSHIFT_EXPR:
1079 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1080 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1081 {
1082 tree op1 = TREE_OPERAND (t, 1);
1083 if (TREE_INT_CST_HIGH (op1) == 0
1084 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1085 == TREE_INT_CST_LOW (op1))
1086 return true;
1087 }
1088 break;
1089
1090 default:
1091 break;
1092 }
1093 return false;
1094 }
1095
1096 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
1097 simplification is possible.
1098 If negate_expr_p would return true for T, NULL_TREE will never be
1099 returned. */
1100
1101 static tree
1102 fold_negate_expr (tree t)
1103 {
1104 tree type = TREE_TYPE (t);
1105 tree tem;
1106
1107 switch (TREE_CODE (t))
1108 {
1109 /* Convert - (~A) to A + 1. */
1110 case BIT_NOT_EXPR:
1111 if (INTEGRAL_TYPE_P (type))
1112 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (t, 0),
1113 build_int_cst (type, 1));
1114 break;
1115
1116 case INTEGER_CST:
1117 tem = fold_negate_const (t, type);
1118 if (! TREE_OVERFLOW (tem)
1119 || TYPE_UNSIGNED (type)
1120 || ! flag_trapv)
1121 return tem;
1122 break;
1123
1124 case REAL_CST:
1125 tem = fold_negate_const (t, type);
1126 /* Two's complement FP formats, such as c4x, may overflow. */
1127 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1128 return tem;
1129 break;
1130
1131 case COMPLEX_CST:
1132 {
1133 tree rpart = negate_expr (TREE_REALPART (t));
1134 tree ipart = negate_expr (TREE_IMAGPART (t));
1135
1136 if ((TREE_CODE (rpart) == REAL_CST
1137 && TREE_CODE (ipart) == REAL_CST)
1138 || (TREE_CODE (rpart) == INTEGER_CST
1139 && TREE_CODE (ipart) == INTEGER_CST))
1140 return build_complex (type, rpart, ipart);
1141 }
1142 break;
1143
1144 case NEGATE_EXPR:
1145 return TREE_OPERAND (t, 0);
1146
1147 case PLUS_EXPR:
1148 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1149 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
1150 {
1151 /* -(A + B) -> (-B) - A. */
1152 if (negate_expr_p (TREE_OPERAND (t, 1))
1153 && reorder_operands_p (TREE_OPERAND (t, 0),
1154 TREE_OPERAND (t, 1)))
1155 {
1156 tem = negate_expr (TREE_OPERAND (t, 1));
1157 return fold_build2 (MINUS_EXPR, type,
1158 tem, TREE_OPERAND (t, 0));
1159 }
1160
1161 /* -(A + B) -> (-A) - B. */
1162 if (negate_expr_p (TREE_OPERAND (t, 0)))
1163 {
1164 tem = negate_expr (TREE_OPERAND (t, 0));
1165 return fold_build2 (MINUS_EXPR, type,
1166 tem, TREE_OPERAND (t, 1));
1167 }
1168 }
1169 break;
1170
1171 case MINUS_EXPR:
1172 /* - (A - B) -> B - A */
1173 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
1174 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
1175 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1176 return fold_build2 (MINUS_EXPR, type,
1177 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
1178 break;
1179
1180 case MULT_EXPR:
1181 if (TYPE_UNSIGNED (type))
1182 break;
1183
1184 /* Fall through. */
1185
1186 case RDIV_EXPR:
1187 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
1188 {
1189 tem = TREE_OPERAND (t, 1);
1190 if (negate_expr_p (tem))
1191 return fold_build2 (TREE_CODE (t), type,
1192 TREE_OPERAND (t, 0), negate_expr (tem));
1193 tem = TREE_OPERAND (t, 0);
1194 if (negate_expr_p (tem))
1195 return fold_build2 (TREE_CODE (t), type,
1196 negate_expr (tem), TREE_OPERAND (t, 1));
1197 }
1198 break;
1199
1200 case TRUNC_DIV_EXPR:
1201 case ROUND_DIV_EXPR:
1202 case FLOOR_DIV_EXPR:
1203 case CEIL_DIV_EXPR:
1204 case EXACT_DIV_EXPR:
1205 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
1206 {
1207 tem = TREE_OPERAND (t, 1);
1208 if (negate_expr_p (tem))
1209 return fold_build2 (TREE_CODE (t), type,
1210 TREE_OPERAND (t, 0), negate_expr (tem));
1211 tem = TREE_OPERAND (t, 0);
1212 if (negate_expr_p (tem))
1213 return fold_build2 (TREE_CODE (t), type,
1214 negate_expr (tem), TREE_OPERAND (t, 1));
1215 }
1216 break;
1217
1218 case NOP_EXPR:
1219 /* Convert -((double)float) into (double)(-float). */
1220 if (TREE_CODE (type) == REAL_TYPE)
1221 {
1222 tem = strip_float_extensions (t);
1223 if (tem != t && negate_expr_p (tem))
1224 return negate_expr (tem);
1225 }
1226 break;
1227
1228 case CALL_EXPR:
1229 /* Negate -f(x) as f(-x). */
1230 if (negate_mathfn_p (builtin_mathfn_code (t))
1231 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1232 {
1233 tree fndecl, arg, arglist;
1234
1235 fndecl = get_callee_fndecl (t);
1236 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1237 arglist = build_tree_list (NULL_TREE, arg);
1238 return build_function_call_expr (fndecl, arglist);
1239 }
1240 break;
1241
1242 case RSHIFT_EXPR:
1243 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1244 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1245 {
1246 tree op1 = TREE_OPERAND (t, 1);
1247 if (TREE_INT_CST_HIGH (op1) == 0
1248 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1249 == TREE_INT_CST_LOW (op1))
1250 {
1251 tree ntype = TYPE_UNSIGNED (type)
1252 ? lang_hooks.types.signed_type (type)
1253 : lang_hooks.types.unsigned_type (type);
1254 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1255 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1256 return fold_convert (type, temp);
1257 }
1258 }
1259 break;
1260
1261 default:
1262 break;
1263 }
1264
1265 return NULL_TREE;
1266 }
1267
1268 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
1269 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
1270 return NULL_TREE. */
1271
1272 static tree
1273 negate_expr (tree t)
1274 {
1275 tree type, tem;
1276
1277 if (t == NULL_TREE)
1278 return NULL_TREE;
1279
1280 type = TREE_TYPE (t);
1281 STRIP_SIGN_NOPS (t);
1282
1283 tem = fold_negate_expr (t);
1284 if (!tem)
1285 tem = build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1286 return fold_convert (type, tem);
1287 }
1288 \f
1289 /* Split a tree IN into a constant, literal and variable parts that could be
1290 combined with CODE to make IN. "constant" means an expression with
1291 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1292 commutative arithmetic operation. Store the constant part into *CONP,
1293 the literal in *LITP and return the variable part. If a part isn't
1294 present, set it to null. If the tree does not decompose in this way,
1295 return the entire tree as the variable part and the other parts as null.
1296
1297 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1298 case, we negate an operand that was subtracted. Except if it is a
1299 literal for which we use *MINUS_LITP instead.
1300
1301 If NEGATE_P is true, we are negating all of IN, again except a literal
1302 for which we use *MINUS_LITP instead.
1303
1304 If IN is itself a literal or constant, return it as appropriate.
1305
1306 Note that we do not guarantee that any of the three values will be the
1307 same type as IN, but they will have the same signedness and mode. */
1308
1309 static tree
1310 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1311 tree *minus_litp, int negate_p)
1312 {
1313 tree var = 0;
1314
1315 *conp = 0;
1316 *litp = 0;
1317 *minus_litp = 0;
1318
1319 /* Strip any conversions that don't change the machine mode or signedness. */
1320 STRIP_SIGN_NOPS (in);
1321
1322 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1323 *litp = in;
1324 else if (TREE_CODE (in) == code
1325 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1326 /* We can associate addition and subtraction together (even
1327 though the C standard doesn't say so) for integers because
1328 the value is not affected. For reals, the value might be
1329 affected, so we can't. */
1330 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1331 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1332 {
1333 tree op0 = TREE_OPERAND (in, 0);
1334 tree op1 = TREE_OPERAND (in, 1);
1335 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1336 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1337
1338 /* First see if either of the operands is a literal, then a constant. */
1339 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1340 *litp = op0, op0 = 0;
1341 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1342 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1343
1344 if (op0 != 0 && TREE_CONSTANT (op0))
1345 *conp = op0, op0 = 0;
1346 else if (op1 != 0 && TREE_CONSTANT (op1))
1347 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1348
1349 /* If we haven't dealt with either operand, this is not a case we can
1350 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1351 if (op0 != 0 && op1 != 0)
1352 var = in;
1353 else if (op0 != 0)
1354 var = op0;
1355 else
1356 var = op1, neg_var_p = neg1_p;
1357
1358 /* Now do any needed negations. */
1359 if (neg_litp_p)
1360 *minus_litp = *litp, *litp = 0;
1361 if (neg_conp_p)
1362 *conp = negate_expr (*conp);
1363 if (neg_var_p)
1364 var = negate_expr (var);
1365 }
1366 else if (TREE_CONSTANT (in))
1367 *conp = in;
1368 else
1369 var = in;
1370
1371 if (negate_p)
1372 {
1373 if (*litp)
1374 *minus_litp = *litp, *litp = 0;
1375 else if (*minus_litp)
1376 *litp = *minus_litp, *minus_litp = 0;
1377 *conp = negate_expr (*conp);
1378 var = negate_expr (var);
1379 }
1380
1381 return var;
1382 }
1383
1384 /* Re-associate trees split by the above function. T1 and T2 are either
1385 expressions to associate or null. Return the new expression, if any. If
1386 we build an operation, do it in TYPE and with CODE. */
1387
1388 static tree
1389 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1390 {
1391 if (t1 == 0)
1392 return t2;
1393 else if (t2 == 0)
1394 return t1;
1395
1396 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1397 try to fold this since we will have infinite recursion. But do
1398 deal with any NEGATE_EXPRs. */
1399 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1400 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1401 {
1402 if (code == PLUS_EXPR)
1403 {
1404 if (TREE_CODE (t1) == NEGATE_EXPR)
1405 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1406 fold_convert (type, TREE_OPERAND (t1, 0)));
1407 else if (TREE_CODE (t2) == NEGATE_EXPR)
1408 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1409 fold_convert (type, TREE_OPERAND (t2, 0)));
1410 else if (integer_zerop (t2))
1411 return fold_convert (type, t1);
1412 }
1413 else if (code == MINUS_EXPR)
1414 {
1415 if (integer_zerop (t2))
1416 return fold_convert (type, t1);
1417 }
1418
1419 return build2 (code, type, fold_convert (type, t1),
1420 fold_convert (type, t2));
1421 }
1422
1423 return fold_build2 (code, type, fold_convert (type, t1),
1424 fold_convert (type, t2));
1425 }
1426 \f
1427 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
1428 for use in int_const_binop, size_binop and size_diffop. */
1429
1430 static bool
1431 int_binop_types_match_p (enum tree_code code, tree type1, tree type2)
1432 {
1433 if (TREE_CODE (type1) != INTEGER_TYPE && !POINTER_TYPE_P (type1))
1434 return false;
1435 if (TREE_CODE (type2) != INTEGER_TYPE && !POINTER_TYPE_P (type2))
1436 return false;
1437
1438 switch (code)
1439 {
1440 case LSHIFT_EXPR:
1441 case RSHIFT_EXPR:
1442 case LROTATE_EXPR:
1443 case RROTATE_EXPR:
1444 return true;
1445
1446 default:
1447 break;
1448 }
1449
1450 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
1451 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
1452 && TYPE_MODE (type1) == TYPE_MODE (type2);
1453 }
1454
1455
1456 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1457 to produce a new constant. Return NULL_TREE if we don't know how
1458 to evaluate CODE at compile-time.
1459
1460 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1461
1462 tree
1463 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1464 {
1465 unsigned HOST_WIDE_INT int1l, int2l;
1466 HOST_WIDE_INT int1h, int2h;
1467 unsigned HOST_WIDE_INT low;
1468 HOST_WIDE_INT hi;
1469 unsigned HOST_WIDE_INT garbagel;
1470 HOST_WIDE_INT garbageh;
1471 tree t;
1472 tree type = TREE_TYPE (arg1);
1473 int uns = TYPE_UNSIGNED (type);
1474 int is_sizetype
1475 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1476 int overflow = 0;
1477
1478 int1l = TREE_INT_CST_LOW (arg1);
1479 int1h = TREE_INT_CST_HIGH (arg1);
1480 int2l = TREE_INT_CST_LOW (arg2);
1481 int2h = TREE_INT_CST_HIGH (arg2);
1482
1483 switch (code)
1484 {
1485 case BIT_IOR_EXPR:
1486 low = int1l | int2l, hi = int1h | int2h;
1487 break;
1488
1489 case BIT_XOR_EXPR:
1490 low = int1l ^ int2l, hi = int1h ^ int2h;
1491 break;
1492
1493 case BIT_AND_EXPR:
1494 low = int1l & int2l, hi = int1h & int2h;
1495 break;
1496
1497 case RSHIFT_EXPR:
1498 int2l = -int2l;
1499 case LSHIFT_EXPR:
1500 /* It's unclear from the C standard whether shifts can overflow.
1501 The following code ignores overflow; perhaps a C standard
1502 interpretation ruling is needed. */
1503 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1504 &low, &hi, !uns);
1505 break;
1506
1507 case RROTATE_EXPR:
1508 int2l = - int2l;
1509 case LROTATE_EXPR:
1510 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1511 &low, &hi);
1512 break;
1513
1514 case PLUS_EXPR:
1515 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1516 break;
1517
1518 case MINUS_EXPR:
1519 neg_double (int2l, int2h, &low, &hi);
1520 add_double (int1l, int1h, low, hi, &low, &hi);
1521 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1522 break;
1523
1524 case MULT_EXPR:
1525 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1526 break;
1527
1528 case TRUNC_DIV_EXPR:
1529 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1530 case EXACT_DIV_EXPR:
1531 /* This is a shortcut for a common special case. */
1532 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1533 && ! TREE_CONSTANT_OVERFLOW (arg1)
1534 && ! TREE_CONSTANT_OVERFLOW (arg2)
1535 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1536 {
1537 if (code == CEIL_DIV_EXPR)
1538 int1l += int2l - 1;
1539
1540 low = int1l / int2l, hi = 0;
1541 break;
1542 }
1543
1544 /* ... fall through ... */
1545
1546 case ROUND_DIV_EXPR:
1547 if (int2h == 0 && int2l == 0)
1548 return NULL_TREE;
1549 if (int2h == 0 && int2l == 1)
1550 {
1551 low = int1l, hi = int1h;
1552 break;
1553 }
1554 if (int1l == int2l && int1h == int2h
1555 && ! (int1l == 0 && int1h == 0))
1556 {
1557 low = 1, hi = 0;
1558 break;
1559 }
1560 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1561 &low, &hi, &garbagel, &garbageh);
1562 break;
1563
1564 case TRUNC_MOD_EXPR:
1565 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1566 /* This is a shortcut for a common special case. */
1567 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1568 && ! TREE_CONSTANT_OVERFLOW (arg1)
1569 && ! TREE_CONSTANT_OVERFLOW (arg2)
1570 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1571 {
1572 if (code == CEIL_MOD_EXPR)
1573 int1l += int2l - 1;
1574 low = int1l % int2l, hi = 0;
1575 break;
1576 }
1577
1578 /* ... fall through ... */
1579
1580 case ROUND_MOD_EXPR:
1581 if (int2h == 0 && int2l == 0)
1582 return NULL_TREE;
1583 overflow = div_and_round_double (code, uns,
1584 int1l, int1h, int2l, int2h,
1585 &garbagel, &garbageh, &low, &hi);
1586 break;
1587
1588 case MIN_EXPR:
1589 case MAX_EXPR:
1590 if (uns)
1591 low = (((unsigned HOST_WIDE_INT) int1h
1592 < (unsigned HOST_WIDE_INT) int2h)
1593 || (((unsigned HOST_WIDE_INT) int1h
1594 == (unsigned HOST_WIDE_INT) int2h)
1595 && int1l < int2l));
1596 else
1597 low = (int1h < int2h
1598 || (int1h == int2h && int1l < int2l));
1599
1600 if (low == (code == MIN_EXPR))
1601 low = int1l, hi = int1h;
1602 else
1603 low = int2l, hi = int2h;
1604 break;
1605
1606 default:
1607 return NULL_TREE;
1608 }
1609
1610 if (notrunc)
1611 {
1612 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1613
1614 /* Propagate overflow flags ourselves. */
1615 if (((!uns || is_sizetype) && overflow)
1616 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1617 {
1618 t = copy_node (t);
1619 TREE_OVERFLOW (t) = 1;
1620 TREE_CONSTANT_OVERFLOW (t) = 1;
1621 }
1622 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1623 {
1624 t = copy_node (t);
1625 TREE_CONSTANT_OVERFLOW (t) = 1;
1626 }
1627 }
1628 else
1629 t = force_fit_type_double (TREE_TYPE (arg1), low, hi, 1,
1630 ((!uns || is_sizetype) && overflow)
1631 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1632 TREE_CONSTANT_OVERFLOW (arg1)
1633 | TREE_CONSTANT_OVERFLOW (arg2));
1634
1635 return t;
1636 }
1637
1638 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1639 constant. We assume ARG1 and ARG2 have the same data type, or at least
1640 are the same kind of constant and the same machine mode. Return zero if
1641 combining the constants is not allowed in the current operating mode.
1642
1643 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1644
1645 static tree
1646 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1647 {
1648 /* Sanity check for the recursive cases. */
1649 if (!arg1 || !arg2)
1650 return NULL_TREE;
1651
1652 STRIP_NOPS (arg1);
1653 STRIP_NOPS (arg2);
1654
1655 if (TREE_CODE (arg1) == INTEGER_CST)
1656 return int_const_binop (code, arg1, arg2, notrunc);
1657
1658 if (TREE_CODE (arg1) == REAL_CST)
1659 {
1660 enum machine_mode mode;
1661 REAL_VALUE_TYPE d1;
1662 REAL_VALUE_TYPE d2;
1663 REAL_VALUE_TYPE value;
1664 REAL_VALUE_TYPE result;
1665 bool inexact;
1666 tree t, type;
1667
1668 /* The following codes are handled by real_arithmetic. */
1669 switch (code)
1670 {
1671 case PLUS_EXPR:
1672 case MINUS_EXPR:
1673 case MULT_EXPR:
1674 case RDIV_EXPR:
1675 case MIN_EXPR:
1676 case MAX_EXPR:
1677 break;
1678
1679 default:
1680 return NULL_TREE;
1681 }
1682
1683 d1 = TREE_REAL_CST (arg1);
1684 d2 = TREE_REAL_CST (arg2);
1685
1686 type = TREE_TYPE (arg1);
1687 mode = TYPE_MODE (type);
1688
1689 /* Don't perform operation if we honor signaling NaNs and
1690 either operand is a NaN. */
1691 if (HONOR_SNANS (mode)
1692 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1693 return NULL_TREE;
1694
1695 /* Don't perform operation if it would raise a division
1696 by zero exception. */
1697 if (code == RDIV_EXPR
1698 && REAL_VALUES_EQUAL (d2, dconst0)
1699 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1700 return NULL_TREE;
1701
1702 /* If either operand is a NaN, just return it. Otherwise, set up
1703 for floating-point trap; we return an overflow. */
1704 if (REAL_VALUE_ISNAN (d1))
1705 return arg1;
1706 else if (REAL_VALUE_ISNAN (d2))
1707 return arg2;
1708
1709 inexact = real_arithmetic (&value, code, &d1, &d2);
1710 real_convert (&result, mode, &value);
1711
1712 /* Don't constant fold this floating point operation if
1713 the result has overflowed and flag_trapping_math. */
1714 if (flag_trapping_math
1715 && MODE_HAS_INFINITIES (mode)
1716 && REAL_VALUE_ISINF (result)
1717 && !REAL_VALUE_ISINF (d1)
1718 && !REAL_VALUE_ISINF (d2))
1719 return NULL_TREE;
1720
1721 /* Don't constant fold this floating point operation if the
1722 result may dependent upon the run-time rounding mode and
1723 flag_rounding_math is set, or if GCC's software emulation
1724 is unable to accurately represent the result. */
1725 if ((flag_rounding_math
1726 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1727 && !flag_unsafe_math_optimizations))
1728 && (inexact || !real_identical (&result, &value)))
1729 return NULL_TREE;
1730
1731 t = build_real (type, result);
1732
1733 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1734 TREE_CONSTANT_OVERFLOW (t)
1735 = TREE_OVERFLOW (t)
1736 | TREE_CONSTANT_OVERFLOW (arg1)
1737 | TREE_CONSTANT_OVERFLOW (arg2);
1738 return t;
1739 }
1740
1741 if (TREE_CODE (arg1) == COMPLEX_CST)
1742 {
1743 tree type = TREE_TYPE (arg1);
1744 tree r1 = TREE_REALPART (arg1);
1745 tree i1 = TREE_IMAGPART (arg1);
1746 tree r2 = TREE_REALPART (arg2);
1747 tree i2 = TREE_IMAGPART (arg2);
1748 tree real, imag;
1749
1750 switch (code)
1751 {
1752 case PLUS_EXPR:
1753 case MINUS_EXPR:
1754 real = const_binop (code, r1, r2, notrunc);
1755 imag = const_binop (code, i1, i2, notrunc);
1756 break;
1757
1758 case MULT_EXPR:
1759 real = const_binop (MINUS_EXPR,
1760 const_binop (MULT_EXPR, r1, r2, notrunc),
1761 const_binop (MULT_EXPR, i1, i2, notrunc),
1762 notrunc);
1763 imag = const_binop (PLUS_EXPR,
1764 const_binop (MULT_EXPR, r1, i2, notrunc),
1765 const_binop (MULT_EXPR, i1, r2, notrunc),
1766 notrunc);
1767 break;
1768
1769 case RDIV_EXPR:
1770 {
1771 tree magsquared
1772 = const_binop (PLUS_EXPR,
1773 const_binop (MULT_EXPR, r2, r2, notrunc),
1774 const_binop (MULT_EXPR, i2, i2, notrunc),
1775 notrunc);
1776 tree t1
1777 = const_binop (PLUS_EXPR,
1778 const_binop (MULT_EXPR, r1, r2, notrunc),
1779 const_binop (MULT_EXPR, i1, i2, notrunc),
1780 notrunc);
1781 tree t2
1782 = const_binop (MINUS_EXPR,
1783 const_binop (MULT_EXPR, i1, r2, notrunc),
1784 const_binop (MULT_EXPR, r1, i2, notrunc),
1785 notrunc);
1786
1787 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1788 code = TRUNC_DIV_EXPR;
1789
1790 real = const_binop (code, t1, magsquared, notrunc);
1791 imag = const_binop (code, t2, magsquared, notrunc);
1792 }
1793 break;
1794
1795 default:
1796 return NULL_TREE;
1797 }
1798
1799 if (real && imag)
1800 return build_complex (type, real, imag);
1801 }
1802
1803 return NULL_TREE;
1804 }
1805
1806 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1807 indicates which particular sizetype to create. */
1808
1809 tree
1810 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1811 {
1812 return build_int_cst (sizetype_tab[(int) kind], number);
1813 }
1814 \f
1815 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1816 is a tree code. The type of the result is taken from the operands.
1817 Both must be equivalent integer types, ala int_binop_types_match_p.
1818 If the operands are constant, so is the result. */
1819
1820 tree
1821 size_binop (enum tree_code code, tree arg0, tree arg1)
1822 {
1823 tree type = TREE_TYPE (arg0);
1824
1825 if (arg0 == error_mark_node || arg1 == error_mark_node)
1826 return error_mark_node;
1827
1828 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1829 TREE_TYPE (arg1)));
1830
1831 /* Handle the special case of two integer constants faster. */
1832 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1833 {
1834 /* And some specific cases even faster than that. */
1835 if (code == PLUS_EXPR && integer_zerop (arg0))
1836 return arg1;
1837 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1838 && integer_zerop (arg1))
1839 return arg0;
1840 else if (code == MULT_EXPR && integer_onep (arg0))
1841 return arg1;
1842
1843 /* Handle general case of two integer constants. */
1844 return int_const_binop (code, arg0, arg1, 0);
1845 }
1846
1847 return fold_build2 (code, type, arg0, arg1);
1848 }
1849
1850 /* Given two values, either both of sizetype or both of bitsizetype,
1851 compute the difference between the two values. Return the value
1852 in signed type corresponding to the type of the operands. */
1853
1854 tree
1855 size_diffop (tree arg0, tree arg1)
1856 {
1857 tree type = TREE_TYPE (arg0);
1858 tree ctype;
1859
1860 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1861 TREE_TYPE (arg1)));
1862
1863 /* If the type is already signed, just do the simple thing. */
1864 if (!TYPE_UNSIGNED (type))
1865 return size_binop (MINUS_EXPR, arg0, arg1);
1866
1867 if (type == sizetype)
1868 ctype = ssizetype;
1869 else if (type == bitsizetype)
1870 ctype = sbitsizetype;
1871 else
1872 ctype = lang_hooks.types.signed_type (type);
1873
1874 /* If either operand is not a constant, do the conversions to the signed
1875 type and subtract. The hardware will do the right thing with any
1876 overflow in the subtraction. */
1877 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1878 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1879 fold_convert (ctype, arg1));
1880
1881 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1882 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1883 overflow) and negate (which can't either). Special-case a result
1884 of zero while we're here. */
1885 if (tree_int_cst_equal (arg0, arg1))
1886 return build_int_cst (ctype, 0);
1887 else if (tree_int_cst_lt (arg1, arg0))
1888 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1889 else
1890 return size_binop (MINUS_EXPR, build_int_cst (ctype, 0),
1891 fold_convert (ctype, size_binop (MINUS_EXPR,
1892 arg1, arg0)));
1893 }
1894 \f
1895 /* A subroutine of fold_convert_const handling conversions of an
1896 INTEGER_CST to another integer type. */
1897
1898 static tree
1899 fold_convert_const_int_from_int (tree type, tree arg1)
1900 {
1901 tree t;
1902
1903 /* Given an integer constant, make new constant with new type,
1904 appropriately sign-extended or truncated. */
1905 t = force_fit_type_double (type, TREE_INT_CST_LOW (arg1),
1906 TREE_INT_CST_HIGH (arg1),
1907 /* Don't set the overflow when
1908 converting a pointer */
1909 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1910 (TREE_INT_CST_HIGH (arg1) < 0
1911 && (TYPE_UNSIGNED (type)
1912 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1913 | TREE_OVERFLOW (arg1),
1914 TREE_CONSTANT_OVERFLOW (arg1));
1915
1916 return t;
1917 }
1918
1919 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1920 to an integer type. */
1921
1922 static tree
1923 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1924 {
1925 int overflow = 0;
1926 tree t;
1927
1928 /* The following code implements the floating point to integer
1929 conversion rules required by the Java Language Specification,
1930 that IEEE NaNs are mapped to zero and values that overflow
1931 the target precision saturate, i.e. values greater than
1932 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1933 are mapped to INT_MIN. These semantics are allowed by the
1934 C and C++ standards that simply state that the behavior of
1935 FP-to-integer conversion is unspecified upon overflow. */
1936
1937 HOST_WIDE_INT high, low;
1938 REAL_VALUE_TYPE r;
1939 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1940
1941 switch (code)
1942 {
1943 case FIX_TRUNC_EXPR:
1944 real_trunc (&r, VOIDmode, &x);
1945 break;
1946
1947 default:
1948 gcc_unreachable ();
1949 }
1950
1951 /* If R is NaN, return zero and show we have an overflow. */
1952 if (REAL_VALUE_ISNAN (r))
1953 {
1954 overflow = 1;
1955 high = 0;
1956 low = 0;
1957 }
1958
1959 /* See if R is less than the lower bound or greater than the
1960 upper bound. */
1961
1962 if (! overflow)
1963 {
1964 tree lt = TYPE_MIN_VALUE (type);
1965 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1966 if (REAL_VALUES_LESS (r, l))
1967 {
1968 overflow = 1;
1969 high = TREE_INT_CST_HIGH (lt);
1970 low = TREE_INT_CST_LOW (lt);
1971 }
1972 }
1973
1974 if (! overflow)
1975 {
1976 tree ut = TYPE_MAX_VALUE (type);
1977 if (ut)
1978 {
1979 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1980 if (REAL_VALUES_LESS (u, r))
1981 {
1982 overflow = 1;
1983 high = TREE_INT_CST_HIGH (ut);
1984 low = TREE_INT_CST_LOW (ut);
1985 }
1986 }
1987 }
1988
1989 if (! overflow)
1990 REAL_VALUE_TO_INT (&low, &high, r);
1991
1992 t = force_fit_type_double (type, low, high, -1,
1993 overflow | TREE_OVERFLOW (arg1),
1994 TREE_CONSTANT_OVERFLOW (arg1));
1995 return t;
1996 }
1997
1998 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1999 to another floating point type. */
2000
2001 static tree
2002 fold_convert_const_real_from_real (tree type, tree arg1)
2003 {
2004 REAL_VALUE_TYPE value;
2005 tree t;
2006
2007 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
2008 t = build_real (type, value);
2009
2010 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2011 TREE_CONSTANT_OVERFLOW (t)
2012 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
2013 return t;
2014 }
2015
2016 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2017 type TYPE. If no simplification can be done return NULL_TREE. */
2018
2019 static tree
2020 fold_convert_const (enum tree_code code, tree type, tree arg1)
2021 {
2022 if (TREE_TYPE (arg1) == type)
2023 return arg1;
2024
2025 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
2026 {
2027 if (TREE_CODE (arg1) == INTEGER_CST)
2028 return fold_convert_const_int_from_int (type, arg1);
2029 else if (TREE_CODE (arg1) == REAL_CST)
2030 return fold_convert_const_int_from_real (code, type, arg1);
2031 }
2032 else if (TREE_CODE (type) == REAL_TYPE)
2033 {
2034 if (TREE_CODE (arg1) == INTEGER_CST)
2035 return build_real_from_int_cst (type, arg1);
2036 if (TREE_CODE (arg1) == REAL_CST)
2037 return fold_convert_const_real_from_real (type, arg1);
2038 }
2039 return NULL_TREE;
2040 }
2041
2042 /* Construct a vector of zero elements of vector type TYPE. */
2043
2044 static tree
2045 build_zero_vector (tree type)
2046 {
2047 tree elem, list;
2048 int i, units;
2049
2050 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2051 units = TYPE_VECTOR_SUBPARTS (type);
2052
2053 list = NULL_TREE;
2054 for (i = 0; i < units; i++)
2055 list = tree_cons (NULL_TREE, elem, list);
2056 return build_vector (type, list);
2057 }
2058
2059 /* Convert expression ARG to type TYPE. Used by the middle-end for
2060 simple conversions in preference to calling the front-end's convert. */
2061
2062 tree
2063 fold_convert (tree type, tree arg)
2064 {
2065 tree orig = TREE_TYPE (arg);
2066 tree tem;
2067
2068 if (type == orig)
2069 return arg;
2070
2071 if (TREE_CODE (arg) == ERROR_MARK
2072 || TREE_CODE (type) == ERROR_MARK
2073 || TREE_CODE (orig) == ERROR_MARK)
2074 return error_mark_node;
2075
2076 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
2077 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
2078 TYPE_MAIN_VARIANT (orig)))
2079 return fold_build1 (NOP_EXPR, type, arg);
2080
2081 switch (TREE_CODE (type))
2082 {
2083 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2084 case POINTER_TYPE: case REFERENCE_TYPE:
2085 case OFFSET_TYPE:
2086 if (TREE_CODE (arg) == INTEGER_CST)
2087 {
2088 tem = fold_convert_const (NOP_EXPR, type, arg);
2089 if (tem != NULL_TREE)
2090 return tem;
2091 }
2092 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2093 || TREE_CODE (orig) == OFFSET_TYPE)
2094 return fold_build1 (NOP_EXPR, type, arg);
2095 if (TREE_CODE (orig) == COMPLEX_TYPE)
2096 {
2097 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2098 return fold_convert (type, tem);
2099 }
2100 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2101 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2102 return fold_build1 (NOP_EXPR, type, arg);
2103
2104 case REAL_TYPE:
2105 if (TREE_CODE (arg) == INTEGER_CST)
2106 {
2107 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2108 if (tem != NULL_TREE)
2109 return tem;
2110 }
2111 else if (TREE_CODE (arg) == REAL_CST)
2112 {
2113 tem = fold_convert_const (NOP_EXPR, type, arg);
2114 if (tem != NULL_TREE)
2115 return tem;
2116 }
2117
2118 switch (TREE_CODE (orig))
2119 {
2120 case INTEGER_TYPE:
2121 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2122 case POINTER_TYPE: case REFERENCE_TYPE:
2123 return fold_build1 (FLOAT_EXPR, type, arg);
2124
2125 case REAL_TYPE:
2126 return fold_build1 (NOP_EXPR, type, arg);
2127
2128 case COMPLEX_TYPE:
2129 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2130 return fold_convert (type, tem);
2131
2132 default:
2133 gcc_unreachable ();
2134 }
2135
2136 case COMPLEX_TYPE:
2137 switch (TREE_CODE (orig))
2138 {
2139 case INTEGER_TYPE:
2140 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2141 case POINTER_TYPE: case REFERENCE_TYPE:
2142 case REAL_TYPE:
2143 return build2 (COMPLEX_EXPR, type,
2144 fold_convert (TREE_TYPE (type), arg),
2145 fold_convert (TREE_TYPE (type), integer_zero_node));
2146 case COMPLEX_TYPE:
2147 {
2148 tree rpart, ipart;
2149
2150 if (TREE_CODE (arg) == COMPLEX_EXPR)
2151 {
2152 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2153 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2154 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2155 }
2156
2157 arg = save_expr (arg);
2158 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2159 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2160 rpart = fold_convert (TREE_TYPE (type), rpart);
2161 ipart = fold_convert (TREE_TYPE (type), ipart);
2162 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2163 }
2164
2165 default:
2166 gcc_unreachable ();
2167 }
2168
2169 case VECTOR_TYPE:
2170 if (integer_zerop (arg))
2171 return build_zero_vector (type);
2172 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2173 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == VECTOR_TYPE);
2175 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2176
2177 case VOID_TYPE:
2178 tem = fold_ignored_result (arg);
2179 if (TREE_CODE (tem) == GIMPLE_MODIFY_STMT)
2180 return tem;
2181 return fold_build1 (NOP_EXPR, type, tem);
2182
2183 default:
2184 gcc_unreachable ();
2185 }
2186 }
2187 \f
2188 /* Return false if expr can be assumed not to be an lvalue, true
2189 otherwise. */
2190
2191 static bool
2192 maybe_lvalue_p (tree x)
2193 {
2194 /* We only need to wrap lvalue tree codes. */
2195 switch (TREE_CODE (x))
2196 {
2197 case VAR_DECL:
2198 case PARM_DECL:
2199 case RESULT_DECL:
2200 case LABEL_DECL:
2201 case FUNCTION_DECL:
2202 case SSA_NAME:
2203
2204 case COMPONENT_REF:
2205 case INDIRECT_REF:
2206 case ALIGN_INDIRECT_REF:
2207 case MISALIGNED_INDIRECT_REF:
2208 case ARRAY_REF:
2209 case ARRAY_RANGE_REF:
2210 case BIT_FIELD_REF:
2211 case OBJ_TYPE_REF:
2212
2213 case REALPART_EXPR:
2214 case IMAGPART_EXPR:
2215 case PREINCREMENT_EXPR:
2216 case PREDECREMENT_EXPR:
2217 case SAVE_EXPR:
2218 case TRY_CATCH_EXPR:
2219 case WITH_CLEANUP_EXPR:
2220 case COMPOUND_EXPR:
2221 case MODIFY_EXPR:
2222 case GIMPLE_MODIFY_STMT:
2223 case TARGET_EXPR:
2224 case COND_EXPR:
2225 case BIND_EXPR:
2226 case MIN_EXPR:
2227 case MAX_EXPR:
2228 break;
2229
2230 default:
2231 /* Assume the worst for front-end tree codes. */
2232 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2233 break;
2234 return false;
2235 }
2236
2237 return true;
2238 }
2239
2240 /* Return an expr equal to X but certainly not valid as an lvalue. */
2241
2242 tree
2243 non_lvalue (tree x)
2244 {
2245 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2246 us. */
2247 if (in_gimple_form)
2248 return x;
2249
2250 if (! maybe_lvalue_p (x))
2251 return x;
2252 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2253 }
2254
2255 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2256 Zero means allow extended lvalues. */
2257
2258 int pedantic_lvalues;
2259
2260 /* When pedantic, return an expr equal to X but certainly not valid as a
2261 pedantic lvalue. Otherwise, return X. */
2262
2263 static tree
2264 pedantic_non_lvalue (tree x)
2265 {
2266 if (pedantic_lvalues)
2267 return non_lvalue (x);
2268 else
2269 return x;
2270 }
2271 \f
2272 /* Given a tree comparison code, return the code that is the logical inverse
2273 of the given code. It is not safe to do this for floating-point
2274 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2275 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2276
2277 enum tree_code
2278 invert_tree_comparison (enum tree_code code, bool honor_nans)
2279 {
2280 if (honor_nans && flag_trapping_math)
2281 return ERROR_MARK;
2282
2283 switch (code)
2284 {
2285 case EQ_EXPR:
2286 return NE_EXPR;
2287 case NE_EXPR:
2288 return EQ_EXPR;
2289 case GT_EXPR:
2290 return honor_nans ? UNLE_EXPR : LE_EXPR;
2291 case GE_EXPR:
2292 return honor_nans ? UNLT_EXPR : LT_EXPR;
2293 case LT_EXPR:
2294 return honor_nans ? UNGE_EXPR : GE_EXPR;
2295 case LE_EXPR:
2296 return honor_nans ? UNGT_EXPR : GT_EXPR;
2297 case LTGT_EXPR:
2298 return UNEQ_EXPR;
2299 case UNEQ_EXPR:
2300 return LTGT_EXPR;
2301 case UNGT_EXPR:
2302 return LE_EXPR;
2303 case UNGE_EXPR:
2304 return LT_EXPR;
2305 case UNLT_EXPR:
2306 return GE_EXPR;
2307 case UNLE_EXPR:
2308 return GT_EXPR;
2309 case ORDERED_EXPR:
2310 return UNORDERED_EXPR;
2311 case UNORDERED_EXPR:
2312 return ORDERED_EXPR;
2313 default:
2314 gcc_unreachable ();
2315 }
2316 }
2317
2318 /* Similar, but return the comparison that results if the operands are
2319 swapped. This is safe for floating-point. */
2320
2321 enum tree_code
2322 swap_tree_comparison (enum tree_code code)
2323 {
2324 switch (code)
2325 {
2326 case EQ_EXPR:
2327 case NE_EXPR:
2328 case ORDERED_EXPR:
2329 case UNORDERED_EXPR:
2330 case LTGT_EXPR:
2331 case UNEQ_EXPR:
2332 return code;
2333 case GT_EXPR:
2334 return LT_EXPR;
2335 case GE_EXPR:
2336 return LE_EXPR;
2337 case LT_EXPR:
2338 return GT_EXPR;
2339 case LE_EXPR:
2340 return GE_EXPR;
2341 case UNGT_EXPR:
2342 return UNLT_EXPR;
2343 case UNGE_EXPR:
2344 return UNLE_EXPR;
2345 case UNLT_EXPR:
2346 return UNGT_EXPR;
2347 case UNLE_EXPR:
2348 return UNGE_EXPR;
2349 default:
2350 gcc_unreachable ();
2351 }
2352 }
2353
2354
2355 /* Convert a comparison tree code from an enum tree_code representation
2356 into a compcode bit-based encoding. This function is the inverse of
2357 compcode_to_comparison. */
2358
2359 static enum comparison_code
2360 comparison_to_compcode (enum tree_code code)
2361 {
2362 switch (code)
2363 {
2364 case LT_EXPR:
2365 return COMPCODE_LT;
2366 case EQ_EXPR:
2367 return COMPCODE_EQ;
2368 case LE_EXPR:
2369 return COMPCODE_LE;
2370 case GT_EXPR:
2371 return COMPCODE_GT;
2372 case NE_EXPR:
2373 return COMPCODE_NE;
2374 case GE_EXPR:
2375 return COMPCODE_GE;
2376 case ORDERED_EXPR:
2377 return COMPCODE_ORD;
2378 case UNORDERED_EXPR:
2379 return COMPCODE_UNORD;
2380 case UNLT_EXPR:
2381 return COMPCODE_UNLT;
2382 case UNEQ_EXPR:
2383 return COMPCODE_UNEQ;
2384 case UNLE_EXPR:
2385 return COMPCODE_UNLE;
2386 case UNGT_EXPR:
2387 return COMPCODE_UNGT;
2388 case LTGT_EXPR:
2389 return COMPCODE_LTGT;
2390 case UNGE_EXPR:
2391 return COMPCODE_UNGE;
2392 default:
2393 gcc_unreachable ();
2394 }
2395 }
2396
2397 /* Convert a compcode bit-based encoding of a comparison operator back
2398 to GCC's enum tree_code representation. This function is the
2399 inverse of comparison_to_compcode. */
2400
2401 static enum tree_code
2402 compcode_to_comparison (enum comparison_code code)
2403 {
2404 switch (code)
2405 {
2406 case COMPCODE_LT:
2407 return LT_EXPR;
2408 case COMPCODE_EQ:
2409 return EQ_EXPR;
2410 case COMPCODE_LE:
2411 return LE_EXPR;
2412 case COMPCODE_GT:
2413 return GT_EXPR;
2414 case COMPCODE_NE:
2415 return NE_EXPR;
2416 case COMPCODE_GE:
2417 return GE_EXPR;
2418 case COMPCODE_ORD:
2419 return ORDERED_EXPR;
2420 case COMPCODE_UNORD:
2421 return UNORDERED_EXPR;
2422 case COMPCODE_UNLT:
2423 return UNLT_EXPR;
2424 case COMPCODE_UNEQ:
2425 return UNEQ_EXPR;
2426 case COMPCODE_UNLE:
2427 return UNLE_EXPR;
2428 case COMPCODE_UNGT:
2429 return UNGT_EXPR;
2430 case COMPCODE_LTGT:
2431 return LTGT_EXPR;
2432 case COMPCODE_UNGE:
2433 return UNGE_EXPR;
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438
2439 /* Return a tree for the comparison which is the combination of
2440 doing the AND or OR (depending on CODE) of the two operations LCODE
2441 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2442 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2443 if this makes the transformation invalid. */
2444
2445 tree
2446 combine_comparisons (enum tree_code code, enum tree_code lcode,
2447 enum tree_code rcode, tree truth_type,
2448 tree ll_arg, tree lr_arg)
2449 {
2450 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2451 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2452 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2453 enum comparison_code compcode;
2454
2455 switch (code)
2456 {
2457 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2458 compcode = lcompcode & rcompcode;
2459 break;
2460
2461 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2462 compcode = lcompcode | rcompcode;
2463 break;
2464
2465 default:
2466 return NULL_TREE;
2467 }
2468
2469 if (!honor_nans)
2470 {
2471 /* Eliminate unordered comparisons, as well as LTGT and ORD
2472 which are not used unless the mode has NaNs. */
2473 compcode &= ~COMPCODE_UNORD;
2474 if (compcode == COMPCODE_LTGT)
2475 compcode = COMPCODE_NE;
2476 else if (compcode == COMPCODE_ORD)
2477 compcode = COMPCODE_TRUE;
2478 }
2479 else if (flag_trapping_math)
2480 {
2481 /* Check that the original operation and the optimized ones will trap
2482 under the same condition. */
2483 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2484 && (lcompcode != COMPCODE_EQ)
2485 && (lcompcode != COMPCODE_ORD);
2486 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2487 && (rcompcode != COMPCODE_EQ)
2488 && (rcompcode != COMPCODE_ORD);
2489 bool trap = (compcode & COMPCODE_UNORD) == 0
2490 && (compcode != COMPCODE_EQ)
2491 && (compcode != COMPCODE_ORD);
2492
2493 /* In a short-circuited boolean expression the LHS might be
2494 such that the RHS, if evaluated, will never trap. For
2495 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2496 if neither x nor y is NaN. (This is a mixed blessing: for
2497 example, the expression above will never trap, hence
2498 optimizing it to x < y would be invalid). */
2499 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2500 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2501 rtrap = false;
2502
2503 /* If the comparison was short-circuited, and only the RHS
2504 trapped, we may now generate a spurious trap. */
2505 if (rtrap && !ltrap
2506 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2507 return NULL_TREE;
2508
2509 /* If we changed the conditions that cause a trap, we lose. */
2510 if ((ltrap || rtrap) != trap)
2511 return NULL_TREE;
2512 }
2513
2514 if (compcode == COMPCODE_TRUE)
2515 return constant_boolean_node (true, truth_type);
2516 else if (compcode == COMPCODE_FALSE)
2517 return constant_boolean_node (false, truth_type);
2518 else
2519 return fold_build2 (compcode_to_comparison (compcode),
2520 truth_type, ll_arg, lr_arg);
2521 }
2522
2523 /* Return nonzero if CODE is a tree code that represents a truth value. */
2524
2525 static int
2526 truth_value_p (enum tree_code code)
2527 {
2528 return (TREE_CODE_CLASS (code) == tcc_comparison
2529 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2530 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2531 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2532 }
2533 \f
2534 /* Return nonzero if two operands (typically of the same tree node)
2535 are necessarily equal. If either argument has side-effects this
2536 function returns zero. FLAGS modifies behavior as follows:
2537
2538 If OEP_ONLY_CONST is set, only return nonzero for constants.
2539 This function tests whether the operands are indistinguishable;
2540 it does not test whether they are equal using C's == operation.
2541 The distinction is important for IEEE floating point, because
2542 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2543 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2544
2545 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2546 even though it may hold multiple values during a function.
2547 This is because a GCC tree node guarantees that nothing else is
2548 executed between the evaluation of its "operands" (which may often
2549 be evaluated in arbitrary order). Hence if the operands themselves
2550 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2551 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2552 unset means assuming isochronic (or instantaneous) tree equivalence.
2553 Unless comparing arbitrary expression trees, such as from different
2554 statements, this flag can usually be left unset.
2555
2556 If OEP_PURE_SAME is set, then pure functions with identical arguments
2557 are considered the same. It is used when the caller has other ways
2558 to ensure that global memory is unchanged in between. */
2559
2560 int
2561 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2562 {
2563 /* If either is ERROR_MARK, they aren't equal. */
2564 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2565 return 0;
2566
2567 /* If both types don't have the same signedness, then we can't consider
2568 them equal. We must check this before the STRIP_NOPS calls
2569 because they may change the signedness of the arguments. */
2570 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2571 return 0;
2572
2573 /* If both types don't have the same precision, then it is not safe
2574 to strip NOPs. */
2575 if (TYPE_PRECISION (TREE_TYPE (arg0)) != TYPE_PRECISION (TREE_TYPE (arg1)))
2576 return 0;
2577
2578 STRIP_NOPS (arg0);
2579 STRIP_NOPS (arg1);
2580
2581 /* In case both args are comparisons but with different comparison
2582 code, try to swap the comparison operands of one arg to produce
2583 a match and compare that variant. */
2584 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2585 && COMPARISON_CLASS_P (arg0)
2586 && COMPARISON_CLASS_P (arg1))
2587 {
2588 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2589
2590 if (TREE_CODE (arg0) == swap_code)
2591 return operand_equal_p (TREE_OPERAND (arg0, 0),
2592 TREE_OPERAND (arg1, 1), flags)
2593 && operand_equal_p (TREE_OPERAND (arg0, 1),
2594 TREE_OPERAND (arg1, 0), flags);
2595 }
2596
2597 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2598 /* This is needed for conversions and for COMPONENT_REF.
2599 Might as well play it safe and always test this. */
2600 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2601 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2602 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2603 return 0;
2604
2605 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2606 We don't care about side effects in that case because the SAVE_EXPR
2607 takes care of that for us. In all other cases, two expressions are
2608 equal if they have no side effects. If we have two identical
2609 expressions with side effects that should be treated the same due
2610 to the only side effects being identical SAVE_EXPR's, that will
2611 be detected in the recursive calls below. */
2612 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2613 && (TREE_CODE (arg0) == SAVE_EXPR
2614 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2615 return 1;
2616
2617 /* Next handle constant cases, those for which we can return 1 even
2618 if ONLY_CONST is set. */
2619 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2620 switch (TREE_CODE (arg0))
2621 {
2622 case INTEGER_CST:
2623 return tree_int_cst_equal (arg0, arg1);
2624
2625 case REAL_CST:
2626 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2627 TREE_REAL_CST (arg1)))
2628 return 1;
2629
2630
2631 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2632 {
2633 /* If we do not distinguish between signed and unsigned zero,
2634 consider them equal. */
2635 if (real_zerop (arg0) && real_zerop (arg1))
2636 return 1;
2637 }
2638 return 0;
2639
2640 case VECTOR_CST:
2641 {
2642 tree v1, v2;
2643
2644 v1 = TREE_VECTOR_CST_ELTS (arg0);
2645 v2 = TREE_VECTOR_CST_ELTS (arg1);
2646 while (v1 && v2)
2647 {
2648 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2649 flags))
2650 return 0;
2651 v1 = TREE_CHAIN (v1);
2652 v2 = TREE_CHAIN (v2);
2653 }
2654
2655 return v1 == v2;
2656 }
2657
2658 case COMPLEX_CST:
2659 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2660 flags)
2661 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2662 flags));
2663
2664 case STRING_CST:
2665 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2666 && ! memcmp (TREE_STRING_POINTER (arg0),
2667 TREE_STRING_POINTER (arg1),
2668 TREE_STRING_LENGTH (arg0)));
2669
2670 case ADDR_EXPR:
2671 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2672 0);
2673 default:
2674 break;
2675 }
2676
2677 if (flags & OEP_ONLY_CONST)
2678 return 0;
2679
2680 /* Define macros to test an operand from arg0 and arg1 for equality and a
2681 variant that allows null and views null as being different from any
2682 non-null value. In the latter case, if either is null, the both
2683 must be; otherwise, do the normal comparison. */
2684 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2685 TREE_OPERAND (arg1, N), flags)
2686
2687 #define OP_SAME_WITH_NULL(N) \
2688 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2689 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2690
2691 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2692 {
2693 case tcc_unary:
2694 /* Two conversions are equal only if signedness and modes match. */
2695 switch (TREE_CODE (arg0))
2696 {
2697 case NOP_EXPR:
2698 case CONVERT_EXPR:
2699 case FIX_TRUNC_EXPR:
2700 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2701 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2702 return 0;
2703 break;
2704 default:
2705 break;
2706 }
2707
2708 return OP_SAME (0);
2709
2710
2711 case tcc_comparison:
2712 case tcc_binary:
2713 if (OP_SAME (0) && OP_SAME (1))
2714 return 1;
2715
2716 /* For commutative ops, allow the other order. */
2717 return (commutative_tree_code (TREE_CODE (arg0))
2718 && operand_equal_p (TREE_OPERAND (arg0, 0),
2719 TREE_OPERAND (arg1, 1), flags)
2720 && operand_equal_p (TREE_OPERAND (arg0, 1),
2721 TREE_OPERAND (arg1, 0), flags));
2722
2723 case tcc_reference:
2724 /* If either of the pointer (or reference) expressions we are
2725 dereferencing contain a side effect, these cannot be equal. */
2726 if (TREE_SIDE_EFFECTS (arg0)
2727 || TREE_SIDE_EFFECTS (arg1))
2728 return 0;
2729
2730 switch (TREE_CODE (arg0))
2731 {
2732 case INDIRECT_REF:
2733 case ALIGN_INDIRECT_REF:
2734 case MISALIGNED_INDIRECT_REF:
2735 case REALPART_EXPR:
2736 case IMAGPART_EXPR:
2737 return OP_SAME (0);
2738
2739 case ARRAY_REF:
2740 case ARRAY_RANGE_REF:
2741 /* Operands 2 and 3 may be null. */
2742 return (OP_SAME (0)
2743 && OP_SAME (1)
2744 && OP_SAME_WITH_NULL (2)
2745 && OP_SAME_WITH_NULL (3));
2746
2747 case COMPONENT_REF:
2748 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2749 may be NULL when we're called to compare MEM_EXPRs. */
2750 return OP_SAME_WITH_NULL (0)
2751 && OP_SAME (1)
2752 && OP_SAME_WITH_NULL (2);
2753
2754 case BIT_FIELD_REF:
2755 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2756
2757 default:
2758 return 0;
2759 }
2760
2761 case tcc_expression:
2762 switch (TREE_CODE (arg0))
2763 {
2764 case ADDR_EXPR:
2765 case TRUTH_NOT_EXPR:
2766 return OP_SAME (0);
2767
2768 case TRUTH_ANDIF_EXPR:
2769 case TRUTH_ORIF_EXPR:
2770 return OP_SAME (0) && OP_SAME (1);
2771
2772 case TRUTH_AND_EXPR:
2773 case TRUTH_OR_EXPR:
2774 case TRUTH_XOR_EXPR:
2775 if (OP_SAME (0) && OP_SAME (1))
2776 return 1;
2777
2778 /* Otherwise take into account this is a commutative operation. */
2779 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2780 TREE_OPERAND (arg1, 1), flags)
2781 && operand_equal_p (TREE_OPERAND (arg0, 1),
2782 TREE_OPERAND (arg1, 0), flags));
2783
2784 case CALL_EXPR:
2785 /* If the CALL_EXPRs call different functions, then they
2786 clearly can not be equal. */
2787 if (!OP_SAME (0))
2788 return 0;
2789
2790 {
2791 unsigned int cef = call_expr_flags (arg0);
2792 if (flags & OEP_PURE_SAME)
2793 cef &= ECF_CONST | ECF_PURE;
2794 else
2795 cef &= ECF_CONST;
2796 if (!cef)
2797 return 0;
2798 }
2799
2800 /* Now see if all the arguments are the same. operand_equal_p
2801 does not handle TREE_LIST, so we walk the operands here
2802 feeding them to operand_equal_p. */
2803 arg0 = TREE_OPERAND (arg0, 1);
2804 arg1 = TREE_OPERAND (arg1, 1);
2805 while (arg0 && arg1)
2806 {
2807 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2808 flags))
2809 return 0;
2810
2811 arg0 = TREE_CHAIN (arg0);
2812 arg1 = TREE_CHAIN (arg1);
2813 }
2814
2815 /* If we get here and both argument lists are exhausted
2816 then the CALL_EXPRs are equal. */
2817 return ! (arg0 || arg1);
2818
2819 default:
2820 return 0;
2821 }
2822
2823 case tcc_declaration:
2824 /* Consider __builtin_sqrt equal to sqrt. */
2825 return (TREE_CODE (arg0) == FUNCTION_DECL
2826 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2827 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2828 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2829
2830 default:
2831 return 0;
2832 }
2833
2834 #undef OP_SAME
2835 #undef OP_SAME_WITH_NULL
2836 }
2837 \f
2838 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2839 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2840
2841 When in doubt, return 0. */
2842
2843 static int
2844 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2845 {
2846 int unsignedp1, unsignedpo;
2847 tree primarg0, primarg1, primother;
2848 unsigned int correct_width;
2849
2850 if (operand_equal_p (arg0, arg1, 0))
2851 return 1;
2852
2853 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2854 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2855 return 0;
2856
2857 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2858 and see if the inner values are the same. This removes any
2859 signedness comparison, which doesn't matter here. */
2860 primarg0 = arg0, primarg1 = arg1;
2861 STRIP_NOPS (primarg0);
2862 STRIP_NOPS (primarg1);
2863 if (operand_equal_p (primarg0, primarg1, 0))
2864 return 1;
2865
2866 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2867 actual comparison operand, ARG0.
2868
2869 First throw away any conversions to wider types
2870 already present in the operands. */
2871
2872 primarg1 = get_narrower (arg1, &unsignedp1);
2873 primother = get_narrower (other, &unsignedpo);
2874
2875 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2876 if (unsignedp1 == unsignedpo
2877 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2878 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2879 {
2880 tree type = TREE_TYPE (arg0);
2881
2882 /* Make sure shorter operand is extended the right way
2883 to match the longer operand. */
2884 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2885 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2886
2887 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2888 return 1;
2889 }
2890
2891 return 0;
2892 }
2893 \f
2894 /* See if ARG is an expression that is either a comparison or is performing
2895 arithmetic on comparisons. The comparisons must only be comparing
2896 two different values, which will be stored in *CVAL1 and *CVAL2; if
2897 they are nonzero it means that some operands have already been found.
2898 No variables may be used anywhere else in the expression except in the
2899 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2900 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2901
2902 If this is true, return 1. Otherwise, return zero. */
2903
2904 static int
2905 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2906 {
2907 enum tree_code code = TREE_CODE (arg);
2908 enum tree_code_class class = TREE_CODE_CLASS (code);
2909
2910 /* We can handle some of the tcc_expression cases here. */
2911 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2912 class = tcc_unary;
2913 else if (class == tcc_expression
2914 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2915 || code == COMPOUND_EXPR))
2916 class = tcc_binary;
2917
2918 else if (class == tcc_expression && code == SAVE_EXPR
2919 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2920 {
2921 /* If we've already found a CVAL1 or CVAL2, this expression is
2922 two complex to handle. */
2923 if (*cval1 || *cval2)
2924 return 0;
2925
2926 class = tcc_unary;
2927 *save_p = 1;
2928 }
2929
2930 switch (class)
2931 {
2932 case tcc_unary:
2933 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2934
2935 case tcc_binary:
2936 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2937 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2938 cval1, cval2, save_p));
2939
2940 case tcc_constant:
2941 return 1;
2942
2943 case tcc_expression:
2944 if (code == COND_EXPR)
2945 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2946 cval1, cval2, save_p)
2947 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2948 cval1, cval2, save_p)
2949 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2950 cval1, cval2, save_p));
2951 return 0;
2952
2953 case tcc_comparison:
2954 /* First see if we can handle the first operand, then the second. For
2955 the second operand, we know *CVAL1 can't be zero. It must be that
2956 one side of the comparison is each of the values; test for the
2957 case where this isn't true by failing if the two operands
2958 are the same. */
2959
2960 if (operand_equal_p (TREE_OPERAND (arg, 0),
2961 TREE_OPERAND (arg, 1), 0))
2962 return 0;
2963
2964 if (*cval1 == 0)
2965 *cval1 = TREE_OPERAND (arg, 0);
2966 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2967 ;
2968 else if (*cval2 == 0)
2969 *cval2 = TREE_OPERAND (arg, 0);
2970 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2971 ;
2972 else
2973 return 0;
2974
2975 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2976 ;
2977 else if (*cval2 == 0)
2978 *cval2 = TREE_OPERAND (arg, 1);
2979 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2980 ;
2981 else
2982 return 0;
2983
2984 return 1;
2985
2986 default:
2987 return 0;
2988 }
2989 }
2990 \f
2991 /* ARG is a tree that is known to contain just arithmetic operations and
2992 comparisons. Evaluate the operations in the tree substituting NEW0 for
2993 any occurrence of OLD0 as an operand of a comparison and likewise for
2994 NEW1 and OLD1. */
2995
2996 static tree
2997 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2998 {
2999 tree type = TREE_TYPE (arg);
3000 enum tree_code code = TREE_CODE (arg);
3001 enum tree_code_class class = TREE_CODE_CLASS (code);
3002
3003 /* We can handle some of the tcc_expression cases here. */
3004 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
3005 class = tcc_unary;
3006 else if (class == tcc_expression
3007 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3008 class = tcc_binary;
3009
3010 switch (class)
3011 {
3012 case tcc_unary:
3013 return fold_build1 (code, type,
3014 eval_subst (TREE_OPERAND (arg, 0),
3015 old0, new0, old1, new1));
3016
3017 case tcc_binary:
3018 return fold_build2 (code, type,
3019 eval_subst (TREE_OPERAND (arg, 0),
3020 old0, new0, old1, new1),
3021 eval_subst (TREE_OPERAND (arg, 1),
3022 old0, new0, old1, new1));
3023
3024 case tcc_expression:
3025 switch (code)
3026 {
3027 case SAVE_EXPR:
3028 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
3029
3030 case COMPOUND_EXPR:
3031 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
3032
3033 case COND_EXPR:
3034 return fold_build3 (code, type,
3035 eval_subst (TREE_OPERAND (arg, 0),
3036 old0, new0, old1, new1),
3037 eval_subst (TREE_OPERAND (arg, 1),
3038 old0, new0, old1, new1),
3039 eval_subst (TREE_OPERAND (arg, 2),
3040 old0, new0, old1, new1));
3041 default:
3042 break;
3043 }
3044 /* Fall through - ??? */
3045
3046 case tcc_comparison:
3047 {
3048 tree arg0 = TREE_OPERAND (arg, 0);
3049 tree arg1 = TREE_OPERAND (arg, 1);
3050
3051 /* We need to check both for exact equality and tree equality. The
3052 former will be true if the operand has a side-effect. In that
3053 case, we know the operand occurred exactly once. */
3054
3055 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3056 arg0 = new0;
3057 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3058 arg0 = new1;
3059
3060 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3061 arg1 = new0;
3062 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3063 arg1 = new1;
3064
3065 return fold_build2 (code, type, arg0, arg1);
3066 }
3067
3068 default:
3069 return arg;
3070 }
3071 }
3072 \f
3073 /* Return a tree for the case when the result of an expression is RESULT
3074 converted to TYPE and OMITTED was previously an operand of the expression
3075 but is now not needed (e.g., we folded OMITTED * 0).
3076
3077 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3078 the conversion of RESULT to TYPE. */
3079
3080 tree
3081 omit_one_operand (tree type, tree result, tree omitted)
3082 {
3083 tree t = fold_convert (type, result);
3084
3085 if (TREE_SIDE_EFFECTS (omitted))
3086 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3087
3088 return non_lvalue (t);
3089 }
3090
3091 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3092
3093 static tree
3094 pedantic_omit_one_operand (tree type, tree result, tree omitted)
3095 {
3096 tree t = fold_convert (type, result);
3097
3098 if (TREE_SIDE_EFFECTS (omitted))
3099 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
3100
3101 return pedantic_non_lvalue (t);
3102 }
3103
3104 /* Return a tree for the case when the result of an expression is RESULT
3105 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3106 of the expression but are now not needed.
3107
3108 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3109 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3110 evaluated before OMITTED2. Otherwise, if neither has side effects,
3111 just do the conversion of RESULT to TYPE. */
3112
3113 tree
3114 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
3115 {
3116 tree t = fold_convert (type, result);
3117
3118 if (TREE_SIDE_EFFECTS (omitted2))
3119 t = build2 (COMPOUND_EXPR, type, omitted2, t);
3120 if (TREE_SIDE_EFFECTS (omitted1))
3121 t = build2 (COMPOUND_EXPR, type, omitted1, t);
3122
3123 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
3124 }
3125
3126 \f
3127 /* Return a simplified tree node for the truth-negation of ARG. This
3128 never alters ARG itself. We assume that ARG is an operation that
3129 returns a truth value (0 or 1).
3130
3131 FIXME: one would think we would fold the result, but it causes
3132 problems with the dominator optimizer. */
3133
3134 tree
3135 fold_truth_not_expr (tree arg)
3136 {
3137 tree type = TREE_TYPE (arg);
3138 enum tree_code code = TREE_CODE (arg);
3139
3140 /* If this is a comparison, we can simply invert it, except for
3141 floating-point non-equality comparisons, in which case we just
3142 enclose a TRUTH_NOT_EXPR around what we have. */
3143
3144 if (TREE_CODE_CLASS (code) == tcc_comparison)
3145 {
3146 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3147 if (FLOAT_TYPE_P (op_type)
3148 && flag_trapping_math
3149 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3150 && code != NE_EXPR && code != EQ_EXPR)
3151 return NULL_TREE;
3152 else
3153 {
3154 code = invert_tree_comparison (code,
3155 HONOR_NANS (TYPE_MODE (op_type)));
3156 if (code == ERROR_MARK)
3157 return NULL_TREE;
3158 else
3159 return build2 (code, type,
3160 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3161 }
3162 }
3163
3164 switch (code)
3165 {
3166 case INTEGER_CST:
3167 return constant_boolean_node (integer_zerop (arg), type);
3168
3169 case TRUTH_AND_EXPR:
3170 return build2 (TRUTH_OR_EXPR, type,
3171 invert_truthvalue (TREE_OPERAND (arg, 0)),
3172 invert_truthvalue (TREE_OPERAND (arg, 1)));
3173
3174 case TRUTH_OR_EXPR:
3175 return build2 (TRUTH_AND_EXPR, type,
3176 invert_truthvalue (TREE_OPERAND (arg, 0)),
3177 invert_truthvalue (TREE_OPERAND (arg, 1)));
3178
3179 case TRUTH_XOR_EXPR:
3180 /* Here we can invert either operand. We invert the first operand
3181 unless the second operand is a TRUTH_NOT_EXPR in which case our
3182 result is the XOR of the first operand with the inside of the
3183 negation of the second operand. */
3184
3185 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3186 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3187 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3188 else
3189 return build2 (TRUTH_XOR_EXPR, type,
3190 invert_truthvalue (TREE_OPERAND (arg, 0)),
3191 TREE_OPERAND (arg, 1));
3192
3193 case TRUTH_ANDIF_EXPR:
3194 return build2 (TRUTH_ORIF_EXPR, type,
3195 invert_truthvalue (TREE_OPERAND (arg, 0)),
3196 invert_truthvalue (TREE_OPERAND (arg, 1)));
3197
3198 case TRUTH_ORIF_EXPR:
3199 return build2 (TRUTH_ANDIF_EXPR, type,
3200 invert_truthvalue (TREE_OPERAND (arg, 0)),
3201 invert_truthvalue (TREE_OPERAND (arg, 1)));
3202
3203 case TRUTH_NOT_EXPR:
3204 return TREE_OPERAND (arg, 0);
3205
3206 case COND_EXPR:
3207 {
3208 tree arg1 = TREE_OPERAND (arg, 1);
3209 tree arg2 = TREE_OPERAND (arg, 2);
3210 /* A COND_EXPR may have a throw as one operand, which
3211 then has void type. Just leave void operands
3212 as they are. */
3213 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3214 VOID_TYPE_P (TREE_TYPE (arg1))
3215 ? arg1 : invert_truthvalue (arg1),
3216 VOID_TYPE_P (TREE_TYPE (arg2))
3217 ? arg2 : invert_truthvalue (arg2));
3218 }
3219
3220 case COMPOUND_EXPR:
3221 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3222 invert_truthvalue (TREE_OPERAND (arg, 1)));
3223
3224 case NON_LVALUE_EXPR:
3225 return invert_truthvalue (TREE_OPERAND (arg, 0));
3226
3227 case NOP_EXPR:
3228 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3229 return build1 (TRUTH_NOT_EXPR, type, arg);
3230
3231 case CONVERT_EXPR:
3232 case FLOAT_EXPR:
3233 return build1 (TREE_CODE (arg), type,
3234 invert_truthvalue (TREE_OPERAND (arg, 0)));
3235
3236 case BIT_AND_EXPR:
3237 if (!integer_onep (TREE_OPERAND (arg, 1)))
3238 break;
3239 return build2 (EQ_EXPR, type, arg,
3240 build_int_cst (type, 0));
3241
3242 case SAVE_EXPR:
3243 return build1 (TRUTH_NOT_EXPR, type, arg);
3244
3245 case CLEANUP_POINT_EXPR:
3246 return build1 (CLEANUP_POINT_EXPR, type,
3247 invert_truthvalue (TREE_OPERAND (arg, 0)));
3248
3249 default:
3250 break;
3251 }
3252
3253 return NULL_TREE;
3254 }
3255
3256 /* Return a simplified tree node for the truth-negation of ARG. This
3257 never alters ARG itself. We assume that ARG is an operation that
3258 returns a truth value (0 or 1).
3259
3260 FIXME: one would think we would fold the result, but it causes
3261 problems with the dominator optimizer. */
3262
3263 tree
3264 invert_truthvalue (tree arg)
3265 {
3266 tree tem;
3267
3268 if (TREE_CODE (arg) == ERROR_MARK)
3269 return arg;
3270
3271 tem = fold_truth_not_expr (arg);
3272 if (!tem)
3273 tem = build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg), arg);
3274
3275 return tem;
3276 }
3277
3278 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3279 operands are another bit-wise operation with a common input. If so,
3280 distribute the bit operations to save an operation and possibly two if
3281 constants are involved. For example, convert
3282 (A | B) & (A | C) into A | (B & C)
3283 Further simplification will occur if B and C are constants.
3284
3285 If this optimization cannot be done, 0 will be returned. */
3286
3287 static tree
3288 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3289 {
3290 tree common;
3291 tree left, right;
3292
3293 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3294 || TREE_CODE (arg0) == code
3295 || (TREE_CODE (arg0) != BIT_AND_EXPR
3296 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3297 return 0;
3298
3299 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3300 {
3301 common = TREE_OPERAND (arg0, 0);
3302 left = TREE_OPERAND (arg0, 1);
3303 right = TREE_OPERAND (arg1, 1);
3304 }
3305 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3306 {
3307 common = TREE_OPERAND (arg0, 0);
3308 left = TREE_OPERAND (arg0, 1);
3309 right = TREE_OPERAND (arg1, 0);
3310 }
3311 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3312 {
3313 common = TREE_OPERAND (arg0, 1);
3314 left = TREE_OPERAND (arg0, 0);
3315 right = TREE_OPERAND (arg1, 1);
3316 }
3317 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3318 {
3319 common = TREE_OPERAND (arg0, 1);
3320 left = TREE_OPERAND (arg0, 0);
3321 right = TREE_OPERAND (arg1, 0);
3322 }
3323 else
3324 return 0;
3325
3326 return fold_build2 (TREE_CODE (arg0), type, common,
3327 fold_build2 (code, type, left, right));
3328 }
3329
3330 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3331 with code CODE. This optimization is unsafe. */
3332 static tree
3333 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3334 {
3335 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3336 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3337
3338 /* (A / C) +- (B / C) -> (A +- B) / C. */
3339 if (mul0 == mul1
3340 && operand_equal_p (TREE_OPERAND (arg0, 1),
3341 TREE_OPERAND (arg1, 1), 0))
3342 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3343 fold_build2 (code, type,
3344 TREE_OPERAND (arg0, 0),
3345 TREE_OPERAND (arg1, 0)),
3346 TREE_OPERAND (arg0, 1));
3347
3348 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3349 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3350 TREE_OPERAND (arg1, 0), 0)
3351 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3352 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3353 {
3354 REAL_VALUE_TYPE r0, r1;
3355 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3356 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3357 if (!mul0)
3358 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3359 if (!mul1)
3360 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3361 real_arithmetic (&r0, code, &r0, &r1);
3362 return fold_build2 (MULT_EXPR, type,
3363 TREE_OPERAND (arg0, 0),
3364 build_real (type, r0));
3365 }
3366
3367 return NULL_TREE;
3368 }
3369 \f
3370 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3371 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3372
3373 static tree
3374 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3375 int unsignedp)
3376 {
3377 tree result;
3378
3379 if (bitpos == 0)
3380 {
3381 tree size = TYPE_SIZE (TREE_TYPE (inner));
3382 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3383 || POINTER_TYPE_P (TREE_TYPE (inner)))
3384 && host_integerp (size, 0)
3385 && tree_low_cst (size, 0) == bitsize)
3386 return fold_convert (type, inner);
3387 }
3388
3389 result = build3 (BIT_FIELD_REF, type, inner,
3390 size_int (bitsize), bitsize_int (bitpos));
3391
3392 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3393
3394 return result;
3395 }
3396
3397 /* Optimize a bit-field compare.
3398
3399 There are two cases: First is a compare against a constant and the
3400 second is a comparison of two items where the fields are at the same
3401 bit position relative to the start of a chunk (byte, halfword, word)
3402 large enough to contain it. In these cases we can avoid the shift
3403 implicit in bitfield extractions.
3404
3405 For constants, we emit a compare of the shifted constant with the
3406 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3407 compared. For two fields at the same position, we do the ANDs with the
3408 similar mask and compare the result of the ANDs.
3409
3410 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3411 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3412 are the left and right operands of the comparison, respectively.
3413
3414 If the optimization described above can be done, we return the resulting
3415 tree. Otherwise we return zero. */
3416
3417 static tree
3418 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3419 tree lhs, tree rhs)
3420 {
3421 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3422 tree type = TREE_TYPE (lhs);
3423 tree signed_type, unsigned_type;
3424 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3425 enum machine_mode lmode, rmode, nmode;
3426 int lunsignedp, runsignedp;
3427 int lvolatilep = 0, rvolatilep = 0;
3428 tree linner, rinner = NULL_TREE;
3429 tree mask;
3430 tree offset;
3431
3432 /* Get all the information about the extractions being done. If the bit size
3433 if the same as the size of the underlying object, we aren't doing an
3434 extraction at all and so can do nothing. We also don't want to
3435 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3436 then will no longer be able to replace it. */
3437 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3438 &lunsignedp, &lvolatilep, false);
3439 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3440 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3441 return 0;
3442
3443 if (!const_p)
3444 {
3445 /* If this is not a constant, we can only do something if bit positions,
3446 sizes, and signedness are the same. */
3447 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3448 &runsignedp, &rvolatilep, false);
3449
3450 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3451 || lunsignedp != runsignedp || offset != 0
3452 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3453 return 0;
3454 }
3455
3456 /* See if we can find a mode to refer to this field. We should be able to,
3457 but fail if we can't. */
3458 nmode = get_best_mode (lbitsize, lbitpos,
3459 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3460 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3461 TYPE_ALIGN (TREE_TYPE (rinner))),
3462 word_mode, lvolatilep || rvolatilep);
3463 if (nmode == VOIDmode)
3464 return 0;
3465
3466 /* Set signed and unsigned types of the precision of this mode for the
3467 shifts below. */
3468 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3469 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3470
3471 /* Compute the bit position and size for the new reference and our offset
3472 within it. If the new reference is the same size as the original, we
3473 won't optimize anything, so return zero. */
3474 nbitsize = GET_MODE_BITSIZE (nmode);
3475 nbitpos = lbitpos & ~ (nbitsize - 1);
3476 lbitpos -= nbitpos;
3477 if (nbitsize == lbitsize)
3478 return 0;
3479
3480 if (BYTES_BIG_ENDIAN)
3481 lbitpos = nbitsize - lbitsize - lbitpos;
3482
3483 /* Make the mask to be used against the extracted field. */
3484 mask = build_int_cst_type (unsigned_type, -1);
3485 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3486 mask = const_binop (RSHIFT_EXPR, mask,
3487 size_int (nbitsize - lbitsize - lbitpos), 0);
3488
3489 if (! const_p)
3490 /* If not comparing with constant, just rework the comparison
3491 and return. */
3492 return fold_build2 (code, compare_type,
3493 fold_build2 (BIT_AND_EXPR, unsigned_type,
3494 make_bit_field_ref (linner,
3495 unsigned_type,
3496 nbitsize, nbitpos,
3497 1),
3498 mask),
3499 fold_build2 (BIT_AND_EXPR, unsigned_type,
3500 make_bit_field_ref (rinner,
3501 unsigned_type,
3502 nbitsize, nbitpos,
3503 1),
3504 mask));
3505
3506 /* Otherwise, we are handling the constant case. See if the constant is too
3507 big for the field. Warn and return a tree of for 0 (false) if so. We do
3508 this not only for its own sake, but to avoid having to test for this
3509 error case below. If we didn't, we might generate wrong code.
3510
3511 For unsigned fields, the constant shifted right by the field length should
3512 be all zero. For signed fields, the high-order bits should agree with
3513 the sign bit. */
3514
3515 if (lunsignedp)
3516 {
3517 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3518 fold_convert (unsigned_type, rhs),
3519 size_int (lbitsize), 0)))
3520 {
3521 warning (0, "comparison is always %d due to width of bit-field",
3522 code == NE_EXPR);
3523 return constant_boolean_node (code == NE_EXPR, compare_type);
3524 }
3525 }
3526 else
3527 {
3528 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3529 size_int (lbitsize - 1), 0);
3530 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3531 {
3532 warning (0, "comparison is always %d due to width of bit-field",
3533 code == NE_EXPR);
3534 return constant_boolean_node (code == NE_EXPR, compare_type);
3535 }
3536 }
3537
3538 /* Single-bit compares should always be against zero. */
3539 if (lbitsize == 1 && ! integer_zerop (rhs))
3540 {
3541 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3542 rhs = build_int_cst (type, 0);
3543 }
3544
3545 /* Make a new bitfield reference, shift the constant over the
3546 appropriate number of bits and mask it with the computed mask
3547 (in case this was a signed field). If we changed it, make a new one. */
3548 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3549 if (lvolatilep)
3550 {
3551 TREE_SIDE_EFFECTS (lhs) = 1;
3552 TREE_THIS_VOLATILE (lhs) = 1;
3553 }
3554
3555 rhs = const_binop (BIT_AND_EXPR,
3556 const_binop (LSHIFT_EXPR,
3557 fold_convert (unsigned_type, rhs),
3558 size_int (lbitpos), 0),
3559 mask, 0);
3560
3561 return build2 (code, compare_type,
3562 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3563 rhs);
3564 }
3565 \f
3566 /* Subroutine for fold_truthop: decode a field reference.
3567
3568 If EXP is a comparison reference, we return the innermost reference.
3569
3570 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3571 set to the starting bit number.
3572
3573 If the innermost field can be completely contained in a mode-sized
3574 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3575
3576 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3577 otherwise it is not changed.
3578
3579 *PUNSIGNEDP is set to the signedness of the field.
3580
3581 *PMASK is set to the mask used. This is either contained in a
3582 BIT_AND_EXPR or derived from the width of the field.
3583
3584 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3585
3586 Return 0 if this is not a component reference or is one that we can't
3587 do anything with. */
3588
3589 static tree
3590 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3591 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3592 int *punsignedp, int *pvolatilep,
3593 tree *pmask, tree *pand_mask)
3594 {
3595 tree outer_type = 0;
3596 tree and_mask = 0;
3597 tree mask, inner, offset;
3598 tree unsigned_type;
3599 unsigned int precision;
3600
3601 /* All the optimizations using this function assume integer fields.
3602 There are problems with FP fields since the type_for_size call
3603 below can fail for, e.g., XFmode. */
3604 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3605 return 0;
3606
3607 /* We are interested in the bare arrangement of bits, so strip everything
3608 that doesn't affect the machine mode. However, record the type of the
3609 outermost expression if it may matter below. */
3610 if (TREE_CODE (exp) == NOP_EXPR
3611 || TREE_CODE (exp) == CONVERT_EXPR
3612 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3613 outer_type = TREE_TYPE (exp);
3614 STRIP_NOPS (exp);
3615
3616 if (TREE_CODE (exp) == BIT_AND_EXPR)
3617 {
3618 and_mask = TREE_OPERAND (exp, 1);
3619 exp = TREE_OPERAND (exp, 0);
3620 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3621 if (TREE_CODE (and_mask) != INTEGER_CST)
3622 return 0;
3623 }
3624
3625 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3626 punsignedp, pvolatilep, false);
3627 if ((inner == exp && and_mask == 0)
3628 || *pbitsize < 0 || offset != 0
3629 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3630 return 0;
3631
3632 /* If the number of bits in the reference is the same as the bitsize of
3633 the outer type, then the outer type gives the signedness. Otherwise
3634 (in case of a small bitfield) the signedness is unchanged. */
3635 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3636 *punsignedp = TYPE_UNSIGNED (outer_type);
3637
3638 /* Compute the mask to access the bitfield. */
3639 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3640 precision = TYPE_PRECISION (unsigned_type);
3641
3642 mask = build_int_cst_type (unsigned_type, -1);
3643
3644 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3645 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3646
3647 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3648 if (and_mask != 0)
3649 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3650 fold_convert (unsigned_type, and_mask), mask);
3651
3652 *pmask = mask;
3653 *pand_mask = and_mask;
3654 return inner;
3655 }
3656
3657 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3658 bit positions. */
3659
3660 static int
3661 all_ones_mask_p (tree mask, int size)
3662 {
3663 tree type = TREE_TYPE (mask);
3664 unsigned int precision = TYPE_PRECISION (type);
3665 tree tmask;
3666
3667 tmask = build_int_cst_type (lang_hooks.types.signed_type (type), -1);
3668
3669 return
3670 tree_int_cst_equal (mask,
3671 const_binop (RSHIFT_EXPR,
3672 const_binop (LSHIFT_EXPR, tmask,
3673 size_int (precision - size),
3674 0),
3675 size_int (precision - size), 0));
3676 }
3677
3678 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3679 represents the sign bit of EXP's type. If EXP represents a sign
3680 or zero extension, also test VAL against the unextended type.
3681 The return value is the (sub)expression whose sign bit is VAL,
3682 or NULL_TREE otherwise. */
3683
3684 static tree
3685 sign_bit_p (tree exp, tree val)
3686 {
3687 unsigned HOST_WIDE_INT mask_lo, lo;
3688 HOST_WIDE_INT mask_hi, hi;
3689 int width;
3690 tree t;
3691
3692 /* Tree EXP must have an integral type. */
3693 t = TREE_TYPE (exp);
3694 if (! INTEGRAL_TYPE_P (t))
3695 return NULL_TREE;
3696
3697 /* Tree VAL must be an integer constant. */
3698 if (TREE_CODE (val) != INTEGER_CST
3699 || TREE_CONSTANT_OVERFLOW (val))
3700 return NULL_TREE;
3701
3702 width = TYPE_PRECISION (t);
3703 if (width > HOST_BITS_PER_WIDE_INT)
3704 {
3705 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3706 lo = 0;
3707
3708 mask_hi = ((unsigned HOST_WIDE_INT) -1
3709 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3710 mask_lo = -1;
3711 }
3712 else
3713 {
3714 hi = 0;
3715 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3716
3717 mask_hi = 0;
3718 mask_lo = ((unsigned HOST_WIDE_INT) -1
3719 >> (HOST_BITS_PER_WIDE_INT - width));
3720 }
3721
3722 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3723 treat VAL as if it were unsigned. */
3724 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3725 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3726 return exp;
3727
3728 /* Handle extension from a narrower type. */
3729 if (TREE_CODE (exp) == NOP_EXPR
3730 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3731 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3732
3733 return NULL_TREE;
3734 }
3735
3736 /* Subroutine for fold_truthop: determine if an operand is simple enough
3737 to be evaluated unconditionally. */
3738
3739 static int
3740 simple_operand_p (tree exp)
3741 {
3742 /* Strip any conversions that don't change the machine mode. */
3743 STRIP_NOPS (exp);
3744
3745 return (CONSTANT_CLASS_P (exp)
3746 || TREE_CODE (exp) == SSA_NAME
3747 || (DECL_P (exp)
3748 && ! TREE_ADDRESSABLE (exp)
3749 && ! TREE_THIS_VOLATILE (exp)
3750 && ! DECL_NONLOCAL (exp)
3751 /* Don't regard global variables as simple. They may be
3752 allocated in ways unknown to the compiler (shared memory,
3753 #pragma weak, etc). */
3754 && ! TREE_PUBLIC (exp)
3755 && ! DECL_EXTERNAL (exp)
3756 /* Loading a static variable is unduly expensive, but global
3757 registers aren't expensive. */
3758 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3759 }
3760 \f
3761 /* The following functions are subroutines to fold_range_test and allow it to
3762 try to change a logical combination of comparisons into a range test.
3763
3764 For example, both
3765 X == 2 || X == 3 || X == 4 || X == 5
3766 and
3767 X >= 2 && X <= 5
3768 are converted to
3769 (unsigned) (X - 2) <= 3
3770
3771 We describe each set of comparisons as being either inside or outside
3772 a range, using a variable named like IN_P, and then describe the
3773 range with a lower and upper bound. If one of the bounds is omitted,
3774 it represents either the highest or lowest value of the type.
3775
3776 In the comments below, we represent a range by two numbers in brackets
3777 preceded by a "+" to designate being inside that range, or a "-" to
3778 designate being outside that range, so the condition can be inverted by
3779 flipping the prefix. An omitted bound is represented by a "-". For
3780 example, "- [-, 10]" means being outside the range starting at the lowest
3781 possible value and ending at 10, in other words, being greater than 10.
3782 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3783 always false.
3784
3785 We set up things so that the missing bounds are handled in a consistent
3786 manner so neither a missing bound nor "true" and "false" need to be
3787 handled using a special case. */
3788
3789 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3790 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3791 and UPPER1_P are nonzero if the respective argument is an upper bound
3792 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3793 must be specified for a comparison. ARG1 will be converted to ARG0's
3794 type if both are specified. */
3795
3796 static tree
3797 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3798 tree arg1, int upper1_p)
3799 {
3800 tree tem;
3801 int result;
3802 int sgn0, sgn1;
3803
3804 /* If neither arg represents infinity, do the normal operation.
3805 Else, if not a comparison, return infinity. Else handle the special
3806 comparison rules. Note that most of the cases below won't occur, but
3807 are handled for consistency. */
3808
3809 if (arg0 != 0 && arg1 != 0)
3810 {
3811 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3812 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3813 STRIP_NOPS (tem);
3814 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3815 }
3816
3817 if (TREE_CODE_CLASS (code) != tcc_comparison)
3818 return 0;
3819
3820 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3821 for neither. In real maths, we cannot assume open ended ranges are
3822 the same. But, this is computer arithmetic, where numbers are finite.
3823 We can therefore make the transformation of any unbounded range with
3824 the value Z, Z being greater than any representable number. This permits
3825 us to treat unbounded ranges as equal. */
3826 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3827 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3828 switch (code)
3829 {
3830 case EQ_EXPR:
3831 result = sgn0 == sgn1;
3832 break;
3833 case NE_EXPR:
3834 result = sgn0 != sgn1;
3835 break;
3836 case LT_EXPR:
3837 result = sgn0 < sgn1;
3838 break;
3839 case LE_EXPR:
3840 result = sgn0 <= sgn1;
3841 break;
3842 case GT_EXPR:
3843 result = sgn0 > sgn1;
3844 break;
3845 case GE_EXPR:
3846 result = sgn0 >= sgn1;
3847 break;
3848 default:
3849 gcc_unreachable ();
3850 }
3851
3852 return constant_boolean_node (result, type);
3853 }
3854 \f
3855 /* Given EXP, a logical expression, set the range it is testing into
3856 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3857 actually being tested. *PLOW and *PHIGH will be made of the same type
3858 as the returned expression. If EXP is not a comparison, we will most
3859 likely not be returning a useful value and range. */
3860
3861 static tree
3862 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3863 {
3864 enum tree_code code;
3865 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3866 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3867 int in_p, n_in_p;
3868 tree low, high, n_low, n_high;
3869
3870 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3871 and see if we can refine the range. Some of the cases below may not
3872 happen, but it doesn't seem worth worrying about this. We "continue"
3873 the outer loop when we've changed something; otherwise we "break"
3874 the switch, which will "break" the while. */
3875
3876 in_p = 0;
3877 low = high = build_int_cst (TREE_TYPE (exp), 0);
3878
3879 while (1)
3880 {
3881 code = TREE_CODE (exp);
3882 exp_type = TREE_TYPE (exp);
3883
3884 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3885 {
3886 if (TREE_CODE_LENGTH (code) > 0)
3887 arg0 = TREE_OPERAND (exp, 0);
3888 if (TREE_CODE_CLASS (code) == tcc_comparison
3889 || TREE_CODE_CLASS (code) == tcc_unary
3890 || TREE_CODE_CLASS (code) == tcc_binary)
3891 arg0_type = TREE_TYPE (arg0);
3892 if (TREE_CODE_CLASS (code) == tcc_binary
3893 || TREE_CODE_CLASS (code) == tcc_comparison
3894 || (TREE_CODE_CLASS (code) == tcc_expression
3895 && TREE_CODE_LENGTH (code) > 1))
3896 arg1 = TREE_OPERAND (exp, 1);
3897 }
3898
3899 switch (code)
3900 {
3901 case TRUTH_NOT_EXPR:
3902 in_p = ! in_p, exp = arg0;
3903 continue;
3904
3905 case EQ_EXPR: case NE_EXPR:
3906 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3907 /* We can only do something if the range is testing for zero
3908 and if the second operand is an integer constant. Note that
3909 saying something is "in" the range we make is done by
3910 complementing IN_P since it will set in the initial case of
3911 being not equal to zero; "out" is leaving it alone. */
3912 if (low == 0 || high == 0
3913 || ! integer_zerop (low) || ! integer_zerop (high)
3914 || TREE_CODE (arg1) != INTEGER_CST)
3915 break;
3916
3917 switch (code)
3918 {
3919 case NE_EXPR: /* - [c, c] */
3920 low = high = arg1;
3921 break;
3922 case EQ_EXPR: /* + [c, c] */
3923 in_p = ! in_p, low = high = arg1;
3924 break;
3925 case GT_EXPR: /* - [-, c] */
3926 low = 0, high = arg1;
3927 break;
3928 case GE_EXPR: /* + [c, -] */
3929 in_p = ! in_p, low = arg1, high = 0;
3930 break;
3931 case LT_EXPR: /* - [c, -] */
3932 low = arg1, high = 0;
3933 break;
3934 case LE_EXPR: /* + [-, c] */
3935 in_p = ! in_p, low = 0, high = arg1;
3936 break;
3937 default:
3938 gcc_unreachable ();
3939 }
3940
3941 /* If this is an unsigned comparison, we also know that EXP is
3942 greater than or equal to zero. We base the range tests we make
3943 on that fact, so we record it here so we can parse existing
3944 range tests. We test arg0_type since often the return type
3945 of, e.g. EQ_EXPR, is boolean. */
3946 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3947 {
3948 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3949 in_p, low, high, 1,
3950 build_int_cst (arg0_type, 0),
3951 NULL_TREE))
3952 break;
3953
3954 in_p = n_in_p, low = n_low, high = n_high;
3955
3956 /* If the high bound is missing, but we have a nonzero low
3957 bound, reverse the range so it goes from zero to the low bound
3958 minus 1. */
3959 if (high == 0 && low && ! integer_zerop (low))
3960 {
3961 in_p = ! in_p;
3962 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3963 integer_one_node, 0);
3964 low = build_int_cst (arg0_type, 0);
3965 }
3966 }
3967
3968 exp = arg0;
3969 continue;
3970
3971 case NEGATE_EXPR:
3972 /* (-x) IN [a,b] -> x in [-b, -a] */
3973 n_low = range_binop (MINUS_EXPR, exp_type,
3974 build_int_cst (exp_type, 0),
3975 0, high, 1);
3976 n_high = range_binop (MINUS_EXPR, exp_type,
3977 build_int_cst (exp_type, 0),
3978 0, low, 0);
3979 low = n_low, high = n_high;
3980 exp = arg0;
3981 continue;
3982
3983 case BIT_NOT_EXPR:
3984 /* ~ X -> -X - 1 */
3985 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3986 build_int_cst (exp_type, 1));
3987 continue;
3988
3989 case PLUS_EXPR: case MINUS_EXPR:
3990 if (TREE_CODE (arg1) != INTEGER_CST)
3991 break;
3992
3993 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
3994 move a constant to the other side. */
3995 if (flag_wrapv && !TYPE_UNSIGNED (arg0_type))
3996 break;
3997
3998 /* If EXP is signed, any overflow in the computation is undefined,
3999 so we don't worry about it so long as our computations on
4000 the bounds don't overflow. For unsigned, overflow is defined
4001 and this is exactly the right thing. */
4002 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4003 arg0_type, low, 0, arg1, 0);
4004 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4005 arg0_type, high, 1, arg1, 0);
4006 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4007 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4008 break;
4009
4010 /* Check for an unsigned range which has wrapped around the maximum
4011 value thus making n_high < n_low, and normalize it. */
4012 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4013 {
4014 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4015 integer_one_node, 0);
4016 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4017 integer_one_node, 0);
4018
4019 /* If the range is of the form +/- [ x+1, x ], we won't
4020 be able to normalize it. But then, it represents the
4021 whole range or the empty set, so make it
4022 +/- [ -, - ]. */
4023 if (tree_int_cst_equal (n_low, low)
4024 && tree_int_cst_equal (n_high, high))
4025 low = high = 0;
4026 else
4027 in_p = ! in_p;
4028 }
4029 else
4030 low = n_low, high = n_high;
4031
4032 exp = arg0;
4033 continue;
4034
4035 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
4036 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4037 break;
4038
4039 if (! INTEGRAL_TYPE_P (arg0_type)
4040 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4041 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4042 break;
4043
4044 n_low = low, n_high = high;
4045
4046 if (n_low != 0)
4047 n_low = fold_convert (arg0_type, n_low);
4048
4049 if (n_high != 0)
4050 n_high = fold_convert (arg0_type, n_high);
4051
4052
4053 /* If we're converting arg0 from an unsigned type, to exp,
4054 a signed type, we will be doing the comparison as unsigned.
4055 The tests above have already verified that LOW and HIGH
4056 are both positive.
4057
4058 So we have to ensure that we will handle large unsigned
4059 values the same way that the current signed bounds treat
4060 negative values. */
4061
4062 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4063 {
4064 tree high_positive;
4065 tree equiv_type = lang_hooks.types.type_for_mode
4066 (TYPE_MODE (arg0_type), 1);
4067
4068 /* A range without an upper bound is, naturally, unbounded.
4069 Since convert would have cropped a very large value, use
4070 the max value for the destination type. */
4071 high_positive
4072 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4073 : TYPE_MAX_VALUE (arg0_type);
4074
4075 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4076 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
4077 fold_convert (arg0_type,
4078 high_positive),
4079 build_int_cst (arg0_type, 1));
4080
4081 /* If the low bound is specified, "and" the range with the
4082 range for which the original unsigned value will be
4083 positive. */
4084 if (low != 0)
4085 {
4086 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4087 1, n_low, n_high, 1,
4088 fold_convert (arg0_type,
4089 integer_zero_node),
4090 high_positive))
4091 break;
4092
4093 in_p = (n_in_p == in_p);
4094 }
4095 else
4096 {
4097 /* Otherwise, "or" the range with the range of the input
4098 that will be interpreted as negative. */
4099 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4100 0, n_low, n_high, 1,
4101 fold_convert (arg0_type,
4102 integer_zero_node),
4103 high_positive))
4104 break;
4105
4106 in_p = (in_p != n_in_p);
4107 }
4108 }
4109
4110 exp = arg0;
4111 low = n_low, high = n_high;
4112 continue;
4113
4114 default:
4115 break;
4116 }
4117
4118 break;
4119 }
4120
4121 /* If EXP is a constant, we can evaluate whether this is true or false. */
4122 if (TREE_CODE (exp) == INTEGER_CST)
4123 {
4124 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4125 exp, 0, low, 0))
4126 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4127 exp, 1, high, 1)));
4128 low = high = 0;
4129 exp = 0;
4130 }
4131
4132 *pin_p = in_p, *plow = low, *phigh = high;
4133 return exp;
4134 }
4135 \f
4136 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4137 type, TYPE, return an expression to test if EXP is in (or out of, depending
4138 on IN_P) the range. Return 0 if the test couldn't be created. */
4139
4140 static tree
4141 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
4142 {
4143 tree etype = TREE_TYPE (exp);
4144 tree value;
4145
4146 #ifdef HAVE_canonicalize_funcptr_for_compare
4147 /* Disable this optimization for function pointer expressions
4148 on targets that require function pointer canonicalization. */
4149 if (HAVE_canonicalize_funcptr_for_compare
4150 && TREE_CODE (etype) == POINTER_TYPE
4151 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4152 return NULL_TREE;
4153 #endif
4154
4155 if (! in_p)
4156 {
4157 value = build_range_check (type, exp, 1, low, high);
4158 if (value != 0)
4159 return invert_truthvalue (value);
4160
4161 return 0;
4162 }
4163
4164 if (low == 0 && high == 0)
4165 return build_int_cst (type, 1);
4166
4167 if (low == 0)
4168 return fold_build2 (LE_EXPR, type, exp,
4169 fold_convert (etype, high));
4170
4171 if (high == 0)
4172 return fold_build2 (GE_EXPR, type, exp,
4173 fold_convert (etype, low));
4174
4175 if (operand_equal_p (low, high, 0))
4176 return fold_build2 (EQ_EXPR, type, exp,
4177 fold_convert (etype, low));
4178
4179 if (integer_zerop (low))
4180 {
4181 if (! TYPE_UNSIGNED (etype))
4182 {
4183 etype = lang_hooks.types.unsigned_type (etype);
4184 high = fold_convert (etype, high);
4185 exp = fold_convert (etype, exp);
4186 }
4187 return build_range_check (type, exp, 1, 0, high);
4188 }
4189
4190 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4191 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4192 {
4193 unsigned HOST_WIDE_INT lo;
4194 HOST_WIDE_INT hi;
4195 int prec;
4196
4197 prec = TYPE_PRECISION (etype);
4198 if (prec <= HOST_BITS_PER_WIDE_INT)
4199 {
4200 hi = 0;
4201 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4202 }
4203 else
4204 {
4205 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4206 lo = (unsigned HOST_WIDE_INT) -1;
4207 }
4208
4209 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4210 {
4211 if (TYPE_UNSIGNED (etype))
4212 {
4213 etype = lang_hooks.types.signed_type (etype);
4214 exp = fold_convert (etype, exp);
4215 }
4216 return fold_build2 (GT_EXPR, type, exp,
4217 build_int_cst (etype, 0));
4218 }
4219 }
4220
4221 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4222 This requires wrap-around arithmetics for the type of the expression. */
4223 switch (TREE_CODE (etype))
4224 {
4225 case INTEGER_TYPE:
4226 /* There is no requirement that LOW be within the range of ETYPE
4227 if the latter is a subtype. It must, however, be within the base
4228 type of ETYPE. So be sure we do the subtraction in that type. */
4229 if (TREE_TYPE (etype))
4230 etype = TREE_TYPE (etype);
4231 break;
4232
4233 case ENUMERAL_TYPE:
4234 case BOOLEAN_TYPE:
4235 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4236 TYPE_UNSIGNED (etype));
4237 break;
4238
4239 default:
4240 break;
4241 }
4242
4243 /* If we don't have wrap-around arithmetics upfront, try to force it. */
4244 if (TREE_CODE (etype) == INTEGER_TYPE
4245 && !TYPE_UNSIGNED (etype) && !flag_wrapv)
4246 {
4247 tree utype, minv, maxv;
4248
4249 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4250 for the type in question, as we rely on this here. */
4251 utype = lang_hooks.types.unsigned_type (etype);
4252 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4253 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4254 integer_one_node, 1);
4255 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4256
4257 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4258 minv, 1, maxv, 1)))
4259 etype = utype;
4260 else
4261 return 0;
4262 }
4263
4264 high = fold_convert (etype, high);
4265 low = fold_convert (etype, low);
4266 exp = fold_convert (etype, exp);
4267
4268 value = const_binop (MINUS_EXPR, high, low, 0);
4269
4270 if (value != 0 && !TREE_OVERFLOW (value))
4271 return build_range_check (type,
4272 fold_build2 (MINUS_EXPR, etype, exp, low),
4273 1, build_int_cst (etype, 0), value);
4274
4275 return 0;
4276 }
4277 \f
4278 /* Return the predecessor of VAL in its type, handling the infinite case. */
4279
4280 static tree
4281 range_predecessor (tree val)
4282 {
4283 tree type = TREE_TYPE (val);
4284
4285 if (INTEGRAL_TYPE_P (type)
4286 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4287 return 0;
4288 else
4289 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4290 }
4291
4292 /* Return the successor of VAL in its type, handling the infinite case. */
4293
4294 static tree
4295 range_successor (tree val)
4296 {
4297 tree type = TREE_TYPE (val);
4298
4299 if (INTEGRAL_TYPE_P (type)
4300 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4301 return 0;
4302 else
4303 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4304 }
4305
4306 /* Given two ranges, see if we can merge them into one. Return 1 if we
4307 can, 0 if we can't. Set the output range into the specified parameters. */
4308
4309 static int
4310 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4311 tree high0, int in1_p, tree low1, tree high1)
4312 {
4313 int no_overlap;
4314 int subset;
4315 int temp;
4316 tree tem;
4317 int in_p;
4318 tree low, high;
4319 int lowequal = ((low0 == 0 && low1 == 0)
4320 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4321 low0, 0, low1, 0)));
4322 int highequal = ((high0 == 0 && high1 == 0)
4323 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4324 high0, 1, high1, 1)));
4325
4326 /* Make range 0 be the range that starts first, or ends last if they
4327 start at the same value. Swap them if it isn't. */
4328 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4329 low0, 0, low1, 0))
4330 || (lowequal
4331 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4332 high1, 1, high0, 1))))
4333 {
4334 temp = in0_p, in0_p = in1_p, in1_p = temp;
4335 tem = low0, low0 = low1, low1 = tem;
4336 tem = high0, high0 = high1, high1 = tem;
4337 }
4338
4339 /* Now flag two cases, whether the ranges are disjoint or whether the
4340 second range is totally subsumed in the first. Note that the tests
4341 below are simplified by the ones above. */
4342 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4343 high0, 1, low1, 0));
4344 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4345 high1, 1, high0, 1));
4346
4347 /* We now have four cases, depending on whether we are including or
4348 excluding the two ranges. */
4349 if (in0_p && in1_p)
4350 {
4351 /* If they don't overlap, the result is false. If the second range
4352 is a subset it is the result. Otherwise, the range is from the start
4353 of the second to the end of the first. */
4354 if (no_overlap)
4355 in_p = 0, low = high = 0;
4356 else if (subset)
4357 in_p = 1, low = low1, high = high1;
4358 else
4359 in_p = 1, low = low1, high = high0;
4360 }
4361
4362 else if (in0_p && ! in1_p)
4363 {
4364 /* If they don't overlap, the result is the first range. If they are
4365 equal, the result is false. If the second range is a subset of the
4366 first, and the ranges begin at the same place, we go from just after
4367 the end of the second range to the end of the first. If the second
4368 range is not a subset of the first, or if it is a subset and both
4369 ranges end at the same place, the range starts at the start of the
4370 first range and ends just before the second range.
4371 Otherwise, we can't describe this as a single range. */
4372 if (no_overlap)
4373 in_p = 1, low = low0, high = high0;
4374 else if (lowequal && highequal)
4375 in_p = 0, low = high = 0;
4376 else if (subset && lowequal)
4377 {
4378 low = range_successor (high1);
4379 high = high0;
4380 in_p = (low != 0);
4381 }
4382 else if (! subset || highequal)
4383 {
4384 low = low0;
4385 high = range_predecessor (low1);
4386 in_p = (high != 0);
4387 }
4388 else
4389 return 0;
4390 }
4391
4392 else if (! in0_p && in1_p)
4393 {
4394 /* If they don't overlap, the result is the second range. If the second
4395 is a subset of the first, the result is false. Otherwise,
4396 the range starts just after the first range and ends at the
4397 end of the second. */
4398 if (no_overlap)
4399 in_p = 1, low = low1, high = high1;
4400 else if (subset || highequal)
4401 in_p = 0, low = high = 0;
4402 else
4403 {
4404 low = range_successor (high0);
4405 high = high1;
4406 in_p = (low != 0);
4407 }
4408 }
4409
4410 else
4411 {
4412 /* The case where we are excluding both ranges. Here the complex case
4413 is if they don't overlap. In that case, the only time we have a
4414 range is if they are adjacent. If the second is a subset of the
4415 first, the result is the first. Otherwise, the range to exclude
4416 starts at the beginning of the first range and ends at the end of the
4417 second. */
4418 if (no_overlap)
4419 {
4420 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4421 range_successor (high0),
4422 1, low1, 0)))
4423 in_p = 0, low = low0, high = high1;
4424 else
4425 {
4426 /* Canonicalize - [min, x] into - [-, x]. */
4427 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4428 switch (TREE_CODE (TREE_TYPE (low0)))
4429 {
4430 case ENUMERAL_TYPE:
4431 if (TYPE_PRECISION (TREE_TYPE (low0))
4432 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4433 break;
4434 /* FALLTHROUGH */
4435 case INTEGER_TYPE:
4436 if (tree_int_cst_equal (low0,
4437 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4438 low0 = 0;
4439 break;
4440 case POINTER_TYPE:
4441 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4442 && integer_zerop (low0))
4443 low0 = 0;
4444 break;
4445 default:
4446 break;
4447 }
4448
4449 /* Canonicalize - [x, max] into - [x, -]. */
4450 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4451 switch (TREE_CODE (TREE_TYPE (high1)))
4452 {
4453 case ENUMERAL_TYPE:
4454 if (TYPE_PRECISION (TREE_TYPE (high1))
4455 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4456 break;
4457 /* FALLTHROUGH */
4458 case INTEGER_TYPE:
4459 if (tree_int_cst_equal (high1,
4460 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4461 high1 = 0;
4462 break;
4463 case POINTER_TYPE:
4464 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4465 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4466 high1, 1,
4467 integer_one_node, 1)))
4468 high1 = 0;
4469 break;
4470 default:
4471 break;
4472 }
4473
4474 /* The ranges might be also adjacent between the maximum and
4475 minimum values of the given type. For
4476 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4477 return + [x + 1, y - 1]. */
4478 if (low0 == 0 && high1 == 0)
4479 {
4480 low = range_successor (high0);
4481 high = range_predecessor (low1);
4482 if (low == 0 || high == 0)
4483 return 0;
4484
4485 in_p = 1;
4486 }
4487 else
4488 return 0;
4489 }
4490 }
4491 else if (subset)
4492 in_p = 0, low = low0, high = high0;
4493 else
4494 in_p = 0, low = low0, high = high1;
4495 }
4496
4497 *pin_p = in_p, *plow = low, *phigh = high;
4498 return 1;
4499 }
4500 \f
4501
4502 /* Subroutine of fold, looking inside expressions of the form
4503 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4504 of the COND_EXPR. This function is being used also to optimize
4505 A op B ? C : A, by reversing the comparison first.
4506
4507 Return a folded expression whose code is not a COND_EXPR
4508 anymore, or NULL_TREE if no folding opportunity is found. */
4509
4510 static tree
4511 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4512 {
4513 enum tree_code comp_code = TREE_CODE (arg0);
4514 tree arg00 = TREE_OPERAND (arg0, 0);
4515 tree arg01 = TREE_OPERAND (arg0, 1);
4516 tree arg1_type = TREE_TYPE (arg1);
4517 tree tem;
4518
4519 STRIP_NOPS (arg1);
4520 STRIP_NOPS (arg2);
4521
4522 /* If we have A op 0 ? A : -A, consider applying the following
4523 transformations:
4524
4525 A == 0? A : -A same as -A
4526 A != 0? A : -A same as A
4527 A >= 0? A : -A same as abs (A)
4528 A > 0? A : -A same as abs (A)
4529 A <= 0? A : -A same as -abs (A)
4530 A < 0? A : -A same as -abs (A)
4531
4532 None of these transformations work for modes with signed
4533 zeros. If A is +/-0, the first two transformations will
4534 change the sign of the result (from +0 to -0, or vice
4535 versa). The last four will fix the sign of the result,
4536 even though the original expressions could be positive or
4537 negative, depending on the sign of A.
4538
4539 Note that all these transformations are correct if A is
4540 NaN, since the two alternatives (A and -A) are also NaNs. */
4541 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4542 ? real_zerop (arg01)
4543 : integer_zerop (arg01))
4544 && ((TREE_CODE (arg2) == NEGATE_EXPR
4545 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4546 /* In the case that A is of the form X-Y, '-A' (arg2) may
4547 have already been folded to Y-X, check for that. */
4548 || (TREE_CODE (arg1) == MINUS_EXPR
4549 && TREE_CODE (arg2) == MINUS_EXPR
4550 && operand_equal_p (TREE_OPERAND (arg1, 0),
4551 TREE_OPERAND (arg2, 1), 0)
4552 && operand_equal_p (TREE_OPERAND (arg1, 1),
4553 TREE_OPERAND (arg2, 0), 0))))
4554 switch (comp_code)
4555 {
4556 case EQ_EXPR:
4557 case UNEQ_EXPR:
4558 tem = fold_convert (arg1_type, arg1);
4559 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4560 case NE_EXPR:
4561 case LTGT_EXPR:
4562 return pedantic_non_lvalue (fold_convert (type, arg1));
4563 case UNGE_EXPR:
4564 case UNGT_EXPR:
4565 if (flag_trapping_math)
4566 break;
4567 /* Fall through. */
4568 case GE_EXPR:
4569 case GT_EXPR:
4570 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4571 arg1 = fold_convert (lang_hooks.types.signed_type
4572 (TREE_TYPE (arg1)), arg1);
4573 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4574 return pedantic_non_lvalue (fold_convert (type, tem));
4575 case UNLE_EXPR:
4576 case UNLT_EXPR:
4577 if (flag_trapping_math)
4578 break;
4579 case LE_EXPR:
4580 case LT_EXPR:
4581 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4582 arg1 = fold_convert (lang_hooks.types.signed_type
4583 (TREE_TYPE (arg1)), arg1);
4584 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4585 return negate_expr (fold_convert (type, tem));
4586 default:
4587 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4588 break;
4589 }
4590
4591 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4592 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4593 both transformations are correct when A is NaN: A != 0
4594 is then true, and A == 0 is false. */
4595
4596 if (integer_zerop (arg01) && integer_zerop (arg2))
4597 {
4598 if (comp_code == NE_EXPR)
4599 return pedantic_non_lvalue (fold_convert (type, arg1));
4600 else if (comp_code == EQ_EXPR)
4601 return build_int_cst (type, 0);
4602 }
4603
4604 /* Try some transformations of A op B ? A : B.
4605
4606 A == B? A : B same as B
4607 A != B? A : B same as A
4608 A >= B? A : B same as max (A, B)
4609 A > B? A : B same as max (B, A)
4610 A <= B? A : B same as min (A, B)
4611 A < B? A : B same as min (B, A)
4612
4613 As above, these transformations don't work in the presence
4614 of signed zeros. For example, if A and B are zeros of
4615 opposite sign, the first two transformations will change
4616 the sign of the result. In the last four, the original
4617 expressions give different results for (A=+0, B=-0) and
4618 (A=-0, B=+0), but the transformed expressions do not.
4619
4620 The first two transformations are correct if either A or B
4621 is a NaN. In the first transformation, the condition will
4622 be false, and B will indeed be chosen. In the case of the
4623 second transformation, the condition A != B will be true,
4624 and A will be chosen.
4625
4626 The conversions to max() and min() are not correct if B is
4627 a number and A is not. The conditions in the original
4628 expressions will be false, so all four give B. The min()
4629 and max() versions would give a NaN instead. */
4630 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4631 /* Avoid these transformations if the COND_EXPR may be used
4632 as an lvalue in the C++ front-end. PR c++/19199. */
4633 && (in_gimple_form
4634 || (strcmp (lang_hooks.name, "GNU C++") != 0
4635 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4636 || ! maybe_lvalue_p (arg1)
4637 || ! maybe_lvalue_p (arg2)))
4638 {
4639 tree comp_op0 = arg00;
4640 tree comp_op1 = arg01;
4641 tree comp_type = TREE_TYPE (comp_op0);
4642
4643 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4644 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4645 {
4646 comp_type = type;
4647 comp_op0 = arg1;
4648 comp_op1 = arg2;
4649 }
4650
4651 switch (comp_code)
4652 {
4653 case EQ_EXPR:
4654 return pedantic_non_lvalue (fold_convert (type, arg2));
4655 case NE_EXPR:
4656 return pedantic_non_lvalue (fold_convert (type, arg1));
4657 case LE_EXPR:
4658 case LT_EXPR:
4659 case UNLE_EXPR:
4660 case UNLT_EXPR:
4661 /* In C++ a ?: expression can be an lvalue, so put the
4662 operand which will be used if they are equal first
4663 so that we can convert this back to the
4664 corresponding COND_EXPR. */
4665 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4666 {
4667 comp_op0 = fold_convert (comp_type, comp_op0);
4668 comp_op1 = fold_convert (comp_type, comp_op1);
4669 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4670 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4671 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4672 return pedantic_non_lvalue (fold_convert (type, tem));
4673 }
4674 break;
4675 case GE_EXPR:
4676 case GT_EXPR:
4677 case UNGE_EXPR:
4678 case UNGT_EXPR:
4679 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4680 {
4681 comp_op0 = fold_convert (comp_type, comp_op0);
4682 comp_op1 = fold_convert (comp_type, comp_op1);
4683 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4684 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4685 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4686 return pedantic_non_lvalue (fold_convert (type, tem));
4687 }
4688 break;
4689 case UNEQ_EXPR:
4690 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4691 return pedantic_non_lvalue (fold_convert (type, arg2));
4692 break;
4693 case LTGT_EXPR:
4694 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4695 return pedantic_non_lvalue (fold_convert (type, arg1));
4696 break;
4697 default:
4698 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4699 break;
4700 }
4701 }
4702
4703 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4704 we might still be able to simplify this. For example,
4705 if C1 is one less or one more than C2, this might have started
4706 out as a MIN or MAX and been transformed by this function.
4707 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4708
4709 if (INTEGRAL_TYPE_P (type)
4710 && TREE_CODE (arg01) == INTEGER_CST
4711 && TREE_CODE (arg2) == INTEGER_CST)
4712 switch (comp_code)
4713 {
4714 case EQ_EXPR:
4715 /* We can replace A with C1 in this case. */
4716 arg1 = fold_convert (type, arg01);
4717 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4718
4719 case LT_EXPR:
4720 /* If C1 is C2 + 1, this is min(A, C2). */
4721 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4722 OEP_ONLY_CONST)
4723 && operand_equal_p (arg01,
4724 const_binop (PLUS_EXPR, arg2,
4725 build_int_cst (type, 1), 0),
4726 OEP_ONLY_CONST))
4727 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4728 type, arg1, arg2));
4729 break;
4730
4731 case LE_EXPR:
4732 /* If C1 is C2 - 1, this is min(A, C2). */
4733 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4734 OEP_ONLY_CONST)
4735 && operand_equal_p (arg01,
4736 const_binop (MINUS_EXPR, arg2,
4737 build_int_cst (type, 1), 0),
4738 OEP_ONLY_CONST))
4739 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4740 type, arg1, arg2));
4741 break;
4742
4743 case GT_EXPR:
4744 /* If C1 is C2 - 1, this is max(A, C2). */
4745 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4746 OEP_ONLY_CONST)
4747 && operand_equal_p (arg01,
4748 const_binop (MINUS_EXPR, arg2,
4749 build_int_cst (type, 1), 0),
4750 OEP_ONLY_CONST))
4751 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4752 type, arg1, arg2));
4753 break;
4754
4755 case GE_EXPR:
4756 /* If C1 is C2 + 1, this is max(A, C2). */
4757 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4758 OEP_ONLY_CONST)
4759 && operand_equal_p (arg01,
4760 const_binop (PLUS_EXPR, arg2,
4761 build_int_cst (type, 1), 0),
4762 OEP_ONLY_CONST))
4763 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4764 type, arg1, arg2));
4765 break;
4766 case NE_EXPR:
4767 break;
4768 default:
4769 gcc_unreachable ();
4770 }
4771
4772 return NULL_TREE;
4773 }
4774
4775
4776 \f
4777 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4778 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4779 #endif
4780
4781 /* EXP is some logical combination of boolean tests. See if we can
4782 merge it into some range test. Return the new tree if so. */
4783
4784 static tree
4785 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4786 {
4787 int or_op = (code == TRUTH_ORIF_EXPR
4788 || code == TRUTH_OR_EXPR);
4789 int in0_p, in1_p, in_p;
4790 tree low0, low1, low, high0, high1, high;
4791 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4792 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4793 tree tem;
4794
4795 /* If this is an OR operation, invert both sides; we will invert
4796 again at the end. */
4797 if (or_op)
4798 in0_p = ! in0_p, in1_p = ! in1_p;
4799
4800 /* If both expressions are the same, if we can merge the ranges, and we
4801 can build the range test, return it or it inverted. If one of the
4802 ranges is always true or always false, consider it to be the same
4803 expression as the other. */
4804 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4805 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4806 in1_p, low1, high1)
4807 && 0 != (tem = (build_range_check (type,
4808 lhs != 0 ? lhs
4809 : rhs != 0 ? rhs : integer_zero_node,
4810 in_p, low, high))))
4811 return or_op ? invert_truthvalue (tem) : tem;
4812
4813 /* On machines where the branch cost is expensive, if this is a
4814 short-circuited branch and the underlying object on both sides
4815 is the same, make a non-short-circuit operation. */
4816 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4817 && lhs != 0 && rhs != 0
4818 && (code == TRUTH_ANDIF_EXPR
4819 || code == TRUTH_ORIF_EXPR)
4820 && operand_equal_p (lhs, rhs, 0))
4821 {
4822 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4823 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4824 which cases we can't do this. */
4825 if (simple_operand_p (lhs))
4826 return build2 (code == TRUTH_ANDIF_EXPR
4827 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4828 type, op0, op1);
4829
4830 else if (lang_hooks.decls.global_bindings_p () == 0
4831 && ! CONTAINS_PLACEHOLDER_P (lhs))
4832 {
4833 tree common = save_expr (lhs);
4834
4835 if (0 != (lhs = build_range_check (type, common,
4836 or_op ? ! in0_p : in0_p,
4837 low0, high0))
4838 && (0 != (rhs = build_range_check (type, common,
4839 or_op ? ! in1_p : in1_p,
4840 low1, high1))))
4841 return build2 (code == TRUTH_ANDIF_EXPR
4842 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4843 type, lhs, rhs);
4844 }
4845 }
4846
4847 return 0;
4848 }
4849 \f
4850 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4851 bit value. Arrange things so the extra bits will be set to zero if and
4852 only if C is signed-extended to its full width. If MASK is nonzero,
4853 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4854
4855 static tree
4856 unextend (tree c, int p, int unsignedp, tree mask)
4857 {
4858 tree type = TREE_TYPE (c);
4859 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4860 tree temp;
4861
4862 if (p == modesize || unsignedp)
4863 return c;
4864
4865 /* We work by getting just the sign bit into the low-order bit, then
4866 into the high-order bit, then sign-extend. We then XOR that value
4867 with C. */
4868 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4869 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4870
4871 /* We must use a signed type in order to get an arithmetic right shift.
4872 However, we must also avoid introducing accidental overflows, so that
4873 a subsequent call to integer_zerop will work. Hence we must
4874 do the type conversion here. At this point, the constant is either
4875 zero or one, and the conversion to a signed type can never overflow.
4876 We could get an overflow if this conversion is done anywhere else. */
4877 if (TYPE_UNSIGNED (type))
4878 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4879
4880 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4881 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4882 if (mask != 0)
4883 temp = const_binop (BIT_AND_EXPR, temp,
4884 fold_convert (TREE_TYPE (c), mask), 0);
4885 /* If necessary, convert the type back to match the type of C. */
4886 if (TYPE_UNSIGNED (type))
4887 temp = fold_convert (type, temp);
4888
4889 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4890 }
4891 \f
4892 /* Find ways of folding logical expressions of LHS and RHS:
4893 Try to merge two comparisons to the same innermost item.
4894 Look for range tests like "ch >= '0' && ch <= '9'".
4895 Look for combinations of simple terms on machines with expensive branches
4896 and evaluate the RHS unconditionally.
4897
4898 For example, if we have p->a == 2 && p->b == 4 and we can make an
4899 object large enough to span both A and B, we can do this with a comparison
4900 against the object ANDed with the a mask.
4901
4902 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4903 operations to do this with one comparison.
4904
4905 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4906 function and the one above.
4907
4908 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4909 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4910
4911 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4912 two operands.
4913
4914 We return the simplified tree or 0 if no optimization is possible. */
4915
4916 static tree
4917 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4918 {
4919 /* If this is the "or" of two comparisons, we can do something if
4920 the comparisons are NE_EXPR. If this is the "and", we can do something
4921 if the comparisons are EQ_EXPR. I.e.,
4922 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4923
4924 WANTED_CODE is this operation code. For single bit fields, we can
4925 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4926 comparison for one-bit fields. */
4927
4928 enum tree_code wanted_code;
4929 enum tree_code lcode, rcode;
4930 tree ll_arg, lr_arg, rl_arg, rr_arg;
4931 tree ll_inner, lr_inner, rl_inner, rr_inner;
4932 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4933 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4934 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4935 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4936 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4937 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4938 enum machine_mode lnmode, rnmode;
4939 tree ll_mask, lr_mask, rl_mask, rr_mask;
4940 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4941 tree l_const, r_const;
4942 tree lntype, rntype, result;
4943 int first_bit, end_bit;
4944 int volatilep;
4945 tree orig_lhs = lhs, orig_rhs = rhs;
4946 enum tree_code orig_code = code;
4947
4948 /* Start by getting the comparison codes. Fail if anything is volatile.
4949 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4950 it were surrounded with a NE_EXPR. */
4951
4952 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4953 return 0;
4954
4955 lcode = TREE_CODE (lhs);
4956 rcode = TREE_CODE (rhs);
4957
4958 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4959 {
4960 lhs = build2 (NE_EXPR, truth_type, lhs,
4961 build_int_cst (TREE_TYPE (lhs), 0));
4962 lcode = NE_EXPR;
4963 }
4964
4965 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4966 {
4967 rhs = build2 (NE_EXPR, truth_type, rhs,
4968 build_int_cst (TREE_TYPE (rhs), 0));
4969 rcode = NE_EXPR;
4970 }
4971
4972 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4973 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4974 return 0;
4975
4976 ll_arg = TREE_OPERAND (lhs, 0);
4977 lr_arg = TREE_OPERAND (lhs, 1);
4978 rl_arg = TREE_OPERAND (rhs, 0);
4979 rr_arg = TREE_OPERAND (rhs, 1);
4980
4981 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4982 if (simple_operand_p (ll_arg)
4983 && simple_operand_p (lr_arg))
4984 {
4985 tree result;
4986 if (operand_equal_p (ll_arg, rl_arg, 0)
4987 && operand_equal_p (lr_arg, rr_arg, 0))
4988 {
4989 result = combine_comparisons (code, lcode, rcode,
4990 truth_type, ll_arg, lr_arg);
4991 if (result)
4992 return result;
4993 }
4994 else if (operand_equal_p (ll_arg, rr_arg, 0)
4995 && operand_equal_p (lr_arg, rl_arg, 0))
4996 {
4997 result = combine_comparisons (code, lcode,
4998 swap_tree_comparison (rcode),
4999 truth_type, ll_arg, lr_arg);
5000 if (result)
5001 return result;
5002 }
5003 }
5004
5005 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5006 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5007
5008 /* If the RHS can be evaluated unconditionally and its operands are
5009 simple, it wins to evaluate the RHS unconditionally on machines
5010 with expensive branches. In this case, this isn't a comparison
5011 that can be merged. Avoid doing this if the RHS is a floating-point
5012 comparison since those can trap. */
5013
5014 if (BRANCH_COST >= 2
5015 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5016 && simple_operand_p (rl_arg)
5017 && simple_operand_p (rr_arg))
5018 {
5019 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5020 if (code == TRUTH_OR_EXPR
5021 && lcode == NE_EXPR && integer_zerop (lr_arg)
5022 && rcode == NE_EXPR && integer_zerop (rr_arg)
5023 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5024 return build2 (NE_EXPR, truth_type,
5025 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5026 ll_arg, rl_arg),
5027 build_int_cst (TREE_TYPE (ll_arg), 0));
5028
5029 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5030 if (code == TRUTH_AND_EXPR
5031 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5032 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5033 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
5034 return build2 (EQ_EXPR, truth_type,
5035 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5036 ll_arg, rl_arg),
5037 build_int_cst (TREE_TYPE (ll_arg), 0));
5038
5039 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
5040 {
5041 if (code != orig_code || lhs != orig_lhs || rhs != orig_rhs)
5042 return build2 (code, truth_type, lhs, rhs);
5043 return NULL_TREE;
5044 }
5045 }
5046
5047 /* See if the comparisons can be merged. Then get all the parameters for
5048 each side. */
5049
5050 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5051 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5052 return 0;
5053
5054 volatilep = 0;
5055 ll_inner = decode_field_reference (ll_arg,
5056 &ll_bitsize, &ll_bitpos, &ll_mode,
5057 &ll_unsignedp, &volatilep, &ll_mask,
5058 &ll_and_mask);
5059 lr_inner = decode_field_reference (lr_arg,
5060 &lr_bitsize, &lr_bitpos, &lr_mode,
5061 &lr_unsignedp, &volatilep, &lr_mask,
5062 &lr_and_mask);
5063 rl_inner = decode_field_reference (rl_arg,
5064 &rl_bitsize, &rl_bitpos, &rl_mode,
5065 &rl_unsignedp, &volatilep, &rl_mask,
5066 &rl_and_mask);
5067 rr_inner = decode_field_reference (rr_arg,
5068 &rr_bitsize, &rr_bitpos, &rr_mode,
5069 &rr_unsignedp, &volatilep, &rr_mask,
5070 &rr_and_mask);
5071
5072 /* It must be true that the inner operation on the lhs of each
5073 comparison must be the same if we are to be able to do anything.
5074 Then see if we have constants. If not, the same must be true for
5075 the rhs's. */
5076 if (volatilep || ll_inner == 0 || rl_inner == 0
5077 || ! operand_equal_p (ll_inner, rl_inner, 0))
5078 return 0;
5079
5080 if (TREE_CODE (lr_arg) == INTEGER_CST
5081 && TREE_CODE (rr_arg) == INTEGER_CST)
5082 l_const = lr_arg, r_const = rr_arg;
5083 else if (lr_inner == 0 || rr_inner == 0
5084 || ! operand_equal_p (lr_inner, rr_inner, 0))
5085 return 0;
5086 else
5087 l_const = r_const = 0;
5088
5089 /* If either comparison code is not correct for our logical operation,
5090 fail. However, we can convert a one-bit comparison against zero into
5091 the opposite comparison against that bit being set in the field. */
5092
5093 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5094 if (lcode != wanted_code)
5095 {
5096 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5097 {
5098 /* Make the left operand unsigned, since we are only interested
5099 in the value of one bit. Otherwise we are doing the wrong
5100 thing below. */
5101 ll_unsignedp = 1;
5102 l_const = ll_mask;
5103 }
5104 else
5105 return 0;
5106 }
5107
5108 /* This is analogous to the code for l_const above. */
5109 if (rcode != wanted_code)
5110 {
5111 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5112 {
5113 rl_unsignedp = 1;
5114 r_const = rl_mask;
5115 }
5116 else
5117 return 0;
5118 }
5119
5120 /* See if we can find a mode that contains both fields being compared on
5121 the left. If we can't, fail. Otherwise, update all constants and masks
5122 to be relative to a field of that size. */
5123 first_bit = MIN (ll_bitpos, rl_bitpos);
5124 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5125 lnmode = get_best_mode (end_bit - first_bit, first_bit,
5126 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5127 volatilep);
5128 if (lnmode == VOIDmode)
5129 return 0;
5130
5131 lnbitsize = GET_MODE_BITSIZE (lnmode);
5132 lnbitpos = first_bit & ~ (lnbitsize - 1);
5133 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5134 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5135
5136 if (BYTES_BIG_ENDIAN)
5137 {
5138 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5139 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5140 }
5141
5142 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
5143 size_int (xll_bitpos), 0);
5144 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
5145 size_int (xrl_bitpos), 0);
5146
5147 if (l_const)
5148 {
5149 l_const = fold_convert (lntype, l_const);
5150 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5151 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
5152 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5153 fold_build1 (BIT_NOT_EXPR,
5154 lntype, ll_mask),
5155 0)))
5156 {
5157 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5158
5159 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5160 }
5161 }
5162 if (r_const)
5163 {
5164 r_const = fold_convert (lntype, r_const);
5165 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5166 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
5167 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5168 fold_build1 (BIT_NOT_EXPR,
5169 lntype, rl_mask),
5170 0)))
5171 {
5172 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5173
5174 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5175 }
5176 }
5177
5178 /* If the right sides are not constant, do the same for it. Also,
5179 disallow this optimization if a size or signedness mismatch occurs
5180 between the left and right sides. */
5181 if (l_const == 0)
5182 {
5183 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5184 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5185 /* Make sure the two fields on the right
5186 correspond to the left without being swapped. */
5187 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5188 return 0;
5189
5190 first_bit = MIN (lr_bitpos, rr_bitpos);
5191 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5192 rnmode = get_best_mode (end_bit - first_bit, first_bit,
5193 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5194 volatilep);
5195 if (rnmode == VOIDmode)
5196 return 0;
5197
5198 rnbitsize = GET_MODE_BITSIZE (rnmode);
5199 rnbitpos = first_bit & ~ (rnbitsize - 1);
5200 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5201 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5202
5203 if (BYTES_BIG_ENDIAN)
5204 {
5205 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5206 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5207 }
5208
5209 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5210 size_int (xlr_bitpos), 0);
5211 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5212 size_int (xrr_bitpos), 0);
5213
5214 /* Make a mask that corresponds to both fields being compared.
5215 Do this for both items being compared. If the operands are the
5216 same size and the bits being compared are in the same position
5217 then we can do this by masking both and comparing the masked
5218 results. */
5219 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5220 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5221 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5222 {
5223 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5224 ll_unsignedp || rl_unsignedp);
5225 if (! all_ones_mask_p (ll_mask, lnbitsize))
5226 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5227
5228 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5229 lr_unsignedp || rr_unsignedp);
5230 if (! all_ones_mask_p (lr_mask, rnbitsize))
5231 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5232
5233 return build2 (wanted_code, truth_type, lhs, rhs);
5234 }
5235
5236 /* There is still another way we can do something: If both pairs of
5237 fields being compared are adjacent, we may be able to make a wider
5238 field containing them both.
5239
5240 Note that we still must mask the lhs/rhs expressions. Furthermore,
5241 the mask must be shifted to account for the shift done by
5242 make_bit_field_ref. */
5243 if ((ll_bitsize + ll_bitpos == rl_bitpos
5244 && lr_bitsize + lr_bitpos == rr_bitpos)
5245 || (ll_bitpos == rl_bitpos + rl_bitsize
5246 && lr_bitpos == rr_bitpos + rr_bitsize))
5247 {
5248 tree type;
5249
5250 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5251 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5252 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5253 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5254
5255 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5256 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5257 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5258 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5259
5260 /* Convert to the smaller type before masking out unwanted bits. */
5261 type = lntype;
5262 if (lntype != rntype)
5263 {
5264 if (lnbitsize > rnbitsize)
5265 {
5266 lhs = fold_convert (rntype, lhs);
5267 ll_mask = fold_convert (rntype, ll_mask);
5268 type = rntype;
5269 }
5270 else if (lnbitsize < rnbitsize)
5271 {
5272 rhs = fold_convert (lntype, rhs);
5273 lr_mask = fold_convert (lntype, lr_mask);
5274 type = lntype;
5275 }
5276 }
5277
5278 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5279 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5280
5281 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5282 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5283
5284 return build2 (wanted_code, truth_type, lhs, rhs);
5285 }
5286
5287 return 0;
5288 }
5289
5290 /* Handle the case of comparisons with constants. If there is something in
5291 common between the masks, those bits of the constants must be the same.
5292 If not, the condition is always false. Test for this to avoid generating
5293 incorrect code below. */
5294 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5295 if (! integer_zerop (result)
5296 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5297 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5298 {
5299 if (wanted_code == NE_EXPR)
5300 {
5301 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5302 return constant_boolean_node (true, truth_type);
5303 }
5304 else
5305 {
5306 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5307 return constant_boolean_node (false, truth_type);
5308 }
5309 }
5310
5311 /* Construct the expression we will return. First get the component
5312 reference we will make. Unless the mask is all ones the width of
5313 that field, perform the mask operation. Then compare with the
5314 merged constant. */
5315 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5316 ll_unsignedp || rl_unsignedp);
5317
5318 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5319 if (! all_ones_mask_p (ll_mask, lnbitsize))
5320 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5321
5322 return build2 (wanted_code, truth_type, result,
5323 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5324 }
5325 \f
5326 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5327 constant. */
5328
5329 static tree
5330 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5331 {
5332 tree arg0 = op0;
5333 enum tree_code op_code;
5334 tree comp_const = op1;
5335 tree minmax_const;
5336 int consts_equal, consts_lt;
5337 tree inner;
5338
5339 STRIP_SIGN_NOPS (arg0);
5340
5341 op_code = TREE_CODE (arg0);
5342 minmax_const = TREE_OPERAND (arg0, 1);
5343 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5344 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5345 inner = TREE_OPERAND (arg0, 0);
5346
5347 /* If something does not permit us to optimize, return the original tree. */
5348 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5349 || TREE_CODE (comp_const) != INTEGER_CST
5350 || TREE_CONSTANT_OVERFLOW (comp_const)
5351 || TREE_CODE (minmax_const) != INTEGER_CST
5352 || TREE_CONSTANT_OVERFLOW (minmax_const))
5353 return NULL_TREE;
5354
5355 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5356 and GT_EXPR, doing the rest with recursive calls using logical
5357 simplifications. */
5358 switch (code)
5359 {
5360 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5361 {
5362 tree tem = optimize_minmax_comparison (invert_tree_comparison (code, false),
5363 type, op0, op1);
5364 if (tem)
5365 return invert_truthvalue (tem);
5366 return NULL_TREE;
5367 }
5368
5369 case GE_EXPR:
5370 return
5371 fold_build2 (TRUTH_ORIF_EXPR, type,
5372 optimize_minmax_comparison
5373 (EQ_EXPR, type, arg0, comp_const),
5374 optimize_minmax_comparison
5375 (GT_EXPR, type, arg0, comp_const));
5376
5377 case EQ_EXPR:
5378 if (op_code == MAX_EXPR && consts_equal)
5379 /* MAX (X, 0) == 0 -> X <= 0 */
5380 return fold_build2 (LE_EXPR, type, inner, comp_const);
5381
5382 else if (op_code == MAX_EXPR && consts_lt)
5383 /* MAX (X, 0) == 5 -> X == 5 */
5384 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5385
5386 else if (op_code == MAX_EXPR)
5387 /* MAX (X, 0) == -1 -> false */
5388 return omit_one_operand (type, integer_zero_node, inner);
5389
5390 else if (consts_equal)
5391 /* MIN (X, 0) == 0 -> X >= 0 */
5392 return fold_build2 (GE_EXPR, type, inner, comp_const);
5393
5394 else if (consts_lt)
5395 /* MIN (X, 0) == 5 -> false */
5396 return omit_one_operand (type, integer_zero_node, inner);
5397
5398 else
5399 /* MIN (X, 0) == -1 -> X == -1 */
5400 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5401
5402 case GT_EXPR:
5403 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5404 /* MAX (X, 0) > 0 -> X > 0
5405 MAX (X, 0) > 5 -> X > 5 */
5406 return fold_build2 (GT_EXPR, type, inner, comp_const);
5407
5408 else if (op_code == MAX_EXPR)
5409 /* MAX (X, 0) > -1 -> true */
5410 return omit_one_operand (type, integer_one_node, inner);
5411
5412 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5413 /* MIN (X, 0) > 0 -> false
5414 MIN (X, 0) > 5 -> false */
5415 return omit_one_operand (type, integer_zero_node, inner);
5416
5417 else
5418 /* MIN (X, 0) > -1 -> X > -1 */
5419 return fold_build2 (GT_EXPR, type, inner, comp_const);
5420
5421 default:
5422 return NULL_TREE;
5423 }
5424 }
5425 \f
5426 /* T is an integer expression that is being multiplied, divided, or taken a
5427 modulus (CODE says which and what kind of divide or modulus) by a
5428 constant C. See if we can eliminate that operation by folding it with
5429 other operations already in T. WIDE_TYPE, if non-null, is a type that
5430 should be used for the computation if wider than our type.
5431
5432 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5433 (X * 2) + (Y * 4). We must, however, be assured that either the original
5434 expression would not overflow or that overflow is undefined for the type
5435 in the language in question.
5436
5437 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5438 the machine has a multiply-accumulate insn or that this is part of an
5439 addressing calculation.
5440
5441 If we return a non-null expression, it is an equivalent form of the
5442 original computation, but need not be in the original type. */
5443
5444 static tree
5445 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5446 {
5447 /* To avoid exponential search depth, refuse to allow recursion past
5448 three levels. Beyond that (1) it's highly unlikely that we'll find
5449 something interesting and (2) we've probably processed it before
5450 when we built the inner expression. */
5451
5452 static int depth;
5453 tree ret;
5454
5455 if (depth > 3)
5456 return NULL;
5457
5458 depth++;
5459 ret = extract_muldiv_1 (t, c, code, wide_type);
5460 depth--;
5461
5462 return ret;
5463 }
5464
5465 static tree
5466 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5467 {
5468 tree type = TREE_TYPE (t);
5469 enum tree_code tcode = TREE_CODE (t);
5470 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5471 > GET_MODE_SIZE (TYPE_MODE (type)))
5472 ? wide_type : type);
5473 tree t1, t2;
5474 int same_p = tcode == code;
5475 tree op0 = NULL_TREE, op1 = NULL_TREE;
5476
5477 /* Don't deal with constants of zero here; they confuse the code below. */
5478 if (integer_zerop (c))
5479 return NULL_TREE;
5480
5481 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5482 op0 = TREE_OPERAND (t, 0);
5483
5484 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5485 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5486
5487 /* Note that we need not handle conditional operations here since fold
5488 already handles those cases. So just do arithmetic here. */
5489 switch (tcode)
5490 {
5491 case INTEGER_CST:
5492 /* For a constant, we can always simplify if we are a multiply
5493 or (for divide and modulus) if it is a multiple of our constant. */
5494 if (code == MULT_EXPR
5495 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5496 return const_binop (code, fold_convert (ctype, t),
5497 fold_convert (ctype, c), 0);
5498 break;
5499
5500 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5501 /* If op0 is an expression ... */
5502 if ((COMPARISON_CLASS_P (op0)
5503 || UNARY_CLASS_P (op0)
5504 || BINARY_CLASS_P (op0)
5505 || EXPRESSION_CLASS_P (op0))
5506 /* ... and is unsigned, and its type is smaller than ctype,
5507 then we cannot pass through as widening. */
5508 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5509 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5510 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5511 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5512 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5513 /* ... or this is a truncation (t is narrower than op0),
5514 then we cannot pass through this narrowing. */
5515 || (GET_MODE_SIZE (TYPE_MODE (type))
5516 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5517 /* ... or signedness changes for division or modulus,
5518 then we cannot pass through this conversion. */
5519 || (code != MULT_EXPR
5520 && (TYPE_UNSIGNED (ctype)
5521 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5522 break;
5523
5524 /* Pass the constant down and see if we can make a simplification. If
5525 we can, replace this expression with the inner simplification for
5526 possible later conversion to our or some other type. */
5527 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5528 && TREE_CODE (t2) == INTEGER_CST
5529 && ! TREE_CONSTANT_OVERFLOW (t2)
5530 && (0 != (t1 = extract_muldiv (op0, t2, code,
5531 code == MULT_EXPR
5532 ? ctype : NULL_TREE))))
5533 return t1;
5534 break;
5535
5536 case ABS_EXPR:
5537 /* If widening the type changes it from signed to unsigned, then we
5538 must avoid building ABS_EXPR itself as unsigned. */
5539 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5540 {
5541 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5542 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5543 {
5544 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5545 return fold_convert (ctype, t1);
5546 }
5547 break;
5548 }
5549 /* FALLTHROUGH */
5550 case NEGATE_EXPR:
5551 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5552 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5553 break;
5554
5555 case MIN_EXPR: case MAX_EXPR:
5556 /* If widening the type changes the signedness, then we can't perform
5557 this optimization as that changes the result. */
5558 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5559 break;
5560
5561 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5562 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5563 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5564 {
5565 if (tree_int_cst_sgn (c) < 0)
5566 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5567
5568 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5569 fold_convert (ctype, t2));
5570 }
5571 break;
5572
5573 case LSHIFT_EXPR: case RSHIFT_EXPR:
5574 /* If the second operand is constant, this is a multiplication
5575 or floor division, by a power of two, so we can treat it that
5576 way unless the multiplier or divisor overflows. Signed
5577 left-shift overflow is implementation-defined rather than
5578 undefined in C90, so do not convert signed left shift into
5579 multiplication. */
5580 if (TREE_CODE (op1) == INTEGER_CST
5581 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5582 /* const_binop may not detect overflow correctly,
5583 so check for it explicitly here. */
5584 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5585 && TREE_INT_CST_HIGH (op1) == 0
5586 && 0 != (t1 = fold_convert (ctype,
5587 const_binop (LSHIFT_EXPR,
5588 size_one_node,
5589 op1, 0)))
5590 && ! TREE_OVERFLOW (t1))
5591 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5592 ? MULT_EXPR : FLOOR_DIV_EXPR,
5593 ctype, fold_convert (ctype, op0), t1),
5594 c, code, wide_type);
5595 break;
5596
5597 case PLUS_EXPR: case MINUS_EXPR:
5598 /* See if we can eliminate the operation on both sides. If we can, we
5599 can return a new PLUS or MINUS. If we can't, the only remaining
5600 cases where we can do anything are if the second operand is a
5601 constant. */
5602 t1 = extract_muldiv (op0, c, code, wide_type);
5603 t2 = extract_muldiv (op1, c, code, wide_type);
5604 if (t1 != 0 && t2 != 0
5605 && (code == MULT_EXPR
5606 /* If not multiplication, we can only do this if both operands
5607 are divisible by c. */
5608 || (multiple_of_p (ctype, op0, c)
5609 && multiple_of_p (ctype, op1, c))))
5610 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5611 fold_convert (ctype, t2));
5612
5613 /* If this was a subtraction, negate OP1 and set it to be an addition.
5614 This simplifies the logic below. */
5615 if (tcode == MINUS_EXPR)
5616 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5617
5618 if (TREE_CODE (op1) != INTEGER_CST)
5619 break;
5620
5621 /* If either OP1 or C are negative, this optimization is not safe for
5622 some of the division and remainder types while for others we need
5623 to change the code. */
5624 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5625 {
5626 if (code == CEIL_DIV_EXPR)
5627 code = FLOOR_DIV_EXPR;
5628 else if (code == FLOOR_DIV_EXPR)
5629 code = CEIL_DIV_EXPR;
5630 else if (code != MULT_EXPR
5631 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5632 break;
5633 }
5634
5635 /* If it's a multiply or a division/modulus operation of a multiple
5636 of our constant, do the operation and verify it doesn't overflow. */
5637 if (code == MULT_EXPR
5638 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5639 {
5640 op1 = const_binop (code, fold_convert (ctype, op1),
5641 fold_convert (ctype, c), 0);
5642 /* We allow the constant to overflow with wrapping semantics. */
5643 if (op1 == 0
5644 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5645 break;
5646 }
5647 else
5648 break;
5649
5650 /* If we have an unsigned type is not a sizetype, we cannot widen
5651 the operation since it will change the result if the original
5652 computation overflowed. */
5653 if (TYPE_UNSIGNED (ctype)
5654 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5655 && ctype != type)
5656 break;
5657
5658 /* If we were able to eliminate our operation from the first side,
5659 apply our operation to the second side and reform the PLUS. */
5660 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5661 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5662
5663 /* The last case is if we are a multiply. In that case, we can
5664 apply the distributive law to commute the multiply and addition
5665 if the multiplication of the constants doesn't overflow. */
5666 if (code == MULT_EXPR)
5667 return fold_build2 (tcode, ctype,
5668 fold_build2 (code, ctype,
5669 fold_convert (ctype, op0),
5670 fold_convert (ctype, c)),
5671 op1);
5672
5673 break;
5674
5675 case MULT_EXPR:
5676 /* We have a special case here if we are doing something like
5677 (C * 8) % 4 since we know that's zero. */
5678 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5679 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5680 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5681 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5682 return omit_one_operand (type, integer_zero_node, op0);
5683
5684 /* ... fall through ... */
5685
5686 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5687 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5688 /* If we can extract our operation from the LHS, do so and return a
5689 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5690 do something only if the second operand is a constant. */
5691 if (same_p
5692 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5693 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5694 fold_convert (ctype, op1));
5695 else if (tcode == MULT_EXPR && code == MULT_EXPR
5696 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5697 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5698 fold_convert (ctype, t1));
5699 else if (TREE_CODE (op1) != INTEGER_CST)
5700 return 0;
5701
5702 /* If these are the same operation types, we can associate them
5703 assuming no overflow. */
5704 if (tcode == code
5705 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5706 fold_convert (ctype, c), 0))
5707 && ! TREE_OVERFLOW (t1))
5708 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5709
5710 /* If these operations "cancel" each other, we have the main
5711 optimizations of this pass, which occur when either constant is a
5712 multiple of the other, in which case we replace this with either an
5713 operation or CODE or TCODE.
5714
5715 If we have an unsigned type that is not a sizetype, we cannot do
5716 this since it will change the result if the original computation
5717 overflowed. */
5718 if ((! TYPE_UNSIGNED (ctype)
5719 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5720 && ! flag_wrapv
5721 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5722 || (tcode == MULT_EXPR
5723 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5724 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5725 {
5726 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5727 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5728 fold_convert (ctype,
5729 const_binop (TRUNC_DIV_EXPR,
5730 op1, c, 0)));
5731 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5732 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5733 fold_convert (ctype,
5734 const_binop (TRUNC_DIV_EXPR,
5735 c, op1, 0)));
5736 }
5737 break;
5738
5739 default:
5740 break;
5741 }
5742
5743 return 0;
5744 }
5745 \f
5746 /* Return a node which has the indicated constant VALUE (either 0 or
5747 1), and is of the indicated TYPE. */
5748
5749 tree
5750 constant_boolean_node (int value, tree type)
5751 {
5752 if (type == integer_type_node)
5753 return value ? integer_one_node : integer_zero_node;
5754 else if (type == boolean_type_node)
5755 return value ? boolean_true_node : boolean_false_node;
5756 else
5757 return build_int_cst (type, value);
5758 }
5759
5760
5761 /* Return true if expr looks like an ARRAY_REF and set base and
5762 offset to the appropriate trees. If there is no offset,
5763 offset is set to NULL_TREE. Base will be canonicalized to
5764 something you can get the element type from using
5765 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5766 in bytes to the base. */
5767
5768 static bool
5769 extract_array_ref (tree expr, tree *base, tree *offset)
5770 {
5771 /* One canonical form is a PLUS_EXPR with the first
5772 argument being an ADDR_EXPR with a possible NOP_EXPR
5773 attached. */
5774 if (TREE_CODE (expr) == PLUS_EXPR)
5775 {
5776 tree op0 = TREE_OPERAND (expr, 0);
5777 tree inner_base, dummy1;
5778 /* Strip NOP_EXPRs here because the C frontends and/or
5779 folders present us (int *)&x.a + 4B possibly. */
5780 STRIP_NOPS (op0);
5781 if (extract_array_ref (op0, &inner_base, &dummy1))
5782 {
5783 *base = inner_base;
5784 if (dummy1 == NULL_TREE)
5785 *offset = TREE_OPERAND (expr, 1);
5786 else
5787 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5788 dummy1, TREE_OPERAND (expr, 1));
5789 return true;
5790 }
5791 }
5792 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5793 which we transform into an ADDR_EXPR with appropriate
5794 offset. For other arguments to the ADDR_EXPR we assume
5795 zero offset and as such do not care about the ADDR_EXPR
5796 type and strip possible nops from it. */
5797 else if (TREE_CODE (expr) == ADDR_EXPR)
5798 {
5799 tree op0 = TREE_OPERAND (expr, 0);
5800 if (TREE_CODE (op0) == ARRAY_REF)
5801 {
5802 tree idx = TREE_OPERAND (op0, 1);
5803 *base = TREE_OPERAND (op0, 0);
5804 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5805 array_ref_element_size (op0));
5806 }
5807 else
5808 {
5809 /* Handle array-to-pointer decay as &a. */
5810 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5811 *base = TREE_OPERAND (expr, 0);
5812 else
5813 *base = expr;
5814 *offset = NULL_TREE;
5815 }
5816 return true;
5817 }
5818 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5819 else if (SSA_VAR_P (expr)
5820 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5821 {
5822 *base = expr;
5823 *offset = NULL_TREE;
5824 return true;
5825 }
5826
5827 return false;
5828 }
5829
5830
5831 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5832 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5833 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5834 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5835 COND is the first argument to CODE; otherwise (as in the example
5836 given here), it is the second argument. TYPE is the type of the
5837 original expression. Return NULL_TREE if no simplification is
5838 possible. */
5839
5840 static tree
5841 fold_binary_op_with_conditional_arg (enum tree_code code,
5842 tree type, tree op0, tree op1,
5843 tree cond, tree arg, int cond_first_p)
5844 {
5845 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5846 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5847 tree test, true_value, false_value;
5848 tree lhs = NULL_TREE;
5849 tree rhs = NULL_TREE;
5850
5851 /* This transformation is only worthwhile if we don't have to wrap
5852 arg in a SAVE_EXPR, and the operation can be simplified on at least
5853 one of the branches once its pushed inside the COND_EXPR. */
5854 if (!TREE_CONSTANT (arg))
5855 return NULL_TREE;
5856
5857 if (TREE_CODE (cond) == COND_EXPR)
5858 {
5859 test = TREE_OPERAND (cond, 0);
5860 true_value = TREE_OPERAND (cond, 1);
5861 false_value = TREE_OPERAND (cond, 2);
5862 /* If this operand throws an expression, then it does not make
5863 sense to try to perform a logical or arithmetic operation
5864 involving it. */
5865 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5866 lhs = true_value;
5867 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5868 rhs = false_value;
5869 }
5870 else
5871 {
5872 tree testtype = TREE_TYPE (cond);
5873 test = cond;
5874 true_value = constant_boolean_node (true, testtype);
5875 false_value = constant_boolean_node (false, testtype);
5876 }
5877
5878 arg = fold_convert (arg_type, arg);
5879 if (lhs == 0)
5880 {
5881 true_value = fold_convert (cond_type, true_value);
5882 if (cond_first_p)
5883 lhs = fold_build2 (code, type, true_value, arg);
5884 else
5885 lhs = fold_build2 (code, type, arg, true_value);
5886 }
5887 if (rhs == 0)
5888 {
5889 false_value = fold_convert (cond_type, false_value);
5890 if (cond_first_p)
5891 rhs = fold_build2 (code, type, false_value, arg);
5892 else
5893 rhs = fold_build2 (code, type, arg, false_value);
5894 }
5895
5896 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5897 return fold_convert (type, test);
5898 }
5899
5900 \f
5901 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5902
5903 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5904 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5905 ADDEND is the same as X.
5906
5907 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5908 and finite. The problematic cases are when X is zero, and its mode
5909 has signed zeros. In the case of rounding towards -infinity,
5910 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5911 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5912
5913 static bool
5914 fold_real_zero_addition_p (tree type, tree addend, int negate)
5915 {
5916 if (!real_zerop (addend))
5917 return false;
5918
5919 /* Don't allow the fold with -fsignaling-nans. */
5920 if (HONOR_SNANS (TYPE_MODE (type)))
5921 return false;
5922
5923 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5924 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5925 return true;
5926
5927 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5928 if (TREE_CODE (addend) == REAL_CST
5929 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5930 negate = !negate;
5931
5932 /* The mode has signed zeros, and we have to honor their sign.
5933 In this situation, there is only one case we can return true for.
5934 X - 0 is the same as X unless rounding towards -infinity is
5935 supported. */
5936 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5937 }
5938
5939 /* Subroutine of fold() that checks comparisons of built-in math
5940 functions against real constants.
5941
5942 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5943 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5944 is the type of the result and ARG0 and ARG1 are the operands of the
5945 comparison. ARG1 must be a TREE_REAL_CST.
5946
5947 The function returns the constant folded tree if a simplification
5948 can be made, and NULL_TREE otherwise. */
5949
5950 static tree
5951 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5952 tree type, tree arg0, tree arg1)
5953 {
5954 REAL_VALUE_TYPE c;
5955
5956 if (BUILTIN_SQRT_P (fcode))
5957 {
5958 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5959 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5960
5961 c = TREE_REAL_CST (arg1);
5962 if (REAL_VALUE_NEGATIVE (c))
5963 {
5964 /* sqrt(x) < y is always false, if y is negative. */
5965 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5966 return omit_one_operand (type, integer_zero_node, arg);
5967
5968 /* sqrt(x) > y is always true, if y is negative and we
5969 don't care about NaNs, i.e. negative values of x. */
5970 if (code == NE_EXPR || !HONOR_NANS (mode))
5971 return omit_one_operand (type, integer_one_node, arg);
5972
5973 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5974 return fold_build2 (GE_EXPR, type, arg,
5975 build_real (TREE_TYPE (arg), dconst0));
5976 }
5977 else if (code == GT_EXPR || code == GE_EXPR)
5978 {
5979 REAL_VALUE_TYPE c2;
5980
5981 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5982 real_convert (&c2, mode, &c2);
5983
5984 if (REAL_VALUE_ISINF (c2))
5985 {
5986 /* sqrt(x) > y is x == +Inf, when y is very large. */
5987 if (HONOR_INFINITIES (mode))
5988 return fold_build2 (EQ_EXPR, type, arg,
5989 build_real (TREE_TYPE (arg), c2));
5990
5991 /* sqrt(x) > y is always false, when y is very large
5992 and we don't care about infinities. */
5993 return omit_one_operand (type, integer_zero_node, arg);
5994 }
5995
5996 /* sqrt(x) > c is the same as x > c*c. */
5997 return fold_build2 (code, type, arg,
5998 build_real (TREE_TYPE (arg), c2));
5999 }
6000 else if (code == LT_EXPR || code == LE_EXPR)
6001 {
6002 REAL_VALUE_TYPE c2;
6003
6004 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6005 real_convert (&c2, mode, &c2);
6006
6007 if (REAL_VALUE_ISINF (c2))
6008 {
6009 /* sqrt(x) < y is always true, when y is a very large
6010 value and we don't care about NaNs or Infinities. */
6011 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6012 return omit_one_operand (type, integer_one_node, arg);
6013
6014 /* sqrt(x) < y is x != +Inf when y is very large and we
6015 don't care about NaNs. */
6016 if (! HONOR_NANS (mode))
6017 return fold_build2 (NE_EXPR, type, arg,
6018 build_real (TREE_TYPE (arg), c2));
6019
6020 /* sqrt(x) < y is x >= 0 when y is very large and we
6021 don't care about Infinities. */
6022 if (! HONOR_INFINITIES (mode))
6023 return fold_build2 (GE_EXPR, type, arg,
6024 build_real (TREE_TYPE (arg), dconst0));
6025
6026 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6027 if (lang_hooks.decls.global_bindings_p () != 0
6028 || CONTAINS_PLACEHOLDER_P (arg))
6029 return NULL_TREE;
6030
6031 arg = save_expr (arg);
6032 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6033 fold_build2 (GE_EXPR, type, arg,
6034 build_real (TREE_TYPE (arg),
6035 dconst0)),
6036 fold_build2 (NE_EXPR, type, arg,
6037 build_real (TREE_TYPE (arg),
6038 c2)));
6039 }
6040
6041 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6042 if (! HONOR_NANS (mode))
6043 return fold_build2 (code, type, arg,
6044 build_real (TREE_TYPE (arg), c2));
6045
6046 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6047 if (lang_hooks.decls.global_bindings_p () == 0
6048 && ! CONTAINS_PLACEHOLDER_P (arg))
6049 {
6050 arg = save_expr (arg);
6051 return fold_build2 (TRUTH_ANDIF_EXPR, type,
6052 fold_build2 (GE_EXPR, type, arg,
6053 build_real (TREE_TYPE (arg),
6054 dconst0)),
6055 fold_build2 (code, type, arg,
6056 build_real (TREE_TYPE (arg),
6057 c2)));
6058 }
6059 }
6060 }
6061
6062 return NULL_TREE;
6063 }
6064
6065 /* Subroutine of fold() that optimizes comparisons against Infinities,
6066 either +Inf or -Inf.
6067
6068 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6069 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6070 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6071
6072 The function returns the constant folded tree if a simplification
6073 can be made, and NULL_TREE otherwise. */
6074
6075 static tree
6076 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6077 {
6078 enum machine_mode mode;
6079 REAL_VALUE_TYPE max;
6080 tree temp;
6081 bool neg;
6082
6083 mode = TYPE_MODE (TREE_TYPE (arg0));
6084
6085 /* For negative infinity swap the sense of the comparison. */
6086 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6087 if (neg)
6088 code = swap_tree_comparison (code);
6089
6090 switch (code)
6091 {
6092 case GT_EXPR:
6093 /* x > +Inf is always false, if with ignore sNANs. */
6094 if (HONOR_SNANS (mode))
6095 return NULL_TREE;
6096 return omit_one_operand (type, integer_zero_node, arg0);
6097
6098 case LE_EXPR:
6099 /* x <= +Inf is always true, if we don't case about NaNs. */
6100 if (! HONOR_NANS (mode))
6101 return omit_one_operand (type, integer_one_node, arg0);
6102
6103 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6104 if (lang_hooks.decls.global_bindings_p () == 0
6105 && ! CONTAINS_PLACEHOLDER_P (arg0))
6106 {
6107 arg0 = save_expr (arg0);
6108 return fold_build2 (EQ_EXPR, type, arg0, arg0);
6109 }
6110 break;
6111
6112 case EQ_EXPR:
6113 case GE_EXPR:
6114 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6115 real_maxval (&max, neg, mode);
6116 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6117 arg0, build_real (TREE_TYPE (arg0), max));
6118
6119 case LT_EXPR:
6120 /* x < +Inf is always equal to x <= DBL_MAX. */
6121 real_maxval (&max, neg, mode);
6122 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6123 arg0, build_real (TREE_TYPE (arg0), max));
6124
6125 case NE_EXPR:
6126 /* x != +Inf is always equal to !(x > DBL_MAX). */
6127 real_maxval (&max, neg, mode);
6128 if (! HONOR_NANS (mode))
6129 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
6130 arg0, build_real (TREE_TYPE (arg0), max));
6131
6132 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
6133 arg0, build_real (TREE_TYPE (arg0), max));
6134 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
6135
6136 default:
6137 break;
6138 }
6139
6140 return NULL_TREE;
6141 }
6142
6143 /* Subroutine of fold() that optimizes comparisons of a division by
6144 a nonzero integer constant against an integer constant, i.e.
6145 X/C1 op C2.
6146
6147 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6148 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6149 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6150
6151 The function returns the constant folded tree if a simplification
6152 can be made, and NULL_TREE otherwise. */
6153
6154 static tree
6155 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
6156 {
6157 tree prod, tmp, hi, lo;
6158 tree arg00 = TREE_OPERAND (arg0, 0);
6159 tree arg01 = TREE_OPERAND (arg0, 1);
6160 unsigned HOST_WIDE_INT lpart;
6161 HOST_WIDE_INT hpart;
6162 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6163 bool neg_overflow;
6164 int overflow;
6165
6166 /* We have to do this the hard way to detect unsigned overflow.
6167 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
6168 overflow = mul_double_with_sign (TREE_INT_CST_LOW (arg01),
6169 TREE_INT_CST_HIGH (arg01),
6170 TREE_INT_CST_LOW (arg1),
6171 TREE_INT_CST_HIGH (arg1),
6172 &lpart, &hpart, unsigned_p);
6173 prod = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6174 -1, overflow, false);
6175 neg_overflow = false;
6176
6177 if (unsigned_p)
6178 {
6179 tmp = int_const_binop (MINUS_EXPR, arg01,
6180 build_int_cst (TREE_TYPE (arg01), 1), 0);
6181 lo = prod;
6182
6183 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
6184 overflow = add_double_with_sign (TREE_INT_CST_LOW (prod),
6185 TREE_INT_CST_HIGH (prod),
6186 TREE_INT_CST_LOW (tmp),
6187 TREE_INT_CST_HIGH (tmp),
6188 &lpart, &hpart, unsigned_p);
6189 hi = force_fit_type_double (TREE_TYPE (arg00), lpart, hpart,
6190 -1, overflow | TREE_OVERFLOW (prod),
6191 TREE_CONSTANT_OVERFLOW (prod));
6192 }
6193 else if (tree_int_cst_sgn (arg01) >= 0)
6194 {
6195 tmp = int_const_binop (MINUS_EXPR, arg01,
6196 build_int_cst (TREE_TYPE (arg01), 1), 0);
6197 switch (tree_int_cst_sgn (arg1))
6198 {
6199 case -1:
6200 neg_overflow = true;
6201 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6202 hi = prod;
6203 break;
6204
6205 case 0:
6206 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6207 hi = tmp;
6208 break;
6209
6210 case 1:
6211 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6212 lo = prod;
6213 break;
6214
6215 default:
6216 gcc_unreachable ();
6217 }
6218 }
6219 else
6220 {
6221 /* A negative divisor reverses the relational operators. */
6222 code = swap_tree_comparison (code);
6223
6224 tmp = int_const_binop (PLUS_EXPR, arg01,
6225 build_int_cst (TREE_TYPE (arg01), 1), 0);
6226 switch (tree_int_cst_sgn (arg1))
6227 {
6228 case -1:
6229 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6230 lo = prod;
6231 break;
6232
6233 case 0:
6234 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6235 lo = tmp;
6236 break;
6237
6238 case 1:
6239 neg_overflow = true;
6240 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6241 hi = prod;
6242 break;
6243
6244 default:
6245 gcc_unreachable ();
6246 }
6247 }
6248
6249 switch (code)
6250 {
6251 case EQ_EXPR:
6252 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6253 return omit_one_operand (type, integer_zero_node, arg00);
6254 if (TREE_OVERFLOW (hi))
6255 return fold_build2 (GE_EXPR, type, arg00, lo);
6256 if (TREE_OVERFLOW (lo))
6257 return fold_build2 (LE_EXPR, type, arg00, hi);
6258 return build_range_check (type, arg00, 1, lo, hi);
6259
6260 case NE_EXPR:
6261 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6262 return omit_one_operand (type, integer_one_node, arg00);
6263 if (TREE_OVERFLOW (hi))
6264 return fold_build2 (LT_EXPR, type, arg00, lo);
6265 if (TREE_OVERFLOW (lo))
6266 return fold_build2 (GT_EXPR, type, arg00, hi);
6267 return build_range_check (type, arg00, 0, lo, hi);
6268
6269 case LT_EXPR:
6270 if (TREE_OVERFLOW (lo))
6271 {
6272 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6273 return omit_one_operand (type, tmp, arg00);
6274 }
6275 return fold_build2 (LT_EXPR, type, arg00, lo);
6276
6277 case LE_EXPR:
6278 if (TREE_OVERFLOW (hi))
6279 {
6280 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6281 return omit_one_operand (type, tmp, arg00);
6282 }
6283 return fold_build2 (LE_EXPR, type, arg00, hi);
6284
6285 case GT_EXPR:
6286 if (TREE_OVERFLOW (hi))
6287 {
6288 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6289 return omit_one_operand (type, tmp, arg00);
6290 }
6291 return fold_build2 (GT_EXPR, type, arg00, hi);
6292
6293 case GE_EXPR:
6294 if (TREE_OVERFLOW (lo))
6295 {
6296 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6297 return omit_one_operand (type, tmp, arg00);
6298 }
6299 return fold_build2 (GE_EXPR, type, arg00, lo);
6300
6301 default:
6302 break;
6303 }
6304
6305 return NULL_TREE;
6306 }
6307
6308
6309 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6310 equality/inequality test, then return a simplified form of the test
6311 using a sign testing. Otherwise return NULL. TYPE is the desired
6312 result type. */
6313
6314 static tree
6315 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6316 tree result_type)
6317 {
6318 /* If this is testing a single bit, we can optimize the test. */
6319 if ((code == NE_EXPR || code == EQ_EXPR)
6320 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6321 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6322 {
6323 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6324 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6325 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6326
6327 if (arg00 != NULL_TREE
6328 /* This is only a win if casting to a signed type is cheap,
6329 i.e. when arg00's type is not a partial mode. */
6330 && TYPE_PRECISION (TREE_TYPE (arg00))
6331 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6332 {
6333 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6334 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6335 result_type, fold_convert (stype, arg00),
6336 build_int_cst (stype, 0));
6337 }
6338 }
6339
6340 return NULL_TREE;
6341 }
6342
6343 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6344 equality/inequality test, then return a simplified form of
6345 the test using shifts and logical operations. Otherwise return
6346 NULL. TYPE is the desired result type. */
6347
6348 tree
6349 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6350 tree result_type)
6351 {
6352 /* If this is testing a single bit, we can optimize the test. */
6353 if ((code == NE_EXPR || code == EQ_EXPR)
6354 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6355 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6356 {
6357 tree inner = TREE_OPERAND (arg0, 0);
6358 tree type = TREE_TYPE (arg0);
6359 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6360 enum machine_mode operand_mode = TYPE_MODE (type);
6361 int ops_unsigned;
6362 tree signed_type, unsigned_type, intermediate_type;
6363 tree tem, one;
6364
6365 /* First, see if we can fold the single bit test into a sign-bit
6366 test. */
6367 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6368 result_type);
6369 if (tem)
6370 return tem;
6371
6372 /* Otherwise we have (A & C) != 0 where C is a single bit,
6373 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6374 Similarly for (A & C) == 0. */
6375
6376 /* If INNER is a right shift of a constant and it plus BITNUM does
6377 not overflow, adjust BITNUM and INNER. */
6378 if (TREE_CODE (inner) == RSHIFT_EXPR
6379 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6380 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6381 && bitnum < TYPE_PRECISION (type)
6382 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6383 bitnum - TYPE_PRECISION (type)))
6384 {
6385 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6386 inner = TREE_OPERAND (inner, 0);
6387 }
6388
6389 /* If we are going to be able to omit the AND below, we must do our
6390 operations as unsigned. If we must use the AND, we have a choice.
6391 Normally unsigned is faster, but for some machines signed is. */
6392 #ifdef LOAD_EXTEND_OP
6393 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6394 && !flag_syntax_only) ? 0 : 1;
6395 #else
6396 ops_unsigned = 1;
6397 #endif
6398
6399 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6400 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6401 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6402 inner = fold_convert (intermediate_type, inner);
6403
6404 if (bitnum != 0)
6405 inner = build2 (RSHIFT_EXPR, intermediate_type,
6406 inner, size_int (bitnum));
6407
6408 one = build_int_cst (intermediate_type, 1);
6409
6410 if (code == EQ_EXPR)
6411 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type, inner, one);
6412
6413 /* Put the AND last so it can combine with more things. */
6414 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6415
6416 /* Make sure to return the proper type. */
6417 inner = fold_convert (result_type, inner);
6418
6419 return inner;
6420 }
6421 return NULL_TREE;
6422 }
6423
6424 /* Check whether we are allowed to reorder operands arg0 and arg1,
6425 such that the evaluation of arg1 occurs before arg0. */
6426
6427 static bool
6428 reorder_operands_p (tree arg0, tree arg1)
6429 {
6430 if (! flag_evaluation_order)
6431 return true;
6432 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6433 return true;
6434 return ! TREE_SIDE_EFFECTS (arg0)
6435 && ! TREE_SIDE_EFFECTS (arg1);
6436 }
6437
6438 /* Test whether it is preferable two swap two operands, ARG0 and
6439 ARG1, for example because ARG0 is an integer constant and ARG1
6440 isn't. If REORDER is true, only recommend swapping if we can
6441 evaluate the operands in reverse order. */
6442
6443 bool
6444 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6445 {
6446 STRIP_SIGN_NOPS (arg0);
6447 STRIP_SIGN_NOPS (arg1);
6448
6449 if (TREE_CODE (arg1) == INTEGER_CST)
6450 return 0;
6451 if (TREE_CODE (arg0) == INTEGER_CST)
6452 return 1;
6453
6454 if (TREE_CODE (arg1) == REAL_CST)
6455 return 0;
6456 if (TREE_CODE (arg0) == REAL_CST)
6457 return 1;
6458
6459 if (TREE_CODE (arg1) == COMPLEX_CST)
6460 return 0;
6461 if (TREE_CODE (arg0) == COMPLEX_CST)
6462 return 1;
6463
6464 if (TREE_CONSTANT (arg1))
6465 return 0;
6466 if (TREE_CONSTANT (arg0))
6467 return 1;
6468
6469 if (optimize_size)
6470 return 0;
6471
6472 if (reorder && flag_evaluation_order
6473 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6474 return 0;
6475
6476 if (DECL_P (arg1))
6477 return 0;
6478 if (DECL_P (arg0))
6479 return 1;
6480
6481 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6482 for commutative and comparison operators. Ensuring a canonical
6483 form allows the optimizers to find additional redundancies without
6484 having to explicitly check for both orderings. */
6485 if (TREE_CODE (arg0) == SSA_NAME
6486 && TREE_CODE (arg1) == SSA_NAME
6487 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6488 return 1;
6489
6490 return 0;
6491 }
6492
6493 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6494 ARG0 is extended to a wider type. */
6495
6496 static tree
6497 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6498 {
6499 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6500 tree arg1_unw;
6501 tree shorter_type, outer_type;
6502 tree min, max;
6503 bool above, below;
6504
6505 if (arg0_unw == arg0)
6506 return NULL_TREE;
6507 shorter_type = TREE_TYPE (arg0_unw);
6508
6509 #ifdef HAVE_canonicalize_funcptr_for_compare
6510 /* Disable this optimization if we're casting a function pointer
6511 type on targets that require function pointer canonicalization. */
6512 if (HAVE_canonicalize_funcptr_for_compare
6513 && TREE_CODE (shorter_type) == POINTER_TYPE
6514 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6515 return NULL_TREE;
6516 #endif
6517
6518 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6519 return NULL_TREE;
6520
6521 arg1_unw = get_unwidened (arg1, shorter_type);
6522
6523 /* If possible, express the comparison in the shorter mode. */
6524 if ((code == EQ_EXPR || code == NE_EXPR
6525 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6526 && (TREE_TYPE (arg1_unw) == shorter_type
6527 || (TREE_CODE (arg1_unw) == INTEGER_CST
6528 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6529 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6530 && int_fits_type_p (arg1_unw, shorter_type))))
6531 return fold_build2 (code, type, arg0_unw,
6532 fold_convert (shorter_type, arg1_unw));
6533
6534 if (TREE_CODE (arg1_unw) != INTEGER_CST
6535 || TREE_CODE (shorter_type) != INTEGER_TYPE
6536 || !int_fits_type_p (arg1_unw, shorter_type))
6537 return NULL_TREE;
6538
6539 /* If we are comparing with the integer that does not fit into the range
6540 of the shorter type, the result is known. */
6541 outer_type = TREE_TYPE (arg1_unw);
6542 min = lower_bound_in_type (outer_type, shorter_type);
6543 max = upper_bound_in_type (outer_type, shorter_type);
6544
6545 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6546 max, arg1_unw));
6547 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6548 arg1_unw, min));
6549
6550 switch (code)
6551 {
6552 case EQ_EXPR:
6553 if (above || below)
6554 return omit_one_operand (type, integer_zero_node, arg0);
6555 break;
6556
6557 case NE_EXPR:
6558 if (above || below)
6559 return omit_one_operand (type, integer_one_node, arg0);
6560 break;
6561
6562 case LT_EXPR:
6563 case LE_EXPR:
6564 if (above)
6565 return omit_one_operand (type, integer_one_node, arg0);
6566 else if (below)
6567 return omit_one_operand (type, integer_zero_node, arg0);
6568
6569 case GT_EXPR:
6570 case GE_EXPR:
6571 if (above)
6572 return omit_one_operand (type, integer_zero_node, arg0);
6573 else if (below)
6574 return omit_one_operand (type, integer_one_node, arg0);
6575
6576 default:
6577 break;
6578 }
6579
6580 return NULL_TREE;
6581 }
6582
6583 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6584 ARG0 just the signedness is changed. */
6585
6586 static tree
6587 fold_sign_changed_comparison (enum tree_code code, tree type,
6588 tree arg0, tree arg1)
6589 {
6590 tree arg0_inner;
6591 tree inner_type, outer_type;
6592
6593 if (TREE_CODE (arg0) != NOP_EXPR
6594 && TREE_CODE (arg0) != CONVERT_EXPR)
6595 return NULL_TREE;
6596
6597 outer_type = TREE_TYPE (arg0);
6598 arg0_inner = TREE_OPERAND (arg0, 0);
6599 inner_type = TREE_TYPE (arg0_inner);
6600
6601 #ifdef HAVE_canonicalize_funcptr_for_compare
6602 /* Disable this optimization if we're casting a function pointer
6603 type on targets that require function pointer canonicalization. */
6604 if (HAVE_canonicalize_funcptr_for_compare
6605 && TREE_CODE (inner_type) == POINTER_TYPE
6606 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6607 return NULL_TREE;
6608 #endif
6609
6610 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6611 return NULL_TREE;
6612
6613 if (TREE_CODE (arg1) != INTEGER_CST
6614 && !((TREE_CODE (arg1) == NOP_EXPR
6615 || TREE_CODE (arg1) == CONVERT_EXPR)
6616 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6617 return NULL_TREE;
6618
6619 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6620 && code != NE_EXPR
6621 && code != EQ_EXPR)
6622 return NULL_TREE;
6623
6624 if (TREE_CODE (arg1) == INTEGER_CST)
6625 arg1 = force_fit_type_double (inner_type, TREE_INT_CST_LOW (arg1),
6626 TREE_INT_CST_HIGH (arg1), 0,
6627 TREE_OVERFLOW (arg1),
6628 TREE_CONSTANT_OVERFLOW (arg1));
6629 else
6630 arg1 = fold_convert (inner_type, arg1);
6631
6632 return fold_build2 (code, type, arg0_inner, arg1);
6633 }
6634
6635 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6636 step of the array. Reconstructs s and delta in the case of s * delta
6637 being an integer constant (and thus already folded).
6638 ADDR is the address. MULT is the multiplicative expression.
6639 If the function succeeds, the new address expression is returned. Otherwise
6640 NULL_TREE is returned. */
6641
6642 static tree
6643 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6644 {
6645 tree s, delta, step;
6646 tree ref = TREE_OPERAND (addr, 0), pref;
6647 tree ret, pos;
6648 tree itype;
6649
6650 /* Canonicalize op1 into a possibly non-constant delta
6651 and an INTEGER_CST s. */
6652 if (TREE_CODE (op1) == MULT_EXPR)
6653 {
6654 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6655
6656 STRIP_NOPS (arg0);
6657 STRIP_NOPS (arg1);
6658
6659 if (TREE_CODE (arg0) == INTEGER_CST)
6660 {
6661 s = arg0;
6662 delta = arg1;
6663 }
6664 else if (TREE_CODE (arg1) == INTEGER_CST)
6665 {
6666 s = arg1;
6667 delta = arg0;
6668 }
6669 else
6670 return NULL_TREE;
6671 }
6672 else if (TREE_CODE (op1) == INTEGER_CST)
6673 {
6674 delta = op1;
6675 s = NULL_TREE;
6676 }
6677 else
6678 {
6679 /* Simulate we are delta * 1. */
6680 delta = op1;
6681 s = integer_one_node;
6682 }
6683
6684 for (;; ref = TREE_OPERAND (ref, 0))
6685 {
6686 if (TREE_CODE (ref) == ARRAY_REF)
6687 {
6688 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6689 if (! itype)
6690 continue;
6691
6692 step = array_ref_element_size (ref);
6693 if (TREE_CODE (step) != INTEGER_CST)
6694 continue;
6695
6696 if (s)
6697 {
6698 if (! tree_int_cst_equal (step, s))
6699 continue;
6700 }
6701 else
6702 {
6703 /* Try if delta is a multiple of step. */
6704 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6705 if (! tmp)
6706 continue;
6707 delta = tmp;
6708 }
6709
6710 break;
6711 }
6712
6713 if (!handled_component_p (ref))
6714 return NULL_TREE;
6715 }
6716
6717 /* We found the suitable array reference. So copy everything up to it,
6718 and replace the index. */
6719
6720 pref = TREE_OPERAND (addr, 0);
6721 ret = copy_node (pref);
6722 pos = ret;
6723
6724 while (pref != ref)
6725 {
6726 pref = TREE_OPERAND (pref, 0);
6727 TREE_OPERAND (pos, 0) = copy_node (pref);
6728 pos = TREE_OPERAND (pos, 0);
6729 }
6730
6731 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6732 fold_convert (itype,
6733 TREE_OPERAND (pos, 1)),
6734 fold_convert (itype, delta));
6735
6736 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6737 }
6738
6739
6740 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6741 means A >= Y && A != MAX, but in this case we know that
6742 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6743
6744 static tree
6745 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6746 {
6747 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6748
6749 if (TREE_CODE (bound) == LT_EXPR)
6750 a = TREE_OPERAND (bound, 0);
6751 else if (TREE_CODE (bound) == GT_EXPR)
6752 a = TREE_OPERAND (bound, 1);
6753 else
6754 return NULL_TREE;
6755
6756 typea = TREE_TYPE (a);
6757 if (!INTEGRAL_TYPE_P (typea)
6758 && !POINTER_TYPE_P (typea))
6759 return NULL_TREE;
6760
6761 if (TREE_CODE (ineq) == LT_EXPR)
6762 {
6763 a1 = TREE_OPERAND (ineq, 1);
6764 y = TREE_OPERAND (ineq, 0);
6765 }
6766 else if (TREE_CODE (ineq) == GT_EXPR)
6767 {
6768 a1 = TREE_OPERAND (ineq, 0);
6769 y = TREE_OPERAND (ineq, 1);
6770 }
6771 else
6772 return NULL_TREE;
6773
6774 if (TREE_TYPE (a1) != typea)
6775 return NULL_TREE;
6776
6777 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6778 if (!integer_onep (diff))
6779 return NULL_TREE;
6780
6781 return fold_build2 (GE_EXPR, type, a, y);
6782 }
6783
6784 /* Fold a sum or difference of at least one multiplication.
6785 Returns the folded tree or NULL if no simplification could be made. */
6786
6787 static tree
6788 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6789 {
6790 tree arg00, arg01, arg10, arg11;
6791 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6792
6793 /* (A * C) +- (B * C) -> (A+-B) * C.
6794 (A * C) +- A -> A * (C+-1).
6795 We are most concerned about the case where C is a constant,
6796 but other combinations show up during loop reduction. Since
6797 it is not difficult, try all four possibilities. */
6798
6799 if (TREE_CODE (arg0) == MULT_EXPR)
6800 {
6801 arg00 = TREE_OPERAND (arg0, 0);
6802 arg01 = TREE_OPERAND (arg0, 1);
6803 }
6804 else
6805 {
6806 arg00 = arg0;
6807 arg01 = build_one_cst (type);
6808 }
6809 if (TREE_CODE (arg1) == MULT_EXPR)
6810 {
6811 arg10 = TREE_OPERAND (arg1, 0);
6812 arg11 = TREE_OPERAND (arg1, 1);
6813 }
6814 else
6815 {
6816 arg10 = arg1;
6817 arg11 = build_one_cst (type);
6818 }
6819 same = NULL_TREE;
6820
6821 if (operand_equal_p (arg01, arg11, 0))
6822 same = arg01, alt0 = arg00, alt1 = arg10;
6823 else if (operand_equal_p (arg00, arg10, 0))
6824 same = arg00, alt0 = arg01, alt1 = arg11;
6825 else if (operand_equal_p (arg00, arg11, 0))
6826 same = arg00, alt0 = arg01, alt1 = arg10;
6827 else if (operand_equal_p (arg01, arg10, 0))
6828 same = arg01, alt0 = arg00, alt1 = arg11;
6829
6830 /* No identical multiplicands; see if we can find a common
6831 power-of-two factor in non-power-of-two multiplies. This
6832 can help in multi-dimensional array access. */
6833 else if (host_integerp (arg01, 0)
6834 && host_integerp (arg11, 0))
6835 {
6836 HOST_WIDE_INT int01, int11, tmp;
6837 bool swap = false;
6838 tree maybe_same;
6839 int01 = TREE_INT_CST_LOW (arg01);
6840 int11 = TREE_INT_CST_LOW (arg11);
6841
6842 /* Move min of absolute values to int11. */
6843 if ((int01 >= 0 ? int01 : -int01)
6844 < (int11 >= 0 ? int11 : -int11))
6845 {
6846 tmp = int01, int01 = int11, int11 = tmp;
6847 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6848 maybe_same = arg01;
6849 swap = true;
6850 }
6851 else
6852 maybe_same = arg11;
6853
6854 if (exact_log2 (abs (int11)) > 0 && int01 % int11 == 0)
6855 {
6856 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6857 build_int_cst (TREE_TYPE (arg00),
6858 int01 / int11));
6859 alt1 = arg10;
6860 same = maybe_same;
6861 if (swap)
6862 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6863 }
6864 }
6865
6866 if (same)
6867 return fold_build2 (MULT_EXPR, type,
6868 fold_build2 (code, type,
6869 fold_convert (type, alt0),
6870 fold_convert (type, alt1)),
6871 fold_convert (type, same));
6872
6873 return NULL_TREE;
6874 }
6875
6876 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
6877 specified by EXPR into the buffer PTR of length LEN bytes.
6878 Return the number of bytes placed in the buffer, or zero
6879 upon failure. */
6880
6881 static int
6882 native_encode_int (tree expr, unsigned char *ptr, int len)
6883 {
6884 tree type = TREE_TYPE (expr);
6885 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6886 int byte, offset, word, words;
6887 unsigned char value;
6888
6889 if (total_bytes > len)
6890 return 0;
6891 words = total_bytes / UNITS_PER_WORD;
6892
6893 for (byte = 0; byte < total_bytes; byte++)
6894 {
6895 int bitpos = byte * BITS_PER_UNIT;
6896 if (bitpos < HOST_BITS_PER_WIDE_INT)
6897 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
6898 else
6899 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
6900 >> (bitpos - HOST_BITS_PER_WIDE_INT));
6901
6902 if (total_bytes > UNITS_PER_WORD)
6903 {
6904 word = byte / UNITS_PER_WORD;
6905 if (WORDS_BIG_ENDIAN)
6906 word = (words - 1) - word;
6907 offset = word * UNITS_PER_WORD;
6908 if (BYTES_BIG_ENDIAN)
6909 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6910 else
6911 offset += byte % UNITS_PER_WORD;
6912 }
6913 else
6914 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6915 ptr[offset] = value;
6916 }
6917 return total_bytes;
6918 }
6919
6920
6921 /* Subroutine of native_encode_expr. Encode the REAL_CST
6922 specified by EXPR into the buffer PTR of length LEN bytes.
6923 Return the number of bytes placed in the buffer, or zero
6924 upon failure. */
6925
6926 static int
6927 native_encode_real (tree expr, unsigned char *ptr, int len)
6928 {
6929 tree type = TREE_TYPE (expr);
6930 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
6931 int byte, offset, word, words;
6932 unsigned char value;
6933
6934 /* There are always 32 bits in each long, no matter the size of
6935 the hosts long. We handle floating point representations with
6936 up to 192 bits. */
6937 long tmp[6];
6938
6939 if (total_bytes > len)
6940 return 0;
6941 words = total_bytes / UNITS_PER_WORD;
6942
6943 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
6944
6945 for (byte = 0; byte < total_bytes; byte++)
6946 {
6947 int bitpos = byte * BITS_PER_UNIT;
6948 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
6949
6950 if (total_bytes > UNITS_PER_WORD)
6951 {
6952 word = byte / UNITS_PER_WORD;
6953 if (FLOAT_WORDS_BIG_ENDIAN)
6954 word = (words - 1) - word;
6955 offset = word * UNITS_PER_WORD;
6956 if (BYTES_BIG_ENDIAN)
6957 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
6958 else
6959 offset += byte % UNITS_PER_WORD;
6960 }
6961 else
6962 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
6963 ptr[offset] = value;
6964 }
6965 return total_bytes;
6966 }
6967
6968 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
6969 specified by EXPR into the buffer PTR of length LEN bytes.
6970 Return the number of bytes placed in the buffer, or zero
6971 upon failure. */
6972
6973 static int
6974 native_encode_complex (tree expr, unsigned char *ptr, int len)
6975 {
6976 int rsize, isize;
6977 tree part;
6978
6979 part = TREE_REALPART (expr);
6980 rsize = native_encode_expr (part, ptr, len);
6981 if (rsize == 0)
6982 return 0;
6983 part = TREE_IMAGPART (expr);
6984 isize = native_encode_expr (part, ptr+rsize, len-rsize);
6985 if (isize != rsize)
6986 return 0;
6987 return rsize + isize;
6988 }
6989
6990
6991 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
6992 specified by EXPR into the buffer PTR of length LEN bytes.
6993 Return the number of bytes placed in the buffer, or zero
6994 upon failure. */
6995
6996 static int
6997 native_encode_vector (tree expr, unsigned char *ptr, int len)
6998 {
6999 int i, size, offset, count;
7000 tree itype, elem, elements;
7001
7002 offset = 0;
7003 elements = TREE_VECTOR_CST_ELTS (expr);
7004 count = TYPE_VECTOR_SUBPARTS (TREE_TYPE (expr));
7005 itype = TREE_TYPE (TREE_TYPE (expr));
7006 size = GET_MODE_SIZE (TYPE_MODE (itype));
7007 for (i = 0; i < count; i++)
7008 {
7009 if (elements)
7010 {
7011 elem = TREE_VALUE (elements);
7012 elements = TREE_CHAIN (elements);
7013 }
7014 else
7015 elem = NULL_TREE;
7016
7017 if (elem)
7018 {
7019 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7020 return 0;
7021 }
7022 else
7023 {
7024 if (offset + size > len)
7025 return 0;
7026 memset (ptr+offset, 0, size);
7027 }
7028 offset += size;
7029 }
7030 return offset;
7031 }
7032
7033
7034 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7035 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7036 buffer PTR of length LEN bytes. Return the number of bytes
7037 placed in the buffer, or zero upon failure. */
7038
7039 static int
7040 native_encode_expr (tree expr, unsigned char *ptr, int len)
7041 {
7042 switch (TREE_CODE (expr))
7043 {
7044 case INTEGER_CST:
7045 return native_encode_int (expr, ptr, len);
7046
7047 case REAL_CST:
7048 return native_encode_real (expr, ptr, len);
7049
7050 case COMPLEX_CST:
7051 return native_encode_complex (expr, ptr, len);
7052
7053 case VECTOR_CST:
7054 return native_encode_vector (expr, ptr, len);
7055
7056 default:
7057 return 0;
7058 }
7059 }
7060
7061
7062 /* Subroutine of native_interpret_expr. Interpret the contents of
7063 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7064 If the buffer cannot be interpreted, return NULL_TREE. */
7065
7066 static tree
7067 native_interpret_int (tree type, unsigned char *ptr, int len)
7068 {
7069 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7070 int byte, offset, word, words;
7071 unsigned char value;
7072 unsigned int HOST_WIDE_INT lo = 0;
7073 HOST_WIDE_INT hi = 0;
7074
7075 if (total_bytes > len)
7076 return NULL_TREE;
7077 if (total_bytes * BITS_PER_UNIT > 2 * HOST_BITS_PER_WIDE_INT)
7078 return NULL_TREE;
7079 words = total_bytes / UNITS_PER_WORD;
7080
7081 for (byte = 0; byte < total_bytes; byte++)
7082 {
7083 int bitpos = byte * BITS_PER_UNIT;
7084 if (total_bytes > UNITS_PER_WORD)
7085 {
7086 word = byte / UNITS_PER_WORD;
7087 if (WORDS_BIG_ENDIAN)
7088 word = (words - 1) - word;
7089 offset = word * UNITS_PER_WORD;
7090 if (BYTES_BIG_ENDIAN)
7091 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7092 else
7093 offset += byte % UNITS_PER_WORD;
7094 }
7095 else
7096 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7097 value = ptr[offset];
7098
7099 if (bitpos < HOST_BITS_PER_WIDE_INT)
7100 lo |= (unsigned HOST_WIDE_INT) value << bitpos;
7101 else
7102 hi |= (unsigned HOST_WIDE_INT) value
7103 << (bitpos - HOST_BITS_PER_WIDE_INT);
7104 }
7105
7106 return build_int_cst_wide_type (type, lo, hi);
7107 }
7108
7109
7110 /* Subroutine of native_interpret_expr. Interpret the contents of
7111 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7112 If the buffer cannot be interpreted, return NULL_TREE. */
7113
7114 static tree
7115 native_interpret_real (tree type, unsigned char *ptr, int len)
7116 {
7117 enum machine_mode mode = TYPE_MODE (type);
7118 int total_bytes = GET_MODE_SIZE (mode);
7119 int byte, offset, word, words;
7120 unsigned char value;
7121 /* There are always 32 bits in each long, no matter the size of
7122 the hosts long. We handle floating point representations with
7123 up to 192 bits. */
7124 REAL_VALUE_TYPE r;
7125 long tmp[6];
7126
7127 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7128 if (total_bytes > len || total_bytes > 24)
7129 return NULL_TREE;
7130 words = total_bytes / UNITS_PER_WORD;
7131
7132 memset (tmp, 0, sizeof (tmp));
7133 for (byte = 0; byte < total_bytes; byte++)
7134 {
7135 int bitpos = byte * BITS_PER_UNIT;
7136 if (total_bytes > UNITS_PER_WORD)
7137 {
7138 word = byte / UNITS_PER_WORD;
7139 if (FLOAT_WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 else
7145 offset += byte % UNITS_PER_WORD;
7146 }
7147 else
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 value = ptr[offset];
7150
7151 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7152 }
7153
7154 real_from_target (&r, tmp, mode);
7155 return build_real (type, r);
7156 }
7157
7158
7159 /* Subroutine of native_interpret_expr. Interpret the contents of
7160 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7161 If the buffer cannot be interpreted, return NULL_TREE. */
7162
7163 static tree
7164 native_interpret_complex (tree type, unsigned char *ptr, int len)
7165 {
7166 tree etype, rpart, ipart;
7167 int size;
7168
7169 etype = TREE_TYPE (type);
7170 size = GET_MODE_SIZE (TYPE_MODE (etype));
7171 if (size * 2 > len)
7172 return NULL_TREE;
7173 rpart = native_interpret_expr (etype, ptr, size);
7174 if (!rpart)
7175 return NULL_TREE;
7176 ipart = native_interpret_expr (etype, ptr+size, size);
7177 if (!ipart)
7178 return NULL_TREE;
7179 return build_complex (type, rpart, ipart);
7180 }
7181
7182
7183 /* Subroutine of native_interpret_expr. Interpret the contents of
7184 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7185 If the buffer cannot be interpreted, return NULL_TREE. */
7186
7187 static tree
7188 native_interpret_vector (tree type, unsigned char *ptr, int len)
7189 {
7190 tree etype, elem, elements;
7191 int i, size, count;
7192
7193 etype = TREE_TYPE (type);
7194 size = GET_MODE_SIZE (TYPE_MODE (etype));
7195 count = TYPE_VECTOR_SUBPARTS (type);
7196 if (size * count > len)
7197 return NULL_TREE;
7198
7199 elements = NULL_TREE;
7200 for (i = count - 1; i >= 0; i--)
7201 {
7202 elem = native_interpret_expr (etype, ptr+(i*size), size);
7203 if (!elem)
7204 return NULL_TREE;
7205 elements = tree_cons (NULL_TREE, elem, elements);
7206 }
7207 return build_vector (type, elements);
7208 }
7209
7210
7211 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7212 the buffer PTR of length LEN as a constant of type TYPE. For
7213 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7214 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7215 return NULL_TREE. */
7216
7217 static tree
7218 native_interpret_expr (tree type, unsigned char *ptr, int len)
7219 {
7220 switch (TREE_CODE (type))
7221 {
7222 case INTEGER_TYPE:
7223 case ENUMERAL_TYPE:
7224 case BOOLEAN_TYPE:
7225 return native_interpret_int (type, ptr, len);
7226
7227 case REAL_TYPE:
7228 return native_interpret_real (type, ptr, len);
7229
7230 case COMPLEX_TYPE:
7231 return native_interpret_complex (type, ptr, len);
7232
7233 case VECTOR_TYPE:
7234 return native_interpret_vector (type, ptr, len);
7235
7236 default:
7237 return NULL_TREE;
7238 }
7239 }
7240
7241
7242 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7243 TYPE at compile-time. If we're unable to perform the conversion
7244 return NULL_TREE. */
7245
7246 static tree
7247 fold_view_convert_expr (tree type, tree expr)
7248 {
7249 /* We support up to 512-bit values (for V8DFmode). */
7250 unsigned char buffer[64];
7251 int len;
7252
7253 /* Check that the host and target are sane. */
7254 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7255 return NULL_TREE;
7256
7257 len = native_encode_expr (expr, buffer, sizeof (buffer));
7258 if (len == 0)
7259 return NULL_TREE;
7260
7261 return native_interpret_expr (type, buffer, len);
7262 }
7263
7264
7265 /* Fold a unary expression of code CODE and type TYPE with operand
7266 OP0. Return the folded expression if folding is successful.
7267 Otherwise, return NULL_TREE. */
7268
7269 tree
7270 fold_unary (enum tree_code code, tree type, tree op0)
7271 {
7272 tree tem;
7273 tree arg0;
7274 enum tree_code_class kind = TREE_CODE_CLASS (code);
7275
7276 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7277 && TREE_CODE_LENGTH (code) == 1);
7278
7279 arg0 = op0;
7280 if (arg0)
7281 {
7282 if (code == NOP_EXPR || code == CONVERT_EXPR
7283 || code == FLOAT_EXPR || code == ABS_EXPR)
7284 {
7285 /* Don't use STRIP_NOPS, because signedness of argument type
7286 matters. */
7287 STRIP_SIGN_NOPS (arg0);
7288 }
7289 else
7290 {
7291 /* Strip any conversions that don't change the mode. This
7292 is safe for every expression, except for a comparison
7293 expression because its signedness is derived from its
7294 operands.
7295
7296 Note that this is done as an internal manipulation within
7297 the constant folder, in order to find the simplest
7298 representation of the arguments so that their form can be
7299 studied. In any cases, the appropriate type conversions
7300 should be put back in the tree that will get out of the
7301 constant folder. */
7302 STRIP_NOPS (arg0);
7303 }
7304 }
7305
7306 if (TREE_CODE_CLASS (code) == tcc_unary)
7307 {
7308 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7309 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7310 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
7311 else if (TREE_CODE (arg0) == COND_EXPR)
7312 {
7313 tree arg01 = TREE_OPERAND (arg0, 1);
7314 tree arg02 = TREE_OPERAND (arg0, 2);
7315 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7316 arg01 = fold_build1 (code, type, arg01);
7317 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7318 arg02 = fold_build1 (code, type, arg02);
7319 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
7320 arg01, arg02);
7321
7322 /* If this was a conversion, and all we did was to move into
7323 inside the COND_EXPR, bring it back out. But leave it if
7324 it is a conversion from integer to integer and the
7325 result precision is no wider than a word since such a
7326 conversion is cheap and may be optimized away by combine,
7327 while it couldn't if it were outside the COND_EXPR. Then return
7328 so we don't get into an infinite recursion loop taking the
7329 conversion out and then back in. */
7330
7331 if ((code == NOP_EXPR || code == CONVERT_EXPR
7332 || code == NON_LVALUE_EXPR)
7333 && TREE_CODE (tem) == COND_EXPR
7334 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7335 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7336 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7337 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7338 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7339 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7340 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7341 && (INTEGRAL_TYPE_P
7342 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7343 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7344 || flag_syntax_only))
7345 tem = build1 (code, type,
7346 build3 (COND_EXPR,
7347 TREE_TYPE (TREE_OPERAND
7348 (TREE_OPERAND (tem, 1), 0)),
7349 TREE_OPERAND (tem, 0),
7350 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7351 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
7352 return tem;
7353 }
7354 else if (COMPARISON_CLASS_P (arg0))
7355 {
7356 if (TREE_CODE (type) == BOOLEAN_TYPE)
7357 {
7358 arg0 = copy_node (arg0);
7359 TREE_TYPE (arg0) = type;
7360 return arg0;
7361 }
7362 else if (TREE_CODE (type) != INTEGER_TYPE)
7363 return fold_build3 (COND_EXPR, type, arg0,
7364 fold_build1 (code, type,
7365 integer_one_node),
7366 fold_build1 (code, type,
7367 integer_zero_node));
7368 }
7369 }
7370
7371 switch (code)
7372 {
7373 case NOP_EXPR:
7374 case FLOAT_EXPR:
7375 case CONVERT_EXPR:
7376 case FIX_TRUNC_EXPR:
7377 if (TREE_TYPE (op0) == type)
7378 return op0;
7379
7380 /* If we have (type) (a CMP b) and type is an integral type, return
7381 new expression involving the new type. */
7382 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
7383 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
7384 TREE_OPERAND (op0, 1));
7385
7386 /* Handle cases of two conversions in a row. */
7387 if (TREE_CODE (op0) == NOP_EXPR
7388 || TREE_CODE (op0) == CONVERT_EXPR)
7389 {
7390 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7391 tree inter_type = TREE_TYPE (op0);
7392 int inside_int = INTEGRAL_TYPE_P (inside_type);
7393 int inside_ptr = POINTER_TYPE_P (inside_type);
7394 int inside_float = FLOAT_TYPE_P (inside_type);
7395 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7396 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7397 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7398 int inter_int = INTEGRAL_TYPE_P (inter_type);
7399 int inter_ptr = POINTER_TYPE_P (inter_type);
7400 int inter_float = FLOAT_TYPE_P (inter_type);
7401 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7402 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7403 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7404 int final_int = INTEGRAL_TYPE_P (type);
7405 int final_ptr = POINTER_TYPE_P (type);
7406 int final_float = FLOAT_TYPE_P (type);
7407 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7408 unsigned int final_prec = TYPE_PRECISION (type);
7409 int final_unsignedp = TYPE_UNSIGNED (type);
7410
7411 /* In addition to the cases of two conversions in a row
7412 handled below, if we are converting something to its own
7413 type via an object of identical or wider precision, neither
7414 conversion is needed. */
7415 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7416 && (((inter_int || inter_ptr) && final_int)
7417 || (inter_float && final_float))
7418 && inter_prec >= final_prec)
7419 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7420
7421 /* Likewise, if the intermediate and final types are either both
7422 float or both integer, we don't need the middle conversion if
7423 it is wider than the final type and doesn't change the signedness
7424 (for integers). Avoid this if the final type is a pointer
7425 since then we sometimes need the inner conversion. Likewise if
7426 the outer has a precision not equal to the size of its mode. */
7427 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
7428 || (inter_float && inside_float)
7429 || (inter_vec && inside_vec))
7430 && inter_prec >= inside_prec
7431 && (inter_float || inter_vec
7432 || inter_unsignedp == inside_unsignedp)
7433 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7434 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7435 && ! final_ptr
7436 && (! final_vec || inter_prec == inside_prec))
7437 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7438
7439 /* If we have a sign-extension of a zero-extended value, we can
7440 replace that by a single zero-extension. */
7441 if (inside_int && inter_int && final_int
7442 && inside_prec < inter_prec && inter_prec < final_prec
7443 && inside_unsignedp && !inter_unsignedp)
7444 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7445
7446 /* Two conversions in a row are not needed unless:
7447 - some conversion is floating-point (overstrict for now), or
7448 - some conversion is a vector (overstrict for now), or
7449 - the intermediate type is narrower than both initial and
7450 final, or
7451 - the intermediate type and innermost type differ in signedness,
7452 and the outermost type is wider than the intermediate, or
7453 - the initial type is a pointer type and the precisions of the
7454 intermediate and final types differ, or
7455 - the final type is a pointer type and the precisions of the
7456 initial and intermediate types differ.
7457 - the final type is a pointer type and the initial type not
7458 - the initial type is a pointer to an array and the final type
7459 not. */
7460 if (! inside_float && ! inter_float && ! final_float
7461 && ! inside_vec && ! inter_vec && ! final_vec
7462 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7463 && ! (inside_int && inter_int
7464 && inter_unsignedp != inside_unsignedp
7465 && inter_prec < final_prec)
7466 && ((inter_unsignedp && inter_prec > inside_prec)
7467 == (final_unsignedp && final_prec > inter_prec))
7468 && ! (inside_ptr && inter_prec != final_prec)
7469 && ! (final_ptr && inside_prec != inter_prec)
7470 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
7471 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7472 && final_ptr == inside_ptr
7473 && ! (inside_ptr
7474 && TREE_CODE (TREE_TYPE (inside_type)) == ARRAY_TYPE
7475 && TREE_CODE (TREE_TYPE (type)) != ARRAY_TYPE))
7476 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
7477 }
7478
7479 /* Handle (T *)&A.B.C for A being of type T and B and C
7480 living at offset zero. This occurs frequently in
7481 C++ upcasting and then accessing the base. */
7482 if (TREE_CODE (op0) == ADDR_EXPR
7483 && POINTER_TYPE_P (type)
7484 && handled_component_p (TREE_OPERAND (op0, 0)))
7485 {
7486 HOST_WIDE_INT bitsize, bitpos;
7487 tree offset;
7488 enum machine_mode mode;
7489 int unsignedp, volatilep;
7490 tree base = TREE_OPERAND (op0, 0);
7491 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7492 &mode, &unsignedp, &volatilep, false);
7493 /* If the reference was to a (constant) zero offset, we can use
7494 the address of the base if it has the same base type
7495 as the result type. */
7496 if (! offset && bitpos == 0
7497 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
7498 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7499 return fold_convert (type, build_fold_addr_expr (base));
7500 }
7501
7502 if ((TREE_CODE (op0) == MODIFY_EXPR
7503 || TREE_CODE (op0) == GIMPLE_MODIFY_STMT)
7504 && TREE_CONSTANT (GENERIC_TREE_OPERAND (op0, 1))
7505 /* Detect assigning a bitfield. */
7506 && !(TREE_CODE (GENERIC_TREE_OPERAND (op0, 0)) == COMPONENT_REF
7507 && DECL_BIT_FIELD
7508 (TREE_OPERAND (GENERIC_TREE_OPERAND (op0, 0), 1))))
7509 {
7510 /* Don't leave an assignment inside a conversion
7511 unless assigning a bitfield. */
7512 tem = fold_build1 (code, type, GENERIC_TREE_OPERAND (op0, 1));
7513 /* First do the assignment, then return converted constant. */
7514 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7515 TREE_NO_WARNING (tem) = 1;
7516 TREE_USED (tem) = 1;
7517 return tem;
7518 }
7519
7520 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7521 constants (if x has signed type, the sign bit cannot be set
7522 in c). This folds extension into the BIT_AND_EXPR. */
7523 if (INTEGRAL_TYPE_P (type)
7524 && TREE_CODE (type) != BOOLEAN_TYPE
7525 && TREE_CODE (op0) == BIT_AND_EXPR
7526 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7527 {
7528 tree and = op0;
7529 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
7530 int change = 0;
7531
7532 if (TYPE_UNSIGNED (TREE_TYPE (and))
7533 || (TYPE_PRECISION (type)
7534 <= TYPE_PRECISION (TREE_TYPE (and))))
7535 change = 1;
7536 else if (TYPE_PRECISION (TREE_TYPE (and1))
7537 <= HOST_BITS_PER_WIDE_INT
7538 && host_integerp (and1, 1))
7539 {
7540 unsigned HOST_WIDE_INT cst;
7541
7542 cst = tree_low_cst (and1, 1);
7543 cst &= (HOST_WIDE_INT) -1
7544 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7545 change = (cst == 0);
7546 #ifdef LOAD_EXTEND_OP
7547 if (change
7548 && !flag_syntax_only
7549 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7550 == ZERO_EXTEND))
7551 {
7552 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
7553 and0 = fold_convert (uns, and0);
7554 and1 = fold_convert (uns, and1);
7555 }
7556 #endif
7557 }
7558 if (change)
7559 {
7560 tem = force_fit_type_double (type, TREE_INT_CST_LOW (and1),
7561 TREE_INT_CST_HIGH (and1), 0,
7562 TREE_OVERFLOW (and1),
7563 TREE_CONSTANT_OVERFLOW (and1));
7564 return fold_build2 (BIT_AND_EXPR, type,
7565 fold_convert (type, and0), tem);
7566 }
7567 }
7568
7569 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
7570 T2 being pointers to types of the same size. */
7571 if (POINTER_TYPE_P (type)
7572 && BINARY_CLASS_P (arg0)
7573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
7574 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7575 {
7576 tree arg00 = TREE_OPERAND (arg0, 0);
7577 tree t0 = type;
7578 tree t1 = TREE_TYPE (arg00);
7579 tree tt0 = TREE_TYPE (t0);
7580 tree tt1 = TREE_TYPE (t1);
7581 tree s0 = TYPE_SIZE (tt0);
7582 tree s1 = TYPE_SIZE (tt1);
7583
7584 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
7585 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
7586 TREE_OPERAND (arg0, 1));
7587 }
7588
7589 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7590 of the same precision, and X is a integer type not narrower than
7591 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7592 if (INTEGRAL_TYPE_P (type)
7593 && TREE_CODE (op0) == BIT_NOT_EXPR
7594 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7595 && (TREE_CODE (TREE_OPERAND (op0, 0)) == NOP_EXPR
7596 || TREE_CODE (TREE_OPERAND (op0, 0)) == CONVERT_EXPR)
7597 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7598 {
7599 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7600 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7601 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7602 return fold_build1 (BIT_NOT_EXPR, type, fold_convert (type, tem));
7603 }
7604
7605 tem = fold_convert_const (code, type, arg0);
7606 return tem ? tem : NULL_TREE;
7607
7608 case VIEW_CONVERT_EXPR:
7609 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
7610 return fold_build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
7611 return fold_view_convert_expr (type, op0);
7612
7613 case NEGATE_EXPR:
7614 tem = fold_negate_expr (arg0);
7615 if (tem)
7616 return fold_convert (type, tem);
7617 return NULL_TREE;
7618
7619 case ABS_EXPR:
7620 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7621 return fold_abs_const (arg0, type);
7622 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7623 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7624 /* Convert fabs((double)float) into (double)fabsf(float). */
7625 else if (TREE_CODE (arg0) == NOP_EXPR
7626 && TREE_CODE (type) == REAL_TYPE)
7627 {
7628 tree targ0 = strip_float_extensions (arg0);
7629 if (targ0 != arg0)
7630 return fold_convert (type, fold_build1 (ABS_EXPR,
7631 TREE_TYPE (targ0),
7632 targ0));
7633 }
7634 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7635 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7636 return arg0;
7637
7638 /* Strip sign ops from argument. */
7639 if (TREE_CODE (type) == REAL_TYPE)
7640 {
7641 tem = fold_strip_sign_ops (arg0);
7642 if (tem)
7643 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7644 }
7645 return NULL_TREE;
7646
7647 case CONJ_EXPR:
7648 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7649 return fold_convert (type, arg0);
7650 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7651 {
7652 tree itype = TREE_TYPE (type);
7653 tree rpart = fold_convert (itype, TREE_OPERAND (arg0, 0));
7654 tree ipart = fold_convert (itype, TREE_OPERAND (arg0, 1));
7655 return fold_build2 (COMPLEX_EXPR, type, rpart, negate_expr (ipart));
7656 }
7657 if (TREE_CODE (arg0) == COMPLEX_CST)
7658 {
7659 tree itype = TREE_TYPE (type);
7660 tree rpart = fold_convert (itype, TREE_REALPART (arg0));
7661 tree ipart = fold_convert (itype, TREE_IMAGPART (arg0));
7662 return build_complex (type, rpart, negate_expr (ipart));
7663 }
7664 if (TREE_CODE (arg0) == CONJ_EXPR)
7665 return fold_convert (type, TREE_OPERAND (arg0, 0));
7666 return NULL_TREE;
7667
7668 case BIT_NOT_EXPR:
7669 if (TREE_CODE (arg0) == INTEGER_CST)
7670 return fold_not_const (arg0, type);
7671 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7672 return TREE_OPERAND (arg0, 0);
7673 /* Convert ~ (-A) to A - 1. */
7674 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7675 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7676 build_int_cst (type, 1));
7677 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7678 else if (INTEGRAL_TYPE_P (type)
7679 && ((TREE_CODE (arg0) == MINUS_EXPR
7680 && integer_onep (TREE_OPERAND (arg0, 1)))
7681 || (TREE_CODE (arg0) == PLUS_EXPR
7682 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7683 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7684 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7685 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7686 && (tem = fold_unary (BIT_NOT_EXPR, type,
7687 fold_convert (type,
7688 TREE_OPERAND (arg0, 0)))))
7689 return fold_build2 (BIT_XOR_EXPR, type, tem,
7690 fold_convert (type, TREE_OPERAND (arg0, 1)));
7691 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7692 && (tem = fold_unary (BIT_NOT_EXPR, type,
7693 fold_convert (type,
7694 TREE_OPERAND (arg0, 1)))))
7695 return fold_build2 (BIT_XOR_EXPR, type,
7696 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7697
7698 return NULL_TREE;
7699
7700 case TRUTH_NOT_EXPR:
7701 /* The argument to invert_truthvalue must have Boolean type. */
7702 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7703 arg0 = fold_convert (boolean_type_node, arg0);
7704
7705 /* Note that the operand of this must be an int
7706 and its values must be 0 or 1.
7707 ("true" is a fixed value perhaps depending on the language,
7708 but we don't handle values other than 1 correctly yet.) */
7709 tem = fold_truth_not_expr (arg0);
7710 if (!tem)
7711 return NULL_TREE;
7712 return fold_convert (type, tem);
7713
7714 case REALPART_EXPR:
7715 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7716 return fold_convert (type, arg0);
7717 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7718 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7719 TREE_OPERAND (arg0, 1));
7720 if (TREE_CODE (arg0) == COMPLEX_CST)
7721 return fold_convert (type, TREE_REALPART (arg0));
7722 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7723 {
7724 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7725 tem = fold_build2 (TREE_CODE (arg0), itype,
7726 fold_build1 (REALPART_EXPR, itype,
7727 TREE_OPERAND (arg0, 0)),
7728 fold_build1 (REALPART_EXPR, itype,
7729 TREE_OPERAND (arg0, 1)));
7730 return fold_convert (type, tem);
7731 }
7732 if (TREE_CODE (arg0) == CONJ_EXPR)
7733 {
7734 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7735 tem = fold_build1 (REALPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7736 return fold_convert (type, tem);
7737 }
7738 if (TREE_CODE (arg0) == CALL_EXPR)
7739 {
7740 tree fn = get_callee_fndecl (arg0);
7741 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7742 switch (DECL_FUNCTION_CODE (fn))
7743 {
7744 CASE_FLT_FN (BUILT_IN_CEXPI):
7745 fn = mathfn_built_in (type, BUILT_IN_COS);
7746 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7747
7748 default:;
7749 }
7750 }
7751 return NULL_TREE;
7752
7753 case IMAGPART_EXPR:
7754 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7755 return fold_convert (type, integer_zero_node);
7756 if (TREE_CODE (arg0) == COMPLEX_EXPR)
7757 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7758 TREE_OPERAND (arg0, 0));
7759 if (TREE_CODE (arg0) == COMPLEX_CST)
7760 return fold_convert (type, TREE_IMAGPART (arg0));
7761 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7762 {
7763 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7764 tem = fold_build2 (TREE_CODE (arg0), itype,
7765 fold_build1 (IMAGPART_EXPR, itype,
7766 TREE_OPERAND (arg0, 0)),
7767 fold_build1 (IMAGPART_EXPR, itype,
7768 TREE_OPERAND (arg0, 1)));
7769 return fold_convert (type, tem);
7770 }
7771 if (TREE_CODE (arg0) == CONJ_EXPR)
7772 {
7773 tree itype = TREE_TYPE (TREE_TYPE (arg0));
7774 tem = fold_build1 (IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
7775 return fold_convert (type, negate_expr (tem));
7776 }
7777 if (TREE_CODE (arg0) == CALL_EXPR)
7778 {
7779 tree fn = get_callee_fndecl (arg0);
7780 if (DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
7781 switch (DECL_FUNCTION_CODE (fn))
7782 {
7783 CASE_FLT_FN (BUILT_IN_CEXPI):
7784 fn = mathfn_built_in (type, BUILT_IN_SIN);
7785 return build_function_call_expr (fn, TREE_OPERAND (arg0, 1));
7786
7787 default:;
7788 }
7789 }
7790 return NULL_TREE;
7791
7792 default:
7793 return NULL_TREE;
7794 } /* switch (code) */
7795 }
7796
7797 /* Fold a binary expression of code CODE and type TYPE with operands
7798 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
7799 Return the folded expression if folding is successful. Otherwise,
7800 return NULL_TREE. */
7801
7802 static tree
7803 fold_minmax (enum tree_code code, tree type, tree op0, tree op1)
7804 {
7805 enum tree_code compl_code;
7806
7807 if (code == MIN_EXPR)
7808 compl_code = MAX_EXPR;
7809 else if (code == MAX_EXPR)
7810 compl_code = MIN_EXPR;
7811 else
7812 gcc_unreachable ();
7813
7814 /* MIN (MAX (a, b), b) == b. */
7815 if (TREE_CODE (op0) == compl_code
7816 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
7817 return omit_one_operand (type, op1, TREE_OPERAND (op0, 0));
7818
7819 /* MIN (MAX (b, a), b) == b. */
7820 if (TREE_CODE (op0) == compl_code
7821 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
7822 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
7823 return omit_one_operand (type, op1, TREE_OPERAND (op0, 1));
7824
7825 /* MIN (a, MAX (a, b)) == a. */
7826 if (TREE_CODE (op1) == compl_code
7827 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
7828 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
7829 return omit_one_operand (type, op0, TREE_OPERAND (op1, 1));
7830
7831 /* MIN (a, MAX (b, a)) == a. */
7832 if (TREE_CODE (op1) == compl_code
7833 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
7834 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
7835 return omit_one_operand (type, op0, TREE_OPERAND (op1, 0));
7836
7837 return NULL_TREE;
7838 }
7839
7840 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
7841 by changing CODE to reduce the magnitude of constants involved in
7842 ARG0 of the comparison.
7843 Returns a canonicalized comparison tree if a simplification was
7844 possible, otherwise returns NULL_TREE. */
7845
7846 static tree
7847 maybe_canonicalize_comparison_1 (enum tree_code code, tree type,
7848 tree arg0, tree arg1)
7849 {
7850 enum tree_code code0 = TREE_CODE (arg0);
7851 tree t, cst0 = NULL_TREE;
7852 int sgn0;
7853 bool swap = false;
7854
7855 /* Match A +- CST code arg1 and CST code arg1. */
7856 if (!(((code0 == MINUS_EXPR
7857 || code0 == PLUS_EXPR)
7858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7859 || code0 == INTEGER_CST))
7860 return NULL_TREE;
7861
7862 /* Identify the constant in arg0 and its sign. */
7863 if (code0 == INTEGER_CST)
7864 cst0 = arg0;
7865 else
7866 cst0 = TREE_OPERAND (arg0, 1);
7867 sgn0 = tree_int_cst_sgn (cst0);
7868
7869 /* Overflowed constants and zero will cause problems. */
7870 if (integer_zerop (cst0)
7871 || TREE_OVERFLOW (cst0))
7872 return NULL_TREE;
7873
7874 /* See if we can reduce the magnitude of the constant in
7875 arg0 by changing the comparison code. */
7876 if (code0 == INTEGER_CST)
7877 {
7878 /* CST <= arg1 -> CST-1 < arg1. */
7879 if (code == LE_EXPR && sgn0 == 1)
7880 code = LT_EXPR;
7881 /* -CST < arg1 -> -CST-1 <= arg1. */
7882 else if (code == LT_EXPR && sgn0 == -1)
7883 code = LE_EXPR;
7884 /* CST > arg1 -> CST-1 >= arg1. */
7885 else if (code == GT_EXPR && sgn0 == 1)
7886 code = GE_EXPR;
7887 /* -CST >= arg1 -> -CST-1 > arg1. */
7888 else if (code == GE_EXPR && sgn0 == -1)
7889 code = GT_EXPR;
7890 else
7891 return NULL_TREE;
7892 /* arg1 code' CST' might be more canonical. */
7893 swap = true;
7894 }
7895 else
7896 {
7897 /* A - CST < arg1 -> A - CST-1 <= arg1. */
7898 if (code == LT_EXPR
7899 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7900 code = LE_EXPR;
7901 /* A + CST > arg1 -> A + CST-1 >= arg1. */
7902 else if (code == GT_EXPR
7903 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7904 code = GE_EXPR;
7905 /* A + CST <= arg1 -> A + CST-1 < arg1. */
7906 else if (code == LE_EXPR
7907 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
7908 code = LT_EXPR;
7909 /* A - CST >= arg1 -> A - CST-1 > arg1. */
7910 else if (code == GE_EXPR
7911 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
7912 code = GT_EXPR;
7913 else
7914 return NULL_TREE;
7915 }
7916
7917 /* Now build the constant reduced in magnitude. */
7918 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
7919 cst0, build_int_cst (TREE_TYPE (cst0), 1), 0);
7920 if (code0 != INTEGER_CST)
7921 t = fold_build2 (code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
7922
7923 /* If swapping might yield to a more canonical form, do so. */
7924 if (swap)
7925 return fold_build2 (swap_tree_comparison (code), type, arg1, t);
7926 else
7927 return fold_build2 (code, type, t, arg1);
7928 }
7929
7930 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
7931 overflow further. Try to decrease the magnitude of constants involved
7932 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
7933 and put sole constants at the second argument position.
7934 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
7935
7936 static tree
7937 maybe_canonicalize_comparison (enum tree_code code, tree type,
7938 tree arg0, tree arg1)
7939 {
7940 tree t;
7941
7942 /* In principle pointers also have undefined overflow behavior,
7943 but that causes problems elsewhere. */
7944 if ((flag_wrapv || flag_trapv)
7945 || (TYPE_UNSIGNED (TREE_TYPE (arg0))
7946 || POINTER_TYPE_P (TREE_TYPE (arg0))))
7947 return NULL_TREE;
7948
7949 /* Try canonicalization by simplifying arg0. */
7950 t = maybe_canonicalize_comparison_1 (code, type, arg0, arg1);
7951 if (t)
7952 return t;
7953
7954 /* Try canonicalization by simplifying arg1 using the swapped
7955 comparison. */
7956 code = swap_tree_comparison (code);
7957 return maybe_canonicalize_comparison_1 (code, type, arg1, arg0);
7958 }
7959
7960 /* Subroutine of fold_binary. This routine performs all of the
7961 transformations that are common to the equality/inequality
7962 operators (EQ_EXPR and NE_EXPR) and the ordering operators
7963 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
7964 fold_binary should call fold_binary. Fold a comparison with
7965 tree code CODE and type TYPE with operands OP0 and OP1. Return
7966 the folded comparison or NULL_TREE. */
7967
7968 static tree
7969 fold_comparison (enum tree_code code, tree type, tree op0, tree op1)
7970 {
7971 tree arg0, arg1, tem;
7972
7973 arg0 = op0;
7974 arg1 = op1;
7975
7976 STRIP_SIGN_NOPS (arg0);
7977 STRIP_SIGN_NOPS (arg1);
7978
7979 tem = fold_relational_const (code, type, arg0, arg1);
7980 if (tem != NULL_TREE)
7981 return tem;
7982
7983 /* If one arg is a real or integer constant, put it last. */
7984 if (tree_swap_operands_p (arg0, arg1, true))
7985 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
7986
7987 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
7988 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7989 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7990 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
7991 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
7992 && !(flag_wrapv || flag_trapv))
7993 && (TREE_CODE (arg1) == INTEGER_CST
7994 && !TREE_OVERFLOW (arg1)))
7995 {
7996 tree const1 = TREE_OPERAND (arg0, 1);
7997 tree const2 = arg1;
7998 tree variable = TREE_OPERAND (arg0, 0);
7999 tree lhs;
8000 int lhs_add;
8001 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8002
8003 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
8004 TREE_TYPE (arg1), const2, const1);
8005 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8006 && (TREE_CODE (lhs) != INTEGER_CST
8007 || !TREE_OVERFLOW (lhs)))
8008 return fold_build2 (code, type, variable, lhs);
8009 }
8010
8011 /* For comparisons of pointers we can decompose it to a compile time
8012 comparison of the base objects and the offsets into the object.
8013 This requires at least one operand being an ADDR_EXPR to do more
8014 than the operand_equal_p test below. */
8015 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8016 && (TREE_CODE (arg0) == ADDR_EXPR
8017 || TREE_CODE (arg1) == ADDR_EXPR))
8018 {
8019 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8020 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8021 enum machine_mode mode;
8022 int volatilep, unsignedp;
8023 bool indirect_base0 = false;
8024
8025 /* Get base and offset for the access. Strip ADDR_EXPR for
8026 get_inner_reference, but put it back by stripping INDIRECT_REF
8027 off the base object if possible. */
8028 base0 = arg0;
8029 if (TREE_CODE (arg0) == ADDR_EXPR)
8030 {
8031 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8032 &bitsize, &bitpos0, &offset0, &mode,
8033 &unsignedp, &volatilep, false);
8034 if (TREE_CODE (base0) == INDIRECT_REF)
8035 base0 = TREE_OPERAND (base0, 0);
8036 else
8037 indirect_base0 = true;
8038 }
8039
8040 base1 = arg1;
8041 if (TREE_CODE (arg1) == ADDR_EXPR)
8042 {
8043 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8044 &bitsize, &bitpos1, &offset1, &mode,
8045 &unsignedp, &volatilep, false);
8046 /* We have to make sure to have an indirect/non-indirect base1
8047 just the same as we did for base0. */
8048 if (TREE_CODE (base1) == INDIRECT_REF
8049 && !indirect_base0)
8050 base1 = TREE_OPERAND (base1, 0);
8051 else if (!indirect_base0)
8052 base1 = NULL_TREE;
8053 }
8054 else if (indirect_base0)
8055 base1 = NULL_TREE;
8056
8057 /* If we have equivalent bases we might be able to simplify. */
8058 if (base0 && base1
8059 && operand_equal_p (base0, base1, 0))
8060 {
8061 /* We can fold this expression to a constant if the non-constant
8062 offset parts are equal. */
8063 if (offset0 == offset1
8064 || (offset0 && offset1
8065 && operand_equal_p (offset0, offset1, 0)))
8066 {
8067 switch (code)
8068 {
8069 case EQ_EXPR:
8070 return build_int_cst (boolean_type_node, bitpos0 == bitpos1);
8071 case NE_EXPR:
8072 return build_int_cst (boolean_type_node, bitpos0 != bitpos1);
8073 case LT_EXPR:
8074 return build_int_cst (boolean_type_node, bitpos0 < bitpos1);
8075 case LE_EXPR:
8076 return build_int_cst (boolean_type_node, bitpos0 <= bitpos1);
8077 case GE_EXPR:
8078 return build_int_cst (boolean_type_node, bitpos0 >= bitpos1);
8079 case GT_EXPR:
8080 return build_int_cst (boolean_type_node, bitpos0 > bitpos1);
8081 default:;
8082 }
8083 }
8084 /* We can simplify the comparison to a comparison of the variable
8085 offset parts if the constant offset parts are equal.
8086 Be careful to use signed size type here because otherwise we
8087 mess with array offsets in the wrong way. This is possible
8088 because pointer arithmetic is restricted to retain within an
8089 object and overflow on pointer differences is undefined as of
8090 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8091 else if (bitpos0 == bitpos1)
8092 {
8093 tree signed_size_type_node;
8094 signed_size_type_node = signed_type_for (size_type_node);
8095
8096 /* By converting to signed size type we cover middle-end pointer
8097 arithmetic which operates on unsigned pointer types of size
8098 type size and ARRAY_REF offsets which are properly sign or
8099 zero extended from their type in case it is narrower than
8100 size type. */
8101 if (offset0 == NULL_TREE)
8102 offset0 = build_int_cst (signed_size_type_node, 0);
8103 else
8104 offset0 = fold_convert (signed_size_type_node, offset0);
8105 if (offset1 == NULL_TREE)
8106 offset1 = build_int_cst (signed_size_type_node, 0);
8107 else
8108 offset1 = fold_convert (signed_size_type_node, offset1);
8109
8110 return fold_build2 (code, type, offset0, offset1);
8111 }
8112 }
8113 }
8114
8115 /* If this is a comparison of two exprs that look like an ARRAY_REF of the
8116 same object, then we can fold this to a comparison of the two offsets in
8117 signed size type. This is possible because pointer arithmetic is
8118 restricted to retain within an object and overflow on pointer differences
8119 is undefined as of 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8120 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8121 && !flag_wrapv && !flag_trapv)
8122 {
8123 tree base0, offset0, base1, offset1;
8124
8125 if (extract_array_ref (arg0, &base0, &offset0)
8126 && extract_array_ref (arg1, &base1, &offset1)
8127 && operand_equal_p (base0, base1, 0))
8128 {
8129 tree signed_size_type_node;
8130 signed_size_type_node = signed_type_for (size_type_node);
8131
8132 /* By converting to signed size type we cover middle-end pointer
8133 arithmetic which operates on unsigned pointer types of size
8134 type size and ARRAY_REF offsets which are properly sign or
8135 zero extended from their type in case it is narrower than
8136 size type. */
8137 if (offset0 == NULL_TREE)
8138 offset0 = build_int_cst (signed_size_type_node, 0);
8139 else
8140 offset0 = fold_convert (signed_size_type_node, offset0);
8141 if (offset1 == NULL_TREE)
8142 offset1 = build_int_cst (signed_size_type_node, 0);
8143 else
8144 offset1 = fold_convert (signed_size_type_node, offset1);
8145
8146 return fold_build2 (code, type, offset0, offset1);
8147 }
8148 }
8149
8150 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8151 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8152 the resulting offset is smaller in absolute value than the
8153 original one. */
8154 if (!(flag_wrapv || flag_trapv)
8155 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8156 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8157 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8158 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8159 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8160 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8161 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8162 {
8163 tree const1 = TREE_OPERAND (arg0, 1);
8164 tree const2 = TREE_OPERAND (arg1, 1);
8165 tree variable1 = TREE_OPERAND (arg0, 0);
8166 tree variable2 = TREE_OPERAND (arg1, 0);
8167 tree cst;
8168
8169 /* Put the constant on the side where it doesn't overflow and is
8170 of lower absolute value than before. */
8171 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8172 ? MINUS_EXPR : PLUS_EXPR,
8173 const2, const1, 0);
8174 if (!TREE_OVERFLOW (cst)
8175 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
8176 return fold_build2 (code, type,
8177 variable1,
8178 fold_build2 (TREE_CODE (arg1), TREE_TYPE (arg1),
8179 variable2, cst));
8180
8181 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8182 ? MINUS_EXPR : PLUS_EXPR,
8183 const1, const2, 0);
8184 if (!TREE_OVERFLOW (cst)
8185 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
8186 return fold_build2 (code, type,
8187 fold_build2 (TREE_CODE (arg0), TREE_TYPE (arg0),
8188 variable1, cst),
8189 variable2);
8190 }
8191
8192 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
8193 signed arithmetic case. That form is created by the compiler
8194 often enough for folding it to be of value. One example is in
8195 computing loop trip counts after Operator Strength Reduction. */
8196 if (!(flag_wrapv || flag_trapv)
8197 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
8198 && TREE_CODE (arg0) == MULT_EXPR
8199 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8200 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8201 && integer_zerop (arg1))
8202 {
8203 tree const1 = TREE_OPERAND (arg0, 1);
8204 tree const2 = arg1; /* zero */
8205 tree variable1 = TREE_OPERAND (arg0, 0);
8206 enum tree_code cmp_code = code;
8207
8208 gcc_assert (!integer_zerop (const1));
8209
8210 /* If const1 is negative we swap the sense of the comparison. */
8211 if (tree_int_cst_sgn (const1) < 0)
8212 cmp_code = swap_tree_comparison (cmp_code);
8213
8214 return fold_build2 (cmp_code, type, variable1, const2);
8215 }
8216
8217 tem = maybe_canonicalize_comparison (code, type, arg0, arg1);
8218 if (tem)
8219 return tem;
8220
8221 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
8222 {
8223 tree targ0 = strip_float_extensions (arg0);
8224 tree targ1 = strip_float_extensions (arg1);
8225 tree newtype = TREE_TYPE (targ0);
8226
8227 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
8228 newtype = TREE_TYPE (targ1);
8229
8230 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
8231 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
8232 return fold_build2 (code, type, fold_convert (newtype, targ0),
8233 fold_convert (newtype, targ1));
8234
8235 /* (-a) CMP (-b) -> b CMP a */
8236 if (TREE_CODE (arg0) == NEGATE_EXPR
8237 && TREE_CODE (arg1) == NEGATE_EXPR)
8238 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
8239 TREE_OPERAND (arg0, 0));
8240
8241 if (TREE_CODE (arg1) == REAL_CST)
8242 {
8243 REAL_VALUE_TYPE cst;
8244 cst = TREE_REAL_CST (arg1);
8245
8246 /* (-a) CMP CST -> a swap(CMP) (-CST) */
8247 if (TREE_CODE (arg0) == NEGATE_EXPR)
8248 return fold_build2 (swap_tree_comparison (code), type,
8249 TREE_OPERAND (arg0, 0),
8250 build_real (TREE_TYPE (arg1),
8251 REAL_VALUE_NEGATE (cst)));
8252
8253 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
8254 /* a CMP (-0) -> a CMP 0 */
8255 if (REAL_VALUE_MINUS_ZERO (cst))
8256 return fold_build2 (code, type, arg0,
8257 build_real (TREE_TYPE (arg1), dconst0));
8258
8259 /* x != NaN is always true, other ops are always false. */
8260 if (REAL_VALUE_ISNAN (cst)
8261 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
8262 {
8263 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
8264 return omit_one_operand (type, tem, arg0);
8265 }
8266
8267 /* Fold comparisons against infinity. */
8268 if (REAL_VALUE_ISINF (cst))
8269 {
8270 tem = fold_inf_compare (code, type, arg0, arg1);
8271 if (tem != NULL_TREE)
8272 return tem;
8273 }
8274 }
8275
8276 /* If this is a comparison of a real constant with a PLUS_EXPR
8277 or a MINUS_EXPR of a real constant, we can convert it into a
8278 comparison with a revised real constant as long as no overflow
8279 occurs when unsafe_math_optimizations are enabled. */
8280 if (flag_unsafe_math_optimizations
8281 && TREE_CODE (arg1) == REAL_CST
8282 && (TREE_CODE (arg0) == PLUS_EXPR
8283 || TREE_CODE (arg0) == MINUS_EXPR)
8284 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8285 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
8286 ? MINUS_EXPR : PLUS_EXPR,
8287 arg1, TREE_OPERAND (arg0, 1), 0))
8288 && ! TREE_CONSTANT_OVERFLOW (tem))
8289 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8290
8291 /* Likewise, we can simplify a comparison of a real constant with
8292 a MINUS_EXPR whose first operand is also a real constant, i.e.
8293 (c1 - x) < c2 becomes x > c1-c2. */
8294 if (flag_unsafe_math_optimizations
8295 && TREE_CODE (arg1) == REAL_CST
8296 && TREE_CODE (arg0) == MINUS_EXPR
8297 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
8298 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
8299 arg1, 0))
8300 && ! TREE_CONSTANT_OVERFLOW (tem))
8301 return fold_build2 (swap_tree_comparison (code), type,
8302 TREE_OPERAND (arg0, 1), tem);
8303
8304 /* Fold comparisons against built-in math functions. */
8305 if (TREE_CODE (arg1) == REAL_CST
8306 && flag_unsafe_math_optimizations
8307 && ! flag_errno_math)
8308 {
8309 enum built_in_function fcode = builtin_mathfn_code (arg0);
8310
8311 if (fcode != END_BUILTINS)
8312 {
8313 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
8314 if (tem != NULL_TREE)
8315 return tem;
8316 }
8317 }
8318 }
8319
8320 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
8321 if (TREE_CONSTANT (arg1)
8322 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
8323 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
8324 /* This optimization is invalid for ordered comparisons
8325 if CONST+INCR overflows or if foo+incr might overflow.
8326 This optimization is invalid for floating point due to rounding.
8327 For pointer types we assume overflow doesn't happen. */
8328 && (POINTER_TYPE_P (TREE_TYPE (arg0))
8329 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8330 && (code == EQ_EXPR || code == NE_EXPR))))
8331 {
8332 tree varop, newconst;
8333
8334 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
8335 {
8336 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
8337 arg1, TREE_OPERAND (arg0, 1));
8338 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
8339 TREE_OPERAND (arg0, 0),
8340 TREE_OPERAND (arg0, 1));
8341 }
8342 else
8343 {
8344 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
8345 arg1, TREE_OPERAND (arg0, 1));
8346 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
8347 TREE_OPERAND (arg0, 0),
8348 TREE_OPERAND (arg0, 1));
8349 }
8350
8351
8352 /* If VAROP is a reference to a bitfield, we must mask
8353 the constant by the width of the field. */
8354 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
8355 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
8356 && host_integerp (DECL_SIZE (TREE_OPERAND
8357 (TREE_OPERAND (varop, 0), 1)), 1))
8358 {
8359 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
8360 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
8361 tree folded_compare, shift;
8362
8363 /* First check whether the comparison would come out
8364 always the same. If we don't do that we would
8365 change the meaning with the masking. */
8366 folded_compare = fold_build2 (code, type,
8367 TREE_OPERAND (varop, 0), arg1);
8368 if (TREE_CODE (folded_compare) == INTEGER_CST)
8369 return omit_one_operand (type, folded_compare, varop);
8370
8371 shift = build_int_cst (NULL_TREE,
8372 TYPE_PRECISION (TREE_TYPE (varop)) - size);
8373 shift = fold_convert (TREE_TYPE (varop), shift);
8374 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
8375 newconst, shift);
8376 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
8377 newconst, shift);
8378 }
8379
8380 return fold_build2 (code, type, varop, newconst);
8381 }
8382
8383 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
8384 && (TREE_CODE (arg0) == NOP_EXPR
8385 || TREE_CODE (arg0) == CONVERT_EXPR))
8386 {
8387 /* If we are widening one operand of an integer comparison,
8388 see if the other operand is similarly being widened. Perhaps we
8389 can do the comparison in the narrower type. */
8390 tem = fold_widened_comparison (code, type, arg0, arg1);
8391 if (tem)
8392 return tem;
8393
8394 /* Or if we are changing signedness. */
8395 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
8396 if (tem)
8397 return tem;
8398 }
8399
8400 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8401 constant, we can simplify it. */
8402 if (TREE_CODE (arg1) == INTEGER_CST
8403 && (TREE_CODE (arg0) == MIN_EXPR
8404 || TREE_CODE (arg0) == MAX_EXPR)
8405 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8406 {
8407 tem = optimize_minmax_comparison (code, type, op0, op1);
8408 if (tem)
8409 return tem;
8410 }
8411
8412 /* Simplify comparison of something with itself. (For IEEE
8413 floating-point, we can only do some of these simplifications.) */
8414 if (operand_equal_p (arg0, arg1, 0))
8415 {
8416 switch (code)
8417 {
8418 case EQ_EXPR:
8419 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8420 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8421 return constant_boolean_node (1, type);
8422 break;
8423
8424 case GE_EXPR:
8425 case LE_EXPR:
8426 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
8427 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8428 return constant_boolean_node (1, type);
8429 return fold_build2 (EQ_EXPR, type, arg0, arg1);
8430
8431 case NE_EXPR:
8432 /* For NE, we can only do this simplification if integer
8433 or we don't honor IEEE floating point NaNs. */
8434 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
8435 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
8436 break;
8437 /* ... fall through ... */
8438 case GT_EXPR:
8439 case LT_EXPR:
8440 return constant_boolean_node (0, type);
8441 default:
8442 gcc_unreachable ();
8443 }
8444 }
8445
8446 /* If we are comparing an expression that just has comparisons
8447 of two integer values, arithmetic expressions of those comparisons,
8448 and constants, we can simplify it. There are only three cases
8449 to check: the two values can either be equal, the first can be
8450 greater, or the second can be greater. Fold the expression for
8451 those three values. Since each value must be 0 or 1, we have
8452 eight possibilities, each of which corresponds to the constant 0
8453 or 1 or one of the six possible comparisons.
8454
8455 This handles common cases like (a > b) == 0 but also handles
8456 expressions like ((x > y) - (y > x)) > 0, which supposedly
8457 occur in macroized code. */
8458
8459 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8460 {
8461 tree cval1 = 0, cval2 = 0;
8462 int save_p = 0;
8463
8464 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8465 /* Don't handle degenerate cases here; they should already
8466 have been handled anyway. */
8467 && cval1 != 0 && cval2 != 0
8468 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8469 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8470 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8471 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8472 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8473 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8474 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8475 {
8476 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8477 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8478
8479 /* We can't just pass T to eval_subst in case cval1 or cval2
8480 was the same as ARG1. */
8481
8482 tree high_result
8483 = fold_build2 (code, type,
8484 eval_subst (arg0, cval1, maxval,
8485 cval2, minval),
8486 arg1);
8487 tree equal_result
8488 = fold_build2 (code, type,
8489 eval_subst (arg0, cval1, maxval,
8490 cval2, maxval),
8491 arg1);
8492 tree low_result
8493 = fold_build2 (code, type,
8494 eval_subst (arg0, cval1, minval,
8495 cval2, maxval),
8496 arg1);
8497
8498 /* All three of these results should be 0 or 1. Confirm they are.
8499 Then use those values to select the proper code to use. */
8500
8501 if (TREE_CODE (high_result) == INTEGER_CST
8502 && TREE_CODE (equal_result) == INTEGER_CST
8503 && TREE_CODE (low_result) == INTEGER_CST)
8504 {
8505 /* Make a 3-bit mask with the high-order bit being the
8506 value for `>', the next for '=', and the low for '<'. */
8507 switch ((integer_onep (high_result) * 4)
8508 + (integer_onep (equal_result) * 2)
8509 + integer_onep (low_result))
8510 {
8511 case 0:
8512 /* Always false. */
8513 return omit_one_operand (type, integer_zero_node, arg0);
8514 case 1:
8515 code = LT_EXPR;
8516 break;
8517 case 2:
8518 code = EQ_EXPR;
8519 break;
8520 case 3:
8521 code = LE_EXPR;
8522 break;
8523 case 4:
8524 code = GT_EXPR;
8525 break;
8526 case 5:
8527 code = NE_EXPR;
8528 break;
8529 case 6:
8530 code = GE_EXPR;
8531 break;
8532 case 7:
8533 /* Always true. */
8534 return omit_one_operand (type, integer_one_node, arg0);
8535 }
8536
8537 if (save_p)
8538 return save_expr (build2 (code, type, cval1, cval2));
8539 return fold_build2 (code, type, cval1, cval2);
8540 }
8541 }
8542 }
8543
8544 /* Fold a comparison of the address of COMPONENT_REFs with the same
8545 type and component to a comparison of the address of the base
8546 object. In short, &x->a OP &y->a to x OP y and
8547 &x->a OP &y.a to x OP &y */
8548 if (TREE_CODE (arg0) == ADDR_EXPR
8549 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
8550 && TREE_CODE (arg1) == ADDR_EXPR
8551 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
8552 {
8553 tree cref0 = TREE_OPERAND (arg0, 0);
8554 tree cref1 = TREE_OPERAND (arg1, 0);
8555 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
8556 {
8557 tree op0 = TREE_OPERAND (cref0, 0);
8558 tree op1 = TREE_OPERAND (cref1, 0);
8559 return fold_build2 (code, type,
8560 build_fold_addr_expr (op0),
8561 build_fold_addr_expr (op1));
8562 }
8563 }
8564
8565 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8566 into a single range test. */
8567 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8568 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8569 && TREE_CODE (arg1) == INTEGER_CST
8570 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8571 && !integer_zerop (TREE_OPERAND (arg0, 1))
8572 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8573 && !TREE_OVERFLOW (arg1))
8574 {
8575 tem = fold_div_compare (code, type, arg0, arg1);
8576 if (tem != NULL_TREE)
8577 return tem;
8578 }
8579
8580 /* Fold ~X op ~Y as Y op X. */
8581 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8582 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8583 return fold_build2 (code, type,
8584 TREE_OPERAND (arg1, 0),
8585 TREE_OPERAND (arg0, 0));
8586
8587 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
8588 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8589 && TREE_CODE (arg1) == INTEGER_CST)
8590 return fold_build2 (swap_tree_comparison (code), type,
8591 TREE_OPERAND (arg0, 0),
8592 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1));
8593
8594 return NULL_TREE;
8595 }
8596
8597
8598 /* Subroutine of fold_binary. Optimize complex multiplications of the
8599 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8600 argument EXPR represents the expression "z" of type TYPE. */
8601
8602 static tree
8603 fold_mult_zconjz (tree type, tree expr)
8604 {
8605 tree itype = TREE_TYPE (type);
8606 tree rpart, ipart, tem;
8607
8608 if (TREE_CODE (expr) == COMPLEX_EXPR)
8609 {
8610 rpart = TREE_OPERAND (expr, 0);
8611 ipart = TREE_OPERAND (expr, 1);
8612 }
8613 else if (TREE_CODE (expr) == COMPLEX_CST)
8614 {
8615 rpart = TREE_REALPART (expr);
8616 ipart = TREE_IMAGPART (expr);
8617 }
8618 else
8619 {
8620 expr = save_expr (expr);
8621 rpart = fold_build1 (REALPART_EXPR, itype, expr);
8622 ipart = fold_build1 (IMAGPART_EXPR, itype, expr);
8623 }
8624
8625 rpart = save_expr (rpart);
8626 ipart = save_expr (ipart);
8627 tem = fold_build2 (PLUS_EXPR, itype,
8628 fold_build2 (MULT_EXPR, itype, rpart, rpart),
8629 fold_build2 (MULT_EXPR, itype, ipart, ipart));
8630 return fold_build2 (COMPLEX_EXPR, type, tem,
8631 fold_convert (itype, integer_zero_node));
8632 }
8633
8634
8635 /* Fold a binary expression of code CODE and type TYPE with operands
8636 OP0 and OP1. Return the folded expression if folding is
8637 successful. Otherwise, return NULL_TREE. */
8638
8639 tree
8640 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
8641 {
8642 enum tree_code_class kind = TREE_CODE_CLASS (code);
8643 tree arg0, arg1, tem;
8644 tree t1 = NULL_TREE;
8645
8646 gcc_assert ((IS_EXPR_CODE_CLASS (kind)
8647 || IS_GIMPLE_STMT_CODE_CLASS (kind))
8648 && TREE_CODE_LENGTH (code) == 2
8649 && op0 != NULL_TREE
8650 && op1 != NULL_TREE);
8651
8652 arg0 = op0;
8653 arg1 = op1;
8654
8655 /* Strip any conversions that don't change the mode. This is
8656 safe for every expression, except for a comparison expression
8657 because its signedness is derived from its operands. So, in
8658 the latter case, only strip conversions that don't change the
8659 signedness.
8660
8661 Note that this is done as an internal manipulation within the
8662 constant folder, in order to find the simplest representation
8663 of the arguments so that their form can be studied. In any
8664 cases, the appropriate type conversions should be put back in
8665 the tree that will get out of the constant folder. */
8666
8667 if (kind == tcc_comparison)
8668 {
8669 STRIP_SIGN_NOPS (arg0);
8670 STRIP_SIGN_NOPS (arg1);
8671 }
8672 else
8673 {
8674 STRIP_NOPS (arg0);
8675 STRIP_NOPS (arg1);
8676 }
8677
8678 /* Note that TREE_CONSTANT isn't enough: static var addresses are
8679 constant but we can't do arithmetic on them. */
8680 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
8681 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
8682 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
8683 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST))
8684 {
8685 if (kind == tcc_binary)
8686 tem = const_binop (code, arg0, arg1, 0);
8687 else if (kind == tcc_comparison)
8688 tem = fold_relational_const (code, type, arg0, arg1);
8689 else
8690 tem = NULL_TREE;
8691
8692 if (tem != NULL_TREE)
8693 {
8694 if (TREE_TYPE (tem) != type)
8695 tem = fold_convert (type, tem);
8696 return tem;
8697 }
8698 }
8699
8700 /* If this is a commutative operation, and ARG0 is a constant, move it
8701 to ARG1 to reduce the number of tests below. */
8702 if (commutative_tree_code (code)
8703 && tree_swap_operands_p (arg0, arg1, true))
8704 return fold_build2 (code, type, op1, op0);
8705
8706 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
8707
8708 First check for cases where an arithmetic operation is applied to a
8709 compound, conditional, or comparison operation. Push the arithmetic
8710 operation inside the compound or conditional to see if any folding
8711 can then be done. Convert comparison to conditional for this purpose.
8712 The also optimizes non-constant cases that used to be done in
8713 expand_expr.
8714
8715 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
8716 one of the operands is a comparison and the other is a comparison, a
8717 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
8718 code below would make the expression more complex. Change it to a
8719 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
8720 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
8721
8722 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
8723 || code == EQ_EXPR || code == NE_EXPR)
8724 && ((truth_value_p (TREE_CODE (arg0))
8725 && (truth_value_p (TREE_CODE (arg1))
8726 || (TREE_CODE (arg1) == BIT_AND_EXPR
8727 && integer_onep (TREE_OPERAND (arg1, 1)))))
8728 || (truth_value_p (TREE_CODE (arg1))
8729 && (truth_value_p (TREE_CODE (arg0))
8730 || (TREE_CODE (arg0) == BIT_AND_EXPR
8731 && integer_onep (TREE_OPERAND (arg0, 1)))))))
8732 {
8733 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
8734 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
8735 : TRUTH_XOR_EXPR,
8736 boolean_type_node,
8737 fold_convert (boolean_type_node, arg0),
8738 fold_convert (boolean_type_node, arg1));
8739
8740 if (code == EQ_EXPR)
8741 tem = invert_truthvalue (tem);
8742
8743 return fold_convert (type, tem);
8744 }
8745
8746 if (TREE_CODE_CLASS (code) == tcc_binary
8747 || TREE_CODE_CLASS (code) == tcc_comparison)
8748 {
8749 if (TREE_CODE (arg0) == COMPOUND_EXPR)
8750 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
8751 fold_build2 (code, type,
8752 TREE_OPERAND (arg0, 1), op1));
8753 if (TREE_CODE (arg1) == COMPOUND_EXPR
8754 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
8755 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
8756 fold_build2 (code, type,
8757 op0, TREE_OPERAND (arg1, 1)));
8758
8759 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
8760 {
8761 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8762 arg0, arg1,
8763 /*cond_first_p=*/1);
8764 if (tem != NULL_TREE)
8765 return tem;
8766 }
8767
8768 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
8769 {
8770 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
8771 arg1, arg0,
8772 /*cond_first_p=*/0);
8773 if (tem != NULL_TREE)
8774 return tem;
8775 }
8776 }
8777
8778 switch (code)
8779 {
8780 case PLUS_EXPR:
8781 /* A + (-B) -> A - B */
8782 if (TREE_CODE (arg1) == NEGATE_EXPR)
8783 return fold_build2 (MINUS_EXPR, type,
8784 fold_convert (type, arg0),
8785 fold_convert (type, TREE_OPERAND (arg1, 0)));
8786 /* (-A) + B -> B - A */
8787 if (TREE_CODE (arg0) == NEGATE_EXPR
8788 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
8789 return fold_build2 (MINUS_EXPR, type,
8790 fold_convert (type, arg1),
8791 fold_convert (type, TREE_OPERAND (arg0, 0)));
8792 /* Convert ~A + 1 to -A. */
8793 if (INTEGRAL_TYPE_P (type)
8794 && TREE_CODE (arg0) == BIT_NOT_EXPR
8795 && integer_onep (arg1))
8796 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
8797
8798 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
8799 same or one. */
8800 if ((TREE_CODE (arg0) == MULT_EXPR
8801 || TREE_CODE (arg1) == MULT_EXPR)
8802 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
8803 {
8804 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
8805 if (tem)
8806 return tem;
8807 }
8808
8809 if (! FLOAT_TYPE_P (type))
8810 {
8811 if (integer_zerop (arg1))
8812 return non_lvalue (fold_convert (type, arg0));
8813
8814 /* ~X + X is -1. */
8815 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8816 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8817 && !TYPE_TRAP_SIGNED (type))
8818 {
8819 t1 = build_int_cst_type (type, -1);
8820 return omit_one_operand (type, t1, arg1);
8821 }
8822
8823 /* X + ~X is -1. */
8824 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8825 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
8826 && !TYPE_TRAP_SIGNED (type))
8827 {
8828 t1 = build_int_cst_type (type, -1);
8829 return omit_one_operand (type, t1, arg0);
8830 }
8831
8832 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
8833 with a constant, and the two constants have no bits in common,
8834 we should treat this as a BIT_IOR_EXPR since this may produce more
8835 simplifications. */
8836 if (TREE_CODE (arg0) == BIT_AND_EXPR
8837 && TREE_CODE (arg1) == BIT_AND_EXPR
8838 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8839 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8840 && integer_zerop (const_binop (BIT_AND_EXPR,
8841 TREE_OPERAND (arg0, 1),
8842 TREE_OPERAND (arg1, 1), 0)))
8843 {
8844 code = BIT_IOR_EXPR;
8845 goto bit_ior;
8846 }
8847
8848 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
8849 (plus (plus (mult) (mult)) (foo)) so that we can
8850 take advantage of the factoring cases below. */
8851 if (((TREE_CODE (arg0) == PLUS_EXPR
8852 || TREE_CODE (arg0) == MINUS_EXPR)
8853 && TREE_CODE (arg1) == MULT_EXPR)
8854 || ((TREE_CODE (arg1) == PLUS_EXPR
8855 || TREE_CODE (arg1) == MINUS_EXPR)
8856 && TREE_CODE (arg0) == MULT_EXPR))
8857 {
8858 tree parg0, parg1, parg, marg;
8859 enum tree_code pcode;
8860
8861 if (TREE_CODE (arg1) == MULT_EXPR)
8862 parg = arg0, marg = arg1;
8863 else
8864 parg = arg1, marg = arg0;
8865 pcode = TREE_CODE (parg);
8866 parg0 = TREE_OPERAND (parg, 0);
8867 parg1 = TREE_OPERAND (parg, 1);
8868 STRIP_NOPS (parg0);
8869 STRIP_NOPS (parg1);
8870
8871 if (TREE_CODE (parg0) == MULT_EXPR
8872 && TREE_CODE (parg1) != MULT_EXPR)
8873 return fold_build2 (pcode, type,
8874 fold_build2 (PLUS_EXPR, type,
8875 fold_convert (type, parg0),
8876 fold_convert (type, marg)),
8877 fold_convert (type, parg1));
8878 if (TREE_CODE (parg0) != MULT_EXPR
8879 && TREE_CODE (parg1) == MULT_EXPR)
8880 return fold_build2 (PLUS_EXPR, type,
8881 fold_convert (type, parg0),
8882 fold_build2 (pcode, type,
8883 fold_convert (type, marg),
8884 fold_convert (type,
8885 parg1)));
8886 }
8887
8888 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
8889 of the array. Loop optimizer sometimes produce this type of
8890 expressions. */
8891 if (TREE_CODE (arg0) == ADDR_EXPR)
8892 {
8893 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
8894 if (tem)
8895 return fold_convert (type, tem);
8896 }
8897 else if (TREE_CODE (arg1) == ADDR_EXPR)
8898 {
8899 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
8900 if (tem)
8901 return fold_convert (type, tem);
8902 }
8903 }
8904 else
8905 {
8906 /* See if ARG1 is zero and X + ARG1 reduces to X. */
8907 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
8908 return non_lvalue (fold_convert (type, arg0));
8909
8910 /* Likewise if the operands are reversed. */
8911 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
8912 return non_lvalue (fold_convert (type, arg1));
8913
8914 /* Convert X + -C into X - C. */
8915 if (TREE_CODE (arg1) == REAL_CST
8916 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
8917 {
8918 tem = fold_negate_const (arg1, type);
8919 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
8920 return fold_build2 (MINUS_EXPR, type,
8921 fold_convert (type, arg0),
8922 fold_convert (type, tem));
8923 }
8924
8925 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
8926 to __complex__ ( x, y ). This is not the same for SNaNs or
8927 if singed zeros are involved. */
8928 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8929 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
8930 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
8931 {
8932 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
8933 tree arg0r = fold_unary (REALPART_EXPR, rtype, arg0);
8934 tree arg0i = fold_unary (IMAGPART_EXPR, rtype, arg0);
8935 bool arg0rz = false, arg0iz = false;
8936 if ((arg0r && (arg0rz = real_zerop (arg0r)))
8937 || (arg0i && (arg0iz = real_zerop (arg0i))))
8938 {
8939 tree arg1r = fold_unary (REALPART_EXPR, rtype, arg1);
8940 tree arg1i = fold_unary (IMAGPART_EXPR, rtype, arg1);
8941 if (arg0rz && arg1i && real_zerop (arg1i))
8942 {
8943 tree rp = arg1r ? arg1r
8944 : build1 (REALPART_EXPR, rtype, arg1);
8945 tree ip = arg0i ? arg0i
8946 : build1 (IMAGPART_EXPR, rtype, arg0);
8947 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8948 }
8949 else if (arg0iz && arg1r && real_zerop (arg1r))
8950 {
8951 tree rp = arg0r ? arg0r
8952 : build1 (REALPART_EXPR, rtype, arg0);
8953 tree ip = arg1i ? arg1i
8954 : build1 (IMAGPART_EXPR, rtype, arg1);
8955 return fold_build2 (COMPLEX_EXPR, type, rp, ip);
8956 }
8957 }
8958 }
8959
8960 if (flag_unsafe_math_optimizations
8961 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
8962 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
8963 && (tem = distribute_real_division (code, type, arg0, arg1)))
8964 return tem;
8965
8966 /* Convert x+x into x*2.0. */
8967 if (operand_equal_p (arg0, arg1, 0)
8968 && SCALAR_FLOAT_TYPE_P (type))
8969 return fold_build2 (MULT_EXPR, type, arg0,
8970 build_real (type, dconst2));
8971
8972 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
8973 if (flag_unsafe_math_optimizations
8974 && TREE_CODE (arg1) == PLUS_EXPR
8975 && TREE_CODE (arg0) != MULT_EXPR)
8976 {
8977 tree tree10 = TREE_OPERAND (arg1, 0);
8978 tree tree11 = TREE_OPERAND (arg1, 1);
8979 if (TREE_CODE (tree11) == MULT_EXPR
8980 && TREE_CODE (tree10) == MULT_EXPR)
8981 {
8982 tree tree0;
8983 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
8984 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
8985 }
8986 }
8987 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
8988 if (flag_unsafe_math_optimizations
8989 && TREE_CODE (arg0) == PLUS_EXPR
8990 && TREE_CODE (arg1) != MULT_EXPR)
8991 {
8992 tree tree00 = TREE_OPERAND (arg0, 0);
8993 tree tree01 = TREE_OPERAND (arg0, 1);
8994 if (TREE_CODE (tree01) == MULT_EXPR
8995 && TREE_CODE (tree00) == MULT_EXPR)
8996 {
8997 tree tree0;
8998 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
8999 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
9000 }
9001 }
9002 }
9003
9004 bit_rotate:
9005 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9006 is a rotate of A by C1 bits. */
9007 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9008 is a rotate of A by B bits. */
9009 {
9010 enum tree_code code0, code1;
9011 code0 = TREE_CODE (arg0);
9012 code1 = TREE_CODE (arg1);
9013 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9014 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9015 && operand_equal_p (TREE_OPERAND (arg0, 0),
9016 TREE_OPERAND (arg1, 0), 0)
9017 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9018 {
9019 tree tree01, tree11;
9020 enum tree_code code01, code11;
9021
9022 tree01 = TREE_OPERAND (arg0, 1);
9023 tree11 = TREE_OPERAND (arg1, 1);
9024 STRIP_NOPS (tree01);
9025 STRIP_NOPS (tree11);
9026 code01 = TREE_CODE (tree01);
9027 code11 = TREE_CODE (tree11);
9028 if (code01 == INTEGER_CST
9029 && code11 == INTEGER_CST
9030 && TREE_INT_CST_HIGH (tree01) == 0
9031 && TREE_INT_CST_HIGH (tree11) == 0
9032 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
9033 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9034 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
9035 code0 == LSHIFT_EXPR ? tree01 : tree11);
9036 else if (code11 == MINUS_EXPR)
9037 {
9038 tree tree110, tree111;
9039 tree110 = TREE_OPERAND (tree11, 0);
9040 tree111 = TREE_OPERAND (tree11, 1);
9041 STRIP_NOPS (tree110);
9042 STRIP_NOPS (tree111);
9043 if (TREE_CODE (tree110) == INTEGER_CST
9044 && 0 == compare_tree_int (tree110,
9045 TYPE_PRECISION
9046 (TREE_TYPE (TREE_OPERAND
9047 (arg0, 0))))
9048 && operand_equal_p (tree01, tree111, 0))
9049 return build2 ((code0 == LSHIFT_EXPR
9050 ? LROTATE_EXPR
9051 : RROTATE_EXPR),
9052 type, TREE_OPERAND (arg0, 0), tree01);
9053 }
9054 else if (code01 == MINUS_EXPR)
9055 {
9056 tree tree010, tree011;
9057 tree010 = TREE_OPERAND (tree01, 0);
9058 tree011 = TREE_OPERAND (tree01, 1);
9059 STRIP_NOPS (tree010);
9060 STRIP_NOPS (tree011);
9061 if (TREE_CODE (tree010) == INTEGER_CST
9062 && 0 == compare_tree_int (tree010,
9063 TYPE_PRECISION
9064 (TREE_TYPE (TREE_OPERAND
9065 (arg0, 0))))
9066 && operand_equal_p (tree11, tree011, 0))
9067 return build2 ((code0 != LSHIFT_EXPR
9068 ? LROTATE_EXPR
9069 : RROTATE_EXPR),
9070 type, TREE_OPERAND (arg0, 0), tree11);
9071 }
9072 }
9073 }
9074
9075 associate:
9076 /* In most languages, can't associate operations on floats through
9077 parentheses. Rather than remember where the parentheses were, we
9078 don't associate floats at all, unless the user has specified
9079 -funsafe-math-optimizations. */
9080
9081 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9082 {
9083 tree var0, con0, lit0, minus_lit0;
9084 tree var1, con1, lit1, minus_lit1;
9085
9086 /* Split both trees into variables, constants, and literals. Then
9087 associate each group together, the constants with literals,
9088 then the result with variables. This increases the chances of
9089 literals being recombined later and of generating relocatable
9090 expressions for the sum of a constant and literal. */
9091 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9092 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9093 code == MINUS_EXPR);
9094
9095 /* Only do something if we found more than two objects. Otherwise,
9096 nothing has changed and we risk infinite recursion. */
9097 if (2 < ((var0 != 0) + (var1 != 0)
9098 + (con0 != 0) + (con1 != 0)
9099 + (lit0 != 0) + (lit1 != 0)
9100 + (minus_lit0 != 0) + (minus_lit1 != 0)))
9101 {
9102 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9103 if (code == MINUS_EXPR)
9104 code = PLUS_EXPR;
9105
9106 var0 = associate_trees (var0, var1, code, type);
9107 con0 = associate_trees (con0, con1, code, type);
9108 lit0 = associate_trees (lit0, lit1, code, type);
9109 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
9110
9111 /* Preserve the MINUS_EXPR if the negative part of the literal is
9112 greater than the positive part. Otherwise, the multiplicative
9113 folding code (i.e extract_muldiv) may be fooled in case
9114 unsigned constants are subtracted, like in the following
9115 example: ((X*2 + 4) - 8U)/2. */
9116 if (minus_lit0 && lit0)
9117 {
9118 if (TREE_CODE (lit0) == INTEGER_CST
9119 && TREE_CODE (minus_lit0) == INTEGER_CST
9120 && tree_int_cst_lt (lit0, minus_lit0))
9121 {
9122 minus_lit0 = associate_trees (minus_lit0, lit0,
9123 MINUS_EXPR, type);
9124 lit0 = 0;
9125 }
9126 else
9127 {
9128 lit0 = associate_trees (lit0, minus_lit0,
9129 MINUS_EXPR, type);
9130 minus_lit0 = 0;
9131 }
9132 }
9133 if (minus_lit0)
9134 {
9135 if (con0 == 0)
9136 return fold_convert (type,
9137 associate_trees (var0, minus_lit0,
9138 MINUS_EXPR, type));
9139 else
9140 {
9141 con0 = associate_trees (con0, minus_lit0,
9142 MINUS_EXPR, type);
9143 return fold_convert (type,
9144 associate_trees (var0, con0,
9145 PLUS_EXPR, type));
9146 }
9147 }
9148
9149 con0 = associate_trees (con0, lit0, code, type);
9150 return fold_convert (type, associate_trees (var0, con0,
9151 code, type));
9152 }
9153 }
9154
9155 return NULL_TREE;
9156
9157 case MINUS_EXPR:
9158 /* A - (-B) -> A + B */
9159 if (TREE_CODE (arg1) == NEGATE_EXPR)
9160 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
9161 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9162 if (TREE_CODE (arg0) == NEGATE_EXPR
9163 && (FLOAT_TYPE_P (type)
9164 || INTEGRAL_TYPE_P (type))
9165 && negate_expr_p (arg1)
9166 && reorder_operands_p (arg0, arg1))
9167 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
9168 TREE_OPERAND (arg0, 0));
9169 /* Convert -A - 1 to ~A. */
9170 if (INTEGRAL_TYPE_P (type)
9171 && TREE_CODE (arg0) == NEGATE_EXPR
9172 && integer_onep (arg1)
9173 && !TYPE_TRAP_SIGNED (type))
9174 return fold_build1 (BIT_NOT_EXPR, type,
9175 fold_convert (type, TREE_OPERAND (arg0, 0)));
9176
9177 /* Convert -1 - A to ~A. */
9178 if (INTEGRAL_TYPE_P (type)
9179 && integer_all_onesp (arg0))
9180 return fold_build1 (BIT_NOT_EXPR, type, op1);
9181
9182 if (! FLOAT_TYPE_P (type))
9183 {
9184 if (integer_zerop (arg0))
9185 return negate_expr (fold_convert (type, arg1));
9186 if (integer_zerop (arg1))
9187 return non_lvalue (fold_convert (type, arg0));
9188
9189 /* Fold A - (A & B) into ~B & A. */
9190 if (!TREE_SIDE_EFFECTS (arg0)
9191 && TREE_CODE (arg1) == BIT_AND_EXPR)
9192 {
9193 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
9194 return fold_build2 (BIT_AND_EXPR, type,
9195 fold_build1 (BIT_NOT_EXPR, type,
9196 TREE_OPERAND (arg1, 0)),
9197 arg0);
9198 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9199 return fold_build2 (BIT_AND_EXPR, type,
9200 fold_build1 (BIT_NOT_EXPR, type,
9201 TREE_OPERAND (arg1, 1)),
9202 arg0);
9203 }
9204
9205 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
9206 any power of 2 minus 1. */
9207 if (TREE_CODE (arg0) == BIT_AND_EXPR
9208 && TREE_CODE (arg1) == BIT_AND_EXPR
9209 && operand_equal_p (TREE_OPERAND (arg0, 0),
9210 TREE_OPERAND (arg1, 0), 0))
9211 {
9212 tree mask0 = TREE_OPERAND (arg0, 1);
9213 tree mask1 = TREE_OPERAND (arg1, 1);
9214 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
9215
9216 if (operand_equal_p (tem, mask1, 0))
9217 {
9218 tem = fold_build2 (BIT_XOR_EXPR, type,
9219 TREE_OPERAND (arg0, 0), mask1);
9220 return fold_build2 (MINUS_EXPR, type, tem, mask1);
9221 }
9222 }
9223 }
9224
9225 /* See if ARG1 is zero and X - ARG1 reduces to X. */
9226 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
9227 return non_lvalue (fold_convert (type, arg0));
9228
9229 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
9230 ARG0 is zero and X + ARG0 reduces to X, since that would mean
9231 (-ARG1 + ARG0) reduces to -ARG1. */
9232 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
9233 return negate_expr (fold_convert (type, arg1));
9234
9235 /* Fold &x - &x. This can happen from &x.foo - &x.
9236 This is unsafe for certain floats even in non-IEEE formats.
9237 In IEEE, it is unsafe because it does wrong for NaNs.
9238 Also note that operand_equal_p is always false if an operand
9239 is volatile. */
9240
9241 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
9242 && operand_equal_p (arg0, arg1, 0))
9243 return fold_convert (type, integer_zero_node);
9244
9245 /* A - B -> A + (-B) if B is easily negatable. */
9246 if (negate_expr_p (arg1)
9247 && ((FLOAT_TYPE_P (type)
9248 /* Avoid this transformation if B is a positive REAL_CST. */
9249 && (TREE_CODE (arg1) != REAL_CST
9250 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
9251 || INTEGRAL_TYPE_P (type)))
9252 return fold_build2 (PLUS_EXPR, type,
9253 fold_convert (type, arg0),
9254 fold_convert (type, negate_expr (arg1)));
9255
9256 /* Try folding difference of addresses. */
9257 {
9258 HOST_WIDE_INT diff;
9259
9260 if ((TREE_CODE (arg0) == ADDR_EXPR
9261 || TREE_CODE (arg1) == ADDR_EXPR)
9262 && ptr_difference_const (arg0, arg1, &diff))
9263 return build_int_cst_type (type, diff);
9264 }
9265
9266 /* Fold &a[i] - &a[j] to i-j. */
9267 if (TREE_CODE (arg0) == ADDR_EXPR
9268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9269 && TREE_CODE (arg1) == ADDR_EXPR
9270 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9271 {
9272 tree aref0 = TREE_OPERAND (arg0, 0);
9273 tree aref1 = TREE_OPERAND (arg1, 0);
9274 if (operand_equal_p (TREE_OPERAND (aref0, 0),
9275 TREE_OPERAND (aref1, 0), 0))
9276 {
9277 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
9278 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
9279 tree esz = array_ref_element_size (aref0);
9280 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9281 return fold_build2 (MULT_EXPR, type, diff,
9282 fold_convert (type, esz));
9283
9284 }
9285 }
9286
9287 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
9288 of the array. Loop optimizer sometimes produce this type of
9289 expressions. */
9290 if (TREE_CODE (arg0) == ADDR_EXPR)
9291 {
9292 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
9293 if (tem)
9294 return fold_convert (type, tem);
9295 }
9296
9297 if (flag_unsafe_math_optimizations
9298 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9299 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9300 && (tem = distribute_real_division (code, type, arg0, arg1)))
9301 return tem;
9302
9303 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
9304 same or one. */
9305 if ((TREE_CODE (arg0) == MULT_EXPR
9306 || TREE_CODE (arg1) == MULT_EXPR)
9307 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
9308 {
9309 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
9310 if (tem)
9311 return tem;
9312 }
9313
9314 goto associate;
9315
9316 case MULT_EXPR:
9317 /* (-A) * (-B) -> A * B */
9318 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
9319 return fold_build2 (MULT_EXPR, type,
9320 fold_convert (type, TREE_OPERAND (arg0, 0)),
9321 fold_convert (type, negate_expr (arg1)));
9322 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
9323 return fold_build2 (MULT_EXPR, type,
9324 fold_convert (type, negate_expr (arg0)),
9325 fold_convert (type, TREE_OPERAND (arg1, 0)));
9326
9327 if (! FLOAT_TYPE_P (type))
9328 {
9329 if (integer_zerop (arg1))
9330 return omit_one_operand (type, arg1, arg0);
9331 if (integer_onep (arg1))
9332 return non_lvalue (fold_convert (type, arg0));
9333 /* Transform x * -1 into -x. */
9334 if (integer_all_onesp (arg1))
9335 return fold_convert (type, negate_expr (arg0));
9336 /* Transform x * -C into -x * C if x is easily negatable. */
9337 if (TREE_CODE (arg1) == INTEGER_CST
9338 && tree_int_cst_sgn (arg1) == -1
9339 && negate_expr_p (arg0)
9340 && (tem = negate_expr (arg1)) != arg1
9341 && !TREE_OVERFLOW (tem))
9342 return fold_build2 (MULT_EXPR, type,
9343 negate_expr (arg0), tem);
9344
9345 /* (a * (1 << b)) is (a << b) */
9346 if (TREE_CODE (arg1) == LSHIFT_EXPR
9347 && integer_onep (TREE_OPERAND (arg1, 0)))
9348 return fold_build2 (LSHIFT_EXPR, type, arg0,
9349 TREE_OPERAND (arg1, 1));
9350 if (TREE_CODE (arg0) == LSHIFT_EXPR
9351 && integer_onep (TREE_OPERAND (arg0, 0)))
9352 return fold_build2 (LSHIFT_EXPR, type, arg1,
9353 TREE_OPERAND (arg0, 1));
9354
9355 if (TREE_CODE (arg1) == INTEGER_CST
9356 && 0 != (tem = extract_muldiv (op0,
9357 fold_convert (type, arg1),
9358 code, NULL_TREE)))
9359 return fold_convert (type, tem);
9360
9361 /* Optimize z * conj(z) for integer complex numbers. */
9362 if (TREE_CODE (arg0) == CONJ_EXPR
9363 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9364 return fold_mult_zconjz (type, arg1);
9365 if (TREE_CODE (arg1) == CONJ_EXPR
9366 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9367 return fold_mult_zconjz (type, arg0);
9368 }
9369 else
9370 {
9371 /* Maybe fold x * 0 to 0. The expressions aren't the same
9372 when x is NaN, since x * 0 is also NaN. Nor are they the
9373 same in modes with signed zeros, since multiplying a
9374 negative value by 0 gives -0, not +0. */
9375 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9376 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9377 && real_zerop (arg1))
9378 return omit_one_operand (type, arg1, arg0);
9379 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
9380 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9381 && real_onep (arg1))
9382 return non_lvalue (fold_convert (type, arg0));
9383
9384 /* Transform x * -1.0 into -x. */
9385 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
9386 && real_minus_onep (arg1))
9387 return fold_convert (type, negate_expr (arg0));
9388
9389 /* Convert (C1/X)*C2 into (C1*C2)/X. */
9390 if (flag_unsafe_math_optimizations
9391 && TREE_CODE (arg0) == RDIV_EXPR
9392 && TREE_CODE (arg1) == REAL_CST
9393 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
9394 {
9395 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
9396 arg1, 0);
9397 if (tem)
9398 return fold_build2 (RDIV_EXPR, type, tem,
9399 TREE_OPERAND (arg0, 1));
9400 }
9401
9402 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
9403 if (operand_equal_p (arg0, arg1, 0))
9404 {
9405 tree tem = fold_strip_sign_ops (arg0);
9406 if (tem != NULL_TREE)
9407 {
9408 tem = fold_convert (type, tem);
9409 return fold_build2 (MULT_EXPR, type, tem, tem);
9410 }
9411 }
9412
9413 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9414 This is not the same for NaNs or if singed zeros are
9415 involved. */
9416 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9417 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
9418 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9419 && TREE_CODE (arg1) == COMPLEX_CST
9420 && real_zerop (TREE_REALPART (arg1)))
9421 {
9422 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9423 if (real_onep (TREE_IMAGPART (arg1)))
9424 return fold_build2 (COMPLEX_EXPR, type,
9425 negate_expr (fold_build1 (IMAGPART_EXPR,
9426 rtype, arg0)),
9427 fold_build1 (REALPART_EXPR, rtype, arg0));
9428 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9429 return fold_build2 (COMPLEX_EXPR, type,
9430 fold_build1 (IMAGPART_EXPR, rtype, arg0),
9431 negate_expr (fold_build1 (REALPART_EXPR,
9432 rtype, arg0)));
9433 }
9434
9435 /* Optimize z * conj(z) for floating point complex numbers.
9436 Guarded by flag_unsafe_math_optimizations as non-finite
9437 imaginary components don't produce scalar results. */
9438 if (flag_unsafe_math_optimizations
9439 && TREE_CODE (arg0) == CONJ_EXPR
9440 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9441 return fold_mult_zconjz (type, arg1);
9442 if (flag_unsafe_math_optimizations
9443 && TREE_CODE (arg1) == CONJ_EXPR
9444 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9445 return fold_mult_zconjz (type, arg0);
9446
9447 if (flag_unsafe_math_optimizations)
9448 {
9449 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
9450 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
9451
9452 /* Optimizations of root(...)*root(...). */
9453 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
9454 {
9455 tree rootfn, arg, arglist;
9456 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9457 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9458
9459 /* Optimize sqrt(x)*sqrt(x) as x. */
9460 if (BUILTIN_SQRT_P (fcode0)
9461 && operand_equal_p (arg00, arg10, 0)
9462 && ! HONOR_SNANS (TYPE_MODE (type)))
9463 return arg00;
9464
9465 /* Optimize root(x)*root(y) as root(x*y). */
9466 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9467 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9468 arglist = build_tree_list (NULL_TREE, arg);
9469 return build_function_call_expr (rootfn, arglist);
9470 }
9471
9472 /* Optimize expN(x)*expN(y) as expN(x+y). */
9473 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
9474 {
9475 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9476 tree arg = fold_build2 (PLUS_EXPR, type,
9477 TREE_VALUE (TREE_OPERAND (arg0, 1)),
9478 TREE_VALUE (TREE_OPERAND (arg1, 1)));
9479 tree arglist = build_tree_list (NULL_TREE, arg);
9480 return build_function_call_expr (expfn, arglist);
9481 }
9482
9483 /* Optimizations of pow(...)*pow(...). */
9484 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
9485 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
9486 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
9487 {
9488 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9489 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9490 1)));
9491 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9492 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9493 1)));
9494
9495 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
9496 if (operand_equal_p (arg01, arg11, 0))
9497 {
9498 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9499 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
9500 tree arglist = tree_cons (NULL_TREE, arg,
9501 build_tree_list (NULL_TREE,
9502 arg01));
9503 return build_function_call_expr (powfn, arglist);
9504 }
9505
9506 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
9507 if (operand_equal_p (arg00, arg10, 0))
9508 {
9509 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9510 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
9511 tree arglist = tree_cons (NULL_TREE, arg00,
9512 build_tree_list (NULL_TREE,
9513 arg));
9514 return build_function_call_expr (powfn, arglist);
9515 }
9516 }
9517
9518 /* Optimize tan(x)*cos(x) as sin(x). */
9519 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
9520 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
9521 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
9522 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
9523 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
9524 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
9525 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
9526 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
9527 {
9528 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
9529
9530 if (sinfn != NULL_TREE)
9531 return build_function_call_expr (sinfn,
9532 TREE_OPERAND (arg0, 1));
9533 }
9534
9535 /* Optimize x*pow(x,c) as pow(x,c+1). */
9536 if (fcode1 == BUILT_IN_POW
9537 || fcode1 == BUILT_IN_POWF
9538 || fcode1 == BUILT_IN_POWL)
9539 {
9540 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
9541 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
9542 1)));
9543 if (TREE_CODE (arg11) == REAL_CST
9544 && ! TREE_CONSTANT_OVERFLOW (arg11)
9545 && operand_equal_p (arg0, arg10, 0))
9546 {
9547 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
9548 REAL_VALUE_TYPE c;
9549 tree arg, arglist;
9550
9551 c = TREE_REAL_CST (arg11);
9552 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9553 arg = build_real (type, c);
9554 arglist = build_tree_list (NULL_TREE, arg);
9555 arglist = tree_cons (NULL_TREE, arg0, arglist);
9556 return build_function_call_expr (powfn, arglist);
9557 }
9558 }
9559
9560 /* Optimize pow(x,c)*x as pow(x,c+1). */
9561 if (fcode0 == BUILT_IN_POW
9562 || fcode0 == BUILT_IN_POWF
9563 || fcode0 == BUILT_IN_POWL)
9564 {
9565 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
9566 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
9567 1)));
9568 if (TREE_CODE (arg01) == REAL_CST
9569 && ! TREE_CONSTANT_OVERFLOW (arg01)
9570 && operand_equal_p (arg1, arg00, 0))
9571 {
9572 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
9573 REAL_VALUE_TYPE c;
9574 tree arg, arglist;
9575
9576 c = TREE_REAL_CST (arg01);
9577 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
9578 arg = build_real (type, c);
9579 arglist = build_tree_list (NULL_TREE, arg);
9580 arglist = tree_cons (NULL_TREE, arg1, arglist);
9581 return build_function_call_expr (powfn, arglist);
9582 }
9583 }
9584
9585 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
9586 if (! optimize_size
9587 && operand_equal_p (arg0, arg1, 0))
9588 {
9589 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9590
9591 if (powfn)
9592 {
9593 tree arg = build_real (type, dconst2);
9594 tree arglist = build_tree_list (NULL_TREE, arg);
9595 arglist = tree_cons (NULL_TREE, arg0, arglist);
9596 return build_function_call_expr (powfn, arglist);
9597 }
9598 }
9599 }
9600 }
9601 goto associate;
9602
9603 case BIT_IOR_EXPR:
9604 bit_ior:
9605 if (integer_all_onesp (arg1))
9606 return omit_one_operand (type, arg1, arg0);
9607 if (integer_zerop (arg1))
9608 return non_lvalue (fold_convert (type, arg0));
9609 if (operand_equal_p (arg0, arg1, 0))
9610 return non_lvalue (fold_convert (type, arg0));
9611
9612 /* ~X | X is -1. */
9613 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9614 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9615 {
9616 t1 = build_int_cst_type (type, -1);
9617 return omit_one_operand (type, t1, arg1);
9618 }
9619
9620 /* X | ~X is -1. */
9621 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9622 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9623 {
9624 t1 = build_int_cst_type (type, -1);
9625 return omit_one_operand (type, t1, arg0);
9626 }
9627
9628 /* Canonicalize (X & C1) | C2. */
9629 if (TREE_CODE (arg0) == BIT_AND_EXPR
9630 && TREE_CODE (arg1) == INTEGER_CST
9631 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9632 {
9633 unsigned HOST_WIDE_INT hi1, lo1, hi2, lo2, mlo, mhi;
9634 int width = TYPE_PRECISION (type);
9635 hi1 = TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1));
9636 lo1 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
9637 hi2 = TREE_INT_CST_HIGH (arg1);
9638 lo2 = TREE_INT_CST_LOW (arg1);
9639
9640 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9641 if ((hi1 & hi2) == hi1 && (lo1 & lo2) == lo1)
9642 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9643
9644 if (width > HOST_BITS_PER_WIDE_INT)
9645 {
9646 mhi = (unsigned HOST_WIDE_INT) -1
9647 >> (2 * HOST_BITS_PER_WIDE_INT - width);
9648 mlo = -1;
9649 }
9650 else
9651 {
9652 mhi = 0;
9653 mlo = (unsigned HOST_WIDE_INT) -1
9654 >> (HOST_BITS_PER_WIDE_INT - width);
9655 }
9656
9657 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9658 if ((~(hi1 | hi2) & mhi) == 0 && (~(lo1 | lo2) & mlo) == 0)
9659 return fold_build2 (BIT_IOR_EXPR, type,
9660 TREE_OPERAND (arg0, 0), arg1);
9661
9662 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2. */
9663 hi1 &= mhi;
9664 lo1 &= mlo;
9665 if ((hi1 & ~hi2) != hi1 || (lo1 & ~lo2) != lo1)
9666 return fold_build2 (BIT_IOR_EXPR, type,
9667 fold_build2 (BIT_AND_EXPR, type,
9668 TREE_OPERAND (arg0, 0),
9669 build_int_cst_wide (type,
9670 lo1 & ~lo2,
9671 hi1 & ~hi2)),
9672 arg1);
9673 }
9674
9675 /* (X & Y) | Y is (X, Y). */
9676 if (TREE_CODE (arg0) == BIT_AND_EXPR
9677 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9678 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9679 /* (X & Y) | X is (Y, X). */
9680 if (TREE_CODE (arg0) == BIT_AND_EXPR
9681 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9682 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9683 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9684 /* X | (X & Y) is (Y, X). */
9685 if (TREE_CODE (arg1) == BIT_AND_EXPR
9686 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9687 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9688 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9689 /* X | (Y & X) is (Y, X). */
9690 if (TREE_CODE (arg1) == BIT_AND_EXPR
9691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9692 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9693 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9694
9695 t1 = distribute_bit_expr (code, type, arg0, arg1);
9696 if (t1 != NULL_TREE)
9697 return t1;
9698
9699 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
9700
9701 This results in more efficient code for machines without a NAND
9702 instruction. Combine will canonicalize to the first form
9703 which will allow use of NAND instructions provided by the
9704 backend if they exist. */
9705 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9706 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9707 {
9708 return fold_build1 (BIT_NOT_EXPR, type,
9709 build2 (BIT_AND_EXPR, type,
9710 TREE_OPERAND (arg0, 0),
9711 TREE_OPERAND (arg1, 0)));
9712 }
9713
9714 /* See if this can be simplified into a rotate first. If that
9715 is unsuccessful continue in the association code. */
9716 goto bit_rotate;
9717
9718 case BIT_XOR_EXPR:
9719 if (integer_zerop (arg1))
9720 return non_lvalue (fold_convert (type, arg0));
9721 if (integer_all_onesp (arg1))
9722 return fold_build1 (BIT_NOT_EXPR, type, arg0);
9723 if (operand_equal_p (arg0, arg1, 0))
9724 return omit_one_operand (type, integer_zero_node, arg0);
9725
9726 /* ~X ^ X is -1. */
9727 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9728 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9729 {
9730 t1 = build_int_cst_type (type, -1);
9731 return omit_one_operand (type, t1, arg1);
9732 }
9733
9734 /* X ^ ~X is -1. */
9735 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9736 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9737 {
9738 t1 = build_int_cst_type (type, -1);
9739 return omit_one_operand (type, t1, arg0);
9740 }
9741
9742 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
9743 with a constant, and the two constants have no bits in common,
9744 we should treat this as a BIT_IOR_EXPR since this may produce more
9745 simplifications. */
9746 if (TREE_CODE (arg0) == BIT_AND_EXPR
9747 && TREE_CODE (arg1) == BIT_AND_EXPR
9748 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9749 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9750 && integer_zerop (const_binop (BIT_AND_EXPR,
9751 TREE_OPERAND (arg0, 1),
9752 TREE_OPERAND (arg1, 1), 0)))
9753 {
9754 code = BIT_IOR_EXPR;
9755 goto bit_ior;
9756 }
9757
9758 /* (X | Y) ^ X -> Y & ~ X*/
9759 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9760 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9761 {
9762 tree t2 = TREE_OPERAND (arg0, 1);
9763 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9764 arg1);
9765 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9766 fold_convert (type, t1));
9767 return t1;
9768 }
9769
9770 /* (Y | X) ^ X -> Y & ~ X*/
9771 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9772 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9773 {
9774 tree t2 = TREE_OPERAND (arg0, 0);
9775 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
9776 arg1);
9777 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9778 fold_convert (type, t1));
9779 return t1;
9780 }
9781
9782 /* X ^ (X | Y) -> Y & ~ X*/
9783 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9784 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
9785 {
9786 tree t2 = TREE_OPERAND (arg1, 1);
9787 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9788 arg0);
9789 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9790 fold_convert (type, t1));
9791 return t1;
9792 }
9793
9794 /* X ^ (Y | X) -> Y & ~ X*/
9795 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9796 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
9797 {
9798 tree t2 = TREE_OPERAND (arg1, 0);
9799 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
9800 arg0);
9801 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
9802 fold_convert (type, t1));
9803 return t1;
9804 }
9805
9806 /* Convert ~X ^ ~Y to X ^ Y. */
9807 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9808 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9809 return fold_build2 (code, type,
9810 fold_convert (type, TREE_OPERAND (arg0, 0)),
9811 fold_convert (type, TREE_OPERAND (arg1, 0)));
9812
9813 /* Convert ~X ^ C to X ^ ~C. */
9814 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9815 && TREE_CODE (arg1) == INTEGER_CST)
9816 return fold_build2 (code, type,
9817 fold_convert (type, TREE_OPERAND (arg0, 0)),
9818 fold_build1 (BIT_NOT_EXPR, type, arg1));
9819
9820 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
9821 if (TREE_CODE (arg0) == BIT_AND_EXPR
9822 && integer_onep (TREE_OPERAND (arg0, 1))
9823 && integer_onep (arg1))
9824 return fold_build2 (EQ_EXPR, type, arg0,
9825 build_int_cst (TREE_TYPE (arg0), 0));
9826
9827 /* Fold (X & Y) ^ Y as ~X & Y. */
9828 if (TREE_CODE (arg0) == BIT_AND_EXPR
9829 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9830 {
9831 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9832 return fold_build2 (BIT_AND_EXPR, type,
9833 fold_build1 (BIT_NOT_EXPR, type, tem),
9834 fold_convert (type, arg1));
9835 }
9836 /* Fold (X & Y) ^ X as ~Y & X. */
9837 if (TREE_CODE (arg0) == BIT_AND_EXPR
9838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9839 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9840 {
9841 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9842 return fold_build2 (BIT_AND_EXPR, type,
9843 fold_build1 (BIT_NOT_EXPR, type, tem),
9844 fold_convert (type, arg1));
9845 }
9846 /* Fold X ^ (X & Y) as X & ~Y. */
9847 if (TREE_CODE (arg1) == BIT_AND_EXPR
9848 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9849 {
9850 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9851 return fold_build2 (BIT_AND_EXPR, type,
9852 fold_convert (type, arg0),
9853 fold_build1 (BIT_NOT_EXPR, type, tem));
9854 }
9855 /* Fold X ^ (Y & X) as ~Y & X. */
9856 if (TREE_CODE (arg1) == BIT_AND_EXPR
9857 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9858 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9859 {
9860 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9861 return fold_build2 (BIT_AND_EXPR, type,
9862 fold_build1 (BIT_NOT_EXPR, type, tem),
9863 fold_convert (type, arg0));
9864 }
9865
9866 /* See if this can be simplified into a rotate first. If that
9867 is unsuccessful continue in the association code. */
9868 goto bit_rotate;
9869
9870 case BIT_AND_EXPR:
9871 if (integer_all_onesp (arg1))
9872 return non_lvalue (fold_convert (type, arg0));
9873 if (integer_zerop (arg1))
9874 return omit_one_operand (type, arg1, arg0);
9875 if (operand_equal_p (arg0, arg1, 0))
9876 return non_lvalue (fold_convert (type, arg0));
9877
9878 /* ~X & X is always zero. */
9879 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9880 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9881 return omit_one_operand (type, integer_zero_node, arg1);
9882
9883 /* X & ~X is always zero. */
9884 if (TREE_CODE (arg1) == BIT_NOT_EXPR
9885 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9886 return omit_one_operand (type, integer_zero_node, arg0);
9887
9888 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
9889 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9890 && TREE_CODE (arg1) == INTEGER_CST
9891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9892 return fold_build2 (BIT_IOR_EXPR, type,
9893 fold_build2 (BIT_AND_EXPR, type,
9894 TREE_OPERAND (arg0, 0), arg1),
9895 fold_build2 (BIT_AND_EXPR, type,
9896 TREE_OPERAND (arg0, 1), arg1));
9897
9898 /* (X | Y) & Y is (X, Y). */
9899 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9900 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9901 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 0));
9902 /* (X | Y) & X is (Y, X). */
9903 if (TREE_CODE (arg0) == BIT_IOR_EXPR
9904 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9905 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9906 return omit_one_operand (type, arg1, TREE_OPERAND (arg0, 1));
9907 /* X & (X | Y) is (Y, X). */
9908 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9909 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
9910 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
9911 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 1));
9912 /* X & (Y | X) is (Y, X). */
9913 if (TREE_CODE (arg1) == BIT_IOR_EXPR
9914 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9915 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9916 return omit_one_operand (type, arg0, TREE_OPERAND (arg1, 0));
9917
9918 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
9919 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9920 && integer_onep (TREE_OPERAND (arg0, 1))
9921 && integer_onep (arg1))
9922 {
9923 tem = TREE_OPERAND (arg0, 0);
9924 return fold_build2 (EQ_EXPR, type,
9925 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9926 build_int_cst (TREE_TYPE (tem), 1)),
9927 build_int_cst (TREE_TYPE (tem), 0));
9928 }
9929 /* Fold ~X & 1 as (X & 1) == 0. */
9930 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9931 && integer_onep (arg1))
9932 {
9933 tem = TREE_OPERAND (arg0, 0);
9934 return fold_build2 (EQ_EXPR, type,
9935 fold_build2 (BIT_AND_EXPR, TREE_TYPE (tem), tem,
9936 build_int_cst (TREE_TYPE (tem), 1)),
9937 build_int_cst (TREE_TYPE (tem), 0));
9938 }
9939
9940 /* Fold (X ^ Y) & Y as ~X & Y. */
9941 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9942 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9943 {
9944 tem = fold_convert (type, TREE_OPERAND (arg0, 0));
9945 return fold_build2 (BIT_AND_EXPR, type,
9946 fold_build1 (BIT_NOT_EXPR, type, tem),
9947 fold_convert (type, arg1));
9948 }
9949 /* Fold (X ^ Y) & X as ~Y & X. */
9950 if (TREE_CODE (arg0) == BIT_XOR_EXPR
9951 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
9952 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
9953 {
9954 tem = fold_convert (type, TREE_OPERAND (arg0, 1));
9955 return fold_build2 (BIT_AND_EXPR, type,
9956 fold_build1 (BIT_NOT_EXPR, type, tem),
9957 fold_convert (type, arg1));
9958 }
9959 /* Fold X & (X ^ Y) as X & ~Y. */
9960 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9961 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9962 {
9963 tem = fold_convert (type, TREE_OPERAND (arg1, 1));
9964 return fold_build2 (BIT_AND_EXPR, type,
9965 fold_convert (type, arg0),
9966 fold_build1 (BIT_NOT_EXPR, type, tem));
9967 }
9968 /* Fold X & (Y ^ X) as ~Y & X. */
9969 if (TREE_CODE (arg1) == BIT_XOR_EXPR
9970 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
9971 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9972 {
9973 tem = fold_convert (type, TREE_OPERAND (arg1, 0));
9974 return fold_build2 (BIT_AND_EXPR, type,
9975 fold_build1 (BIT_NOT_EXPR, type, tem),
9976 fold_convert (type, arg0));
9977 }
9978
9979 t1 = distribute_bit_expr (code, type, arg0, arg1);
9980 if (t1 != NULL_TREE)
9981 return t1;
9982 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
9983 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
9984 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
9985 {
9986 unsigned int prec
9987 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
9988
9989 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
9990 && (~TREE_INT_CST_LOW (arg1)
9991 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
9992 return fold_convert (type, TREE_OPERAND (arg0, 0));
9993 }
9994
9995 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
9996
9997 This results in more efficient code for machines without a NOR
9998 instruction. Combine will canonicalize to the first form
9999 which will allow use of NOR instructions provided by the
10000 backend if they exist. */
10001 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10002 && TREE_CODE (arg1) == BIT_NOT_EXPR)
10003 {
10004 return fold_build1 (BIT_NOT_EXPR, type,
10005 build2 (BIT_IOR_EXPR, type,
10006 TREE_OPERAND (arg0, 0),
10007 TREE_OPERAND (arg1, 0)));
10008 }
10009
10010 goto associate;
10011
10012 case RDIV_EXPR:
10013 /* Don't touch a floating-point divide by zero unless the mode
10014 of the constant can represent infinity. */
10015 if (TREE_CODE (arg1) == REAL_CST
10016 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10017 && real_zerop (arg1))
10018 return NULL_TREE;
10019
10020 /* Optimize A / A to 1.0 if we don't care about
10021 NaNs or Infinities. Skip the transformation
10022 for non-real operands. */
10023 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
10024 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10025 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
10026 && operand_equal_p (arg0, arg1, 0))
10027 {
10028 tree r = build_real (TREE_TYPE (arg0), dconst1);
10029
10030 return omit_two_operands (type, r, arg0, arg1);
10031 }
10032
10033 /* The complex version of the above A / A optimization. */
10034 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10035 && operand_equal_p (arg0, arg1, 0))
10036 {
10037 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
10038 if (! HONOR_NANS (TYPE_MODE (elem_type))
10039 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
10040 {
10041 tree r = build_real (elem_type, dconst1);
10042 /* omit_two_operands will call fold_convert for us. */
10043 return omit_two_operands (type, r, arg0, arg1);
10044 }
10045 }
10046
10047 /* (-A) / (-B) -> A / B */
10048 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10049 return fold_build2 (RDIV_EXPR, type,
10050 TREE_OPERAND (arg0, 0),
10051 negate_expr (arg1));
10052 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10053 return fold_build2 (RDIV_EXPR, type,
10054 negate_expr (arg0),
10055 TREE_OPERAND (arg1, 0));
10056
10057 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
10058 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10059 && real_onep (arg1))
10060 return non_lvalue (fold_convert (type, arg0));
10061
10062 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
10063 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10064 && real_minus_onep (arg1))
10065 return non_lvalue (fold_convert (type, negate_expr (arg0)));
10066
10067 /* If ARG1 is a constant, we can convert this to a multiply by the
10068 reciprocal. This does not have the same rounding properties,
10069 so only do this if -funsafe-math-optimizations. We can actually
10070 always safely do it if ARG1 is a power of two, but it's hard to
10071 tell if it is or not in a portable manner. */
10072 if (TREE_CODE (arg1) == REAL_CST)
10073 {
10074 if (flag_unsafe_math_optimizations
10075 && 0 != (tem = const_binop (code, build_real (type, dconst1),
10076 arg1, 0)))
10077 return fold_build2 (MULT_EXPR, type, arg0, tem);
10078 /* Find the reciprocal if optimizing and the result is exact. */
10079 if (optimize)
10080 {
10081 REAL_VALUE_TYPE r;
10082 r = TREE_REAL_CST (arg1);
10083 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
10084 {
10085 tem = build_real (type, r);
10086 return fold_build2 (MULT_EXPR, type,
10087 fold_convert (type, arg0), tem);
10088 }
10089 }
10090 }
10091 /* Convert A/B/C to A/(B*C). */
10092 if (flag_unsafe_math_optimizations
10093 && TREE_CODE (arg0) == RDIV_EXPR)
10094 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
10095 fold_build2 (MULT_EXPR, type,
10096 TREE_OPERAND (arg0, 1), arg1));
10097
10098 /* Convert A/(B/C) to (A/B)*C. */
10099 if (flag_unsafe_math_optimizations
10100 && TREE_CODE (arg1) == RDIV_EXPR)
10101 return fold_build2 (MULT_EXPR, type,
10102 fold_build2 (RDIV_EXPR, type, arg0,
10103 TREE_OPERAND (arg1, 0)),
10104 TREE_OPERAND (arg1, 1));
10105
10106 /* Convert C1/(X*C2) into (C1/C2)/X. */
10107 if (flag_unsafe_math_optimizations
10108 && TREE_CODE (arg1) == MULT_EXPR
10109 && TREE_CODE (arg0) == REAL_CST
10110 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
10111 {
10112 tree tem = const_binop (RDIV_EXPR, arg0,
10113 TREE_OPERAND (arg1, 1), 0);
10114 if (tem)
10115 return fold_build2 (RDIV_EXPR, type, tem,
10116 TREE_OPERAND (arg1, 0));
10117 }
10118
10119 if (flag_unsafe_math_optimizations)
10120 {
10121 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10122 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10123
10124 /* Optimize sin(x)/cos(x) as tan(x). */
10125 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
10126 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
10127 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
10128 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10129 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10130 {
10131 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10132
10133 if (tanfn != NULL_TREE)
10134 return build_function_call_expr (tanfn,
10135 TREE_OPERAND (arg0, 1));
10136 }
10137
10138 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
10139 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
10140 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
10141 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
10142 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
10143 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
10144 {
10145 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
10146
10147 if (tanfn != NULL_TREE)
10148 {
10149 tree tmp = TREE_OPERAND (arg0, 1);
10150 tmp = build_function_call_expr (tanfn, tmp);
10151 return fold_build2 (RDIV_EXPR, type,
10152 build_real (type, dconst1), tmp);
10153 }
10154 }
10155
10156 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
10157 NaNs or Infinities. */
10158 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
10159 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
10160 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
10161 {
10162 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10163 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10164
10165 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10166 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10167 && operand_equal_p (arg00, arg01, 0))
10168 {
10169 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10170
10171 if (cosfn != NULL_TREE)
10172 return build_function_call_expr (cosfn,
10173 TREE_OPERAND (arg0, 1));
10174 }
10175 }
10176
10177 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
10178 NaNs or Infinities. */
10179 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
10180 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
10181 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
10182 {
10183 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10184 tree arg01 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10185
10186 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
10187 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
10188 && operand_equal_p (arg00, arg01, 0))
10189 {
10190 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
10191
10192 if (cosfn != NULL_TREE)
10193 {
10194 tree tmp = TREE_OPERAND (arg0, 1);
10195 tmp = build_function_call_expr (cosfn, tmp);
10196 return fold_build2 (RDIV_EXPR, type,
10197 build_real (type, dconst1),
10198 tmp);
10199 }
10200 }
10201 }
10202
10203 /* Optimize pow(x,c)/x as pow(x,c-1). */
10204 if (fcode0 == BUILT_IN_POW
10205 || fcode0 == BUILT_IN_POWF
10206 || fcode0 == BUILT_IN_POWL)
10207 {
10208 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
10209 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
10210 if (TREE_CODE (arg01) == REAL_CST
10211 && ! TREE_CONSTANT_OVERFLOW (arg01)
10212 && operand_equal_p (arg1, arg00, 0))
10213 {
10214 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10215 REAL_VALUE_TYPE c;
10216 tree arg, arglist;
10217
10218 c = TREE_REAL_CST (arg01);
10219 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
10220 arg = build_real (type, c);
10221 arglist = build_tree_list (NULL_TREE, arg);
10222 arglist = tree_cons (NULL_TREE, arg1, arglist);
10223 return build_function_call_expr (powfn, arglist);
10224 }
10225 }
10226
10227 /* Optimize x/expN(y) into x*expN(-y). */
10228 if (BUILTIN_EXPONENT_P (fcode1))
10229 {
10230 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10231 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
10232 tree arglist = build_tree_list (NULL_TREE,
10233 fold_convert (type, arg));
10234 arg1 = build_function_call_expr (expfn, arglist);
10235 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10236 }
10237
10238 /* Optimize x/pow(y,z) into x*pow(y,-z). */
10239 if (fcode1 == BUILT_IN_POW
10240 || fcode1 == BUILT_IN_POWF
10241 || fcode1 == BUILT_IN_POWL)
10242 {
10243 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
10244 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
10245 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
10246 tree neg11 = fold_convert (type, negate_expr (arg11));
10247 tree arglist = tree_cons(NULL_TREE, arg10,
10248 build_tree_list (NULL_TREE, neg11));
10249 arg1 = build_function_call_expr (powfn, arglist);
10250 return fold_build2 (MULT_EXPR, type, arg0, arg1);
10251 }
10252 }
10253 return NULL_TREE;
10254
10255 case TRUNC_DIV_EXPR:
10256 case FLOOR_DIV_EXPR:
10257 /* Simplify A / (B << N) where A and B are positive and B is
10258 a power of 2, to A >> (N + log2(B)). */
10259 if (TREE_CODE (arg1) == LSHIFT_EXPR
10260 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10261 {
10262 tree sval = TREE_OPERAND (arg1, 0);
10263 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10264 {
10265 tree sh_cnt = TREE_OPERAND (arg1, 1);
10266 unsigned long pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
10267
10268 sh_cnt = fold_build2 (PLUS_EXPR, TREE_TYPE (sh_cnt),
10269 sh_cnt, build_int_cst (NULL_TREE, pow2));
10270 return fold_build2 (RSHIFT_EXPR, type,
10271 fold_convert (type, arg0), sh_cnt);
10272 }
10273 }
10274 /* Fall thru */
10275
10276 case ROUND_DIV_EXPR:
10277 case CEIL_DIV_EXPR:
10278 case EXACT_DIV_EXPR:
10279 if (integer_onep (arg1))
10280 return non_lvalue (fold_convert (type, arg0));
10281 if (integer_zerop (arg1))
10282 return NULL_TREE;
10283 /* X / -1 is -X. */
10284 if (!TYPE_UNSIGNED (type)
10285 && TREE_CODE (arg1) == INTEGER_CST
10286 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10287 && TREE_INT_CST_HIGH (arg1) == -1)
10288 return fold_convert (type, negate_expr (arg0));
10289
10290 /* Convert -A / -B to A / B when the type is signed and overflow is
10291 undefined. */
10292 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10293 && TREE_CODE (arg0) == NEGATE_EXPR
10294 && negate_expr_p (arg1))
10295 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10296 negate_expr (arg1));
10297 if (!TYPE_UNSIGNED (type) && !flag_wrapv
10298 && TREE_CODE (arg1) == NEGATE_EXPR
10299 && negate_expr_p (arg0))
10300 return fold_build2 (code, type, negate_expr (arg0),
10301 TREE_OPERAND (arg1, 0));
10302
10303 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10304 operation, EXACT_DIV_EXPR.
10305
10306 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10307 At one time others generated faster code, it's not clear if they do
10308 after the last round to changes to the DIV code in expmed.c. */
10309 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10310 && multiple_of_p (type, arg0, arg1))
10311 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
10312
10313 if (TREE_CODE (arg1) == INTEGER_CST
10314 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10315 return fold_convert (type, tem);
10316
10317 return NULL_TREE;
10318
10319 case CEIL_MOD_EXPR:
10320 case FLOOR_MOD_EXPR:
10321 case ROUND_MOD_EXPR:
10322 case TRUNC_MOD_EXPR:
10323 /* X % 1 is always zero, but be sure to preserve any side
10324 effects in X. */
10325 if (integer_onep (arg1))
10326 return omit_one_operand (type, integer_zero_node, arg0);
10327
10328 /* X % 0, return X % 0 unchanged so that we can get the
10329 proper warnings and errors. */
10330 if (integer_zerop (arg1))
10331 return NULL_TREE;
10332
10333 /* 0 % X is always zero, but be sure to preserve any side
10334 effects in X. Place this after checking for X == 0. */
10335 if (integer_zerop (arg0))
10336 return omit_one_operand (type, integer_zero_node, arg1);
10337
10338 /* X % -1 is zero. */
10339 if (!TYPE_UNSIGNED (type)
10340 && TREE_CODE (arg1) == INTEGER_CST
10341 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
10342 && TREE_INT_CST_HIGH (arg1) == -1)
10343 return omit_one_operand (type, integer_zero_node, arg0);
10344
10345 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
10346 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
10347 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
10348 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0)))
10349 {
10350 tree c = arg1;
10351 /* Also optimize A % (C << N) where C is a power of 2,
10352 to A & ((C << N) - 1). */
10353 if (TREE_CODE (arg1) == LSHIFT_EXPR)
10354 c = TREE_OPERAND (arg1, 0);
10355
10356 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
10357 {
10358 tree mask = fold_build2 (MINUS_EXPR, TREE_TYPE (arg1), arg1,
10359 build_int_cst (TREE_TYPE (arg1), 1));
10360 return fold_build2 (BIT_AND_EXPR, type,
10361 fold_convert (type, arg0),
10362 fold_convert (type, mask));
10363 }
10364 }
10365
10366 /* X % -C is the same as X % C. */
10367 if (code == TRUNC_MOD_EXPR
10368 && !TYPE_UNSIGNED (type)
10369 && TREE_CODE (arg1) == INTEGER_CST
10370 && !TREE_CONSTANT_OVERFLOW (arg1)
10371 && TREE_INT_CST_HIGH (arg1) < 0
10372 && !flag_trapv
10373 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
10374 && !sign_bit_p (arg1, arg1))
10375 return fold_build2 (code, type, fold_convert (type, arg0),
10376 fold_convert (type, negate_expr (arg1)));
10377
10378 /* X % -Y is the same as X % Y. */
10379 if (code == TRUNC_MOD_EXPR
10380 && !TYPE_UNSIGNED (type)
10381 && TREE_CODE (arg1) == NEGATE_EXPR
10382 && !flag_trapv)
10383 return fold_build2 (code, type, fold_convert (type, arg0),
10384 fold_convert (type, TREE_OPERAND (arg1, 0)));
10385
10386 if (TREE_CODE (arg1) == INTEGER_CST
10387 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
10388 return fold_convert (type, tem);
10389
10390 return NULL_TREE;
10391
10392 case LROTATE_EXPR:
10393 case RROTATE_EXPR:
10394 if (integer_all_onesp (arg0))
10395 return omit_one_operand (type, arg0, arg1);
10396 goto shift;
10397
10398 case RSHIFT_EXPR:
10399 /* Optimize -1 >> x for arithmetic right shifts. */
10400 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
10401 return omit_one_operand (type, arg0, arg1);
10402 /* ... fall through ... */
10403
10404 case LSHIFT_EXPR:
10405 shift:
10406 if (integer_zerop (arg1))
10407 return non_lvalue (fold_convert (type, arg0));
10408 if (integer_zerop (arg0))
10409 return omit_one_operand (type, arg0, arg1);
10410
10411 /* Since negative shift count is not well-defined,
10412 don't try to compute it in the compiler. */
10413 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10414 return NULL_TREE;
10415
10416 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
10417 if (TREE_CODE (op0) == code && host_integerp (arg1, false)
10418 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10419 && host_integerp (TREE_OPERAND (arg0, 1), false)
10420 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10421 {
10422 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
10423 + TREE_INT_CST_LOW (arg1));
10424
10425 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
10426 being well defined. */
10427 if (low >= TYPE_PRECISION (type))
10428 {
10429 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
10430 low = low % TYPE_PRECISION (type);
10431 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
10432 return build_int_cst (type, 0);
10433 else
10434 low = TYPE_PRECISION (type) - 1;
10435 }
10436
10437 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10438 build_int_cst (type, low));
10439 }
10440
10441 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
10442 into x & ((unsigned)-1 >> c) for unsigned types. */
10443 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
10444 || (TYPE_UNSIGNED (type)
10445 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
10446 && host_integerp (arg1, false)
10447 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
10448 && host_integerp (TREE_OPERAND (arg0, 1), false)
10449 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
10450 {
10451 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
10452 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
10453 tree lshift;
10454 tree arg00;
10455
10456 if (low0 == low1)
10457 {
10458 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
10459
10460 lshift = build_int_cst (type, -1);
10461 lshift = int_const_binop (code, lshift, arg1, 0);
10462
10463 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
10464 }
10465 }
10466
10467 /* Rewrite an LROTATE_EXPR by a constant into an
10468 RROTATE_EXPR by a new constant. */
10469 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
10470 {
10471 tree tem = build_int_cst (TREE_TYPE (arg1),
10472 GET_MODE_BITSIZE (TYPE_MODE (type)));
10473 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
10474 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
10475 }
10476
10477 /* If we have a rotate of a bit operation with the rotate count and
10478 the second operand of the bit operation both constant,
10479 permute the two operations. */
10480 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10481 && (TREE_CODE (arg0) == BIT_AND_EXPR
10482 || TREE_CODE (arg0) == BIT_IOR_EXPR
10483 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10484 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10485 return fold_build2 (TREE_CODE (arg0), type,
10486 fold_build2 (code, type,
10487 TREE_OPERAND (arg0, 0), arg1),
10488 fold_build2 (code, type,
10489 TREE_OPERAND (arg0, 1), arg1));
10490
10491 /* Two consecutive rotates adding up to the width of the mode can
10492 be ignored. */
10493 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10494 && TREE_CODE (arg0) == RROTATE_EXPR
10495 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10496 && TREE_INT_CST_HIGH (arg1) == 0
10497 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
10498 && ((TREE_INT_CST_LOW (arg1)
10499 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
10500 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
10501 return TREE_OPERAND (arg0, 0);
10502
10503 return NULL_TREE;
10504
10505 case MIN_EXPR:
10506 if (operand_equal_p (arg0, arg1, 0))
10507 return omit_one_operand (type, arg0, arg1);
10508 if (INTEGRAL_TYPE_P (type)
10509 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
10510 return omit_one_operand (type, arg1, arg0);
10511 tem = fold_minmax (MIN_EXPR, type, arg0, arg1);
10512 if (tem)
10513 return tem;
10514 goto associate;
10515
10516 case MAX_EXPR:
10517 if (operand_equal_p (arg0, arg1, 0))
10518 return omit_one_operand (type, arg0, arg1);
10519 if (INTEGRAL_TYPE_P (type)
10520 && TYPE_MAX_VALUE (type)
10521 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
10522 return omit_one_operand (type, arg1, arg0);
10523 tem = fold_minmax (MAX_EXPR, type, arg0, arg1);
10524 if (tem)
10525 return tem;
10526 goto associate;
10527
10528 case TRUTH_ANDIF_EXPR:
10529 /* Note that the operands of this must be ints
10530 and their values must be 0 or 1.
10531 ("true" is a fixed value perhaps depending on the language.) */
10532 /* If first arg is constant zero, return it. */
10533 if (integer_zerop (arg0))
10534 return fold_convert (type, arg0);
10535 case TRUTH_AND_EXPR:
10536 /* If either arg is constant true, drop it. */
10537 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10538 return non_lvalue (fold_convert (type, arg1));
10539 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10540 /* Preserve sequence points. */
10541 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10542 return non_lvalue (fold_convert (type, arg0));
10543 /* If second arg is constant zero, result is zero, but first arg
10544 must be evaluated. */
10545 if (integer_zerop (arg1))
10546 return omit_one_operand (type, arg1, arg0);
10547 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10548 case will be handled here. */
10549 if (integer_zerop (arg0))
10550 return omit_one_operand (type, arg0, arg1);
10551
10552 /* !X && X is always false. */
10553 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10554 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10555 return omit_one_operand (type, integer_zero_node, arg1);
10556 /* X && !X is always false. */
10557 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10558 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10559 return omit_one_operand (type, integer_zero_node, arg0);
10560
10561 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10562 means A >= Y && A != MAX, but in this case we know that
10563 A < X <= MAX. */
10564
10565 if (!TREE_SIDE_EFFECTS (arg0)
10566 && !TREE_SIDE_EFFECTS (arg1))
10567 {
10568 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
10569 if (tem && !operand_equal_p (tem, arg0, 0))
10570 return fold_build2 (code, type, tem, arg1);
10571
10572 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
10573 if (tem && !operand_equal_p (tem, arg1, 0))
10574 return fold_build2 (code, type, arg0, tem);
10575 }
10576
10577 truth_andor:
10578 /* We only do these simplifications if we are optimizing. */
10579 if (!optimize)
10580 return NULL_TREE;
10581
10582 /* Check for things like (A || B) && (A || C). We can convert this
10583 to A || (B && C). Note that either operator can be any of the four
10584 truth and/or operations and the transformation will still be
10585 valid. Also note that we only care about order for the
10586 ANDIF and ORIF operators. If B contains side effects, this
10587 might change the truth-value of A. */
10588 if (TREE_CODE (arg0) == TREE_CODE (arg1)
10589 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
10590 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
10591 || TREE_CODE (arg0) == TRUTH_AND_EXPR
10592 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
10593 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
10594 {
10595 tree a00 = TREE_OPERAND (arg0, 0);
10596 tree a01 = TREE_OPERAND (arg0, 1);
10597 tree a10 = TREE_OPERAND (arg1, 0);
10598 tree a11 = TREE_OPERAND (arg1, 1);
10599 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
10600 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
10601 && (code == TRUTH_AND_EXPR
10602 || code == TRUTH_OR_EXPR));
10603
10604 if (operand_equal_p (a00, a10, 0))
10605 return fold_build2 (TREE_CODE (arg0), type, a00,
10606 fold_build2 (code, type, a01, a11));
10607 else if (commutative && operand_equal_p (a00, a11, 0))
10608 return fold_build2 (TREE_CODE (arg0), type, a00,
10609 fold_build2 (code, type, a01, a10));
10610 else if (commutative && operand_equal_p (a01, a10, 0))
10611 return fold_build2 (TREE_CODE (arg0), type, a01,
10612 fold_build2 (code, type, a00, a11));
10613
10614 /* This case if tricky because we must either have commutative
10615 operators or else A10 must not have side-effects. */
10616
10617 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
10618 && operand_equal_p (a01, a11, 0))
10619 return fold_build2 (TREE_CODE (arg0), type,
10620 fold_build2 (code, type, a00, a10),
10621 a01);
10622 }
10623
10624 /* See if we can build a range comparison. */
10625 if (0 != (tem = fold_range_test (code, type, op0, op1)))
10626 return tem;
10627
10628 /* Check for the possibility of merging component references. If our
10629 lhs is another similar operation, try to merge its rhs with our
10630 rhs. Then try to merge our lhs and rhs. */
10631 if (TREE_CODE (arg0) == code
10632 && 0 != (tem = fold_truthop (code, type,
10633 TREE_OPERAND (arg0, 1), arg1)))
10634 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10635
10636 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
10637 return tem;
10638
10639 return NULL_TREE;
10640
10641 case TRUTH_ORIF_EXPR:
10642 /* Note that the operands of this must be ints
10643 and their values must be 0 or true.
10644 ("true" is a fixed value perhaps depending on the language.) */
10645 /* If first arg is constant true, return it. */
10646 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10647 return fold_convert (type, arg0);
10648 case TRUTH_OR_EXPR:
10649 /* If either arg is constant zero, drop it. */
10650 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10651 return non_lvalue (fold_convert (type, arg1));
10652 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10653 /* Preserve sequence points. */
10654 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10655 return non_lvalue (fold_convert (type, arg0));
10656 /* If second arg is constant true, result is true, but we must
10657 evaluate first arg. */
10658 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10659 return omit_one_operand (type, arg1, arg0);
10660 /* Likewise for first arg, but note this only occurs here for
10661 TRUTH_OR_EXPR. */
10662 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10663 return omit_one_operand (type, arg0, arg1);
10664
10665 /* !X || X is always true. */
10666 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10667 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10668 return omit_one_operand (type, integer_one_node, arg1);
10669 /* X || !X is always true. */
10670 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10671 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10672 return omit_one_operand (type, integer_one_node, arg0);
10673
10674 goto truth_andor;
10675
10676 case TRUTH_XOR_EXPR:
10677 /* If the second arg is constant zero, drop it. */
10678 if (integer_zerop (arg1))
10679 return non_lvalue (fold_convert (type, arg0));
10680 /* If the second arg is constant true, this is a logical inversion. */
10681 if (integer_onep (arg1))
10682 {
10683 /* Only call invert_truthvalue if operand is a truth value. */
10684 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
10685 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
10686 else
10687 tem = invert_truthvalue (arg0);
10688 return non_lvalue (fold_convert (type, tem));
10689 }
10690 /* Identical arguments cancel to zero. */
10691 if (operand_equal_p (arg0, arg1, 0))
10692 return omit_one_operand (type, integer_zero_node, arg0);
10693
10694 /* !X ^ X is always true. */
10695 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10697 return omit_one_operand (type, integer_one_node, arg1);
10698
10699 /* X ^ !X is always true. */
10700 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10701 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10702 return omit_one_operand (type, integer_one_node, arg0);
10703
10704 return NULL_TREE;
10705
10706 case EQ_EXPR:
10707 case NE_EXPR:
10708 tem = fold_comparison (code, type, op0, op1);
10709 if (tem != NULL_TREE)
10710 return tem;
10711
10712 /* bool_var != 0 becomes bool_var. */
10713 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10714 && code == NE_EXPR)
10715 return non_lvalue (fold_convert (type, arg0));
10716
10717 /* bool_var == 1 becomes bool_var. */
10718 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10719 && code == EQ_EXPR)
10720 return non_lvalue (fold_convert (type, arg0));
10721
10722 /* bool_var != 1 becomes !bool_var. */
10723 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10724 && code == NE_EXPR)
10725 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10726
10727 /* bool_var == 0 becomes !bool_var. */
10728 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10729 && code == EQ_EXPR)
10730 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
10731
10732 /* If this is an equality comparison of the address of a non-weak
10733 object against zero, then we know the result. */
10734 if (TREE_CODE (arg0) == ADDR_EXPR
10735 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10736 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10737 && integer_zerop (arg1))
10738 return constant_boolean_node (code != EQ_EXPR, type);
10739
10740 /* If this is an equality comparison of the address of two non-weak,
10741 unaliased symbols neither of which are extern (since we do not
10742 have access to attributes for externs), then we know the result. */
10743 if (TREE_CODE (arg0) == ADDR_EXPR
10744 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
10745 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
10746 && ! lookup_attribute ("alias",
10747 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
10748 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
10749 && TREE_CODE (arg1) == ADDR_EXPR
10750 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
10751 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
10752 && ! lookup_attribute ("alias",
10753 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
10754 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
10755 {
10756 /* We know that we're looking at the address of two
10757 non-weak, unaliased, static _DECL nodes.
10758
10759 It is both wasteful and incorrect to call operand_equal_p
10760 to compare the two ADDR_EXPR nodes. It is wasteful in that
10761 all we need to do is test pointer equality for the arguments
10762 to the two ADDR_EXPR nodes. It is incorrect to use
10763 operand_equal_p as that function is NOT equivalent to a
10764 C equality test. It can in fact return false for two
10765 objects which would test as equal using the C equality
10766 operator. */
10767 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
10768 return constant_boolean_node (equal
10769 ? code == EQ_EXPR : code != EQ_EXPR,
10770 type);
10771 }
10772
10773 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
10774 a MINUS_EXPR of a constant, we can convert it into a comparison with
10775 a revised constant as long as no overflow occurs. */
10776 if (TREE_CODE (arg1) == INTEGER_CST
10777 && (TREE_CODE (arg0) == PLUS_EXPR
10778 || TREE_CODE (arg0) == MINUS_EXPR)
10779 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10780 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
10781 ? MINUS_EXPR : PLUS_EXPR,
10782 fold_convert (TREE_TYPE (arg0), arg1),
10783 TREE_OPERAND (arg0, 1), 0))
10784 && ! TREE_CONSTANT_OVERFLOW (tem))
10785 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10786
10787 /* Similarly for a NEGATE_EXPR. */
10788 if (TREE_CODE (arg0) == NEGATE_EXPR
10789 && TREE_CODE (arg1) == INTEGER_CST
10790 && 0 != (tem = negate_expr (arg1))
10791 && TREE_CODE (tem) == INTEGER_CST
10792 && ! TREE_CONSTANT_OVERFLOW (tem))
10793 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
10794
10795 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
10796 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10797 && TREE_CODE (arg1) == INTEGER_CST
10798 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10799 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
10800 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg0),
10801 fold_convert (TREE_TYPE (arg0), arg1),
10802 TREE_OPERAND (arg0, 1)));
10803
10804 /* If we have X - Y == 0, we can convert that to X == Y and similarly
10805 for !=. Don't do this for ordered comparisons due to overflow. */
10806 if (TREE_CODE (arg0) == MINUS_EXPR
10807 && integer_zerop (arg1))
10808 return fold_build2 (code, type,
10809 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
10810
10811 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
10812 if (TREE_CODE (arg0) == ABS_EXPR
10813 && (integer_zerop (arg1) || real_zerop (arg1)))
10814 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
10815
10816 /* If this is an EQ or NE comparison with zero and ARG0 is
10817 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10818 two operations, but the latter can be done in one less insn
10819 on machines that have only two-operand insns or on which a
10820 constant cannot be the first operand. */
10821 if (TREE_CODE (arg0) == BIT_AND_EXPR
10822 && integer_zerop (arg1))
10823 {
10824 tree arg00 = TREE_OPERAND (arg0, 0);
10825 tree arg01 = TREE_OPERAND (arg0, 1);
10826 if (TREE_CODE (arg00) == LSHIFT_EXPR
10827 && integer_onep (TREE_OPERAND (arg00, 0)))
10828 return
10829 fold_build2 (code, type,
10830 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10831 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
10832 arg01, TREE_OPERAND (arg00, 1)),
10833 fold_convert (TREE_TYPE (arg0),
10834 integer_one_node)),
10835 arg1);
10836 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
10837 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
10838 return
10839 fold_build2 (code, type,
10840 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10841 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
10842 arg00, TREE_OPERAND (arg01, 1)),
10843 fold_convert (TREE_TYPE (arg0),
10844 integer_one_node)),
10845 arg1);
10846 }
10847
10848 /* If this is an NE or EQ comparison of zero against the result of a
10849 signed MOD operation whose second operand is a power of 2, make
10850 the MOD operation unsigned since it is simpler and equivalent. */
10851 if (integer_zerop (arg1)
10852 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10853 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10854 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10855 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10856 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10857 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10858 {
10859 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
10860 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
10861 fold_convert (newtype,
10862 TREE_OPERAND (arg0, 0)),
10863 fold_convert (newtype,
10864 TREE_OPERAND (arg0, 1)));
10865
10866 return fold_build2 (code, type, newmod,
10867 fold_convert (newtype, arg1));
10868 }
10869
10870 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10871 C1 is a valid shift constant, and C2 is a power of two, i.e.
10872 a single bit. */
10873 if (TREE_CODE (arg0) == BIT_AND_EXPR
10874 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10876 == INTEGER_CST
10877 && integer_pow2p (TREE_OPERAND (arg0, 1))
10878 && integer_zerop (arg1))
10879 {
10880 tree itype = TREE_TYPE (arg0);
10881 unsigned HOST_WIDE_INT prec = TYPE_PRECISION (itype);
10882 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10883
10884 /* Check for a valid shift count. */
10885 if (TREE_INT_CST_HIGH (arg001) == 0
10886 && TREE_INT_CST_LOW (arg001) < prec)
10887 {
10888 tree arg01 = TREE_OPERAND (arg0, 1);
10889 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10890 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10891 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10892 can be rewritten as (X & (C2 << C1)) != 0. */
10893 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10894 {
10895 tem = fold_build2 (LSHIFT_EXPR, itype, arg01, arg001);
10896 tem = fold_build2 (BIT_AND_EXPR, itype, arg000, tem);
10897 return fold_build2 (code, type, tem, arg1);
10898 }
10899 /* Otherwise, for signed (arithmetic) shifts,
10900 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10901 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10902 else if (!TYPE_UNSIGNED (itype))
10903 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10904 arg000, build_int_cst (itype, 0));
10905 /* Otherwise, of unsigned (logical) shifts,
10906 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10907 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10908 else
10909 return omit_one_operand (type,
10910 code == EQ_EXPR ? integer_one_node
10911 : integer_zero_node,
10912 arg000);
10913 }
10914 }
10915
10916 /* If this is an NE comparison of zero with an AND of one, remove the
10917 comparison since the AND will give the correct value. */
10918 if (code == NE_EXPR
10919 && integer_zerop (arg1)
10920 && TREE_CODE (arg0) == BIT_AND_EXPR
10921 && integer_onep (TREE_OPERAND (arg0, 1)))
10922 return fold_convert (type, arg0);
10923
10924 /* If we have (A & C) == C where C is a power of 2, convert this into
10925 (A & C) != 0. Similarly for NE_EXPR. */
10926 if (TREE_CODE (arg0) == BIT_AND_EXPR
10927 && integer_pow2p (TREE_OPERAND (arg0, 1))
10928 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10929 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10930 arg0, fold_convert (TREE_TYPE (arg0),
10931 integer_zero_node));
10932
10933 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
10934 bit, then fold the expression into A < 0 or A >= 0. */
10935 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
10936 if (tem)
10937 return tem;
10938
10939 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10940 Similarly for NE_EXPR. */
10941 if (TREE_CODE (arg0) == BIT_AND_EXPR
10942 && TREE_CODE (arg1) == INTEGER_CST
10943 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10944 {
10945 tree notc = fold_build1 (BIT_NOT_EXPR,
10946 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10947 TREE_OPERAND (arg0, 1));
10948 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10949 arg1, notc);
10950 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10951 if (integer_nonzerop (dandnotc))
10952 return omit_one_operand (type, rslt, arg0);
10953 }
10954
10955 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
10956 Similarly for NE_EXPR. */
10957 if (TREE_CODE (arg0) == BIT_IOR_EXPR
10958 && TREE_CODE (arg1) == INTEGER_CST
10959 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10960 {
10961 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
10962 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
10963 TREE_OPERAND (arg0, 1), notd);
10964 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10965 if (integer_nonzerop (candnotd))
10966 return omit_one_operand (type, rslt, arg0);
10967 }
10968
10969 /* If this is a comparison of a field, we may be able to simplify it. */
10970 if ((TREE_CODE (arg0) == COMPONENT_REF
10971 || TREE_CODE (arg0) == BIT_FIELD_REF)
10972 /* Handle the constant case even without -O
10973 to make sure the warnings are given. */
10974 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10975 {
10976 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
10977 if (t1)
10978 return t1;
10979 }
10980
10981 /* Optimize comparisons of strlen vs zero to a compare of the
10982 first character of the string vs zero. To wit,
10983 strlen(ptr) == 0 => *ptr == 0
10984 strlen(ptr) != 0 => *ptr != 0
10985 Other cases should reduce to one of these two (or a constant)
10986 due to the return value of strlen being unsigned. */
10987 if (TREE_CODE (arg0) == CALL_EXPR
10988 && integer_zerop (arg1))
10989 {
10990 tree fndecl = get_callee_fndecl (arg0);
10991 tree arglist;
10992
10993 if (fndecl
10994 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10995 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10996 && (arglist = TREE_OPERAND (arg0, 1))
10997 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
10998 && ! TREE_CHAIN (arglist))
10999 {
11000 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
11001 return fold_build2 (code, type, iref,
11002 build_int_cst (TREE_TYPE (iref), 0));
11003 }
11004 }
11005
11006 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
11007 of X. Similarly fold (X >> C) == 0 into X >= 0. */
11008 if (TREE_CODE (arg0) == RSHIFT_EXPR
11009 && integer_zerop (arg1)
11010 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11011 {
11012 tree arg00 = TREE_OPERAND (arg0, 0);
11013 tree arg01 = TREE_OPERAND (arg0, 1);
11014 tree itype = TREE_TYPE (arg00);
11015 if (TREE_INT_CST_HIGH (arg01) == 0
11016 && TREE_INT_CST_LOW (arg01)
11017 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
11018 {
11019 if (TYPE_UNSIGNED (itype))
11020 {
11021 itype = lang_hooks.types.signed_type (itype);
11022 arg00 = fold_convert (itype, arg00);
11023 }
11024 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
11025 type, arg00, build_int_cst (itype, 0));
11026 }
11027 }
11028
11029 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
11030 if (integer_zerop (arg1)
11031 && TREE_CODE (arg0) == BIT_XOR_EXPR)
11032 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11033 TREE_OPERAND (arg0, 1));
11034
11035 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
11036 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11037 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11038 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11039 build_int_cst (TREE_TYPE (arg1), 0));
11040 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
11041 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11042 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11043 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11044 return fold_build2 (code, type, TREE_OPERAND (arg0, 1),
11045 build_int_cst (TREE_TYPE (arg1), 0));
11046
11047 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
11048 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11049 && TREE_CODE (arg1) == INTEGER_CST
11050 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11051 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
11052 fold_build2 (BIT_XOR_EXPR, TREE_TYPE (arg1),
11053 TREE_OPERAND (arg0, 1), arg1));
11054
11055 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
11056 (X & C) == 0 when C is a single bit. */
11057 if (TREE_CODE (arg0) == BIT_AND_EXPR
11058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
11059 && integer_zerop (arg1)
11060 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11061 {
11062 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
11063 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
11064 TREE_OPERAND (arg0, 1));
11065 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
11066 type, tem, arg1);
11067 }
11068
11069 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
11070 constant C is a power of two, i.e. a single bit. */
11071 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11072 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11073 && integer_zerop (arg1)
11074 && integer_pow2p (TREE_OPERAND (arg0, 1))
11075 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11076 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11077 {
11078 tree arg00 = TREE_OPERAND (arg0, 0);
11079 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11080 arg00, build_int_cst (TREE_TYPE (arg00), 0));
11081 }
11082
11083 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
11084 when is C is a power of two, i.e. a single bit. */
11085 if (TREE_CODE (arg0) == BIT_AND_EXPR
11086 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
11087 && integer_zerop (arg1)
11088 && integer_pow2p (TREE_OPERAND (arg0, 1))
11089 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11090 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
11091 {
11092 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
11093 tem = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg000),
11094 arg000, TREE_OPERAND (arg0, 1));
11095 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
11096 tem, build_int_cst (TREE_TYPE (tem), 0));
11097 }
11098
11099 if (integer_zerop (arg1)
11100 && tree_expr_nonzero_p (arg0))
11101 {
11102 tree res = constant_boolean_node (code==NE_EXPR, type);
11103 return omit_one_operand (type, res, arg0);
11104 }
11105
11106 /* Fold -X op -Y as X op Y, where op is eq/ne. */
11107 if (TREE_CODE (arg0) == NEGATE_EXPR
11108 && TREE_CODE (arg1) == NEGATE_EXPR)
11109 return fold_build2 (code, type,
11110 TREE_OPERAND (arg0, 0),
11111 TREE_OPERAND (arg1, 0));
11112
11113 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
11114 if (TREE_CODE (arg0) == BIT_AND_EXPR
11115 && TREE_CODE (arg1) == BIT_AND_EXPR)
11116 {
11117 tree arg00 = TREE_OPERAND (arg0, 0);
11118 tree arg01 = TREE_OPERAND (arg0, 1);
11119 tree arg10 = TREE_OPERAND (arg1, 0);
11120 tree arg11 = TREE_OPERAND (arg1, 1);
11121 tree itype = TREE_TYPE (arg0);
11122
11123 if (operand_equal_p (arg01, arg11, 0))
11124 return fold_build2 (code, type,
11125 fold_build2 (BIT_AND_EXPR, itype,
11126 fold_build2 (BIT_XOR_EXPR, itype,
11127 arg00, arg10),
11128 arg01),
11129 build_int_cst (itype, 0));
11130
11131 if (operand_equal_p (arg01, arg10, 0))
11132 return fold_build2 (code, type,
11133 fold_build2 (BIT_AND_EXPR, itype,
11134 fold_build2 (BIT_XOR_EXPR, itype,
11135 arg00, arg11),
11136 arg01),
11137 build_int_cst (itype, 0));
11138
11139 if (operand_equal_p (arg00, arg11, 0))
11140 return fold_build2 (code, type,
11141 fold_build2 (BIT_AND_EXPR, itype,
11142 fold_build2 (BIT_XOR_EXPR, itype,
11143 arg01, arg10),
11144 arg00),
11145 build_int_cst (itype, 0));
11146
11147 if (operand_equal_p (arg00, arg10, 0))
11148 return fold_build2 (code, type,
11149 fold_build2 (BIT_AND_EXPR, itype,
11150 fold_build2 (BIT_XOR_EXPR, itype,
11151 arg01, arg11),
11152 arg00),
11153 build_int_cst (itype, 0));
11154 }
11155
11156 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11157 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11158 {
11159 tree arg00 = TREE_OPERAND (arg0, 0);
11160 tree arg01 = TREE_OPERAND (arg0, 1);
11161 tree arg10 = TREE_OPERAND (arg1, 0);
11162 tree arg11 = TREE_OPERAND (arg1, 1);
11163 tree itype = TREE_TYPE (arg0);
11164
11165 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11166 operand_equal_p guarantees no side-effects so we don't need
11167 to use omit_one_operand on Z. */
11168 if (operand_equal_p (arg01, arg11, 0))
11169 return fold_build2 (code, type, arg00, arg10);
11170 if (operand_equal_p (arg01, arg10, 0))
11171 return fold_build2 (code, type, arg00, arg11);
11172 if (operand_equal_p (arg00, arg11, 0))
11173 return fold_build2 (code, type, arg01, arg10);
11174 if (operand_equal_p (arg00, arg10, 0))
11175 return fold_build2 (code, type, arg01, arg11);
11176
11177 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11178 if (TREE_CODE (arg01) == INTEGER_CST
11179 && TREE_CODE (arg11) == INTEGER_CST)
11180 return fold_build2 (code, type,
11181 fold_build2 (BIT_XOR_EXPR, itype, arg00,
11182 fold_build2 (BIT_XOR_EXPR, itype,
11183 arg01, arg11)),
11184 arg10);
11185 }
11186 return NULL_TREE;
11187
11188 case LT_EXPR:
11189 case GT_EXPR:
11190 case LE_EXPR:
11191 case GE_EXPR:
11192 tem = fold_comparison (code, type, op0, op1);
11193 if (tem != NULL_TREE)
11194 return tem;
11195
11196 /* Transform comparisons of the form X +- C CMP X. */
11197 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11198 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11199 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11200 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
11201 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11202 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
11203 && !(flag_wrapv || flag_trapv))))
11204 {
11205 tree arg01 = TREE_OPERAND (arg0, 1);
11206 enum tree_code code0 = TREE_CODE (arg0);
11207 int is_positive;
11208
11209 if (TREE_CODE (arg01) == REAL_CST)
11210 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11211 else
11212 is_positive = tree_int_cst_sgn (arg01);
11213
11214 /* (X - c) > X becomes false. */
11215 if (code == GT_EXPR
11216 && ((code0 == MINUS_EXPR && is_positive >= 0)
11217 || (code0 == PLUS_EXPR && is_positive <= 0)))
11218 return constant_boolean_node (0, type);
11219
11220 /* Likewise (X + c) < X becomes false. */
11221 if (code == LT_EXPR
11222 && ((code0 == PLUS_EXPR && is_positive >= 0)
11223 || (code0 == MINUS_EXPR && is_positive <= 0)))
11224 return constant_boolean_node (0, type);
11225
11226 /* Convert (X - c) <= X to true. */
11227 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11228 && code == LE_EXPR
11229 && ((code0 == MINUS_EXPR && is_positive >= 0)
11230 || (code0 == PLUS_EXPR && is_positive <= 0)))
11231 return constant_boolean_node (1, type);
11232
11233 /* Convert (X + c) >= X to true. */
11234 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
11235 && code == GE_EXPR
11236 && ((code0 == PLUS_EXPR && is_positive >= 0)
11237 || (code0 == MINUS_EXPR && is_positive <= 0)))
11238 return constant_boolean_node (1, type);
11239
11240 if (TREE_CODE (arg01) == INTEGER_CST)
11241 {
11242 /* Convert X + c > X and X - c < X to true for integers. */
11243 if (code == GT_EXPR
11244 && ((code0 == PLUS_EXPR && is_positive > 0)
11245 || (code0 == MINUS_EXPR && is_positive < 0)))
11246 return constant_boolean_node (1, type);
11247
11248 if (code == LT_EXPR
11249 && ((code0 == MINUS_EXPR && is_positive > 0)
11250 || (code0 == PLUS_EXPR && is_positive < 0)))
11251 return constant_boolean_node (1, type);
11252
11253 /* Convert X + c <= X and X - c >= X to false for integers. */
11254 if (code == LE_EXPR
11255 && ((code0 == PLUS_EXPR && is_positive > 0)
11256 || (code0 == MINUS_EXPR && is_positive < 0)))
11257 return constant_boolean_node (0, type);
11258
11259 if (code == GE_EXPR
11260 && ((code0 == MINUS_EXPR && is_positive > 0)
11261 || (code0 == PLUS_EXPR && is_positive < 0)))
11262 return constant_boolean_node (0, type);
11263 }
11264 }
11265
11266 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
11267 This transformation affects the cases which are handled in later
11268 optimizations involving comparisons with non-negative constants. */
11269 if (TREE_CODE (arg1) == INTEGER_CST
11270 && TREE_CODE (arg0) != INTEGER_CST
11271 && tree_int_cst_sgn (arg1) > 0)
11272 {
11273 if (code == GE_EXPR)
11274 {
11275 arg1 = const_binop (MINUS_EXPR, arg1,
11276 build_int_cst (TREE_TYPE (arg1), 1), 0);
11277 return fold_build2 (GT_EXPR, type, arg0,
11278 fold_convert (TREE_TYPE (arg0), arg1));
11279 }
11280 if (code == LT_EXPR)
11281 {
11282 arg1 = const_binop (MINUS_EXPR, arg1,
11283 build_int_cst (TREE_TYPE (arg1), 1), 0);
11284 return fold_build2 (LE_EXPR, type, arg0,
11285 fold_convert (TREE_TYPE (arg0), arg1));
11286 }
11287 }
11288
11289 /* Comparisons with the highest or lowest possible integer of
11290 the specified precision will have known values. */
11291 {
11292 tree arg1_type = TREE_TYPE (arg1);
11293 unsigned int width = TYPE_PRECISION (arg1_type);
11294
11295 if (TREE_CODE (arg1) == INTEGER_CST
11296 && ! TREE_CONSTANT_OVERFLOW (arg1)
11297 && width <= 2 * HOST_BITS_PER_WIDE_INT
11298 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
11299 {
11300 HOST_WIDE_INT signed_max_hi;
11301 unsigned HOST_WIDE_INT signed_max_lo;
11302 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
11303
11304 if (width <= HOST_BITS_PER_WIDE_INT)
11305 {
11306 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11307 - 1;
11308 signed_max_hi = 0;
11309 max_hi = 0;
11310
11311 if (TYPE_UNSIGNED (arg1_type))
11312 {
11313 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11314 min_lo = 0;
11315 min_hi = 0;
11316 }
11317 else
11318 {
11319 max_lo = signed_max_lo;
11320 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11321 min_hi = -1;
11322 }
11323 }
11324 else
11325 {
11326 width -= HOST_BITS_PER_WIDE_INT;
11327 signed_max_lo = -1;
11328 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
11329 - 1;
11330 max_lo = -1;
11331 min_lo = 0;
11332
11333 if (TYPE_UNSIGNED (arg1_type))
11334 {
11335 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
11336 min_hi = 0;
11337 }
11338 else
11339 {
11340 max_hi = signed_max_hi;
11341 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
11342 }
11343 }
11344
11345 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
11346 && TREE_INT_CST_LOW (arg1) == max_lo)
11347 switch (code)
11348 {
11349 case GT_EXPR:
11350 return omit_one_operand (type, integer_zero_node, arg0);
11351
11352 case GE_EXPR:
11353 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11354
11355 case LE_EXPR:
11356 return omit_one_operand (type, integer_one_node, arg0);
11357
11358 case LT_EXPR:
11359 return fold_build2 (NE_EXPR, type, arg0, arg1);
11360
11361 /* The GE_EXPR and LT_EXPR cases above are not normally
11362 reached because of previous transformations. */
11363
11364 default:
11365 break;
11366 }
11367 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11368 == max_hi
11369 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
11370 switch (code)
11371 {
11372 case GT_EXPR:
11373 arg1 = const_binop (PLUS_EXPR, arg1,
11374 build_int_cst (TREE_TYPE (arg1), 1), 0);
11375 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11376 case LE_EXPR:
11377 arg1 = const_binop (PLUS_EXPR, arg1,
11378 build_int_cst (TREE_TYPE (arg1), 1), 0);
11379 return fold_build2 (NE_EXPR, type, arg0, arg1);
11380 default:
11381 break;
11382 }
11383 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11384 == min_hi
11385 && TREE_INT_CST_LOW (arg1) == min_lo)
11386 switch (code)
11387 {
11388 case LT_EXPR:
11389 return omit_one_operand (type, integer_zero_node, arg0);
11390
11391 case LE_EXPR:
11392 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11393
11394 case GE_EXPR:
11395 return omit_one_operand (type, integer_one_node, arg0);
11396
11397 case GT_EXPR:
11398 return fold_build2 (NE_EXPR, type, op0, op1);
11399
11400 default:
11401 break;
11402 }
11403 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
11404 == min_hi
11405 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
11406 switch (code)
11407 {
11408 case GE_EXPR:
11409 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11410 return fold_build2 (NE_EXPR, type, arg0, arg1);
11411 case LT_EXPR:
11412 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
11413 return fold_build2 (EQ_EXPR, type, arg0, arg1);
11414 default:
11415 break;
11416 }
11417
11418 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
11419 && TREE_INT_CST_LOW (arg1) == signed_max_lo
11420 && TYPE_UNSIGNED (arg1_type)
11421 /* We will flip the signedness of the comparison operator
11422 associated with the mode of arg1, so the sign bit is
11423 specified by this mode. Check that arg1 is the signed
11424 max associated with this sign bit. */
11425 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
11426 /* signed_type does not work on pointer types. */
11427 && INTEGRAL_TYPE_P (arg1_type))
11428 {
11429 /* The following case also applies to X < signed_max+1
11430 and X >= signed_max+1 because previous transformations. */
11431 if (code == LE_EXPR || code == GT_EXPR)
11432 {
11433 tree st0, st1;
11434 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
11435 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
11436 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
11437 type, fold_convert (st0, arg0),
11438 build_int_cst (st1, 0));
11439 }
11440 }
11441 }
11442 }
11443
11444 /* If we are comparing an ABS_EXPR with a constant, we can
11445 convert all the cases into explicit comparisons, but they may
11446 well not be faster than doing the ABS and one comparison.
11447 But ABS (X) <= C is a range comparison, which becomes a subtraction
11448 and a comparison, and is probably faster. */
11449 if (code == LE_EXPR
11450 && TREE_CODE (arg1) == INTEGER_CST
11451 && TREE_CODE (arg0) == ABS_EXPR
11452 && ! TREE_SIDE_EFFECTS (arg0)
11453 && (0 != (tem = negate_expr (arg1)))
11454 && TREE_CODE (tem) == INTEGER_CST
11455 && ! TREE_CONSTANT_OVERFLOW (tem))
11456 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11457 build2 (GE_EXPR, type,
11458 TREE_OPERAND (arg0, 0), tem),
11459 build2 (LE_EXPR, type,
11460 TREE_OPERAND (arg0, 0), arg1));
11461
11462 /* Convert ABS_EXPR<x> >= 0 to true. */
11463 if (code == GE_EXPR
11464 && tree_expr_nonnegative_p (arg0)
11465 && (integer_zerop (arg1)
11466 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11467 && real_zerop (arg1))))
11468 return omit_one_operand (type, integer_one_node, arg0);
11469
11470 /* Convert ABS_EXPR<x> < 0 to false. */
11471 if (code == LT_EXPR
11472 && tree_expr_nonnegative_p (arg0)
11473 && (integer_zerop (arg1) || real_zerop (arg1)))
11474 return omit_one_operand (type, integer_zero_node, arg0);
11475
11476 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11477 and similarly for >= into !=. */
11478 if ((code == LT_EXPR || code == GE_EXPR)
11479 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11480 && TREE_CODE (arg1) == LSHIFT_EXPR
11481 && integer_onep (TREE_OPERAND (arg1, 0)))
11482 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11483 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11484 TREE_OPERAND (arg1, 1)),
11485 build_int_cst (TREE_TYPE (arg0), 0));
11486
11487 if ((code == LT_EXPR || code == GE_EXPR)
11488 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11489 && (TREE_CODE (arg1) == NOP_EXPR
11490 || TREE_CODE (arg1) == CONVERT_EXPR)
11491 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11492 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11493 return
11494 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11495 fold_convert (TREE_TYPE (arg0),
11496 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11497 TREE_OPERAND (TREE_OPERAND (arg1, 0),
11498 1))),
11499 build_int_cst (TREE_TYPE (arg0), 0));
11500
11501 return NULL_TREE;
11502
11503 case UNORDERED_EXPR:
11504 case ORDERED_EXPR:
11505 case UNLT_EXPR:
11506 case UNLE_EXPR:
11507 case UNGT_EXPR:
11508 case UNGE_EXPR:
11509 case UNEQ_EXPR:
11510 case LTGT_EXPR:
11511 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
11512 {
11513 t1 = fold_relational_const (code, type, arg0, arg1);
11514 if (t1 != NULL_TREE)
11515 return t1;
11516 }
11517
11518 /* If the first operand is NaN, the result is constant. */
11519 if (TREE_CODE (arg0) == REAL_CST
11520 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
11521 && (code != LTGT_EXPR || ! flag_trapping_math))
11522 {
11523 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11524 ? integer_zero_node
11525 : integer_one_node;
11526 return omit_one_operand (type, t1, arg1);
11527 }
11528
11529 /* If the second operand is NaN, the result is constant. */
11530 if (TREE_CODE (arg1) == REAL_CST
11531 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
11532 && (code != LTGT_EXPR || ! flag_trapping_math))
11533 {
11534 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
11535 ? integer_zero_node
11536 : integer_one_node;
11537 return omit_one_operand (type, t1, arg0);
11538 }
11539
11540 /* Simplify unordered comparison of something with itself. */
11541 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
11542 && operand_equal_p (arg0, arg1, 0))
11543 return constant_boolean_node (1, type);
11544
11545 if (code == LTGT_EXPR
11546 && !flag_trapping_math
11547 && operand_equal_p (arg0, arg1, 0))
11548 return constant_boolean_node (0, type);
11549
11550 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11551 {
11552 tree targ0 = strip_float_extensions (arg0);
11553 tree targ1 = strip_float_extensions (arg1);
11554 tree newtype = TREE_TYPE (targ0);
11555
11556 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11557 newtype = TREE_TYPE (targ1);
11558
11559 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11560 return fold_build2 (code, type, fold_convert (newtype, targ0),
11561 fold_convert (newtype, targ1));
11562 }
11563
11564 return NULL_TREE;
11565
11566 case COMPOUND_EXPR:
11567 /* When pedantic, a compound expression can be neither an lvalue
11568 nor an integer constant expression. */
11569 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11570 return NULL_TREE;
11571 /* Don't let (0, 0) be null pointer constant. */
11572 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11573 : fold_convert (type, arg1);
11574 return pedantic_non_lvalue (tem);
11575
11576 case COMPLEX_EXPR:
11577 if ((TREE_CODE (arg0) == REAL_CST
11578 && TREE_CODE (arg1) == REAL_CST)
11579 || (TREE_CODE (arg0) == INTEGER_CST
11580 && TREE_CODE (arg1) == INTEGER_CST))
11581 return build_complex (type, arg0, arg1);
11582 return NULL_TREE;
11583
11584 case ASSERT_EXPR:
11585 /* An ASSERT_EXPR should never be passed to fold_binary. */
11586 gcc_unreachable ();
11587
11588 default:
11589 return NULL_TREE;
11590 } /* switch (code) */
11591 }
11592
11593 /* Callback for walk_tree, looking for LABEL_EXPR.
11594 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
11595 Do not check the sub-tree of GOTO_EXPR. */
11596
11597 static tree
11598 contains_label_1 (tree *tp,
11599 int *walk_subtrees,
11600 void *data ATTRIBUTE_UNUSED)
11601 {
11602 switch (TREE_CODE (*tp))
11603 {
11604 case LABEL_EXPR:
11605 return *tp;
11606 case GOTO_EXPR:
11607 *walk_subtrees = 0;
11608 /* no break */
11609 default:
11610 return NULL_TREE;
11611 }
11612 }
11613
11614 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
11615 accessible from outside the sub-tree. Returns NULL_TREE if no
11616 addressable label is found. */
11617
11618 static bool
11619 contains_label_p (tree st)
11620 {
11621 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
11622 }
11623
11624 /* Fold a ternary expression of code CODE and type TYPE with operands
11625 OP0, OP1, and OP2. Return the folded expression if folding is
11626 successful. Otherwise, return NULL_TREE. */
11627
11628 tree
11629 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
11630 {
11631 tree tem;
11632 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
11633 enum tree_code_class kind = TREE_CODE_CLASS (code);
11634
11635 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11636 && TREE_CODE_LENGTH (code) == 3);
11637
11638 /* Strip any conversions that don't change the mode. This is safe
11639 for every expression, except for a comparison expression because
11640 its signedness is derived from its operands. So, in the latter
11641 case, only strip conversions that don't change the signedness.
11642
11643 Note that this is done as an internal manipulation within the
11644 constant folder, in order to find the simplest representation of
11645 the arguments so that their form can be studied. In any cases,
11646 the appropriate type conversions should be put back in the tree
11647 that will get out of the constant folder. */
11648 if (op0)
11649 {
11650 arg0 = op0;
11651 STRIP_NOPS (arg0);
11652 }
11653
11654 if (op1)
11655 {
11656 arg1 = op1;
11657 STRIP_NOPS (arg1);
11658 }
11659
11660 switch (code)
11661 {
11662 case COMPONENT_REF:
11663 if (TREE_CODE (arg0) == CONSTRUCTOR
11664 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11665 {
11666 unsigned HOST_WIDE_INT idx;
11667 tree field, value;
11668 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11669 if (field == arg1)
11670 return value;
11671 }
11672 return NULL_TREE;
11673
11674 case COND_EXPR:
11675 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11676 so all simple results must be passed through pedantic_non_lvalue. */
11677 if (TREE_CODE (arg0) == INTEGER_CST)
11678 {
11679 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11680 tem = integer_zerop (arg0) ? op2 : op1;
11681 /* Only optimize constant conditions when the selected branch
11682 has the same type as the COND_EXPR. This avoids optimizing
11683 away "c ? x : throw", where the throw has a void type.
11684 Avoid throwing away that operand which contains label. */
11685 if ((!TREE_SIDE_EFFECTS (unused_op)
11686 || !contains_label_p (unused_op))
11687 && (! VOID_TYPE_P (TREE_TYPE (tem))
11688 || VOID_TYPE_P (type)))
11689 return pedantic_non_lvalue (tem);
11690 return NULL_TREE;
11691 }
11692 if (operand_equal_p (arg1, op2, 0))
11693 return pedantic_omit_one_operand (type, arg1, arg0);
11694
11695 /* If we have A op B ? A : C, we may be able to convert this to a
11696 simpler expression, depending on the operation and the values
11697 of B and C. Signed zeros prevent all of these transformations,
11698 for reasons given above each one.
11699
11700 Also try swapping the arguments and inverting the conditional. */
11701 if (COMPARISON_CLASS_P (arg0)
11702 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11703 arg1, TREE_OPERAND (arg0, 1))
11704 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
11705 {
11706 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
11707 if (tem)
11708 return tem;
11709 }
11710
11711 if (COMPARISON_CLASS_P (arg0)
11712 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11713 op2,
11714 TREE_OPERAND (arg0, 1))
11715 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
11716 {
11717 tem = fold_truth_not_expr (arg0);
11718 if (tem && COMPARISON_CLASS_P (tem))
11719 {
11720 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
11721 if (tem)
11722 return tem;
11723 }
11724 }
11725
11726 /* If the second operand is simpler than the third, swap them
11727 since that produces better jump optimization results. */
11728 if (truth_value_p (TREE_CODE (arg0))
11729 && tree_swap_operands_p (op1, op2, false))
11730 {
11731 /* See if this can be inverted. If it can't, possibly because
11732 it was a floating-point inequality comparison, don't do
11733 anything. */
11734 tem = fold_truth_not_expr (arg0);
11735 if (tem)
11736 return fold_build3 (code, type, tem, op2, op1);
11737 }
11738
11739 /* Convert A ? 1 : 0 to simply A. */
11740 if (integer_onep (op1)
11741 && integer_zerop (op2)
11742 /* If we try to convert OP0 to our type, the
11743 call to fold will try to move the conversion inside
11744 a COND, which will recurse. In that case, the COND_EXPR
11745 is probably the best choice, so leave it alone. */
11746 && type == TREE_TYPE (arg0))
11747 return pedantic_non_lvalue (arg0);
11748
11749 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11750 over COND_EXPR in cases such as floating point comparisons. */
11751 if (integer_zerop (op1)
11752 && integer_onep (op2)
11753 && truth_value_p (TREE_CODE (arg0)))
11754 return pedantic_non_lvalue (fold_convert (type,
11755 invert_truthvalue (arg0)));
11756
11757 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11758 if (TREE_CODE (arg0) == LT_EXPR
11759 && integer_zerop (TREE_OPERAND (arg0, 1))
11760 && integer_zerop (op2)
11761 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11762 {
11763 /* sign_bit_p only checks ARG1 bits within A's precision.
11764 If <sign bit of A> has wider type than A, bits outside
11765 of A's precision in <sign bit of A> need to be checked.
11766 If they are all 0, this optimization needs to be done
11767 in unsigned A's type, if they are all 1 in signed A's type,
11768 otherwise this can't be done. */
11769 if (TYPE_PRECISION (TREE_TYPE (tem))
11770 < TYPE_PRECISION (TREE_TYPE (arg1))
11771 && TYPE_PRECISION (TREE_TYPE (tem))
11772 < TYPE_PRECISION (type))
11773 {
11774 unsigned HOST_WIDE_INT mask_lo;
11775 HOST_WIDE_INT mask_hi;
11776 int inner_width, outer_width;
11777 tree tem_type;
11778
11779 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11780 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11781 if (outer_width > TYPE_PRECISION (type))
11782 outer_width = TYPE_PRECISION (type);
11783
11784 if (outer_width > HOST_BITS_PER_WIDE_INT)
11785 {
11786 mask_hi = ((unsigned HOST_WIDE_INT) -1
11787 >> (2 * HOST_BITS_PER_WIDE_INT - outer_width));
11788 mask_lo = -1;
11789 }
11790 else
11791 {
11792 mask_hi = 0;
11793 mask_lo = ((unsigned HOST_WIDE_INT) -1
11794 >> (HOST_BITS_PER_WIDE_INT - outer_width));
11795 }
11796 if (inner_width > HOST_BITS_PER_WIDE_INT)
11797 {
11798 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
11799 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11800 mask_lo = 0;
11801 }
11802 else
11803 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
11804 >> (HOST_BITS_PER_WIDE_INT - inner_width));
11805
11806 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
11807 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
11808 {
11809 tem_type = lang_hooks.types.signed_type (TREE_TYPE (tem));
11810 tem = fold_convert (tem_type, tem);
11811 }
11812 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
11813 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
11814 {
11815 tem_type = lang_hooks.types.unsigned_type (TREE_TYPE (tem));
11816 tem = fold_convert (tem_type, tem);
11817 }
11818 else
11819 tem = NULL;
11820 }
11821
11822 if (tem)
11823 return fold_convert (type,
11824 fold_build2 (BIT_AND_EXPR,
11825 TREE_TYPE (tem), tem,
11826 fold_convert (TREE_TYPE (tem),
11827 arg1)));
11828 }
11829
11830 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11831 already handled above. */
11832 if (TREE_CODE (arg0) == BIT_AND_EXPR
11833 && integer_onep (TREE_OPERAND (arg0, 1))
11834 && integer_zerop (op2)
11835 && integer_pow2p (arg1))
11836 {
11837 tree tem = TREE_OPERAND (arg0, 0);
11838 STRIP_NOPS (tem);
11839 if (TREE_CODE (tem) == RSHIFT_EXPR
11840 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
11841 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11842 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
11843 return fold_build2 (BIT_AND_EXPR, type,
11844 TREE_OPERAND (tem, 0), arg1);
11845 }
11846
11847 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11848 is probably obsolete because the first operand should be a
11849 truth value (that's why we have the two cases above), but let's
11850 leave it in until we can confirm this for all front-ends. */
11851 if (integer_zerop (op2)
11852 && TREE_CODE (arg0) == NE_EXPR
11853 && integer_zerop (TREE_OPERAND (arg0, 1))
11854 && integer_pow2p (arg1)
11855 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11856 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11857 arg1, OEP_ONLY_CONST))
11858 return pedantic_non_lvalue (fold_convert (type,
11859 TREE_OPERAND (arg0, 0)));
11860
11861 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11862 if (integer_zerop (op2)
11863 && truth_value_p (TREE_CODE (arg0))
11864 && truth_value_p (TREE_CODE (arg1)))
11865 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11866 fold_convert (type, arg0),
11867 arg1);
11868
11869 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11870 if (integer_onep (op2)
11871 && truth_value_p (TREE_CODE (arg0))
11872 && truth_value_p (TREE_CODE (arg1)))
11873 {
11874 /* Only perform transformation if ARG0 is easily inverted. */
11875 tem = fold_truth_not_expr (arg0);
11876 if (tem)
11877 return fold_build2 (TRUTH_ORIF_EXPR, type,
11878 fold_convert (type, tem),
11879 arg1);
11880 }
11881
11882 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11883 if (integer_zerop (arg1)
11884 && truth_value_p (TREE_CODE (arg0))
11885 && truth_value_p (TREE_CODE (op2)))
11886 {
11887 /* Only perform transformation if ARG0 is easily inverted. */
11888 tem = fold_truth_not_expr (arg0);
11889 if (tem)
11890 return fold_build2 (TRUTH_ANDIF_EXPR, type,
11891 fold_convert (type, tem),
11892 op2);
11893 }
11894
11895 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11896 if (integer_onep (arg1)
11897 && truth_value_p (TREE_CODE (arg0))
11898 && truth_value_p (TREE_CODE (op2)))
11899 return fold_build2 (TRUTH_ORIF_EXPR, type,
11900 fold_convert (type, arg0),
11901 op2);
11902
11903 return NULL_TREE;
11904
11905 case CALL_EXPR:
11906 /* Check for a built-in function. */
11907 if (TREE_CODE (op0) == ADDR_EXPR
11908 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
11909 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
11910 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
11911 return NULL_TREE;
11912
11913 case BIT_FIELD_REF:
11914 if (TREE_CODE (arg0) == VECTOR_CST
11915 && type == TREE_TYPE (TREE_TYPE (arg0))
11916 && host_integerp (arg1, 1)
11917 && host_integerp (op2, 1))
11918 {
11919 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
11920 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
11921
11922 if (width != 0
11923 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
11924 && (idx % width) == 0
11925 && (idx = idx / width)
11926 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11927 {
11928 tree elements = TREE_VECTOR_CST_ELTS (arg0);
11929 while (idx-- > 0 && elements)
11930 elements = TREE_CHAIN (elements);
11931 if (elements)
11932 return TREE_VALUE (elements);
11933 else
11934 return fold_convert (type, integer_zero_node);
11935 }
11936 }
11937 return NULL_TREE;
11938
11939 default:
11940 return NULL_TREE;
11941 } /* switch (code) */
11942 }
11943
11944 /* Perform constant folding and related simplification of EXPR.
11945 The related simplifications include x*1 => x, x*0 => 0, etc.,
11946 and application of the associative law.
11947 NOP_EXPR conversions may be removed freely (as long as we
11948 are careful not to change the type of the overall expression).
11949 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
11950 but we can constant-fold them if they have constant operands. */
11951
11952 #ifdef ENABLE_FOLD_CHECKING
11953 # define fold(x) fold_1 (x)
11954 static tree fold_1 (tree);
11955 static
11956 #endif
11957 tree
11958 fold (tree expr)
11959 {
11960 const tree t = expr;
11961 enum tree_code code = TREE_CODE (t);
11962 enum tree_code_class kind = TREE_CODE_CLASS (code);
11963 tree tem;
11964
11965 /* Return right away if a constant. */
11966 if (kind == tcc_constant)
11967 return t;
11968
11969 if (IS_EXPR_CODE_CLASS (kind)
11970 || IS_GIMPLE_STMT_CODE_CLASS (kind))
11971 {
11972 tree type = TREE_TYPE (t);
11973 tree op0, op1, op2;
11974
11975 switch (TREE_CODE_LENGTH (code))
11976 {
11977 case 1:
11978 op0 = TREE_OPERAND (t, 0);
11979 tem = fold_unary (code, type, op0);
11980 return tem ? tem : expr;
11981 case 2:
11982 op0 = TREE_OPERAND (t, 0);
11983 op1 = TREE_OPERAND (t, 1);
11984 tem = fold_binary (code, type, op0, op1);
11985 return tem ? tem : expr;
11986 case 3:
11987 op0 = TREE_OPERAND (t, 0);
11988 op1 = TREE_OPERAND (t, 1);
11989 op2 = TREE_OPERAND (t, 2);
11990 tem = fold_ternary (code, type, op0, op1, op2);
11991 return tem ? tem : expr;
11992 default:
11993 break;
11994 }
11995 }
11996
11997 switch (code)
11998 {
11999 case CONST_DECL:
12000 return fold (DECL_INITIAL (t));
12001
12002 default:
12003 return t;
12004 } /* switch (code) */
12005 }
12006
12007 #ifdef ENABLE_FOLD_CHECKING
12008 #undef fold
12009
12010 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
12011 static void fold_check_failed (tree, tree);
12012 void print_fold_checksum (tree);
12013
12014 /* When --enable-checking=fold, compute a digest of expr before
12015 and after actual fold call to see if fold did not accidentally
12016 change original expr. */
12017
12018 tree
12019 fold (tree expr)
12020 {
12021 tree ret;
12022 struct md5_ctx ctx;
12023 unsigned char checksum_before[16], checksum_after[16];
12024 htab_t ht;
12025
12026 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12027 md5_init_ctx (&ctx);
12028 fold_checksum_tree (expr, &ctx, ht);
12029 md5_finish_ctx (&ctx, checksum_before);
12030 htab_empty (ht);
12031
12032 ret = fold_1 (expr);
12033
12034 md5_init_ctx (&ctx);
12035 fold_checksum_tree (expr, &ctx, ht);
12036 md5_finish_ctx (&ctx, checksum_after);
12037 htab_delete (ht);
12038
12039 if (memcmp (checksum_before, checksum_after, 16))
12040 fold_check_failed (expr, ret);
12041
12042 return ret;
12043 }
12044
12045 void
12046 print_fold_checksum (tree expr)
12047 {
12048 struct md5_ctx ctx;
12049 unsigned char checksum[16], cnt;
12050 htab_t ht;
12051
12052 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12053 md5_init_ctx (&ctx);
12054 fold_checksum_tree (expr, &ctx, ht);
12055 md5_finish_ctx (&ctx, checksum);
12056 htab_delete (ht);
12057 for (cnt = 0; cnt < 16; ++cnt)
12058 fprintf (stderr, "%02x", checksum[cnt]);
12059 putc ('\n', stderr);
12060 }
12061
12062 static void
12063 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
12064 {
12065 internal_error ("fold check: original tree changed by fold");
12066 }
12067
12068 static void
12069 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
12070 {
12071 void **slot;
12072 enum tree_code code;
12073 struct tree_function_decl buf;
12074 int i, len;
12075
12076 recursive_label:
12077
12078 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
12079 <= sizeof (struct tree_function_decl))
12080 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
12081 if (expr == NULL)
12082 return;
12083 slot = htab_find_slot (ht, expr, INSERT);
12084 if (*slot != NULL)
12085 return;
12086 *slot = expr;
12087 code = TREE_CODE (expr);
12088 if (TREE_CODE_CLASS (code) == tcc_declaration
12089 && DECL_ASSEMBLER_NAME_SET_P (expr))
12090 {
12091 /* Allow DECL_ASSEMBLER_NAME to be modified. */
12092 memcpy ((char *) &buf, expr, tree_size (expr));
12093 expr = (tree) &buf;
12094 SET_DECL_ASSEMBLER_NAME (expr, NULL);
12095 }
12096 else if (TREE_CODE_CLASS (code) == tcc_type
12097 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
12098 || TYPE_CACHED_VALUES_P (expr)
12099 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
12100 {
12101 /* Allow these fields to be modified. */
12102 memcpy ((char *) &buf, expr, tree_size (expr));
12103 expr = (tree) &buf;
12104 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
12105 TYPE_POINTER_TO (expr) = NULL;
12106 TYPE_REFERENCE_TO (expr) = NULL;
12107 if (TYPE_CACHED_VALUES_P (expr))
12108 {
12109 TYPE_CACHED_VALUES_P (expr) = 0;
12110 TYPE_CACHED_VALUES (expr) = NULL;
12111 }
12112 }
12113 md5_process_bytes (expr, tree_size (expr), ctx);
12114 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12115 if (TREE_CODE_CLASS (code) != tcc_type
12116 && TREE_CODE_CLASS (code) != tcc_declaration
12117 && code != TREE_LIST)
12118 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12119 switch (TREE_CODE_CLASS (code))
12120 {
12121 case tcc_constant:
12122 switch (code)
12123 {
12124 case STRING_CST:
12125 md5_process_bytes (TREE_STRING_POINTER (expr),
12126 TREE_STRING_LENGTH (expr), ctx);
12127 break;
12128 case COMPLEX_CST:
12129 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12130 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12131 break;
12132 case VECTOR_CST:
12133 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
12134 break;
12135 default:
12136 break;
12137 }
12138 break;
12139 case tcc_exceptional:
12140 switch (code)
12141 {
12142 case TREE_LIST:
12143 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12144 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12145 expr = TREE_CHAIN (expr);
12146 goto recursive_label;
12147 break;
12148 case TREE_VEC:
12149 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12150 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12151 break;
12152 default:
12153 break;
12154 }
12155 break;
12156 case tcc_expression:
12157 case tcc_reference:
12158 case tcc_comparison:
12159 case tcc_unary:
12160 case tcc_binary:
12161 case tcc_statement:
12162 len = TREE_CODE_LENGTH (code);
12163 for (i = 0; i < len; ++i)
12164 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12165 break;
12166 case tcc_declaration:
12167 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12168 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12169 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12170 {
12171 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12172 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12173 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12174 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12175 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12176 }
12177 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
12178 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
12179
12180 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12181 {
12182 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12183 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12184 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
12185 }
12186 break;
12187 case tcc_type:
12188 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12189 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12190 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12191 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12192 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12193 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12194 if (INTEGRAL_TYPE_P (expr)
12195 || SCALAR_FLOAT_TYPE_P (expr))
12196 {
12197 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12198 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12199 }
12200 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12201 if (TREE_CODE (expr) == RECORD_TYPE
12202 || TREE_CODE (expr) == UNION_TYPE
12203 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12204 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12205 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12206 break;
12207 default:
12208 break;
12209 }
12210 }
12211
12212 #endif
12213
12214 /* Fold a unary tree expression with code CODE of type TYPE with an
12215 operand OP0. Return a folded expression if successful. Otherwise,
12216 return a tree expression with code CODE of type TYPE with an
12217 operand OP0. */
12218
12219 tree
12220 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12221 {
12222 tree tem;
12223 #ifdef ENABLE_FOLD_CHECKING
12224 unsigned char checksum_before[16], checksum_after[16];
12225 struct md5_ctx ctx;
12226 htab_t ht;
12227
12228 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12229 md5_init_ctx (&ctx);
12230 fold_checksum_tree (op0, &ctx, ht);
12231 md5_finish_ctx (&ctx, checksum_before);
12232 htab_empty (ht);
12233 #endif
12234
12235 tem = fold_unary (code, type, op0);
12236 if (!tem)
12237 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
12238
12239 #ifdef ENABLE_FOLD_CHECKING
12240 md5_init_ctx (&ctx);
12241 fold_checksum_tree (op0, &ctx, ht);
12242 md5_finish_ctx (&ctx, checksum_after);
12243 htab_delete (ht);
12244
12245 if (memcmp (checksum_before, checksum_after, 16))
12246 fold_check_failed (op0, tem);
12247 #endif
12248 return tem;
12249 }
12250
12251 /* Fold a binary tree expression with code CODE of type TYPE with
12252 operands OP0 and OP1. Return a folded expression if successful.
12253 Otherwise, return a tree expression with code CODE of type TYPE
12254 with operands OP0 and OP1. */
12255
12256 tree
12257 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
12258 MEM_STAT_DECL)
12259 {
12260 tree tem;
12261 #ifdef ENABLE_FOLD_CHECKING
12262 unsigned char checksum_before_op0[16],
12263 checksum_before_op1[16],
12264 checksum_after_op0[16],
12265 checksum_after_op1[16];
12266 struct md5_ctx ctx;
12267 htab_t ht;
12268
12269 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12270 md5_init_ctx (&ctx);
12271 fold_checksum_tree (op0, &ctx, ht);
12272 md5_finish_ctx (&ctx, checksum_before_op0);
12273 htab_empty (ht);
12274
12275 md5_init_ctx (&ctx);
12276 fold_checksum_tree (op1, &ctx, ht);
12277 md5_finish_ctx (&ctx, checksum_before_op1);
12278 htab_empty (ht);
12279 #endif
12280
12281 tem = fold_binary (code, type, op0, op1);
12282 if (!tem)
12283 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
12284
12285 #ifdef ENABLE_FOLD_CHECKING
12286 md5_init_ctx (&ctx);
12287 fold_checksum_tree (op0, &ctx, ht);
12288 md5_finish_ctx (&ctx, checksum_after_op0);
12289 htab_empty (ht);
12290
12291 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12292 fold_check_failed (op0, tem);
12293
12294 md5_init_ctx (&ctx);
12295 fold_checksum_tree (op1, &ctx, ht);
12296 md5_finish_ctx (&ctx, checksum_after_op1);
12297 htab_delete (ht);
12298
12299 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12300 fold_check_failed (op1, tem);
12301 #endif
12302 return tem;
12303 }
12304
12305 /* Fold a ternary tree expression with code CODE of type TYPE with
12306 operands OP0, OP1, and OP2. Return a folded expression if
12307 successful. Otherwise, return a tree expression with code CODE of
12308 type TYPE with operands OP0, OP1, and OP2. */
12309
12310 tree
12311 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
12312 MEM_STAT_DECL)
12313 {
12314 tree tem;
12315 #ifdef ENABLE_FOLD_CHECKING
12316 unsigned char checksum_before_op0[16],
12317 checksum_before_op1[16],
12318 checksum_before_op2[16],
12319 checksum_after_op0[16],
12320 checksum_after_op1[16],
12321 checksum_after_op2[16];
12322 struct md5_ctx ctx;
12323 htab_t ht;
12324
12325 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
12326 md5_init_ctx (&ctx);
12327 fold_checksum_tree (op0, &ctx, ht);
12328 md5_finish_ctx (&ctx, checksum_before_op0);
12329 htab_empty (ht);
12330
12331 md5_init_ctx (&ctx);
12332 fold_checksum_tree (op1, &ctx, ht);
12333 md5_finish_ctx (&ctx, checksum_before_op1);
12334 htab_empty (ht);
12335
12336 md5_init_ctx (&ctx);
12337 fold_checksum_tree (op2, &ctx, ht);
12338 md5_finish_ctx (&ctx, checksum_before_op2);
12339 htab_empty (ht);
12340 #endif
12341
12342 tem = fold_ternary (code, type, op0, op1, op2);
12343 if (!tem)
12344 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
12345
12346 #ifdef ENABLE_FOLD_CHECKING
12347 md5_init_ctx (&ctx);
12348 fold_checksum_tree (op0, &ctx, ht);
12349 md5_finish_ctx (&ctx, checksum_after_op0);
12350 htab_empty (ht);
12351
12352 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12353 fold_check_failed (op0, tem);
12354
12355 md5_init_ctx (&ctx);
12356 fold_checksum_tree (op1, &ctx, ht);
12357 md5_finish_ctx (&ctx, checksum_after_op1);
12358 htab_empty (ht);
12359
12360 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12361 fold_check_failed (op1, tem);
12362
12363 md5_init_ctx (&ctx);
12364 fold_checksum_tree (op2, &ctx, ht);
12365 md5_finish_ctx (&ctx, checksum_after_op2);
12366 htab_delete (ht);
12367
12368 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12369 fold_check_failed (op2, tem);
12370 #endif
12371 return tem;
12372 }
12373
12374 /* Perform constant folding and related simplification of initializer
12375 expression EXPR. These behave identically to "fold_buildN" but ignore
12376 potential run-time traps and exceptions that fold must preserve. */
12377
12378 #define START_FOLD_INIT \
12379 int saved_signaling_nans = flag_signaling_nans;\
12380 int saved_trapping_math = flag_trapping_math;\
12381 int saved_rounding_math = flag_rounding_math;\
12382 int saved_trapv = flag_trapv;\
12383 int saved_folding_initializer = folding_initializer;\
12384 flag_signaling_nans = 0;\
12385 flag_trapping_math = 0;\
12386 flag_rounding_math = 0;\
12387 flag_trapv = 0;\
12388 folding_initializer = 1;
12389
12390 #define END_FOLD_INIT \
12391 flag_signaling_nans = saved_signaling_nans;\
12392 flag_trapping_math = saved_trapping_math;\
12393 flag_rounding_math = saved_rounding_math;\
12394 flag_trapv = saved_trapv;\
12395 folding_initializer = saved_folding_initializer;
12396
12397 tree
12398 fold_build1_initializer (enum tree_code code, tree type, tree op)
12399 {
12400 tree result;
12401 START_FOLD_INIT;
12402
12403 result = fold_build1 (code, type, op);
12404
12405 END_FOLD_INIT;
12406 return result;
12407 }
12408
12409 tree
12410 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
12411 {
12412 tree result;
12413 START_FOLD_INIT;
12414
12415 result = fold_build2 (code, type, op0, op1);
12416
12417 END_FOLD_INIT;
12418 return result;
12419 }
12420
12421 tree
12422 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
12423 tree op2)
12424 {
12425 tree result;
12426 START_FOLD_INIT;
12427
12428 result = fold_build3 (code, type, op0, op1, op2);
12429
12430 END_FOLD_INIT;
12431 return result;
12432 }
12433
12434 #undef START_FOLD_INIT
12435 #undef END_FOLD_INIT
12436
12437 /* Determine if first argument is a multiple of second argument. Return 0 if
12438 it is not, or we cannot easily determined it to be.
12439
12440 An example of the sort of thing we care about (at this point; this routine
12441 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12442 fold cases do now) is discovering that
12443
12444 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12445
12446 is a multiple of
12447
12448 SAVE_EXPR (J * 8)
12449
12450 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12451
12452 This code also handles discovering that
12453
12454 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12455
12456 is a multiple of 8 so we don't have to worry about dealing with a
12457 possible remainder.
12458
12459 Note that we *look* inside a SAVE_EXPR only to determine how it was
12460 calculated; it is not safe for fold to do much of anything else with the
12461 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12462 at run time. For example, the latter example above *cannot* be implemented
12463 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12464 evaluation time of the original SAVE_EXPR is not necessarily the same at
12465 the time the new expression is evaluated. The only optimization of this
12466 sort that would be valid is changing
12467
12468 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12469
12470 divided by 8 to
12471
12472 SAVE_EXPR (I) * SAVE_EXPR (J)
12473
12474 (where the same SAVE_EXPR (J) is used in the original and the
12475 transformed version). */
12476
12477 static int
12478 multiple_of_p (tree type, tree top, tree bottom)
12479 {
12480 if (operand_equal_p (top, bottom, 0))
12481 return 1;
12482
12483 if (TREE_CODE (type) != INTEGER_TYPE)
12484 return 0;
12485
12486 switch (TREE_CODE (top))
12487 {
12488 case BIT_AND_EXPR:
12489 /* Bitwise and provides a power of two multiple. If the mask is
12490 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12491 if (!integer_pow2p (bottom))
12492 return 0;
12493 /* FALLTHRU */
12494
12495 case MULT_EXPR:
12496 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12497 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12498
12499 case PLUS_EXPR:
12500 case MINUS_EXPR:
12501 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12502 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12503
12504 case LSHIFT_EXPR:
12505 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12506 {
12507 tree op1, t1;
12508
12509 op1 = TREE_OPERAND (top, 1);
12510 /* const_binop may not detect overflow correctly,
12511 so check for it explicitly here. */
12512 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
12513 > TREE_INT_CST_LOW (op1)
12514 && TREE_INT_CST_HIGH (op1) == 0
12515 && 0 != (t1 = fold_convert (type,
12516 const_binop (LSHIFT_EXPR,
12517 size_one_node,
12518 op1, 0)))
12519 && ! TREE_OVERFLOW (t1))
12520 return multiple_of_p (type, t1, bottom);
12521 }
12522 return 0;
12523
12524 case NOP_EXPR:
12525 /* Can't handle conversions from non-integral or wider integral type. */
12526 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12527 || (TYPE_PRECISION (type)
12528 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12529 return 0;
12530
12531 /* .. fall through ... */
12532
12533 case SAVE_EXPR:
12534 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12535
12536 case INTEGER_CST:
12537 if (TREE_CODE (bottom) != INTEGER_CST
12538 || (TYPE_UNSIGNED (type)
12539 && (tree_int_cst_sgn (top) < 0
12540 || tree_int_cst_sgn (bottom) < 0)))
12541 return 0;
12542 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
12543 top, bottom, 0));
12544
12545 default:
12546 return 0;
12547 }
12548 }
12549
12550 /* Return true if `t' is known to be non-negative. */
12551
12552 bool
12553 tree_expr_nonnegative_p (tree t)
12554 {
12555 if (t == error_mark_node)
12556 return false;
12557
12558 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12559 return true;
12560
12561 switch (TREE_CODE (t))
12562 {
12563 case SSA_NAME:
12564 /* Query VRP to see if it has recorded any information about
12565 the range of this object. */
12566 return ssa_name_nonnegative_p (t);
12567
12568 case ABS_EXPR:
12569 /* We can't return 1 if flag_wrapv is set because
12570 ABS_EXPR<INT_MIN> = INT_MIN. */
12571 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
12572 return true;
12573 break;
12574
12575 case INTEGER_CST:
12576 return tree_int_cst_sgn (t) >= 0;
12577
12578 case REAL_CST:
12579 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12580
12581 case PLUS_EXPR:
12582 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12583 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12584 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12585
12586 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12587 both unsigned and at least 2 bits shorter than the result. */
12588 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12589 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12590 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12591 {
12592 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12593 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12594 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12595 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12596 {
12597 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12598 TYPE_PRECISION (inner2)) + 1;
12599 return prec < TYPE_PRECISION (TREE_TYPE (t));
12600 }
12601 }
12602 break;
12603
12604 case MULT_EXPR:
12605 if (FLOAT_TYPE_P (TREE_TYPE (t)))
12606 {
12607 /* x * x for floating point x is always non-negative. */
12608 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
12609 return true;
12610 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12611 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12612 }
12613
12614 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12615 both unsigned and their total bits is shorter than the result. */
12616 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
12617 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
12618 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
12619 {
12620 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
12621 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
12622 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12623 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12624 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
12625 < TYPE_PRECISION (TREE_TYPE (t));
12626 }
12627 return false;
12628
12629 case BIT_AND_EXPR:
12630 case MAX_EXPR:
12631 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12632 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12633
12634 case BIT_IOR_EXPR:
12635 case BIT_XOR_EXPR:
12636 case MIN_EXPR:
12637 case RDIV_EXPR:
12638 case TRUNC_DIV_EXPR:
12639 case CEIL_DIV_EXPR:
12640 case FLOOR_DIV_EXPR:
12641 case ROUND_DIV_EXPR:
12642 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12643 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
12644
12645 case TRUNC_MOD_EXPR:
12646 case CEIL_MOD_EXPR:
12647 case FLOOR_MOD_EXPR:
12648 case ROUND_MOD_EXPR:
12649 case SAVE_EXPR:
12650 case NON_LVALUE_EXPR:
12651 case FLOAT_EXPR:
12652 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12653
12654 case COMPOUND_EXPR:
12655 case MODIFY_EXPR:
12656 case GIMPLE_MODIFY_STMT:
12657 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12658
12659 case BIND_EXPR:
12660 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
12661
12662 case COND_EXPR:
12663 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
12664 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
12665
12666 case NOP_EXPR:
12667 {
12668 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12669 tree outer_type = TREE_TYPE (t);
12670
12671 if (TREE_CODE (outer_type) == REAL_TYPE)
12672 {
12673 if (TREE_CODE (inner_type) == REAL_TYPE)
12674 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12675 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12676 {
12677 if (TYPE_UNSIGNED (inner_type))
12678 return true;
12679 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12680 }
12681 }
12682 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
12683 {
12684 if (TREE_CODE (inner_type) == REAL_TYPE)
12685 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
12686 if (TREE_CODE (inner_type) == INTEGER_TYPE)
12687 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12688 && TYPE_UNSIGNED (inner_type);
12689 }
12690 }
12691 break;
12692
12693 case TARGET_EXPR:
12694 {
12695 tree temp = TARGET_EXPR_SLOT (t);
12696 t = TARGET_EXPR_INITIAL (t);
12697
12698 /* If the initializer is non-void, then it's a normal expression
12699 that will be assigned to the slot. */
12700 if (!VOID_TYPE_P (t))
12701 return tree_expr_nonnegative_p (t);
12702
12703 /* Otherwise, the initializer sets the slot in some way. One common
12704 way is an assignment statement at the end of the initializer. */
12705 while (1)
12706 {
12707 if (TREE_CODE (t) == BIND_EXPR)
12708 t = expr_last (BIND_EXPR_BODY (t));
12709 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
12710 || TREE_CODE (t) == TRY_CATCH_EXPR)
12711 t = expr_last (TREE_OPERAND (t, 0));
12712 else if (TREE_CODE (t) == STATEMENT_LIST)
12713 t = expr_last (t);
12714 else
12715 break;
12716 }
12717 if ((TREE_CODE (t) == MODIFY_EXPR
12718 || TREE_CODE (t) == GIMPLE_MODIFY_STMT)
12719 && GENERIC_TREE_OPERAND (t, 0) == temp)
12720 return tree_expr_nonnegative_p (GENERIC_TREE_OPERAND (t, 1));
12721
12722 return false;
12723 }
12724
12725 case CALL_EXPR:
12726 {
12727 tree fndecl = get_callee_fndecl (t);
12728 tree arglist = TREE_OPERAND (t, 1);
12729 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
12730 switch (DECL_FUNCTION_CODE (fndecl))
12731 {
12732 CASE_FLT_FN (BUILT_IN_ACOS):
12733 CASE_FLT_FN (BUILT_IN_ACOSH):
12734 CASE_FLT_FN (BUILT_IN_CABS):
12735 CASE_FLT_FN (BUILT_IN_COSH):
12736 CASE_FLT_FN (BUILT_IN_ERFC):
12737 CASE_FLT_FN (BUILT_IN_EXP):
12738 CASE_FLT_FN (BUILT_IN_EXP10):
12739 CASE_FLT_FN (BUILT_IN_EXP2):
12740 CASE_FLT_FN (BUILT_IN_FABS):
12741 CASE_FLT_FN (BUILT_IN_FDIM):
12742 CASE_FLT_FN (BUILT_IN_HYPOT):
12743 CASE_FLT_FN (BUILT_IN_POW10):
12744 CASE_INT_FN (BUILT_IN_FFS):
12745 CASE_INT_FN (BUILT_IN_PARITY):
12746 CASE_INT_FN (BUILT_IN_POPCOUNT):
12747 case BUILT_IN_BSWAP32:
12748 case BUILT_IN_BSWAP64:
12749 /* Always true. */
12750 return true;
12751
12752 CASE_FLT_FN (BUILT_IN_SQRT):
12753 /* sqrt(-0.0) is -0.0. */
12754 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
12755 return true;
12756 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12757
12758 CASE_FLT_FN (BUILT_IN_ASINH):
12759 CASE_FLT_FN (BUILT_IN_ATAN):
12760 CASE_FLT_FN (BUILT_IN_ATANH):
12761 CASE_FLT_FN (BUILT_IN_CBRT):
12762 CASE_FLT_FN (BUILT_IN_CEIL):
12763 CASE_FLT_FN (BUILT_IN_ERF):
12764 CASE_FLT_FN (BUILT_IN_EXPM1):
12765 CASE_FLT_FN (BUILT_IN_FLOOR):
12766 CASE_FLT_FN (BUILT_IN_FMOD):
12767 CASE_FLT_FN (BUILT_IN_FREXP):
12768 CASE_FLT_FN (BUILT_IN_LCEIL):
12769 CASE_FLT_FN (BUILT_IN_LDEXP):
12770 CASE_FLT_FN (BUILT_IN_LFLOOR):
12771 CASE_FLT_FN (BUILT_IN_LLCEIL):
12772 CASE_FLT_FN (BUILT_IN_LLFLOOR):
12773 CASE_FLT_FN (BUILT_IN_LLRINT):
12774 CASE_FLT_FN (BUILT_IN_LLROUND):
12775 CASE_FLT_FN (BUILT_IN_LRINT):
12776 CASE_FLT_FN (BUILT_IN_LROUND):
12777 CASE_FLT_FN (BUILT_IN_MODF):
12778 CASE_FLT_FN (BUILT_IN_NEARBYINT):
12779 CASE_FLT_FN (BUILT_IN_RINT):
12780 CASE_FLT_FN (BUILT_IN_ROUND):
12781 CASE_FLT_FN (BUILT_IN_SIGNBIT):
12782 CASE_FLT_FN (BUILT_IN_SINH):
12783 CASE_FLT_FN (BUILT_IN_TANH):
12784 CASE_FLT_FN (BUILT_IN_TRUNC):
12785 /* True if the 1st argument is nonnegative. */
12786 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12787
12788 CASE_FLT_FN (BUILT_IN_FMAX):
12789 /* True if the 1st OR 2nd arguments are nonnegative. */
12790 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12791 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12792
12793 CASE_FLT_FN (BUILT_IN_FMIN):
12794 /* True if the 1st AND 2nd arguments are nonnegative. */
12795 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
12796 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12797
12798 CASE_FLT_FN (BUILT_IN_COPYSIGN):
12799 /* True if the 2nd argument is nonnegative. */
12800 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
12801
12802 CASE_FLT_FN (BUILT_IN_POWI):
12803 /* True if the 1st argument is nonnegative or the second
12804 argument is an even integer. */
12805 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == INTEGER_CST)
12806 {
12807 tree arg1 = TREE_VALUE (TREE_CHAIN (arglist));
12808 if ((TREE_INT_CST_LOW (arg1) & 1) == 0)
12809 return true;
12810 }
12811 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12812
12813 CASE_FLT_FN (BUILT_IN_POW):
12814 /* True if the 1st argument is nonnegative or the second
12815 argument is an even integer valued real. */
12816 if (TREE_CODE (TREE_VALUE (TREE_CHAIN (arglist))) == REAL_CST)
12817 {
12818 REAL_VALUE_TYPE c;
12819 HOST_WIDE_INT n;
12820
12821 c = TREE_REAL_CST (TREE_VALUE (TREE_CHAIN (arglist)));
12822 n = real_to_integer (&c);
12823 if ((n & 1) == 0)
12824 {
12825 REAL_VALUE_TYPE cint;
12826 real_from_integer (&cint, VOIDmode, n,
12827 n < 0 ? -1 : 0, 0);
12828 if (real_identical (&c, &cint))
12829 return true;
12830 }
12831 }
12832 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
12833
12834 default:
12835 break;
12836 }
12837 }
12838
12839 /* ... fall through ... */
12840
12841 default:
12842 if (truth_value_p (TREE_CODE (t)))
12843 /* Truth values evaluate to 0 or 1, which is nonnegative. */
12844 return true;
12845 }
12846
12847 /* We don't know sign of `t', so be conservative and return false. */
12848 return false;
12849 }
12850
12851 /* Return true when T is an address and is known to be nonzero.
12852 For floating point we further ensure that T is not denormal.
12853 Similar logic is present in nonzero_address in rtlanal.h. */
12854
12855 bool
12856 tree_expr_nonzero_p (tree t)
12857 {
12858 tree type = TREE_TYPE (t);
12859
12860 /* Doing something useful for floating point would need more work. */
12861 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
12862 return false;
12863
12864 switch (TREE_CODE (t))
12865 {
12866 case SSA_NAME:
12867 /* Query VRP to see if it has recorded any information about
12868 the range of this object. */
12869 return ssa_name_nonzero_p (t);
12870
12871 case ABS_EXPR:
12872 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12873
12874 case INTEGER_CST:
12875 /* We used to test for !integer_zerop here. This does not work correctly
12876 if TREE_CONSTANT_OVERFLOW (t). */
12877 return (TREE_INT_CST_LOW (t) != 0
12878 || TREE_INT_CST_HIGH (t) != 0);
12879
12880 case PLUS_EXPR:
12881 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12882 {
12883 /* With the presence of negative values it is hard
12884 to say something. */
12885 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
12886 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12887 return false;
12888 /* One of operands must be positive and the other non-negative. */
12889 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12890 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12891 }
12892 break;
12893
12894 case MULT_EXPR:
12895 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
12896 {
12897 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12898 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12899 }
12900 break;
12901
12902 case NOP_EXPR:
12903 {
12904 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
12905 tree outer_type = TREE_TYPE (t);
12906
12907 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
12908 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
12909 }
12910 break;
12911
12912 case ADDR_EXPR:
12913 {
12914 tree base = get_base_address (TREE_OPERAND (t, 0));
12915
12916 if (!base)
12917 return false;
12918
12919 /* Weak declarations may link to NULL. */
12920 if (VAR_OR_FUNCTION_DECL_P (base))
12921 return !DECL_WEAK (base);
12922
12923 /* Constants are never weak. */
12924 if (CONSTANT_CLASS_P (base))
12925 return true;
12926
12927 return false;
12928 }
12929
12930 case COND_EXPR:
12931 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12932 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
12933
12934 case MIN_EXPR:
12935 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
12936 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
12937
12938 case MAX_EXPR:
12939 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
12940 {
12941 /* When both operands are nonzero, then MAX must be too. */
12942 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
12943 return true;
12944
12945 /* MAX where operand 0 is positive is positive. */
12946 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
12947 }
12948 /* MAX where operand 1 is positive is positive. */
12949 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12950 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
12951 return true;
12952 break;
12953
12954 case COMPOUND_EXPR:
12955 case MODIFY_EXPR:
12956 case GIMPLE_MODIFY_STMT:
12957 case BIND_EXPR:
12958 return tree_expr_nonzero_p (GENERIC_TREE_OPERAND (t, 1));
12959
12960 case SAVE_EXPR:
12961 case NON_LVALUE_EXPR:
12962 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12963
12964 case BIT_IOR_EXPR:
12965 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
12966 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
12967
12968 case CALL_EXPR:
12969 return alloca_call_p (t);
12970
12971 default:
12972 break;
12973 }
12974 return false;
12975 }
12976
12977 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
12978 attempt to fold the expression to a constant without modifying TYPE,
12979 OP0 or OP1.
12980
12981 If the expression could be simplified to a constant, then return
12982 the constant. If the expression would not be simplified to a
12983 constant, then return NULL_TREE. */
12984
12985 tree
12986 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
12987 {
12988 tree tem = fold_binary (code, type, op0, op1);
12989 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
12990 }
12991
12992 /* Given the components of a unary expression CODE, TYPE and OP0,
12993 attempt to fold the expression to a constant without modifying
12994 TYPE or OP0.
12995
12996 If the expression could be simplified to a constant, then return
12997 the constant. If the expression would not be simplified to a
12998 constant, then return NULL_TREE. */
12999
13000 tree
13001 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13002 {
13003 tree tem = fold_unary (code, type, op0);
13004 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13005 }
13006
13007 /* If EXP represents referencing an element in a constant string
13008 (either via pointer arithmetic or array indexing), return the
13009 tree representing the value accessed, otherwise return NULL. */
13010
13011 tree
13012 fold_read_from_constant_string (tree exp)
13013 {
13014 if ((TREE_CODE (exp) == INDIRECT_REF
13015 || TREE_CODE (exp) == ARRAY_REF)
13016 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13017 {
13018 tree exp1 = TREE_OPERAND (exp, 0);
13019 tree index;
13020 tree string;
13021
13022 if (TREE_CODE (exp) == INDIRECT_REF)
13023 string = string_constant (exp1, &index);
13024 else
13025 {
13026 tree low_bound = array_ref_low_bound (exp);
13027 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
13028
13029 /* Optimize the special-case of a zero lower bound.
13030
13031 We convert the low_bound to sizetype to avoid some problems
13032 with constant folding. (E.g. suppose the lower bound is 1,
13033 and its mode is QI. Without the conversion,l (ARRAY
13034 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13035 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
13036 if (! integer_zerop (low_bound))
13037 index = size_diffop (index, fold_convert (sizetype, low_bound));
13038
13039 string = exp1;
13040 }
13041
13042 if (string
13043 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13044 && TREE_CODE (string) == STRING_CST
13045 && TREE_CODE (index) == INTEGER_CST
13046 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13047 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13048 == MODE_INT)
13049 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13050 return fold_convert (TREE_TYPE (exp),
13051 build_int_cst (NULL_TREE,
13052 (TREE_STRING_POINTER (string)
13053 [TREE_INT_CST_LOW (index)])));
13054 }
13055 return NULL;
13056 }
13057
13058 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13059 an integer constant or real constant.
13060
13061 TYPE is the type of the result. */
13062
13063 static tree
13064 fold_negate_const (tree arg0, tree type)
13065 {
13066 tree t = NULL_TREE;
13067
13068 switch (TREE_CODE (arg0))
13069 {
13070 case INTEGER_CST:
13071 {
13072 unsigned HOST_WIDE_INT low;
13073 HOST_WIDE_INT high;
13074 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13075 TREE_INT_CST_HIGH (arg0),
13076 &low, &high);
13077 t = force_fit_type_double (type, low, high, 1,
13078 (overflow | TREE_OVERFLOW (arg0))
13079 && !TYPE_UNSIGNED (type),
13080 TREE_CONSTANT_OVERFLOW (arg0));
13081 break;
13082 }
13083
13084 case REAL_CST:
13085 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13086 break;
13087
13088 default:
13089 gcc_unreachable ();
13090 }
13091
13092 return t;
13093 }
13094
13095 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13096 an integer constant or real constant.
13097
13098 TYPE is the type of the result. */
13099
13100 tree
13101 fold_abs_const (tree arg0, tree type)
13102 {
13103 tree t = NULL_TREE;
13104
13105 switch (TREE_CODE (arg0))
13106 {
13107 case INTEGER_CST:
13108 /* If the value is unsigned, then the absolute value is
13109 the same as the ordinary value. */
13110 if (TYPE_UNSIGNED (type))
13111 t = arg0;
13112 /* Similarly, if the value is non-negative. */
13113 else if (INT_CST_LT (integer_minus_one_node, arg0))
13114 t = arg0;
13115 /* If the value is negative, then the absolute value is
13116 its negation. */
13117 else
13118 {
13119 unsigned HOST_WIDE_INT low;
13120 HOST_WIDE_INT high;
13121 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
13122 TREE_INT_CST_HIGH (arg0),
13123 &low, &high);
13124 t = force_fit_type_double (type, low, high, -1,
13125 overflow | TREE_OVERFLOW (arg0),
13126 TREE_CONSTANT_OVERFLOW (arg0));
13127 }
13128 break;
13129
13130 case REAL_CST:
13131 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13132 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
13133 else
13134 t = arg0;
13135 break;
13136
13137 default:
13138 gcc_unreachable ();
13139 }
13140
13141 return t;
13142 }
13143
13144 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13145 constant. TYPE is the type of the result. */
13146
13147 static tree
13148 fold_not_const (tree arg0, tree type)
13149 {
13150 tree t = NULL_TREE;
13151
13152 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13153
13154 t = force_fit_type_double (type, ~TREE_INT_CST_LOW (arg0),
13155 ~TREE_INT_CST_HIGH (arg0), 0,
13156 TREE_OVERFLOW (arg0),
13157 TREE_CONSTANT_OVERFLOW (arg0));
13158
13159 return t;
13160 }
13161
13162 /* Given CODE, a relational operator, the target type, TYPE and two
13163 constant operands OP0 and OP1, return the result of the
13164 relational operation. If the result is not a compile time
13165 constant, then return NULL_TREE. */
13166
13167 static tree
13168 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13169 {
13170 int result, invert;
13171
13172 /* From here on, the only cases we handle are when the result is
13173 known to be a constant. */
13174
13175 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13176 {
13177 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13178 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13179
13180 /* Handle the cases where either operand is a NaN. */
13181 if (real_isnan (c0) || real_isnan (c1))
13182 {
13183 switch (code)
13184 {
13185 case EQ_EXPR:
13186 case ORDERED_EXPR:
13187 result = 0;
13188 break;
13189
13190 case NE_EXPR:
13191 case UNORDERED_EXPR:
13192 case UNLT_EXPR:
13193 case UNLE_EXPR:
13194 case UNGT_EXPR:
13195 case UNGE_EXPR:
13196 case UNEQ_EXPR:
13197 result = 1;
13198 break;
13199
13200 case LT_EXPR:
13201 case LE_EXPR:
13202 case GT_EXPR:
13203 case GE_EXPR:
13204 case LTGT_EXPR:
13205 if (flag_trapping_math)
13206 return NULL_TREE;
13207 result = 0;
13208 break;
13209
13210 default:
13211 gcc_unreachable ();
13212 }
13213
13214 return constant_boolean_node (result, type);
13215 }
13216
13217 return constant_boolean_node (real_compare (code, c0, c1), type);
13218 }
13219
13220 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
13221
13222 To compute GT, swap the arguments and do LT.
13223 To compute GE, do LT and invert the result.
13224 To compute LE, swap the arguments, do LT and invert the result.
13225 To compute NE, do EQ and invert the result.
13226
13227 Therefore, the code below must handle only EQ and LT. */
13228
13229 if (code == LE_EXPR || code == GT_EXPR)
13230 {
13231 tree tem = op0;
13232 op0 = op1;
13233 op1 = tem;
13234 code = swap_tree_comparison (code);
13235 }
13236
13237 /* Note that it is safe to invert for real values here because we
13238 have already handled the one case that it matters. */
13239
13240 invert = 0;
13241 if (code == NE_EXPR || code == GE_EXPR)
13242 {
13243 invert = 1;
13244 code = invert_tree_comparison (code, false);
13245 }
13246
13247 /* Compute a result for LT or EQ if args permit;
13248 Otherwise return T. */
13249 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
13250 {
13251 if (code == EQ_EXPR)
13252 result = tree_int_cst_equal (op0, op1);
13253 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
13254 result = INT_CST_LT_UNSIGNED (op0, op1);
13255 else
13256 result = INT_CST_LT (op0, op1);
13257 }
13258 else
13259 return NULL_TREE;
13260
13261 if (invert)
13262 result ^= 1;
13263 return constant_boolean_node (result, type);
13264 }
13265
13266 /* Build an expression for the a clean point containing EXPR with type TYPE.
13267 Don't build a cleanup point expression for EXPR which don't have side
13268 effects. */
13269
13270 tree
13271 fold_build_cleanup_point_expr (tree type, tree expr)
13272 {
13273 /* If the expression does not have side effects then we don't have to wrap
13274 it with a cleanup point expression. */
13275 if (!TREE_SIDE_EFFECTS (expr))
13276 return expr;
13277
13278 /* If the expression is a return, check to see if the expression inside the
13279 return has no side effects or the right hand side of the modify expression
13280 inside the return. If either don't have side effects set we don't need to
13281 wrap the expression in a cleanup point expression. Note we don't check the
13282 left hand side of the modify because it should always be a return decl. */
13283 if (TREE_CODE (expr) == RETURN_EXPR)
13284 {
13285 tree op = TREE_OPERAND (expr, 0);
13286 if (!op || !TREE_SIDE_EFFECTS (op))
13287 return expr;
13288 op = TREE_OPERAND (op, 1);
13289 if (!TREE_SIDE_EFFECTS (op))
13290 return expr;
13291 }
13292
13293 return build1 (CLEANUP_POINT_EXPR, type, expr);
13294 }
13295
13296 /* Build an expression for the address of T. Folds away INDIRECT_REF to
13297 avoid confusing the gimplify process. */
13298
13299 tree
13300 build_fold_addr_expr_with_type (tree t, tree ptrtype)
13301 {
13302 /* The size of the object is not relevant when talking about its address. */
13303 if (TREE_CODE (t) == WITH_SIZE_EXPR)
13304 t = TREE_OPERAND (t, 0);
13305
13306 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
13307 if (TREE_CODE (t) == INDIRECT_REF
13308 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
13309 {
13310 t = TREE_OPERAND (t, 0);
13311 if (TREE_TYPE (t) != ptrtype)
13312 t = build1 (NOP_EXPR, ptrtype, t);
13313 }
13314 else
13315 {
13316 tree base = t;
13317
13318 while (handled_component_p (base))
13319 base = TREE_OPERAND (base, 0);
13320 if (DECL_P (base))
13321 TREE_ADDRESSABLE (base) = 1;
13322
13323 t = build1 (ADDR_EXPR, ptrtype, t);
13324 }
13325
13326 return t;
13327 }
13328
13329 tree
13330 build_fold_addr_expr (tree t)
13331 {
13332 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
13333 }
13334
13335 /* Given a pointer value OP0 and a type TYPE, return a simplified version
13336 of an indirection through OP0, or NULL_TREE if no simplification is
13337 possible. */
13338
13339 tree
13340 fold_indirect_ref_1 (tree type, tree op0)
13341 {
13342 tree sub = op0;
13343 tree subtype;
13344
13345 STRIP_NOPS (sub);
13346 subtype = TREE_TYPE (sub);
13347 if (!POINTER_TYPE_P (subtype))
13348 return NULL_TREE;
13349
13350 if (TREE_CODE (sub) == ADDR_EXPR)
13351 {
13352 tree op = TREE_OPERAND (sub, 0);
13353 tree optype = TREE_TYPE (op);
13354 /* *&CONST_DECL -> to the value of the const decl. */
13355 if (TREE_CODE (op) == CONST_DECL)
13356 return DECL_INITIAL (op);
13357 /* *&p => p; make sure to handle *&"str"[cst] here. */
13358 if (type == optype)
13359 {
13360 tree fop = fold_read_from_constant_string (op);
13361 if (fop)
13362 return fop;
13363 else
13364 return op;
13365 }
13366 /* *(foo *)&fooarray => fooarray[0] */
13367 else if (TREE_CODE (optype) == ARRAY_TYPE
13368 && type == TREE_TYPE (optype))
13369 {
13370 tree type_domain = TYPE_DOMAIN (optype);
13371 tree min_val = size_zero_node;
13372 if (type_domain && TYPE_MIN_VALUE (type_domain))
13373 min_val = TYPE_MIN_VALUE (type_domain);
13374 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
13375 }
13376 /* *(foo *)&complexfoo => __real__ complexfoo */
13377 else if (TREE_CODE (optype) == COMPLEX_TYPE
13378 && type == TREE_TYPE (optype))
13379 return fold_build1 (REALPART_EXPR, type, op);
13380 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
13381 else if (TREE_CODE (optype) == VECTOR_TYPE
13382 && type == TREE_TYPE (optype))
13383 {
13384 tree part_width = TYPE_SIZE (type);
13385 tree index = bitsize_int (0);
13386 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
13387 }
13388 }
13389
13390 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
13391 if (TREE_CODE (sub) == PLUS_EXPR
13392 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
13393 {
13394 tree op00 = TREE_OPERAND (sub, 0);
13395 tree op01 = TREE_OPERAND (sub, 1);
13396 tree op00type;
13397
13398 STRIP_NOPS (op00);
13399 op00type = TREE_TYPE (op00);
13400 if (TREE_CODE (op00) == ADDR_EXPR
13401 && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
13402 && type == TREE_TYPE (TREE_TYPE (op00type)))
13403 {
13404 tree size = TYPE_SIZE_UNIT (type);
13405 if (tree_int_cst_equal (size, op01))
13406 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
13407 }
13408 }
13409
13410 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
13411 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
13412 && type == TREE_TYPE (TREE_TYPE (subtype)))
13413 {
13414 tree type_domain;
13415 tree min_val = size_zero_node;
13416 sub = build_fold_indirect_ref (sub);
13417 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
13418 if (type_domain && TYPE_MIN_VALUE (type_domain))
13419 min_val = TYPE_MIN_VALUE (type_domain);
13420 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
13421 }
13422
13423 return NULL_TREE;
13424 }
13425
13426 /* Builds an expression for an indirection through T, simplifying some
13427 cases. */
13428
13429 tree
13430 build_fold_indirect_ref (tree t)
13431 {
13432 tree type = TREE_TYPE (TREE_TYPE (t));
13433 tree sub = fold_indirect_ref_1 (type, t);
13434
13435 if (sub)
13436 return sub;
13437 else
13438 return build1 (INDIRECT_REF, type, t);
13439 }
13440
13441 /* Given an INDIRECT_REF T, return either T or a simplified version. */
13442
13443 tree
13444 fold_indirect_ref (tree t)
13445 {
13446 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
13447
13448 if (sub)
13449 return sub;
13450 else
13451 return t;
13452 }
13453
13454 /* Strip non-trapping, non-side-effecting tree nodes from an expression
13455 whose result is ignored. The type of the returned tree need not be
13456 the same as the original expression. */
13457
13458 tree
13459 fold_ignored_result (tree t)
13460 {
13461 if (!TREE_SIDE_EFFECTS (t))
13462 return integer_zero_node;
13463
13464 for (;;)
13465 switch (TREE_CODE_CLASS (TREE_CODE (t)))
13466 {
13467 case tcc_unary:
13468 t = TREE_OPERAND (t, 0);
13469 break;
13470
13471 case tcc_binary:
13472 case tcc_comparison:
13473 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13474 t = TREE_OPERAND (t, 0);
13475 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
13476 t = TREE_OPERAND (t, 1);
13477 else
13478 return t;
13479 break;
13480
13481 case tcc_expression:
13482 switch (TREE_CODE (t))
13483 {
13484 case COMPOUND_EXPR:
13485 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
13486 return t;
13487 t = TREE_OPERAND (t, 0);
13488 break;
13489
13490 case COND_EXPR:
13491 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
13492 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
13493 return t;
13494 t = TREE_OPERAND (t, 0);
13495 break;
13496
13497 default:
13498 return t;
13499 }
13500 break;
13501
13502 default:
13503 return t;
13504 }
13505 }
13506
13507 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
13508 This can only be applied to objects of a sizetype. */
13509
13510 tree
13511 round_up (tree value, int divisor)
13512 {
13513 tree div = NULL_TREE;
13514
13515 gcc_assert (divisor > 0);
13516 if (divisor == 1)
13517 return value;
13518
13519 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13520 have to do anything. Only do this when we are not given a const,
13521 because in that case, this check is more expensive than just
13522 doing it. */
13523 if (TREE_CODE (value) != INTEGER_CST)
13524 {
13525 div = build_int_cst (TREE_TYPE (value), divisor);
13526
13527 if (multiple_of_p (TREE_TYPE (value), value, div))
13528 return value;
13529 }
13530
13531 /* If divisor is a power of two, simplify this to bit manipulation. */
13532 if (divisor == (divisor & -divisor))
13533 {
13534 tree t;
13535
13536 t = build_int_cst (TREE_TYPE (value), divisor - 1);
13537 value = size_binop (PLUS_EXPR, value, t);
13538 t = build_int_cst (TREE_TYPE (value), -divisor);
13539 value = size_binop (BIT_AND_EXPR, value, t);
13540 }
13541 else
13542 {
13543 if (!div)
13544 div = build_int_cst (TREE_TYPE (value), divisor);
13545 value = size_binop (CEIL_DIV_EXPR, value, div);
13546 value = size_binop (MULT_EXPR, value, div);
13547 }
13548
13549 return value;
13550 }
13551
13552 /* Likewise, but round down. */
13553
13554 tree
13555 round_down (tree value, int divisor)
13556 {
13557 tree div = NULL_TREE;
13558
13559 gcc_assert (divisor > 0);
13560 if (divisor == 1)
13561 return value;
13562
13563 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
13564 have to do anything. Only do this when we are not given a const,
13565 because in that case, this check is more expensive than just
13566 doing it. */
13567 if (TREE_CODE (value) != INTEGER_CST)
13568 {
13569 div = build_int_cst (TREE_TYPE (value), divisor);
13570
13571 if (multiple_of_p (TREE_TYPE (value), value, div))
13572 return value;
13573 }
13574
13575 /* If divisor is a power of two, simplify this to bit manipulation. */
13576 if (divisor == (divisor & -divisor))
13577 {
13578 tree t;
13579
13580 t = build_int_cst (TREE_TYPE (value), -divisor);
13581 value = size_binop (BIT_AND_EXPR, value, t);
13582 }
13583 else
13584 {
13585 if (!div)
13586 div = build_int_cst (TREE_TYPE (value), divisor);
13587 value = size_binop (FLOOR_DIV_EXPR, value, div);
13588 value = size_binop (MULT_EXPR, value, div);
13589 }
13590
13591 return value;
13592 }
13593
13594 /* Returns the pointer to the base of the object addressed by EXP and
13595 extracts the information about the offset of the access, storing it
13596 to PBITPOS and POFFSET. */
13597
13598 static tree
13599 split_address_to_core_and_offset (tree exp,
13600 HOST_WIDE_INT *pbitpos, tree *poffset)
13601 {
13602 tree core;
13603 enum machine_mode mode;
13604 int unsignedp, volatilep;
13605 HOST_WIDE_INT bitsize;
13606
13607 if (TREE_CODE (exp) == ADDR_EXPR)
13608 {
13609 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
13610 poffset, &mode, &unsignedp, &volatilep,
13611 false);
13612 core = build_fold_addr_expr (core);
13613 }
13614 else
13615 {
13616 core = exp;
13617 *pbitpos = 0;
13618 *poffset = NULL_TREE;
13619 }
13620
13621 return core;
13622 }
13623
13624 /* Returns true if addresses of E1 and E2 differ by a constant, false
13625 otherwise. If they do, E1 - E2 is stored in *DIFF. */
13626
13627 bool
13628 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
13629 {
13630 tree core1, core2;
13631 HOST_WIDE_INT bitpos1, bitpos2;
13632 tree toffset1, toffset2, tdiff, type;
13633
13634 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
13635 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
13636
13637 if (bitpos1 % BITS_PER_UNIT != 0
13638 || bitpos2 % BITS_PER_UNIT != 0
13639 || !operand_equal_p (core1, core2, 0))
13640 return false;
13641
13642 if (toffset1 && toffset2)
13643 {
13644 type = TREE_TYPE (toffset1);
13645 if (type != TREE_TYPE (toffset2))
13646 toffset2 = fold_convert (type, toffset2);
13647
13648 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
13649 if (!cst_and_fits_in_hwi (tdiff))
13650 return false;
13651
13652 *diff = int_cst_value (tdiff);
13653 }
13654 else if (toffset1 || toffset2)
13655 {
13656 /* If only one of the offsets is non-constant, the difference cannot
13657 be a constant. */
13658 return false;
13659 }
13660 else
13661 *diff = 0;
13662
13663 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
13664 return true;
13665 }
13666
13667 /* Simplify the floating point expression EXP when the sign of the
13668 result is not significant. Return NULL_TREE if no simplification
13669 is possible. */
13670
13671 tree
13672 fold_strip_sign_ops (tree exp)
13673 {
13674 tree arg0, arg1;
13675
13676 switch (TREE_CODE (exp))
13677 {
13678 case ABS_EXPR:
13679 case NEGATE_EXPR:
13680 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13681 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
13682
13683 case MULT_EXPR:
13684 case RDIV_EXPR:
13685 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
13686 return NULL_TREE;
13687 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
13688 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13689 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
13690 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
13691 arg0 ? arg0 : TREE_OPERAND (exp, 0),
13692 arg1 ? arg1 : TREE_OPERAND (exp, 1));
13693 break;
13694
13695 case COMPOUND_EXPR:
13696 arg0 = TREE_OPERAND (exp, 0);
13697 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13698 if (arg1)
13699 return fold_build2 (COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
13700 break;
13701
13702 case COND_EXPR:
13703 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
13704 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
13705 if (arg0 || arg1)
13706 return fold_build3 (COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
13707 arg0 ? arg0 : TREE_OPERAND (exp, 1),
13708 arg1 ? arg1 : TREE_OPERAND (exp, 2));
13709 break;
13710
13711 case CALL_EXPR:
13712 {
13713 const enum built_in_function fcode = builtin_mathfn_code (exp);
13714 switch (fcode)
13715 {
13716 CASE_FLT_FN (BUILT_IN_COPYSIGN):
13717 /* Strip copysign function call, return the 1st argument. */
13718 arg0 = TREE_VALUE (TREE_OPERAND (exp, 1));
13719 arg1 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (exp, 1)));
13720 return omit_one_operand (TREE_TYPE (exp), arg0, arg1);
13721
13722 default:
13723 /* Strip sign ops from the argument of "odd" math functions. */
13724 if (negate_mathfn_p (fcode))
13725 {
13726 arg0 = fold_strip_sign_ops (TREE_VALUE (TREE_OPERAND (exp, 1)));
13727 if (arg0)
13728 return build_function_call_expr (get_callee_fndecl (exp),
13729 build_tree_list (NULL_TREE,
13730 arg0));
13731 }
13732 break;
13733 }
13734 }
13735 break;
13736
13737 default:
13738 break;
13739 }
13740 return NULL_TREE;
13741 }
This page took 0.737709 seconds and 5 git commands to generate.