]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
fold-const.c (negate_mathfn_p): Fix comment and add support for BUILT_IN_CBRT...
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant, an overflowable flag and prior
43 overflow indicators. It forces the value to fit the type and sets
44 TREE_OVERFLOW and TREE_CONSTANT_OVERFLOW as appropriate. */
45
46 #include "config.h"
47 #include "system.h"
48 #include "coretypes.h"
49 #include "tm.h"
50 #include "flags.h"
51 #include "tree.h"
52 #include "real.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "toplev.h"
57 #include "ggc.h"
58 #include "hashtab.h"
59 #include "langhooks.h"
60 #include "md5.h"
61
62 /* The following constants represent a bit based encoding of GCC's
63 comparison operators. This encoding simplifies transformations
64 on relational comparison operators, such as AND and OR. */
65 enum comparison_code {
66 COMPCODE_FALSE = 0,
67 COMPCODE_LT = 1,
68 COMPCODE_EQ = 2,
69 COMPCODE_LE = 3,
70 COMPCODE_GT = 4,
71 COMPCODE_LTGT = 5,
72 COMPCODE_GE = 6,
73 COMPCODE_ORD = 7,
74 COMPCODE_UNORD = 8,
75 COMPCODE_UNLT = 9,
76 COMPCODE_UNEQ = 10,
77 COMPCODE_UNLE = 11,
78 COMPCODE_UNGT = 12,
79 COMPCODE_NE = 13,
80 COMPCODE_UNGE = 14,
81 COMPCODE_TRUE = 15
82 };
83
84 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
85 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
86 static bool negate_mathfn_p (enum built_in_function);
87 static bool negate_expr_p (tree);
88 static tree negate_expr (tree);
89 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
90 static tree associate_trees (tree, tree, enum tree_code, tree);
91 static tree const_binop (enum tree_code, tree, tree, int);
92 static enum comparison_code comparison_to_compcode (enum tree_code);
93 static enum tree_code compcode_to_comparison (enum comparison_code);
94 static tree combine_comparisons (enum tree_code, enum tree_code,
95 enum tree_code, tree, tree, tree);
96 static int truth_value_p (enum tree_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand (tree, tree, tree);
101 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (tree, tree, int, int, int);
103 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
104 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
105 enum machine_mode *, int *, int *,
106 tree *, tree *);
107 static int all_ones_mask_p (tree, int);
108 static tree sign_bit_p (tree, tree);
109 static int simple_operand_p (tree);
110 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
111 static tree make_range (tree, int *, tree *, tree *);
112 static tree build_range_check (tree, tree, int, tree, tree);
113 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
114 tree);
115 static tree fold_range_test (enum tree_code, tree, tree, tree);
116 static tree fold_cond_expr_with_comparison (tree, tree, tree, tree);
117 static tree unextend (tree, int, int, tree);
118 static tree fold_truthop (enum tree_code, tree, tree, tree);
119 static tree optimize_minmax_comparison (enum tree_code, tree, tree, tree);
120 static tree extract_muldiv (tree, tree, enum tree_code, tree);
121 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
122 static int multiple_of_p (tree, tree, tree);
123 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree,
124 tree, tree,
125 tree, tree, int);
126 static bool fold_real_zero_addition_p (tree, tree, int);
127 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
128 tree, tree, tree);
129 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
130 static tree fold_div_compare (enum tree_code, tree, tree, tree);
131 static bool reorder_operands_p (tree, tree);
132 static tree fold_negate_const (tree, tree);
133 static tree fold_not_const (tree, tree);
134 static tree fold_relational_const (enum tree_code, tree, tree, tree);
135
136 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
137 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
138 and SUM1. Then this yields nonzero if overflow occurred during the
139 addition.
140
141 Overflow occurs if A and B have the same sign, but A and SUM differ in
142 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
143 sign. */
144 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
145 \f
146 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
147 We do that by representing the two-word integer in 4 words, with only
148 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
149 number. The value of the word is LOWPART + HIGHPART * BASE. */
150
151 #define LOWPART(x) \
152 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
153 #define HIGHPART(x) \
154 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
155 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
156
157 /* Unpack a two-word integer into 4 words.
158 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
159 WORDS points to the array of HOST_WIDE_INTs. */
160
161 static void
162 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
163 {
164 words[0] = LOWPART (low);
165 words[1] = HIGHPART (low);
166 words[2] = LOWPART (hi);
167 words[3] = HIGHPART (hi);
168 }
169
170 /* Pack an array of 4 words into a two-word integer.
171 WORDS points to the array of words.
172 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
173
174 static void
175 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
176 HOST_WIDE_INT *hi)
177 {
178 *low = words[0] + words[1] * BASE;
179 *hi = words[2] + words[3] * BASE;
180 }
181 \f
182 /* T is an INT_CST node. OVERFLOWABLE indicates if we are interested
183 in overflow of the value, when >0 we are only interested in signed
184 overflow, for <0 we are interested in any overflow. OVERFLOWED
185 indicates whether overflow has already occurred. CONST_OVERFLOWED
186 indicates whether constant overflow has already occurred. We force
187 T's value to be within range of T's type (by setting to 0 or 1 all
188 the bits outside the type's range). We set TREE_OVERFLOWED if,
189 OVERFLOWED is nonzero,
190 or OVERFLOWABLE is >0 and signed overflow occurs
191 or OVERFLOWABLE is <0 and any overflow occurs
192 We set TREE_CONSTANT_OVERFLOWED if,
193 CONST_OVERFLOWED is nonzero
194 or we set TREE_OVERFLOWED.
195 We return either the original T, or a copy. */
196
197 tree
198 force_fit_type (tree t, int overflowable,
199 bool overflowed, bool overflowed_const)
200 {
201 unsigned HOST_WIDE_INT low;
202 HOST_WIDE_INT high;
203 unsigned int prec;
204 int sign_extended_type;
205
206 gcc_assert (TREE_CODE (t) == INTEGER_CST);
207
208 low = TREE_INT_CST_LOW (t);
209 high = TREE_INT_CST_HIGH (t);
210
211 if (POINTER_TYPE_P (TREE_TYPE (t))
212 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
213 prec = POINTER_SIZE;
214 else
215 prec = TYPE_PRECISION (TREE_TYPE (t));
216 /* Size types *are* sign extended. */
217 sign_extended_type = (!TYPE_UNSIGNED (TREE_TYPE (t))
218 || (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
219 && TYPE_IS_SIZETYPE (TREE_TYPE (t))));
220
221 /* First clear all bits that are beyond the type's precision. */
222
223 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
224 ;
225 else if (prec > HOST_BITS_PER_WIDE_INT)
226 high &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
227 else
228 {
229 high = 0;
230 if (prec < HOST_BITS_PER_WIDE_INT)
231 low &= ~((HOST_WIDE_INT) (-1) << prec);
232 }
233
234 if (!sign_extended_type)
235 /* No sign extension */;
236 else if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
237 /* Correct width already. */;
238 else if (prec > HOST_BITS_PER_WIDE_INT)
239 {
240 /* Sign extend top half? */
241 if (high & ((unsigned HOST_WIDE_INT)1
242 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
243 high |= (HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT);
244 }
245 else if (prec == HOST_BITS_PER_WIDE_INT)
246 {
247 if ((HOST_WIDE_INT)low < 0)
248 high = -1;
249 }
250 else
251 {
252 /* Sign extend bottom half? */
253 if (low & ((unsigned HOST_WIDE_INT)1 << (prec - 1)))
254 {
255 high = -1;
256 low |= (HOST_WIDE_INT)(-1) << prec;
257 }
258 }
259
260 /* If the value changed, return a new node. */
261 if (overflowed || overflowed_const
262 || low != TREE_INT_CST_LOW (t) || high != TREE_INT_CST_HIGH (t))
263 {
264 t = build_int_cst_wide (TREE_TYPE (t), low, high);
265
266 if (overflowed
267 || overflowable < 0
268 || (overflowable > 0 && sign_extended_type))
269 {
270 t = copy_node (t);
271 TREE_OVERFLOW (t) = 1;
272 TREE_CONSTANT_OVERFLOW (t) = 1;
273 }
274 else if (overflowed_const)
275 {
276 t = copy_node (t);
277 TREE_CONSTANT_OVERFLOW (t) = 1;
278 }
279 }
280
281 return t;
282 }
283 \f
284 /* Add two doubleword integers with doubleword result.
285 Each argument is given as two `HOST_WIDE_INT' pieces.
286 One argument is L1 and H1; the other, L2 and H2.
287 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
288
289 int
290 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
291 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
292 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
293 {
294 unsigned HOST_WIDE_INT l;
295 HOST_WIDE_INT h;
296
297 l = l1 + l2;
298 h = h1 + h2 + (l < l1);
299
300 *lv = l;
301 *hv = h;
302 return OVERFLOW_SUM_SIGN (h1, h2, h);
303 }
304
305 /* Negate a doubleword integer with doubleword result.
306 Return nonzero if the operation overflows, assuming it's signed.
307 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
308 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
309
310 int
311 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
312 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
313 {
314 if (l1 == 0)
315 {
316 *lv = 0;
317 *hv = - h1;
318 return (*hv & h1) < 0;
319 }
320 else
321 {
322 *lv = -l1;
323 *hv = ~h1;
324 return 0;
325 }
326 }
327 \f
328 /* Multiply two doubleword integers with doubleword result.
329 Return nonzero if the operation overflows, assuming it's signed.
330 Each argument is given as two `HOST_WIDE_INT' pieces.
331 One argument is L1 and H1; the other, L2 and H2.
332 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
333
334 int
335 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
336 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
337 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
338 {
339 HOST_WIDE_INT arg1[4];
340 HOST_WIDE_INT arg2[4];
341 HOST_WIDE_INT prod[4 * 2];
342 unsigned HOST_WIDE_INT carry;
343 int i, j, k;
344 unsigned HOST_WIDE_INT toplow, neglow;
345 HOST_WIDE_INT tophigh, neghigh;
346
347 encode (arg1, l1, h1);
348 encode (arg2, l2, h2);
349
350 memset (prod, 0, sizeof prod);
351
352 for (i = 0; i < 4; i++)
353 {
354 carry = 0;
355 for (j = 0; j < 4; j++)
356 {
357 k = i + j;
358 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
359 carry += arg1[i] * arg2[j];
360 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
361 carry += prod[k];
362 prod[k] = LOWPART (carry);
363 carry = HIGHPART (carry);
364 }
365 prod[i + 4] = carry;
366 }
367
368 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
369
370 /* Check for overflow by calculating the top half of the answer in full;
371 it should agree with the low half's sign bit. */
372 decode (prod + 4, &toplow, &tophigh);
373 if (h1 < 0)
374 {
375 neg_double (l2, h2, &neglow, &neghigh);
376 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
377 }
378 if (h2 < 0)
379 {
380 neg_double (l1, h1, &neglow, &neghigh);
381 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
382 }
383 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
384 }
385 \f
386 /* Shift the doubleword integer in L1, H1 left by COUNT places
387 keeping only PREC bits of result.
388 Shift right if COUNT is negative.
389 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
390 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
391
392 void
393 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
394 HOST_WIDE_INT count, unsigned int prec,
395 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
396 {
397 unsigned HOST_WIDE_INT signmask;
398
399 if (count < 0)
400 {
401 rshift_double (l1, h1, -count, prec, lv, hv, arith);
402 return;
403 }
404
405 if (SHIFT_COUNT_TRUNCATED)
406 count %= prec;
407
408 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
409 {
410 /* Shifting by the host word size is undefined according to the
411 ANSI standard, so we must handle this as a special case. */
412 *hv = 0;
413 *lv = 0;
414 }
415 else if (count >= HOST_BITS_PER_WIDE_INT)
416 {
417 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
418 *lv = 0;
419 }
420 else
421 {
422 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
423 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
424 *lv = l1 << count;
425 }
426
427 /* Sign extend all bits that are beyond the precision. */
428
429 signmask = -((prec > HOST_BITS_PER_WIDE_INT
430 ? ((unsigned HOST_WIDE_INT) *hv
431 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
432 : (*lv >> (prec - 1))) & 1);
433
434 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
435 ;
436 else if (prec >= HOST_BITS_PER_WIDE_INT)
437 {
438 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
439 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
440 }
441 else
442 {
443 *hv = signmask;
444 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
445 *lv |= signmask << prec;
446 }
447 }
448
449 /* Shift the doubleword integer in L1, H1 right by COUNT places
450 keeping only PREC bits of result. COUNT must be positive.
451 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
452 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
453
454 void
455 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
456 HOST_WIDE_INT count, unsigned int prec,
457 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
458 int arith)
459 {
460 unsigned HOST_WIDE_INT signmask;
461
462 signmask = (arith
463 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
464 : 0);
465
466 if (SHIFT_COUNT_TRUNCATED)
467 count %= prec;
468
469 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
470 {
471 /* Shifting by the host word size is undefined according to the
472 ANSI standard, so we must handle this as a special case. */
473 *hv = 0;
474 *lv = 0;
475 }
476 else if (count >= HOST_BITS_PER_WIDE_INT)
477 {
478 *hv = 0;
479 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
480 }
481 else
482 {
483 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
484 *lv = ((l1 >> count)
485 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
486 }
487
488 /* Zero / sign extend all bits that are beyond the precision. */
489
490 if (count >= (HOST_WIDE_INT)prec)
491 {
492 *hv = signmask;
493 *lv = signmask;
494 }
495 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
496 ;
497 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
498 {
499 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
500 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
501 }
502 else
503 {
504 *hv = signmask;
505 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
506 *lv |= signmask << (prec - count);
507 }
508 }
509 \f
510 /* Rotate the doubleword integer in L1, H1 left by COUNT places
511 keeping only PREC bits of result.
512 Rotate right if COUNT is negative.
513 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
514
515 void
516 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
517 HOST_WIDE_INT count, unsigned int prec,
518 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
519 {
520 unsigned HOST_WIDE_INT s1l, s2l;
521 HOST_WIDE_INT s1h, s2h;
522
523 count %= prec;
524 if (count < 0)
525 count += prec;
526
527 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
528 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
529 *lv = s1l | s2l;
530 *hv = s1h | s2h;
531 }
532
533 /* Rotate the doubleword integer in L1, H1 left by COUNT places
534 keeping only PREC bits of result. COUNT must be positive.
535 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
536
537 void
538 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
539 HOST_WIDE_INT count, unsigned int prec,
540 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
541 {
542 unsigned HOST_WIDE_INT s1l, s2l;
543 HOST_WIDE_INT s1h, s2h;
544
545 count %= prec;
546 if (count < 0)
547 count += prec;
548
549 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
550 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
551 *lv = s1l | s2l;
552 *hv = s1h | s2h;
553 }
554 \f
555 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
556 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
557 CODE is a tree code for a kind of division, one of
558 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
559 or EXACT_DIV_EXPR
560 It controls how the quotient is rounded to an integer.
561 Return nonzero if the operation overflows.
562 UNS nonzero says do unsigned division. */
563
564 int
565 div_and_round_double (enum tree_code code, int uns,
566 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
567 HOST_WIDE_INT hnum_orig,
568 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
569 HOST_WIDE_INT hden_orig,
570 unsigned HOST_WIDE_INT *lquo,
571 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
572 HOST_WIDE_INT *hrem)
573 {
574 int quo_neg = 0;
575 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
576 HOST_WIDE_INT den[4], quo[4];
577 int i, j;
578 unsigned HOST_WIDE_INT work;
579 unsigned HOST_WIDE_INT carry = 0;
580 unsigned HOST_WIDE_INT lnum = lnum_orig;
581 HOST_WIDE_INT hnum = hnum_orig;
582 unsigned HOST_WIDE_INT lden = lden_orig;
583 HOST_WIDE_INT hden = hden_orig;
584 int overflow = 0;
585
586 if (hden == 0 && lden == 0)
587 overflow = 1, lden = 1;
588
589 /* Calculate quotient sign and convert operands to unsigned. */
590 if (!uns)
591 {
592 if (hnum < 0)
593 {
594 quo_neg = ~ quo_neg;
595 /* (minimum integer) / (-1) is the only overflow case. */
596 if (neg_double (lnum, hnum, &lnum, &hnum)
597 && ((HOST_WIDE_INT) lden & hden) == -1)
598 overflow = 1;
599 }
600 if (hden < 0)
601 {
602 quo_neg = ~ quo_neg;
603 neg_double (lden, hden, &lden, &hden);
604 }
605 }
606
607 if (hnum == 0 && hden == 0)
608 { /* single precision */
609 *hquo = *hrem = 0;
610 /* This unsigned division rounds toward zero. */
611 *lquo = lnum / lden;
612 goto finish_up;
613 }
614
615 if (hnum == 0)
616 { /* trivial case: dividend < divisor */
617 /* hden != 0 already checked. */
618 *hquo = *lquo = 0;
619 *hrem = hnum;
620 *lrem = lnum;
621 goto finish_up;
622 }
623
624 memset (quo, 0, sizeof quo);
625
626 memset (num, 0, sizeof num); /* to zero 9th element */
627 memset (den, 0, sizeof den);
628
629 encode (num, lnum, hnum);
630 encode (den, lden, hden);
631
632 /* Special code for when the divisor < BASE. */
633 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
634 {
635 /* hnum != 0 already checked. */
636 for (i = 4 - 1; i >= 0; i--)
637 {
638 work = num[i] + carry * BASE;
639 quo[i] = work / lden;
640 carry = work % lden;
641 }
642 }
643 else
644 {
645 /* Full double precision division,
646 with thanks to Don Knuth's "Seminumerical Algorithms". */
647 int num_hi_sig, den_hi_sig;
648 unsigned HOST_WIDE_INT quo_est, scale;
649
650 /* Find the highest nonzero divisor digit. */
651 for (i = 4 - 1;; i--)
652 if (den[i] != 0)
653 {
654 den_hi_sig = i;
655 break;
656 }
657
658 /* Insure that the first digit of the divisor is at least BASE/2.
659 This is required by the quotient digit estimation algorithm. */
660
661 scale = BASE / (den[den_hi_sig] + 1);
662 if (scale > 1)
663 { /* scale divisor and dividend */
664 carry = 0;
665 for (i = 0; i <= 4 - 1; i++)
666 {
667 work = (num[i] * scale) + carry;
668 num[i] = LOWPART (work);
669 carry = HIGHPART (work);
670 }
671
672 num[4] = carry;
673 carry = 0;
674 for (i = 0; i <= 4 - 1; i++)
675 {
676 work = (den[i] * scale) + carry;
677 den[i] = LOWPART (work);
678 carry = HIGHPART (work);
679 if (den[i] != 0) den_hi_sig = i;
680 }
681 }
682
683 num_hi_sig = 4;
684
685 /* Main loop */
686 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
687 {
688 /* Guess the next quotient digit, quo_est, by dividing the first
689 two remaining dividend digits by the high order quotient digit.
690 quo_est is never low and is at most 2 high. */
691 unsigned HOST_WIDE_INT tmp;
692
693 num_hi_sig = i + den_hi_sig + 1;
694 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
695 if (num[num_hi_sig] != den[den_hi_sig])
696 quo_est = work / den[den_hi_sig];
697 else
698 quo_est = BASE - 1;
699
700 /* Refine quo_est so it's usually correct, and at most one high. */
701 tmp = work - quo_est * den[den_hi_sig];
702 if (tmp < BASE
703 && (den[den_hi_sig - 1] * quo_est
704 > (tmp * BASE + num[num_hi_sig - 2])))
705 quo_est--;
706
707 /* Try QUO_EST as the quotient digit, by multiplying the
708 divisor by QUO_EST and subtracting from the remaining dividend.
709 Keep in mind that QUO_EST is the I - 1st digit. */
710
711 carry = 0;
712 for (j = 0; j <= den_hi_sig; j++)
713 {
714 work = quo_est * den[j] + carry;
715 carry = HIGHPART (work);
716 work = num[i + j] - LOWPART (work);
717 num[i + j] = LOWPART (work);
718 carry += HIGHPART (work) != 0;
719 }
720
721 /* If quo_est was high by one, then num[i] went negative and
722 we need to correct things. */
723 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
724 {
725 quo_est--;
726 carry = 0; /* add divisor back in */
727 for (j = 0; j <= den_hi_sig; j++)
728 {
729 work = num[i + j] + den[j] + carry;
730 carry = HIGHPART (work);
731 num[i + j] = LOWPART (work);
732 }
733
734 num [num_hi_sig] += carry;
735 }
736
737 /* Store the quotient digit. */
738 quo[i] = quo_est;
739 }
740 }
741
742 decode (quo, lquo, hquo);
743
744 finish_up:
745 /* If result is negative, make it so. */
746 if (quo_neg)
747 neg_double (*lquo, *hquo, lquo, hquo);
748
749 /* Compute trial remainder: rem = num - (quo * den) */
750 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
751 neg_double (*lrem, *hrem, lrem, hrem);
752 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
753
754 switch (code)
755 {
756 case TRUNC_DIV_EXPR:
757 case TRUNC_MOD_EXPR: /* round toward zero */
758 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
759 return overflow;
760
761 case FLOOR_DIV_EXPR:
762 case FLOOR_MOD_EXPR: /* round toward negative infinity */
763 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
764 {
765 /* quo = quo - 1; */
766 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
767 lquo, hquo);
768 }
769 else
770 return overflow;
771 break;
772
773 case CEIL_DIV_EXPR:
774 case CEIL_MOD_EXPR: /* round toward positive infinity */
775 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
776 {
777 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
778 lquo, hquo);
779 }
780 else
781 return overflow;
782 break;
783
784 case ROUND_DIV_EXPR:
785 case ROUND_MOD_EXPR: /* round to closest integer */
786 {
787 unsigned HOST_WIDE_INT labs_rem = *lrem;
788 HOST_WIDE_INT habs_rem = *hrem;
789 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
790 HOST_WIDE_INT habs_den = hden, htwice;
791
792 /* Get absolute values. */
793 if (*hrem < 0)
794 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
795 if (hden < 0)
796 neg_double (lden, hden, &labs_den, &habs_den);
797
798 /* If (2 * abs (lrem) >= abs (lden)) */
799 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
800 labs_rem, habs_rem, &ltwice, &htwice);
801
802 if (((unsigned HOST_WIDE_INT) habs_den
803 < (unsigned HOST_WIDE_INT) htwice)
804 || (((unsigned HOST_WIDE_INT) habs_den
805 == (unsigned HOST_WIDE_INT) htwice)
806 && (labs_den < ltwice)))
807 {
808 if (*hquo < 0)
809 /* quo = quo - 1; */
810 add_double (*lquo, *hquo,
811 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
812 else
813 /* quo = quo + 1; */
814 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
815 lquo, hquo);
816 }
817 else
818 return overflow;
819 }
820 break;
821
822 default:
823 gcc_unreachable ();
824 }
825
826 /* Compute true remainder: rem = num - (quo * den) */
827 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
828 neg_double (*lrem, *hrem, lrem, hrem);
829 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
830 return overflow;
831 }
832
833 /* If ARG2 divides ARG1 with zero remainder, carries out the division
834 of type CODE and returns the quotient.
835 Otherwise returns NULL_TREE. */
836
837 static tree
838 div_if_zero_remainder (enum tree_code code, tree arg1, tree arg2)
839 {
840 unsigned HOST_WIDE_INT int1l, int2l;
841 HOST_WIDE_INT int1h, int2h;
842 unsigned HOST_WIDE_INT quol, reml;
843 HOST_WIDE_INT quoh, remh;
844 tree type = TREE_TYPE (arg1);
845 int uns = TYPE_UNSIGNED (type);
846
847 int1l = TREE_INT_CST_LOW (arg1);
848 int1h = TREE_INT_CST_HIGH (arg1);
849 int2l = TREE_INT_CST_LOW (arg2);
850 int2h = TREE_INT_CST_HIGH (arg2);
851
852 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
853 &quol, &quoh, &reml, &remh);
854 if (remh != 0 || reml != 0)
855 return NULL_TREE;
856
857 return build_int_cst_wide (type, quol, quoh);
858 }
859 \f
860 /* Return true if the built-in mathematical function specified by CODE
861 is odd, i.e. -f(x) == f(-x). */
862
863 static bool
864 negate_mathfn_p (enum built_in_function code)
865 {
866 switch (code)
867 {
868 case BUILT_IN_ASIN:
869 case BUILT_IN_ASINF:
870 case BUILT_IN_ASINL:
871 case BUILT_IN_ASINH:
872 case BUILT_IN_ASINHF:
873 case BUILT_IN_ASINHL:
874 case BUILT_IN_ATAN:
875 case BUILT_IN_ATANF:
876 case BUILT_IN_ATANL:
877 case BUILT_IN_ATANH:
878 case BUILT_IN_ATANHF:
879 case BUILT_IN_ATANHL:
880 case BUILT_IN_CBRT:
881 case BUILT_IN_CBRTF:
882 case BUILT_IN_CBRTL:
883 case BUILT_IN_SIN:
884 case BUILT_IN_SINF:
885 case BUILT_IN_SINL:
886 case BUILT_IN_SINH:
887 case BUILT_IN_SINHF:
888 case BUILT_IN_SINHL:
889 case BUILT_IN_TAN:
890 case BUILT_IN_TANF:
891 case BUILT_IN_TANL:
892 case BUILT_IN_TANH:
893 case BUILT_IN_TANHF:
894 case BUILT_IN_TANHL:
895 return true;
896
897 default:
898 break;
899 }
900 return false;
901 }
902
903 /* Check whether we may negate an integer constant T without causing
904 overflow. */
905
906 bool
907 may_negate_without_overflow_p (tree t)
908 {
909 unsigned HOST_WIDE_INT val;
910 unsigned int prec;
911 tree type;
912
913 gcc_assert (TREE_CODE (t) == INTEGER_CST);
914
915 type = TREE_TYPE (t);
916 if (TYPE_UNSIGNED (type))
917 return false;
918
919 prec = TYPE_PRECISION (type);
920 if (prec > HOST_BITS_PER_WIDE_INT)
921 {
922 if (TREE_INT_CST_LOW (t) != 0)
923 return true;
924 prec -= HOST_BITS_PER_WIDE_INT;
925 val = TREE_INT_CST_HIGH (t);
926 }
927 else
928 val = TREE_INT_CST_LOW (t);
929 if (prec < HOST_BITS_PER_WIDE_INT)
930 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
931 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
932 }
933
934 /* Determine whether an expression T can be cheaply negated using
935 the function negate_expr. */
936
937 static bool
938 negate_expr_p (tree t)
939 {
940 tree type;
941
942 if (t == 0)
943 return false;
944
945 type = TREE_TYPE (t);
946
947 STRIP_SIGN_NOPS (t);
948 switch (TREE_CODE (t))
949 {
950 case INTEGER_CST:
951 if (TYPE_UNSIGNED (type) || ! flag_trapv)
952 return true;
953
954 /* Check that -CST will not overflow type. */
955 return may_negate_without_overflow_p (t);
956
957 case REAL_CST:
958 case NEGATE_EXPR:
959 return true;
960
961 case COMPLEX_CST:
962 return negate_expr_p (TREE_REALPART (t))
963 && negate_expr_p (TREE_IMAGPART (t));
964
965 case PLUS_EXPR:
966 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
967 return false;
968 /* -(A + B) -> (-B) - A. */
969 if (negate_expr_p (TREE_OPERAND (t, 1))
970 && reorder_operands_p (TREE_OPERAND (t, 0),
971 TREE_OPERAND (t, 1)))
972 return true;
973 /* -(A + B) -> (-A) - B. */
974 return negate_expr_p (TREE_OPERAND (t, 0));
975
976 case MINUS_EXPR:
977 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
978 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
979 && reorder_operands_p (TREE_OPERAND (t, 0),
980 TREE_OPERAND (t, 1));
981
982 case MULT_EXPR:
983 if (TYPE_UNSIGNED (TREE_TYPE (t)))
984 break;
985
986 /* Fall through. */
987
988 case RDIV_EXPR:
989 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
990 return negate_expr_p (TREE_OPERAND (t, 1))
991 || negate_expr_p (TREE_OPERAND (t, 0));
992 break;
993
994 case NOP_EXPR:
995 /* Negate -((double)float) as (double)(-float). */
996 if (TREE_CODE (type) == REAL_TYPE)
997 {
998 tree tem = strip_float_extensions (t);
999 if (tem != t)
1000 return negate_expr_p (tem);
1001 }
1002 break;
1003
1004 case CALL_EXPR:
1005 /* Negate -f(x) as f(-x). */
1006 if (negate_mathfn_p (builtin_mathfn_code (t)))
1007 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1012 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1013 {
1014 tree op1 = TREE_OPERAND (t, 1);
1015 if (TREE_INT_CST_HIGH (op1) == 0
1016 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1017 == TREE_INT_CST_LOW (op1))
1018 return true;
1019 }
1020 break;
1021
1022 default:
1023 break;
1024 }
1025 return false;
1026 }
1027
1028 /* Given T, an expression, return the negation of T. Allow for T to be
1029 null, in which case return null. */
1030
1031 static tree
1032 negate_expr (tree t)
1033 {
1034 tree type;
1035 tree tem;
1036
1037 if (t == 0)
1038 return 0;
1039
1040 type = TREE_TYPE (t);
1041 STRIP_SIGN_NOPS (t);
1042
1043 switch (TREE_CODE (t))
1044 {
1045 case INTEGER_CST:
1046 tem = fold_negate_const (t, type);
1047 if (! TREE_OVERFLOW (tem)
1048 || TYPE_UNSIGNED (type)
1049 || ! flag_trapv)
1050 return tem;
1051 break;
1052
1053 case REAL_CST:
1054 tem = fold_negate_const (t, type);
1055 /* Two's complement FP formats, such as c4x, may overflow. */
1056 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
1057 return fold_convert (type, tem);
1058 break;
1059
1060 case COMPLEX_CST:
1061 {
1062 tree rpart = negate_expr (TREE_REALPART (t));
1063 tree ipart = negate_expr (TREE_IMAGPART (t));
1064
1065 if ((TREE_CODE (rpart) == REAL_CST
1066 && TREE_CODE (ipart) == REAL_CST)
1067 || (TREE_CODE (rpart) == INTEGER_CST
1068 && TREE_CODE (ipart) == INTEGER_CST))
1069 return build_complex (type, rpart, ipart);
1070 }
1071 break;
1072
1073 case NEGATE_EXPR:
1074 return fold_convert (type, TREE_OPERAND (t, 0));
1075
1076 case PLUS_EXPR:
1077 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1078 {
1079 /* -(A + B) -> (-B) - A. */
1080 if (negate_expr_p (TREE_OPERAND (t, 1))
1081 && reorder_operands_p (TREE_OPERAND (t, 0),
1082 TREE_OPERAND (t, 1)))
1083 {
1084 tem = negate_expr (TREE_OPERAND (t, 1));
1085 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1086 tem, TREE_OPERAND (t, 0));
1087 return fold_convert (type, tem);
1088 }
1089
1090 /* -(A + B) -> (-A) - B. */
1091 if (negate_expr_p (TREE_OPERAND (t, 0)))
1092 {
1093 tem = negate_expr (TREE_OPERAND (t, 0));
1094 tem = fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1095 tem, TREE_OPERAND (t, 1));
1096 return fold_convert (type, tem);
1097 }
1098 }
1099 break;
1100
1101 case MINUS_EXPR:
1102 /* - (A - B) -> B - A */
1103 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1104 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1105 return fold_convert (type,
1106 fold_build2 (MINUS_EXPR, TREE_TYPE (t),
1107 TREE_OPERAND (t, 1),
1108 TREE_OPERAND (t, 0)));
1109 break;
1110
1111 case MULT_EXPR:
1112 if (TYPE_UNSIGNED (TREE_TYPE (t)))
1113 break;
1114
1115 /* Fall through. */
1116
1117 case RDIV_EXPR:
1118 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1119 {
1120 tem = TREE_OPERAND (t, 1);
1121 if (negate_expr_p (tem))
1122 return fold_convert (type,
1123 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1124 TREE_OPERAND (t, 0),
1125 negate_expr (tem)));
1126 tem = TREE_OPERAND (t, 0);
1127 if (negate_expr_p (tem))
1128 return fold_convert (type,
1129 fold_build2 (TREE_CODE (t), TREE_TYPE (t),
1130 negate_expr (tem),
1131 TREE_OPERAND (t, 1)));
1132 }
1133 break;
1134
1135 case NOP_EXPR:
1136 /* Convert -((double)float) into (double)(-float). */
1137 if (TREE_CODE (type) == REAL_TYPE)
1138 {
1139 tem = strip_float_extensions (t);
1140 if (tem != t && negate_expr_p (tem))
1141 return fold_convert (type, negate_expr (tem));
1142 }
1143 break;
1144
1145 case CALL_EXPR:
1146 /* Negate -f(x) as f(-x). */
1147 if (negate_mathfn_p (builtin_mathfn_code (t))
1148 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1149 {
1150 tree fndecl, arg, arglist;
1151
1152 fndecl = get_callee_fndecl (t);
1153 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1154 arglist = build_tree_list (NULL_TREE, arg);
1155 return build_function_call_expr (fndecl, arglist);
1156 }
1157 break;
1158
1159 case RSHIFT_EXPR:
1160 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
1161 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
1162 {
1163 tree op1 = TREE_OPERAND (t, 1);
1164 if (TREE_INT_CST_HIGH (op1) == 0
1165 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
1166 == TREE_INT_CST_LOW (op1))
1167 {
1168 tree ntype = TYPE_UNSIGNED (type)
1169 ? lang_hooks.types.signed_type (type)
1170 : lang_hooks.types.unsigned_type (type);
1171 tree temp = fold_convert (ntype, TREE_OPERAND (t, 0));
1172 temp = fold_build2 (RSHIFT_EXPR, ntype, temp, op1);
1173 return fold_convert (type, temp);
1174 }
1175 }
1176 break;
1177
1178 default:
1179 break;
1180 }
1181
1182 tem = fold_build1 (NEGATE_EXPR, TREE_TYPE (t), t);
1183 return fold_convert (type, tem);
1184 }
1185 \f
1186 /* Split a tree IN into a constant, literal and variable parts that could be
1187 combined with CODE to make IN. "constant" means an expression with
1188 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1189 commutative arithmetic operation. Store the constant part into *CONP,
1190 the literal in *LITP and return the variable part. If a part isn't
1191 present, set it to null. If the tree does not decompose in this way,
1192 return the entire tree as the variable part and the other parts as null.
1193
1194 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1195 case, we negate an operand that was subtracted. Except if it is a
1196 literal for which we use *MINUS_LITP instead.
1197
1198 If NEGATE_P is true, we are negating all of IN, again except a literal
1199 for which we use *MINUS_LITP instead.
1200
1201 If IN is itself a literal or constant, return it as appropriate.
1202
1203 Note that we do not guarantee that any of the three values will be the
1204 same type as IN, but they will have the same signedness and mode. */
1205
1206 static tree
1207 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1208 tree *minus_litp, int negate_p)
1209 {
1210 tree var = 0;
1211
1212 *conp = 0;
1213 *litp = 0;
1214 *minus_litp = 0;
1215
1216 /* Strip any conversions that don't change the machine mode or signedness. */
1217 STRIP_SIGN_NOPS (in);
1218
1219 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1220 *litp = in;
1221 else if (TREE_CODE (in) == code
1222 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1223 /* We can associate addition and subtraction together (even
1224 though the C standard doesn't say so) for integers because
1225 the value is not affected. For reals, the value might be
1226 affected, so we can't. */
1227 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1228 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1229 {
1230 tree op0 = TREE_OPERAND (in, 0);
1231 tree op1 = TREE_OPERAND (in, 1);
1232 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1233 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1234
1235 /* First see if either of the operands is a literal, then a constant. */
1236 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1237 *litp = op0, op0 = 0;
1238 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1239 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1240
1241 if (op0 != 0 && TREE_CONSTANT (op0))
1242 *conp = op0, op0 = 0;
1243 else if (op1 != 0 && TREE_CONSTANT (op1))
1244 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1245
1246 /* If we haven't dealt with either operand, this is not a case we can
1247 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1248 if (op0 != 0 && op1 != 0)
1249 var = in;
1250 else if (op0 != 0)
1251 var = op0;
1252 else
1253 var = op1, neg_var_p = neg1_p;
1254
1255 /* Now do any needed negations. */
1256 if (neg_litp_p)
1257 *minus_litp = *litp, *litp = 0;
1258 if (neg_conp_p)
1259 *conp = negate_expr (*conp);
1260 if (neg_var_p)
1261 var = negate_expr (var);
1262 }
1263 else if (TREE_CONSTANT (in))
1264 *conp = in;
1265 else
1266 var = in;
1267
1268 if (negate_p)
1269 {
1270 if (*litp)
1271 *minus_litp = *litp, *litp = 0;
1272 else if (*minus_litp)
1273 *litp = *minus_litp, *minus_litp = 0;
1274 *conp = negate_expr (*conp);
1275 var = negate_expr (var);
1276 }
1277
1278 return var;
1279 }
1280
1281 /* Re-associate trees split by the above function. T1 and T2 are either
1282 expressions to associate or null. Return the new expression, if any. If
1283 we build an operation, do it in TYPE and with CODE. */
1284
1285 static tree
1286 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1287 {
1288 if (t1 == 0)
1289 return t2;
1290 else if (t2 == 0)
1291 return t1;
1292
1293 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1294 try to fold this since we will have infinite recursion. But do
1295 deal with any NEGATE_EXPRs. */
1296 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1297 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1298 {
1299 if (code == PLUS_EXPR)
1300 {
1301 if (TREE_CODE (t1) == NEGATE_EXPR)
1302 return build2 (MINUS_EXPR, type, fold_convert (type, t2),
1303 fold_convert (type, TREE_OPERAND (t1, 0)));
1304 else if (TREE_CODE (t2) == NEGATE_EXPR)
1305 return build2 (MINUS_EXPR, type, fold_convert (type, t1),
1306 fold_convert (type, TREE_OPERAND (t2, 0)));
1307 else if (integer_zerop (t2))
1308 return fold_convert (type, t1);
1309 }
1310 else if (code == MINUS_EXPR)
1311 {
1312 if (integer_zerop (t2))
1313 return fold_convert (type, t1);
1314 }
1315
1316 return build2 (code, type, fold_convert (type, t1),
1317 fold_convert (type, t2));
1318 }
1319
1320 return fold_build2 (code, type, fold_convert (type, t1),
1321 fold_convert (type, t2));
1322 }
1323 \f
1324 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1325 to produce a new constant.
1326
1327 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1328
1329 tree
1330 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1331 {
1332 unsigned HOST_WIDE_INT int1l, int2l;
1333 HOST_WIDE_INT int1h, int2h;
1334 unsigned HOST_WIDE_INT low;
1335 HOST_WIDE_INT hi;
1336 unsigned HOST_WIDE_INT garbagel;
1337 HOST_WIDE_INT garbageh;
1338 tree t;
1339 tree type = TREE_TYPE (arg1);
1340 int uns = TYPE_UNSIGNED (type);
1341 int is_sizetype
1342 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1343 int overflow = 0;
1344
1345 int1l = TREE_INT_CST_LOW (arg1);
1346 int1h = TREE_INT_CST_HIGH (arg1);
1347 int2l = TREE_INT_CST_LOW (arg2);
1348 int2h = TREE_INT_CST_HIGH (arg2);
1349
1350 switch (code)
1351 {
1352 case BIT_IOR_EXPR:
1353 low = int1l | int2l, hi = int1h | int2h;
1354 break;
1355
1356 case BIT_XOR_EXPR:
1357 low = int1l ^ int2l, hi = int1h ^ int2h;
1358 break;
1359
1360 case BIT_AND_EXPR:
1361 low = int1l & int2l, hi = int1h & int2h;
1362 break;
1363
1364 case RSHIFT_EXPR:
1365 int2l = -int2l;
1366 case LSHIFT_EXPR:
1367 /* It's unclear from the C standard whether shifts can overflow.
1368 The following code ignores overflow; perhaps a C standard
1369 interpretation ruling is needed. */
1370 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1371 &low, &hi, !uns);
1372 break;
1373
1374 case RROTATE_EXPR:
1375 int2l = - int2l;
1376 case LROTATE_EXPR:
1377 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1378 &low, &hi);
1379 break;
1380
1381 case PLUS_EXPR:
1382 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1383 break;
1384
1385 case MINUS_EXPR:
1386 neg_double (int2l, int2h, &low, &hi);
1387 add_double (int1l, int1h, low, hi, &low, &hi);
1388 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1389 break;
1390
1391 case MULT_EXPR:
1392 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1393 break;
1394
1395 case TRUNC_DIV_EXPR:
1396 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1397 case EXACT_DIV_EXPR:
1398 /* This is a shortcut for a common special case. */
1399 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1400 && ! TREE_CONSTANT_OVERFLOW (arg1)
1401 && ! TREE_CONSTANT_OVERFLOW (arg2)
1402 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1403 {
1404 if (code == CEIL_DIV_EXPR)
1405 int1l += int2l - 1;
1406
1407 low = int1l / int2l, hi = 0;
1408 break;
1409 }
1410
1411 /* ... fall through ... */
1412
1413 case ROUND_DIV_EXPR:
1414 if (int2h == 0 && int2l == 1)
1415 {
1416 low = int1l, hi = int1h;
1417 break;
1418 }
1419 if (int1l == int2l && int1h == int2h
1420 && ! (int1l == 0 && int1h == 0))
1421 {
1422 low = 1, hi = 0;
1423 break;
1424 }
1425 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1426 &low, &hi, &garbagel, &garbageh);
1427 break;
1428
1429 case TRUNC_MOD_EXPR:
1430 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1431 /* This is a shortcut for a common special case. */
1432 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1433 && ! TREE_CONSTANT_OVERFLOW (arg1)
1434 && ! TREE_CONSTANT_OVERFLOW (arg2)
1435 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1436 {
1437 if (code == CEIL_MOD_EXPR)
1438 int1l += int2l - 1;
1439 low = int1l % int2l, hi = 0;
1440 break;
1441 }
1442
1443 /* ... fall through ... */
1444
1445 case ROUND_MOD_EXPR:
1446 overflow = div_and_round_double (code, uns,
1447 int1l, int1h, int2l, int2h,
1448 &garbagel, &garbageh, &low, &hi);
1449 break;
1450
1451 case MIN_EXPR:
1452 case MAX_EXPR:
1453 if (uns)
1454 low = (((unsigned HOST_WIDE_INT) int1h
1455 < (unsigned HOST_WIDE_INT) int2h)
1456 || (((unsigned HOST_WIDE_INT) int1h
1457 == (unsigned HOST_WIDE_INT) int2h)
1458 && int1l < int2l));
1459 else
1460 low = (int1h < int2h
1461 || (int1h == int2h && int1l < int2l));
1462
1463 if (low == (code == MIN_EXPR))
1464 low = int1l, hi = int1h;
1465 else
1466 low = int2l, hi = int2h;
1467 break;
1468
1469 default:
1470 gcc_unreachable ();
1471 }
1472
1473 t = build_int_cst_wide (TREE_TYPE (arg1), low, hi);
1474
1475 if (notrunc)
1476 {
1477 /* Propagate overflow flags ourselves. */
1478 if (((!uns || is_sizetype) && overflow)
1479 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1480 {
1481 t = copy_node (t);
1482 TREE_OVERFLOW (t) = 1;
1483 TREE_CONSTANT_OVERFLOW (t) = 1;
1484 }
1485 else if (TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2))
1486 {
1487 t = copy_node (t);
1488 TREE_CONSTANT_OVERFLOW (t) = 1;
1489 }
1490 }
1491 else
1492 t = force_fit_type (t, 1,
1493 ((!uns || is_sizetype) && overflow)
1494 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2),
1495 TREE_CONSTANT_OVERFLOW (arg1)
1496 | TREE_CONSTANT_OVERFLOW (arg2));
1497
1498 return t;
1499 }
1500
1501 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1502 constant. We assume ARG1 and ARG2 have the same data type, or at least
1503 are the same kind of constant and the same machine mode.
1504
1505 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1506
1507 static tree
1508 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1509 {
1510 STRIP_NOPS (arg1);
1511 STRIP_NOPS (arg2);
1512
1513 if (TREE_CODE (arg1) == INTEGER_CST)
1514 return int_const_binop (code, arg1, arg2, notrunc);
1515
1516 if (TREE_CODE (arg1) == REAL_CST)
1517 {
1518 enum machine_mode mode;
1519 REAL_VALUE_TYPE d1;
1520 REAL_VALUE_TYPE d2;
1521 REAL_VALUE_TYPE value;
1522 REAL_VALUE_TYPE result;
1523 bool inexact;
1524 tree t, type;
1525
1526 d1 = TREE_REAL_CST (arg1);
1527 d2 = TREE_REAL_CST (arg2);
1528
1529 type = TREE_TYPE (arg1);
1530 mode = TYPE_MODE (type);
1531
1532 /* Don't perform operation if we honor signaling NaNs and
1533 either operand is a NaN. */
1534 if (HONOR_SNANS (mode)
1535 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1536 return NULL_TREE;
1537
1538 /* Don't perform operation if it would raise a division
1539 by zero exception. */
1540 if (code == RDIV_EXPR
1541 && REAL_VALUES_EQUAL (d2, dconst0)
1542 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1543 return NULL_TREE;
1544
1545 /* If either operand is a NaN, just return it. Otherwise, set up
1546 for floating-point trap; we return an overflow. */
1547 if (REAL_VALUE_ISNAN (d1))
1548 return arg1;
1549 else if (REAL_VALUE_ISNAN (d2))
1550 return arg2;
1551
1552 inexact = real_arithmetic (&value, code, &d1, &d2);
1553 real_convert (&result, mode, &value);
1554
1555 /* Don't constant fold this floating point operation if
1556 the result has overflowed and flag_trapping_math. */
1557
1558 if (flag_trapping_math
1559 && MODE_HAS_INFINITIES (mode)
1560 && REAL_VALUE_ISINF (result)
1561 && !REAL_VALUE_ISINF (d1)
1562 && !REAL_VALUE_ISINF (d2))
1563 return NULL_TREE;
1564
1565 /* Don't constant fold this floating point operation if the
1566 result may dependent upon the run-time rounding mode and
1567 flag_rounding_math is set, or if GCC's software emulation
1568 is unable to accurately represent the result. */
1569
1570 if ((flag_rounding_math
1571 || (REAL_MODE_FORMAT_COMPOSITE_P (mode)
1572 && !flag_unsafe_math_optimizations))
1573 && (inexact || !real_identical (&result, &value)))
1574 return NULL_TREE;
1575
1576 t = build_real (type, result);
1577
1578 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1579 TREE_CONSTANT_OVERFLOW (t)
1580 = TREE_OVERFLOW (t)
1581 | TREE_CONSTANT_OVERFLOW (arg1)
1582 | TREE_CONSTANT_OVERFLOW (arg2);
1583 return t;
1584 }
1585 if (TREE_CODE (arg1) == COMPLEX_CST)
1586 {
1587 tree type = TREE_TYPE (arg1);
1588 tree r1 = TREE_REALPART (arg1);
1589 tree i1 = TREE_IMAGPART (arg1);
1590 tree r2 = TREE_REALPART (arg2);
1591 tree i2 = TREE_IMAGPART (arg2);
1592 tree t;
1593
1594 switch (code)
1595 {
1596 case PLUS_EXPR:
1597 t = build_complex (type,
1598 const_binop (PLUS_EXPR, r1, r2, notrunc),
1599 const_binop (PLUS_EXPR, i1, i2, notrunc));
1600 break;
1601
1602 case MINUS_EXPR:
1603 t = build_complex (type,
1604 const_binop (MINUS_EXPR, r1, r2, notrunc),
1605 const_binop (MINUS_EXPR, i1, i2, notrunc));
1606 break;
1607
1608 case MULT_EXPR:
1609 t = build_complex (type,
1610 const_binop (MINUS_EXPR,
1611 const_binop (MULT_EXPR,
1612 r1, r2, notrunc),
1613 const_binop (MULT_EXPR,
1614 i1, i2, notrunc),
1615 notrunc),
1616 const_binop (PLUS_EXPR,
1617 const_binop (MULT_EXPR,
1618 r1, i2, notrunc),
1619 const_binop (MULT_EXPR,
1620 i1, r2, notrunc),
1621 notrunc));
1622 break;
1623
1624 case RDIV_EXPR:
1625 {
1626 tree t1, t2, real, imag;
1627 tree magsquared
1628 = const_binop (PLUS_EXPR,
1629 const_binop (MULT_EXPR, r2, r2, notrunc),
1630 const_binop (MULT_EXPR, i2, i2, notrunc),
1631 notrunc);
1632
1633 t1 = const_binop (PLUS_EXPR,
1634 const_binop (MULT_EXPR, r1, r2, notrunc),
1635 const_binop (MULT_EXPR, i1, i2, notrunc),
1636 notrunc);
1637 t2 = const_binop (MINUS_EXPR,
1638 const_binop (MULT_EXPR, i1, r2, notrunc),
1639 const_binop (MULT_EXPR, r1, i2, notrunc),
1640 notrunc);
1641
1642 if (INTEGRAL_TYPE_P (TREE_TYPE (r1)))
1643 {
1644 real = const_binop (TRUNC_DIV_EXPR, t1, magsquared, notrunc);
1645 imag = const_binop (TRUNC_DIV_EXPR, t2, magsquared, notrunc);
1646 }
1647 else
1648 {
1649 real = const_binop (RDIV_EXPR, t1, magsquared, notrunc);
1650 imag = const_binop (RDIV_EXPR, t2, magsquared, notrunc);
1651 if (!real || !imag)
1652 return NULL_TREE;
1653 }
1654
1655 t = build_complex (type, real, imag);
1656 }
1657 break;
1658
1659 default:
1660 gcc_unreachable ();
1661 }
1662 return t;
1663 }
1664 return 0;
1665 }
1666
1667 /* Create a size type INT_CST node with NUMBER sign extended. KIND
1668 indicates which particular sizetype to create. */
1669
1670 tree
1671 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1672 {
1673 return build_int_cst (sizetype_tab[(int) kind], number);
1674 }
1675 \f
1676 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1677 is a tree code. The type of the result is taken from the operands.
1678 Both must be the same type integer type and it must be a size type.
1679 If the operands are constant, so is the result. */
1680
1681 tree
1682 size_binop (enum tree_code code, tree arg0, tree arg1)
1683 {
1684 tree type = TREE_TYPE (arg0);
1685
1686 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1687 && type == TREE_TYPE (arg1));
1688
1689 /* Handle the special case of two integer constants faster. */
1690 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1691 {
1692 /* And some specific cases even faster than that. */
1693 if (code == PLUS_EXPR && integer_zerop (arg0))
1694 return arg1;
1695 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1696 && integer_zerop (arg1))
1697 return arg0;
1698 else if (code == MULT_EXPR && integer_onep (arg0))
1699 return arg1;
1700
1701 /* Handle general case of two integer constants. */
1702 return int_const_binop (code, arg0, arg1, 0);
1703 }
1704
1705 if (arg0 == error_mark_node || arg1 == error_mark_node)
1706 return error_mark_node;
1707
1708 return fold_build2 (code, type, arg0, arg1);
1709 }
1710
1711 /* Given two values, either both of sizetype or both of bitsizetype,
1712 compute the difference between the two values. Return the value
1713 in signed type corresponding to the type of the operands. */
1714
1715 tree
1716 size_diffop (tree arg0, tree arg1)
1717 {
1718 tree type = TREE_TYPE (arg0);
1719 tree ctype;
1720
1721 gcc_assert (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1722 && type == TREE_TYPE (arg1));
1723
1724 /* If the type is already signed, just do the simple thing. */
1725 if (!TYPE_UNSIGNED (type))
1726 return size_binop (MINUS_EXPR, arg0, arg1);
1727
1728 ctype = type == bitsizetype ? sbitsizetype : ssizetype;
1729
1730 /* If either operand is not a constant, do the conversions to the signed
1731 type and subtract. The hardware will do the right thing with any
1732 overflow in the subtraction. */
1733 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1734 return size_binop (MINUS_EXPR, fold_convert (ctype, arg0),
1735 fold_convert (ctype, arg1));
1736
1737 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1738 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1739 overflow) and negate (which can't either). Special-case a result
1740 of zero while we're here. */
1741 if (tree_int_cst_equal (arg0, arg1))
1742 return fold_convert (ctype, integer_zero_node);
1743 else if (tree_int_cst_lt (arg1, arg0))
1744 return fold_convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1745 else
1746 return size_binop (MINUS_EXPR, fold_convert (ctype, integer_zero_node),
1747 fold_convert (ctype, size_binop (MINUS_EXPR,
1748 arg1, arg0)));
1749 }
1750 \f
1751 /* A subroutine of fold_convert_const handling conversions of an
1752 INTEGER_CST to another integer type. */
1753
1754 static tree
1755 fold_convert_const_int_from_int (tree type, tree arg1)
1756 {
1757 tree t;
1758
1759 /* Given an integer constant, make new constant with new type,
1760 appropriately sign-extended or truncated. */
1761 t = build_int_cst_wide (type, TREE_INT_CST_LOW (arg1),
1762 TREE_INT_CST_HIGH (arg1));
1763
1764 t = force_fit_type (t,
1765 /* Don't set the overflow when
1766 converting a pointer */
1767 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1768 (TREE_INT_CST_HIGH (arg1) < 0
1769 && (TYPE_UNSIGNED (type)
1770 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1771 | TREE_OVERFLOW (arg1),
1772 TREE_CONSTANT_OVERFLOW (arg1));
1773
1774 return t;
1775 }
1776
1777 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1778 to an integer type. */
1779
1780 static tree
1781 fold_convert_const_int_from_real (enum tree_code code, tree type, tree arg1)
1782 {
1783 int overflow = 0;
1784 tree t;
1785
1786 /* The following code implements the floating point to integer
1787 conversion rules required by the Java Language Specification,
1788 that IEEE NaNs are mapped to zero and values that overflow
1789 the target precision saturate, i.e. values greater than
1790 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1791 are mapped to INT_MIN. These semantics are allowed by the
1792 C and C++ standards that simply state that the behavior of
1793 FP-to-integer conversion is unspecified upon overflow. */
1794
1795 HOST_WIDE_INT high, low;
1796 REAL_VALUE_TYPE r;
1797 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1798
1799 switch (code)
1800 {
1801 case FIX_TRUNC_EXPR:
1802 real_trunc (&r, VOIDmode, &x);
1803 break;
1804
1805 case FIX_CEIL_EXPR:
1806 real_ceil (&r, VOIDmode, &x);
1807 break;
1808
1809 case FIX_FLOOR_EXPR:
1810 real_floor (&r, VOIDmode, &x);
1811 break;
1812
1813 case FIX_ROUND_EXPR:
1814 real_round (&r, VOIDmode, &x);
1815 break;
1816
1817 default:
1818 gcc_unreachable ();
1819 }
1820
1821 /* If R is NaN, return zero and show we have an overflow. */
1822 if (REAL_VALUE_ISNAN (r))
1823 {
1824 overflow = 1;
1825 high = 0;
1826 low = 0;
1827 }
1828
1829 /* See if R is less than the lower bound or greater than the
1830 upper bound. */
1831
1832 if (! overflow)
1833 {
1834 tree lt = TYPE_MIN_VALUE (type);
1835 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1836 if (REAL_VALUES_LESS (r, l))
1837 {
1838 overflow = 1;
1839 high = TREE_INT_CST_HIGH (lt);
1840 low = TREE_INT_CST_LOW (lt);
1841 }
1842 }
1843
1844 if (! overflow)
1845 {
1846 tree ut = TYPE_MAX_VALUE (type);
1847 if (ut)
1848 {
1849 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1850 if (REAL_VALUES_LESS (u, r))
1851 {
1852 overflow = 1;
1853 high = TREE_INT_CST_HIGH (ut);
1854 low = TREE_INT_CST_LOW (ut);
1855 }
1856 }
1857 }
1858
1859 if (! overflow)
1860 REAL_VALUE_TO_INT (&low, &high, r);
1861
1862 t = build_int_cst_wide (type, low, high);
1863
1864 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg1),
1865 TREE_CONSTANT_OVERFLOW (arg1));
1866 return t;
1867 }
1868
1869 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1870 to another floating point type. */
1871
1872 static tree
1873 fold_convert_const_real_from_real (tree type, tree arg1)
1874 {
1875 REAL_VALUE_TYPE value;
1876 tree t;
1877
1878 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1879 t = build_real (type, value);
1880
1881 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1882 TREE_CONSTANT_OVERFLOW (t)
1883 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1884 return t;
1885 }
1886
1887 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1888 type TYPE. If no simplification can be done return NULL_TREE. */
1889
1890 static tree
1891 fold_convert_const (enum tree_code code, tree type, tree arg1)
1892 {
1893 if (TREE_TYPE (arg1) == type)
1894 return arg1;
1895
1896 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1897 {
1898 if (TREE_CODE (arg1) == INTEGER_CST)
1899 return fold_convert_const_int_from_int (type, arg1);
1900 else if (TREE_CODE (arg1) == REAL_CST)
1901 return fold_convert_const_int_from_real (code, type, arg1);
1902 }
1903 else if (TREE_CODE (type) == REAL_TYPE)
1904 {
1905 if (TREE_CODE (arg1) == INTEGER_CST)
1906 return build_real_from_int_cst (type, arg1);
1907 if (TREE_CODE (arg1) == REAL_CST)
1908 return fold_convert_const_real_from_real (type, arg1);
1909 }
1910 return NULL_TREE;
1911 }
1912
1913 /* Construct a vector of zero elements of vector type TYPE. */
1914
1915 static tree
1916 build_zero_vector (tree type)
1917 {
1918 tree elem, list;
1919 int i, units;
1920
1921 elem = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1922 units = TYPE_VECTOR_SUBPARTS (type);
1923
1924 list = NULL_TREE;
1925 for (i = 0; i < units; i++)
1926 list = tree_cons (NULL_TREE, elem, list);
1927 return build_vector (type, list);
1928 }
1929
1930 /* Convert expression ARG to type TYPE. Used by the middle-end for
1931 simple conversions in preference to calling the front-end's convert. */
1932
1933 tree
1934 fold_convert (tree type, tree arg)
1935 {
1936 tree orig = TREE_TYPE (arg);
1937 tree tem;
1938
1939 if (type == orig)
1940 return arg;
1941
1942 if (TREE_CODE (arg) == ERROR_MARK
1943 || TREE_CODE (type) == ERROR_MARK
1944 || TREE_CODE (orig) == ERROR_MARK)
1945 return error_mark_node;
1946
1947 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig)
1948 || lang_hooks.types_compatible_p (TYPE_MAIN_VARIANT (type),
1949 TYPE_MAIN_VARIANT (orig)))
1950 return fold_build1 (NOP_EXPR, type, arg);
1951
1952 switch (TREE_CODE (type))
1953 {
1954 case INTEGER_TYPE: case CHAR_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1955 case POINTER_TYPE: case REFERENCE_TYPE:
1956 case OFFSET_TYPE:
1957 if (TREE_CODE (arg) == INTEGER_CST)
1958 {
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1962 }
1963 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1964 || TREE_CODE (orig) == OFFSET_TYPE)
1965 return fold_build1 (NOP_EXPR, type, arg);
1966 if (TREE_CODE (orig) == COMPLEX_TYPE)
1967 {
1968 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
1969 return fold_convert (type, tem);
1970 }
1971 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1972 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1973 return fold_build1 (NOP_EXPR, type, arg);
1974
1975 case REAL_TYPE:
1976 if (TREE_CODE (arg) == INTEGER_CST)
1977 {
1978 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1979 if (tem != NULL_TREE)
1980 return tem;
1981 }
1982 else if (TREE_CODE (arg) == REAL_CST)
1983 {
1984 tem = fold_convert_const (NOP_EXPR, type, arg);
1985 if (tem != NULL_TREE)
1986 return tem;
1987 }
1988
1989 switch (TREE_CODE (orig))
1990 {
1991 case INTEGER_TYPE: case CHAR_TYPE:
1992 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1993 case POINTER_TYPE: case REFERENCE_TYPE:
1994 return fold_build1 (FLOAT_EXPR, type, arg);
1995
1996 case REAL_TYPE:
1997 return fold_build1 (flag_float_store ? CONVERT_EXPR : NOP_EXPR,
1998 type, arg);
1999
2000 case COMPLEX_TYPE:
2001 tem = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2002 return fold_convert (type, tem);
2003
2004 default:
2005 gcc_unreachable ();
2006 }
2007
2008 case COMPLEX_TYPE:
2009 switch (TREE_CODE (orig))
2010 {
2011 case INTEGER_TYPE: case CHAR_TYPE:
2012 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2013 case POINTER_TYPE: case REFERENCE_TYPE:
2014 case REAL_TYPE:
2015 return build2 (COMPLEX_EXPR, type,
2016 fold_convert (TREE_TYPE (type), arg),
2017 fold_convert (TREE_TYPE (type), integer_zero_node));
2018 case COMPLEX_TYPE:
2019 {
2020 tree rpart, ipart;
2021
2022 if (TREE_CODE (arg) == COMPLEX_EXPR)
2023 {
2024 rpart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 0));
2025 ipart = fold_convert (TREE_TYPE (type), TREE_OPERAND (arg, 1));
2026 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2027 }
2028
2029 arg = save_expr (arg);
2030 rpart = fold_build1 (REALPART_EXPR, TREE_TYPE (orig), arg);
2031 ipart = fold_build1 (IMAGPART_EXPR, TREE_TYPE (orig), arg);
2032 rpart = fold_convert (TREE_TYPE (type), rpart);
2033 ipart = fold_convert (TREE_TYPE (type), ipart);
2034 return fold_build2 (COMPLEX_EXPR, type, rpart, ipart);
2035 }
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 case VECTOR_TYPE:
2042 if (integer_zerop (arg))
2043 return build_zero_vector (type);
2044 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2045 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2046 || TREE_CODE (orig) == VECTOR_TYPE);
2047 return fold_build1 (VIEW_CONVERT_EXPR, type, arg);
2048
2049 case VOID_TYPE:
2050 return fold_build1 (CONVERT_EXPR, type, fold_ignored_result (arg));
2051
2052 default:
2053 gcc_unreachable ();
2054 }
2055 }
2056 \f
2057 /* Return false if expr can be assumed not to be an lvalue, true
2058 otherwise. */
2059
2060 static bool
2061 maybe_lvalue_p (tree x)
2062 {
2063 /* We only need to wrap lvalue tree codes. */
2064 switch (TREE_CODE (x))
2065 {
2066 case VAR_DECL:
2067 case PARM_DECL:
2068 case RESULT_DECL:
2069 case LABEL_DECL:
2070 case FUNCTION_DECL:
2071 case SSA_NAME:
2072
2073 case COMPONENT_REF:
2074 case INDIRECT_REF:
2075 case ALIGN_INDIRECT_REF:
2076 case MISALIGNED_INDIRECT_REF:
2077 case ARRAY_REF:
2078 case ARRAY_RANGE_REF:
2079 case BIT_FIELD_REF:
2080 case OBJ_TYPE_REF:
2081
2082 case REALPART_EXPR:
2083 case IMAGPART_EXPR:
2084 case PREINCREMENT_EXPR:
2085 case PREDECREMENT_EXPR:
2086 case SAVE_EXPR:
2087 case TRY_CATCH_EXPR:
2088 case WITH_CLEANUP_EXPR:
2089 case COMPOUND_EXPR:
2090 case MODIFY_EXPR:
2091 case TARGET_EXPR:
2092 case COND_EXPR:
2093 case BIND_EXPR:
2094 case MIN_EXPR:
2095 case MAX_EXPR:
2096 break;
2097
2098 default:
2099 /* Assume the worst for front-end tree codes. */
2100 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2101 break;
2102 return false;
2103 }
2104
2105 return true;
2106 }
2107
2108 /* Return an expr equal to X but certainly not valid as an lvalue. */
2109
2110 tree
2111 non_lvalue (tree x)
2112 {
2113 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2114 us. */
2115 if (in_gimple_form)
2116 return x;
2117
2118 if (! maybe_lvalue_p (x))
2119 return x;
2120 return build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
2121 }
2122
2123 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2124 Zero means allow extended lvalues. */
2125
2126 int pedantic_lvalues;
2127
2128 /* When pedantic, return an expr equal to X but certainly not valid as a
2129 pedantic lvalue. Otherwise, return X. */
2130
2131 static tree
2132 pedantic_non_lvalue (tree x)
2133 {
2134 if (pedantic_lvalues)
2135 return non_lvalue (x);
2136 else
2137 return x;
2138 }
2139 \f
2140 /* Given a tree comparison code, return the code that is the logical inverse
2141 of the given code. It is not safe to do this for floating-point
2142 comparisons, except for NE_EXPR and EQ_EXPR, so we receive a machine mode
2143 as well: if reversing the comparison is unsafe, return ERROR_MARK. */
2144
2145 enum tree_code
2146 invert_tree_comparison (enum tree_code code, bool honor_nans)
2147 {
2148 if (honor_nans && flag_trapping_math)
2149 return ERROR_MARK;
2150
2151 switch (code)
2152 {
2153 case EQ_EXPR:
2154 return NE_EXPR;
2155 case NE_EXPR:
2156 return EQ_EXPR;
2157 case GT_EXPR:
2158 return honor_nans ? UNLE_EXPR : LE_EXPR;
2159 case GE_EXPR:
2160 return honor_nans ? UNLT_EXPR : LT_EXPR;
2161 case LT_EXPR:
2162 return honor_nans ? UNGE_EXPR : GE_EXPR;
2163 case LE_EXPR:
2164 return honor_nans ? UNGT_EXPR : GT_EXPR;
2165 case LTGT_EXPR:
2166 return UNEQ_EXPR;
2167 case UNEQ_EXPR:
2168 return LTGT_EXPR;
2169 case UNGT_EXPR:
2170 return LE_EXPR;
2171 case UNGE_EXPR:
2172 return LT_EXPR;
2173 case UNLT_EXPR:
2174 return GE_EXPR;
2175 case UNLE_EXPR:
2176 return GT_EXPR;
2177 case ORDERED_EXPR:
2178 return UNORDERED_EXPR;
2179 case UNORDERED_EXPR:
2180 return ORDERED_EXPR;
2181 default:
2182 gcc_unreachable ();
2183 }
2184 }
2185
2186 /* Similar, but return the comparison that results if the operands are
2187 swapped. This is safe for floating-point. */
2188
2189 enum tree_code
2190 swap_tree_comparison (enum tree_code code)
2191 {
2192 switch (code)
2193 {
2194 case EQ_EXPR:
2195 case NE_EXPR:
2196 case ORDERED_EXPR:
2197 case UNORDERED_EXPR:
2198 case LTGT_EXPR:
2199 case UNEQ_EXPR:
2200 return code;
2201 case GT_EXPR:
2202 return LT_EXPR;
2203 case GE_EXPR:
2204 return LE_EXPR;
2205 case LT_EXPR:
2206 return GT_EXPR;
2207 case LE_EXPR:
2208 return GE_EXPR;
2209 case UNGT_EXPR:
2210 return UNLT_EXPR;
2211 case UNGE_EXPR:
2212 return UNLE_EXPR;
2213 case UNLT_EXPR:
2214 return UNGT_EXPR;
2215 case UNLE_EXPR:
2216 return UNGE_EXPR;
2217 default:
2218 gcc_unreachable ();
2219 }
2220 }
2221
2222
2223 /* Convert a comparison tree code from an enum tree_code representation
2224 into a compcode bit-based encoding. This function is the inverse of
2225 compcode_to_comparison. */
2226
2227 static enum comparison_code
2228 comparison_to_compcode (enum tree_code code)
2229 {
2230 switch (code)
2231 {
2232 case LT_EXPR:
2233 return COMPCODE_LT;
2234 case EQ_EXPR:
2235 return COMPCODE_EQ;
2236 case LE_EXPR:
2237 return COMPCODE_LE;
2238 case GT_EXPR:
2239 return COMPCODE_GT;
2240 case NE_EXPR:
2241 return COMPCODE_NE;
2242 case GE_EXPR:
2243 return COMPCODE_GE;
2244 case ORDERED_EXPR:
2245 return COMPCODE_ORD;
2246 case UNORDERED_EXPR:
2247 return COMPCODE_UNORD;
2248 case UNLT_EXPR:
2249 return COMPCODE_UNLT;
2250 case UNEQ_EXPR:
2251 return COMPCODE_UNEQ;
2252 case UNLE_EXPR:
2253 return COMPCODE_UNLE;
2254 case UNGT_EXPR:
2255 return COMPCODE_UNGT;
2256 case LTGT_EXPR:
2257 return COMPCODE_LTGT;
2258 case UNGE_EXPR:
2259 return COMPCODE_UNGE;
2260 default:
2261 gcc_unreachable ();
2262 }
2263 }
2264
2265 /* Convert a compcode bit-based encoding of a comparison operator back
2266 to GCC's enum tree_code representation. This function is the
2267 inverse of comparison_to_compcode. */
2268
2269 static enum tree_code
2270 compcode_to_comparison (enum comparison_code code)
2271 {
2272 switch (code)
2273 {
2274 case COMPCODE_LT:
2275 return LT_EXPR;
2276 case COMPCODE_EQ:
2277 return EQ_EXPR;
2278 case COMPCODE_LE:
2279 return LE_EXPR;
2280 case COMPCODE_GT:
2281 return GT_EXPR;
2282 case COMPCODE_NE:
2283 return NE_EXPR;
2284 case COMPCODE_GE:
2285 return GE_EXPR;
2286 case COMPCODE_ORD:
2287 return ORDERED_EXPR;
2288 case COMPCODE_UNORD:
2289 return UNORDERED_EXPR;
2290 case COMPCODE_UNLT:
2291 return UNLT_EXPR;
2292 case COMPCODE_UNEQ:
2293 return UNEQ_EXPR;
2294 case COMPCODE_UNLE:
2295 return UNLE_EXPR;
2296 case COMPCODE_UNGT:
2297 return UNGT_EXPR;
2298 case COMPCODE_LTGT:
2299 return LTGT_EXPR;
2300 case COMPCODE_UNGE:
2301 return UNGE_EXPR;
2302 default:
2303 gcc_unreachable ();
2304 }
2305 }
2306
2307 /* Return a tree for the comparison which is the combination of
2308 doing the AND or OR (depending on CODE) of the two operations LCODE
2309 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2310 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2311 if this makes the transformation invalid. */
2312
2313 tree
2314 combine_comparisons (enum tree_code code, enum tree_code lcode,
2315 enum tree_code rcode, tree truth_type,
2316 tree ll_arg, tree lr_arg)
2317 {
2318 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2319 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2320 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2321 enum comparison_code compcode;
2322
2323 switch (code)
2324 {
2325 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2326 compcode = lcompcode & rcompcode;
2327 break;
2328
2329 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2330 compcode = lcompcode | rcompcode;
2331 break;
2332
2333 default:
2334 return NULL_TREE;
2335 }
2336
2337 if (!honor_nans)
2338 {
2339 /* Eliminate unordered comparisons, as well as LTGT and ORD
2340 which are not used unless the mode has NaNs. */
2341 compcode &= ~COMPCODE_UNORD;
2342 if (compcode == COMPCODE_LTGT)
2343 compcode = COMPCODE_NE;
2344 else if (compcode == COMPCODE_ORD)
2345 compcode = COMPCODE_TRUE;
2346 }
2347 else if (flag_trapping_math)
2348 {
2349 /* Check that the original operation and the optimized ones will trap
2350 under the same condition. */
2351 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2352 && (lcompcode != COMPCODE_EQ)
2353 && (lcompcode != COMPCODE_ORD);
2354 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2355 && (rcompcode != COMPCODE_EQ)
2356 && (rcompcode != COMPCODE_ORD);
2357 bool trap = (compcode & COMPCODE_UNORD) == 0
2358 && (compcode != COMPCODE_EQ)
2359 && (compcode != COMPCODE_ORD);
2360
2361 /* In a short-circuited boolean expression the LHS might be
2362 such that the RHS, if evaluated, will never trap. For
2363 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2364 if neither x nor y is NaN. (This is a mixed blessing: for
2365 example, the expression above will never trap, hence
2366 optimizing it to x < y would be invalid). */
2367 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2368 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2369 rtrap = false;
2370
2371 /* If the comparison was short-circuited, and only the RHS
2372 trapped, we may now generate a spurious trap. */
2373 if (rtrap && !ltrap
2374 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2375 return NULL_TREE;
2376
2377 /* If we changed the conditions that cause a trap, we lose. */
2378 if ((ltrap || rtrap) != trap)
2379 return NULL_TREE;
2380 }
2381
2382 if (compcode == COMPCODE_TRUE)
2383 return constant_boolean_node (true, truth_type);
2384 else if (compcode == COMPCODE_FALSE)
2385 return constant_boolean_node (false, truth_type);
2386 else
2387 return fold_build2 (compcode_to_comparison (compcode),
2388 truth_type, ll_arg, lr_arg);
2389 }
2390
2391 /* Return nonzero if CODE is a tree code that represents a truth value. */
2392
2393 static int
2394 truth_value_p (enum tree_code code)
2395 {
2396 return (TREE_CODE_CLASS (code) == tcc_comparison
2397 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
2398 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2399 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2400 }
2401 \f
2402 /* Return nonzero if two operands (typically of the same tree node)
2403 are necessarily equal. If either argument has side-effects this
2404 function returns zero. FLAGS modifies behavior as follows:
2405
2406 If OEP_ONLY_CONST is set, only return nonzero for constants.
2407 This function tests whether the operands are indistinguishable;
2408 it does not test whether they are equal using C's == operation.
2409 The distinction is important for IEEE floating point, because
2410 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2411 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2412
2413 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2414 even though it may hold multiple values during a function.
2415 This is because a GCC tree node guarantees that nothing else is
2416 executed between the evaluation of its "operands" (which may often
2417 be evaluated in arbitrary order). Hence if the operands themselves
2418 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2419 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2420 unset means assuming isochronic (or instantaneous) tree equivalence.
2421 Unless comparing arbitrary expression trees, such as from different
2422 statements, this flag can usually be left unset.
2423
2424 If OEP_PURE_SAME is set, then pure functions with identical arguments
2425 are considered the same. It is used when the caller has other ways
2426 to ensure that global memory is unchanged in between. */
2427
2428 int
2429 operand_equal_p (tree arg0, tree arg1, unsigned int flags)
2430 {
2431 /* If either is ERROR_MARK, they aren't equal. */
2432 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK)
2433 return 0;
2434
2435 /* If both types don't have the same signedness, then we can't consider
2436 them equal. We must check this before the STRIP_NOPS calls
2437 because they may change the signedness of the arguments. */
2438 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2439 return 0;
2440
2441 STRIP_NOPS (arg0);
2442 STRIP_NOPS (arg1);
2443
2444 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2445 /* This is needed for conversions and for COMPONENT_REF.
2446 Might as well play it safe and always test this. */
2447 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2448 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2449 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2450 return 0;
2451
2452 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2453 We don't care about side effects in that case because the SAVE_EXPR
2454 takes care of that for us. In all other cases, two expressions are
2455 equal if they have no side effects. If we have two identical
2456 expressions with side effects that should be treated the same due
2457 to the only side effects being identical SAVE_EXPR's, that will
2458 be detected in the recursive calls below. */
2459 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2460 && (TREE_CODE (arg0) == SAVE_EXPR
2461 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2462 return 1;
2463
2464 /* Next handle constant cases, those for which we can return 1 even
2465 if ONLY_CONST is set. */
2466 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2467 switch (TREE_CODE (arg0))
2468 {
2469 case INTEGER_CST:
2470 return (! TREE_CONSTANT_OVERFLOW (arg0)
2471 && ! TREE_CONSTANT_OVERFLOW (arg1)
2472 && tree_int_cst_equal (arg0, arg1));
2473
2474 case REAL_CST:
2475 return (! TREE_CONSTANT_OVERFLOW (arg0)
2476 && ! TREE_CONSTANT_OVERFLOW (arg1)
2477 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2478 TREE_REAL_CST (arg1)));
2479
2480 case VECTOR_CST:
2481 {
2482 tree v1, v2;
2483
2484 if (TREE_CONSTANT_OVERFLOW (arg0)
2485 || TREE_CONSTANT_OVERFLOW (arg1))
2486 return 0;
2487
2488 v1 = TREE_VECTOR_CST_ELTS (arg0);
2489 v2 = TREE_VECTOR_CST_ELTS (arg1);
2490 while (v1 && v2)
2491 {
2492 if (!operand_equal_p (TREE_VALUE (v1), TREE_VALUE (v2),
2493 flags))
2494 return 0;
2495 v1 = TREE_CHAIN (v1);
2496 v2 = TREE_CHAIN (v2);
2497 }
2498
2499 return v1 == v2;
2500 }
2501
2502 case COMPLEX_CST:
2503 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2504 flags)
2505 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2506 flags));
2507
2508 case STRING_CST:
2509 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2510 && ! memcmp (TREE_STRING_POINTER (arg0),
2511 TREE_STRING_POINTER (arg1),
2512 TREE_STRING_LENGTH (arg0)));
2513
2514 case ADDR_EXPR:
2515 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2516 0);
2517 default:
2518 break;
2519 }
2520
2521 if (flags & OEP_ONLY_CONST)
2522 return 0;
2523
2524 /* Define macros to test an operand from arg0 and arg1 for equality and a
2525 variant that allows null and views null as being different from any
2526 non-null value. In the latter case, if either is null, the both
2527 must be; otherwise, do the normal comparison. */
2528 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2529 TREE_OPERAND (arg1, N), flags)
2530
2531 #define OP_SAME_WITH_NULL(N) \
2532 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2533 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2534
2535 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2536 {
2537 case tcc_unary:
2538 /* Two conversions are equal only if signedness and modes match. */
2539 switch (TREE_CODE (arg0))
2540 {
2541 case NOP_EXPR:
2542 case CONVERT_EXPR:
2543 case FIX_CEIL_EXPR:
2544 case FIX_TRUNC_EXPR:
2545 case FIX_FLOOR_EXPR:
2546 case FIX_ROUND_EXPR:
2547 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2548 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2549 return 0;
2550 break;
2551 default:
2552 break;
2553 }
2554
2555 return OP_SAME (0);
2556
2557
2558 case tcc_comparison:
2559 case tcc_binary:
2560 if (OP_SAME (0) && OP_SAME (1))
2561 return 1;
2562
2563 /* For commutative ops, allow the other order. */
2564 return (commutative_tree_code (TREE_CODE (arg0))
2565 && operand_equal_p (TREE_OPERAND (arg0, 0),
2566 TREE_OPERAND (arg1, 1), flags)
2567 && operand_equal_p (TREE_OPERAND (arg0, 1),
2568 TREE_OPERAND (arg1, 0), flags));
2569
2570 case tcc_reference:
2571 /* If either of the pointer (or reference) expressions we are
2572 dereferencing contain a side effect, these cannot be equal. */
2573 if (TREE_SIDE_EFFECTS (arg0)
2574 || TREE_SIDE_EFFECTS (arg1))
2575 return 0;
2576
2577 switch (TREE_CODE (arg0))
2578 {
2579 case INDIRECT_REF:
2580 case ALIGN_INDIRECT_REF:
2581 case MISALIGNED_INDIRECT_REF:
2582 case REALPART_EXPR:
2583 case IMAGPART_EXPR:
2584 return OP_SAME (0);
2585
2586 case ARRAY_REF:
2587 case ARRAY_RANGE_REF:
2588 /* Operands 2 and 3 may be null. */
2589 return (OP_SAME (0)
2590 && OP_SAME (1)
2591 && OP_SAME_WITH_NULL (2)
2592 && OP_SAME_WITH_NULL (3));
2593
2594 case COMPONENT_REF:
2595 /* Handle operand 2 the same as for ARRAY_REF. */
2596 return OP_SAME (0) && OP_SAME (1) && OP_SAME_WITH_NULL (2);
2597
2598 case BIT_FIELD_REF:
2599 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2600
2601 default:
2602 return 0;
2603 }
2604
2605 case tcc_expression:
2606 switch (TREE_CODE (arg0))
2607 {
2608 case ADDR_EXPR:
2609 case TRUTH_NOT_EXPR:
2610 return OP_SAME (0);
2611
2612 case TRUTH_ANDIF_EXPR:
2613 case TRUTH_ORIF_EXPR:
2614 return OP_SAME (0) && OP_SAME (1);
2615
2616 case TRUTH_AND_EXPR:
2617 case TRUTH_OR_EXPR:
2618 case TRUTH_XOR_EXPR:
2619 if (OP_SAME (0) && OP_SAME (1))
2620 return 1;
2621
2622 /* Otherwise take into account this is a commutative operation. */
2623 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2624 TREE_OPERAND (arg1, 1), flags)
2625 && operand_equal_p (TREE_OPERAND (arg0, 1),
2626 TREE_OPERAND (arg1, 0), flags));
2627
2628 case CALL_EXPR:
2629 /* If the CALL_EXPRs call different functions, then they
2630 clearly can not be equal. */
2631 if (!OP_SAME (0))
2632 return 0;
2633
2634 {
2635 unsigned int cef = call_expr_flags (arg0);
2636 if (flags & OEP_PURE_SAME)
2637 cef &= ECF_CONST | ECF_PURE;
2638 else
2639 cef &= ECF_CONST;
2640 if (!cef)
2641 return 0;
2642 }
2643
2644 /* Now see if all the arguments are the same. operand_equal_p
2645 does not handle TREE_LIST, so we walk the operands here
2646 feeding them to operand_equal_p. */
2647 arg0 = TREE_OPERAND (arg0, 1);
2648 arg1 = TREE_OPERAND (arg1, 1);
2649 while (arg0 && arg1)
2650 {
2651 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1),
2652 flags))
2653 return 0;
2654
2655 arg0 = TREE_CHAIN (arg0);
2656 arg1 = TREE_CHAIN (arg1);
2657 }
2658
2659 /* If we get here and both argument lists are exhausted
2660 then the CALL_EXPRs are equal. */
2661 return ! (arg0 || arg1);
2662
2663 default:
2664 return 0;
2665 }
2666
2667 case tcc_declaration:
2668 /* Consider __builtin_sqrt equal to sqrt. */
2669 return (TREE_CODE (arg0) == FUNCTION_DECL
2670 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2671 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2672 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2673
2674 default:
2675 return 0;
2676 }
2677
2678 #undef OP_SAME
2679 #undef OP_SAME_WITH_NULL
2680 }
2681 \f
2682 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2683 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2684
2685 When in doubt, return 0. */
2686
2687 static int
2688 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2689 {
2690 int unsignedp1, unsignedpo;
2691 tree primarg0, primarg1, primother;
2692 unsigned int correct_width;
2693
2694 if (operand_equal_p (arg0, arg1, 0))
2695 return 1;
2696
2697 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2698 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2699 return 0;
2700
2701 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2702 and see if the inner values are the same. This removes any
2703 signedness comparison, which doesn't matter here. */
2704 primarg0 = arg0, primarg1 = arg1;
2705 STRIP_NOPS (primarg0);
2706 STRIP_NOPS (primarg1);
2707 if (operand_equal_p (primarg0, primarg1, 0))
2708 return 1;
2709
2710 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2711 actual comparison operand, ARG0.
2712
2713 First throw away any conversions to wider types
2714 already present in the operands. */
2715
2716 primarg1 = get_narrower (arg1, &unsignedp1);
2717 primother = get_narrower (other, &unsignedpo);
2718
2719 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2720 if (unsignedp1 == unsignedpo
2721 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2722 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2723 {
2724 tree type = TREE_TYPE (arg0);
2725
2726 /* Make sure shorter operand is extended the right way
2727 to match the longer operand. */
2728 primarg1 = fold_convert (lang_hooks.types.signed_or_unsigned_type
2729 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2730
2731 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2732 return 1;
2733 }
2734
2735 return 0;
2736 }
2737 \f
2738 /* See if ARG is an expression that is either a comparison or is performing
2739 arithmetic on comparisons. The comparisons must only be comparing
2740 two different values, which will be stored in *CVAL1 and *CVAL2; if
2741 they are nonzero it means that some operands have already been found.
2742 No variables may be used anywhere else in the expression except in the
2743 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2744 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2745
2746 If this is true, return 1. Otherwise, return zero. */
2747
2748 static int
2749 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2750 {
2751 enum tree_code code = TREE_CODE (arg);
2752 enum tree_code_class class = TREE_CODE_CLASS (code);
2753
2754 /* We can handle some of the tcc_expression cases here. */
2755 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2756 class = tcc_unary;
2757 else if (class == tcc_expression
2758 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2759 || code == COMPOUND_EXPR))
2760 class = tcc_binary;
2761
2762 else if (class == tcc_expression && code == SAVE_EXPR
2763 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2764 {
2765 /* If we've already found a CVAL1 or CVAL2, this expression is
2766 two complex to handle. */
2767 if (*cval1 || *cval2)
2768 return 0;
2769
2770 class = tcc_unary;
2771 *save_p = 1;
2772 }
2773
2774 switch (class)
2775 {
2776 case tcc_unary:
2777 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2778
2779 case tcc_binary:
2780 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2781 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2782 cval1, cval2, save_p));
2783
2784 case tcc_constant:
2785 return 1;
2786
2787 case tcc_expression:
2788 if (code == COND_EXPR)
2789 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2790 cval1, cval2, save_p)
2791 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2792 cval1, cval2, save_p)
2793 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2794 cval1, cval2, save_p));
2795 return 0;
2796
2797 case tcc_comparison:
2798 /* First see if we can handle the first operand, then the second. For
2799 the second operand, we know *CVAL1 can't be zero. It must be that
2800 one side of the comparison is each of the values; test for the
2801 case where this isn't true by failing if the two operands
2802 are the same. */
2803
2804 if (operand_equal_p (TREE_OPERAND (arg, 0),
2805 TREE_OPERAND (arg, 1), 0))
2806 return 0;
2807
2808 if (*cval1 == 0)
2809 *cval1 = TREE_OPERAND (arg, 0);
2810 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2811 ;
2812 else if (*cval2 == 0)
2813 *cval2 = TREE_OPERAND (arg, 0);
2814 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2815 ;
2816 else
2817 return 0;
2818
2819 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2820 ;
2821 else if (*cval2 == 0)
2822 *cval2 = TREE_OPERAND (arg, 1);
2823 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2824 ;
2825 else
2826 return 0;
2827
2828 return 1;
2829
2830 default:
2831 return 0;
2832 }
2833 }
2834 \f
2835 /* ARG is a tree that is known to contain just arithmetic operations and
2836 comparisons. Evaluate the operations in the tree substituting NEW0 for
2837 any occurrence of OLD0 as an operand of a comparison and likewise for
2838 NEW1 and OLD1. */
2839
2840 static tree
2841 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2842 {
2843 tree type = TREE_TYPE (arg);
2844 enum tree_code code = TREE_CODE (arg);
2845 enum tree_code_class class = TREE_CODE_CLASS (code);
2846
2847 /* We can handle some of the tcc_expression cases here. */
2848 if (class == tcc_expression && code == TRUTH_NOT_EXPR)
2849 class = tcc_unary;
2850 else if (class == tcc_expression
2851 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2852 class = tcc_binary;
2853
2854 switch (class)
2855 {
2856 case tcc_unary:
2857 return fold_build1 (code, type,
2858 eval_subst (TREE_OPERAND (arg, 0),
2859 old0, new0, old1, new1));
2860
2861 case tcc_binary:
2862 return fold_build2 (code, type,
2863 eval_subst (TREE_OPERAND (arg, 0),
2864 old0, new0, old1, new1),
2865 eval_subst (TREE_OPERAND (arg, 1),
2866 old0, new0, old1, new1));
2867
2868 case tcc_expression:
2869 switch (code)
2870 {
2871 case SAVE_EXPR:
2872 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2873
2874 case COMPOUND_EXPR:
2875 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2876
2877 case COND_EXPR:
2878 return fold_build3 (code, type,
2879 eval_subst (TREE_OPERAND (arg, 0),
2880 old0, new0, old1, new1),
2881 eval_subst (TREE_OPERAND (arg, 1),
2882 old0, new0, old1, new1),
2883 eval_subst (TREE_OPERAND (arg, 2),
2884 old0, new0, old1, new1));
2885 default:
2886 break;
2887 }
2888 /* Fall through - ??? */
2889
2890 case tcc_comparison:
2891 {
2892 tree arg0 = TREE_OPERAND (arg, 0);
2893 tree arg1 = TREE_OPERAND (arg, 1);
2894
2895 /* We need to check both for exact equality and tree equality. The
2896 former will be true if the operand has a side-effect. In that
2897 case, we know the operand occurred exactly once. */
2898
2899 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2900 arg0 = new0;
2901 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2902 arg0 = new1;
2903
2904 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2905 arg1 = new0;
2906 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2907 arg1 = new1;
2908
2909 return fold_build2 (code, type, arg0, arg1);
2910 }
2911
2912 default:
2913 return arg;
2914 }
2915 }
2916 \f
2917 /* Return a tree for the case when the result of an expression is RESULT
2918 converted to TYPE and OMITTED was previously an operand of the expression
2919 but is now not needed (e.g., we folded OMITTED * 0).
2920
2921 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2922 the conversion of RESULT to TYPE. */
2923
2924 tree
2925 omit_one_operand (tree type, tree result, tree omitted)
2926 {
2927 tree t = fold_convert (type, result);
2928
2929 if (TREE_SIDE_EFFECTS (omitted))
2930 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2931
2932 return non_lvalue (t);
2933 }
2934
2935 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2936
2937 static tree
2938 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2939 {
2940 tree t = fold_convert (type, result);
2941
2942 if (TREE_SIDE_EFFECTS (omitted))
2943 return build2 (COMPOUND_EXPR, type, fold_ignored_result (omitted), t);
2944
2945 return pedantic_non_lvalue (t);
2946 }
2947
2948 /* Return a tree for the case when the result of an expression is RESULT
2949 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
2950 of the expression but are now not needed.
2951
2952 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
2953 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
2954 evaluated before OMITTED2. Otherwise, if neither has side effects,
2955 just do the conversion of RESULT to TYPE. */
2956
2957 tree
2958 omit_two_operands (tree type, tree result, tree omitted1, tree omitted2)
2959 {
2960 tree t = fold_convert (type, result);
2961
2962 if (TREE_SIDE_EFFECTS (omitted2))
2963 t = build2 (COMPOUND_EXPR, type, omitted2, t);
2964 if (TREE_SIDE_EFFECTS (omitted1))
2965 t = build2 (COMPOUND_EXPR, type, omitted1, t);
2966
2967 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue (t) : t;
2968 }
2969
2970 \f
2971 /* Return a simplified tree node for the truth-negation of ARG. This
2972 never alters ARG itself. We assume that ARG is an operation that
2973 returns a truth value (0 or 1).
2974
2975 FIXME: one would think we would fold the result, but it causes
2976 problems with the dominator optimizer. */
2977 tree
2978 invert_truthvalue (tree arg)
2979 {
2980 tree type = TREE_TYPE (arg);
2981 enum tree_code code = TREE_CODE (arg);
2982
2983 if (code == ERROR_MARK)
2984 return arg;
2985
2986 /* If this is a comparison, we can simply invert it, except for
2987 floating-point non-equality comparisons, in which case we just
2988 enclose a TRUTH_NOT_EXPR around what we have. */
2989
2990 if (TREE_CODE_CLASS (code) == tcc_comparison)
2991 {
2992 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
2993 if (FLOAT_TYPE_P (op_type)
2994 && flag_trapping_math
2995 && code != ORDERED_EXPR && code != UNORDERED_EXPR
2996 && code != NE_EXPR && code != EQ_EXPR)
2997 return build1 (TRUTH_NOT_EXPR, type, arg);
2998 else
2999 {
3000 code = invert_tree_comparison (code,
3001 HONOR_NANS (TYPE_MODE (op_type)));
3002 if (code == ERROR_MARK)
3003 return build1 (TRUTH_NOT_EXPR, type, arg);
3004 else
3005 return build2 (code, type,
3006 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
3007 }
3008 }
3009
3010 switch (code)
3011 {
3012 case INTEGER_CST:
3013 return constant_boolean_node (integer_zerop (arg), type);
3014
3015 case TRUTH_AND_EXPR:
3016 return build2 (TRUTH_OR_EXPR, type,
3017 invert_truthvalue (TREE_OPERAND (arg, 0)),
3018 invert_truthvalue (TREE_OPERAND (arg, 1)));
3019
3020 case TRUTH_OR_EXPR:
3021 return build2 (TRUTH_AND_EXPR, type,
3022 invert_truthvalue (TREE_OPERAND (arg, 0)),
3023 invert_truthvalue (TREE_OPERAND (arg, 1)));
3024
3025 case TRUTH_XOR_EXPR:
3026 /* Here we can invert either operand. We invert the first operand
3027 unless the second operand is a TRUTH_NOT_EXPR in which case our
3028 result is the XOR of the first operand with the inside of the
3029 negation of the second operand. */
3030
3031 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3032 return build2 (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3033 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3034 else
3035 return build2 (TRUTH_XOR_EXPR, type,
3036 invert_truthvalue (TREE_OPERAND (arg, 0)),
3037 TREE_OPERAND (arg, 1));
3038
3039 case TRUTH_ANDIF_EXPR:
3040 return build2 (TRUTH_ORIF_EXPR, type,
3041 invert_truthvalue (TREE_OPERAND (arg, 0)),
3042 invert_truthvalue (TREE_OPERAND (arg, 1)));
3043
3044 case TRUTH_ORIF_EXPR:
3045 return build2 (TRUTH_ANDIF_EXPR, type,
3046 invert_truthvalue (TREE_OPERAND (arg, 0)),
3047 invert_truthvalue (TREE_OPERAND (arg, 1)));
3048
3049 case TRUTH_NOT_EXPR:
3050 return TREE_OPERAND (arg, 0);
3051
3052 case COND_EXPR:
3053 {
3054 tree arg1 = TREE_OPERAND (arg, 1);
3055 tree arg2 = TREE_OPERAND (arg, 2);
3056 /* A COND_EXPR may have a throw as one operand, which
3057 then has void type. Just leave void operands
3058 as they are. */
3059 return build3 (COND_EXPR, type, TREE_OPERAND (arg, 0),
3060 VOID_TYPE_P (TREE_TYPE (arg1))
3061 ? arg1 : invert_truthvalue (arg1),
3062 VOID_TYPE_P (TREE_TYPE (arg2))
3063 ? arg2 : invert_truthvalue (arg2));
3064 }
3065
3066 case COMPOUND_EXPR:
3067 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
3068 invert_truthvalue (TREE_OPERAND (arg, 1)));
3069
3070 case NON_LVALUE_EXPR:
3071 return invert_truthvalue (TREE_OPERAND (arg, 0));
3072
3073 case NOP_EXPR:
3074 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3075 break;
3076
3077 case CONVERT_EXPR:
3078 case FLOAT_EXPR:
3079 return build1 (TREE_CODE (arg), type,
3080 invert_truthvalue (TREE_OPERAND (arg, 0)));
3081
3082 case BIT_AND_EXPR:
3083 if (!integer_onep (TREE_OPERAND (arg, 1)))
3084 break;
3085 return build2 (EQ_EXPR, type, arg,
3086 fold_convert (type, integer_zero_node));
3087
3088 case SAVE_EXPR:
3089 return build1 (TRUTH_NOT_EXPR, type, arg);
3090
3091 case CLEANUP_POINT_EXPR:
3092 return build1 (CLEANUP_POINT_EXPR, type,
3093 invert_truthvalue (TREE_OPERAND (arg, 0)));
3094
3095 default:
3096 break;
3097 }
3098 gcc_assert (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE);
3099 return build1 (TRUTH_NOT_EXPR, type, arg);
3100 }
3101
3102 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3103 operands are another bit-wise operation with a common input. If so,
3104 distribute the bit operations to save an operation and possibly two if
3105 constants are involved. For example, convert
3106 (A | B) & (A | C) into A | (B & C)
3107 Further simplification will occur if B and C are constants.
3108
3109 If this optimization cannot be done, 0 will be returned. */
3110
3111 static tree
3112 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
3113 {
3114 tree common;
3115 tree left, right;
3116
3117 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3118 || TREE_CODE (arg0) == code
3119 || (TREE_CODE (arg0) != BIT_AND_EXPR
3120 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3121 return 0;
3122
3123 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3124 {
3125 common = TREE_OPERAND (arg0, 0);
3126 left = TREE_OPERAND (arg0, 1);
3127 right = TREE_OPERAND (arg1, 1);
3128 }
3129 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3130 {
3131 common = TREE_OPERAND (arg0, 0);
3132 left = TREE_OPERAND (arg0, 1);
3133 right = TREE_OPERAND (arg1, 0);
3134 }
3135 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3136 {
3137 common = TREE_OPERAND (arg0, 1);
3138 left = TREE_OPERAND (arg0, 0);
3139 right = TREE_OPERAND (arg1, 1);
3140 }
3141 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3142 {
3143 common = TREE_OPERAND (arg0, 1);
3144 left = TREE_OPERAND (arg0, 0);
3145 right = TREE_OPERAND (arg1, 0);
3146 }
3147 else
3148 return 0;
3149
3150 return fold_build2 (TREE_CODE (arg0), type, common,
3151 fold_build2 (code, type, left, right));
3152 }
3153
3154 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3155 with code CODE. This optimization is unsafe. */
3156 static tree
3157 distribute_real_division (enum tree_code code, tree type, tree arg0, tree arg1)
3158 {
3159 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3160 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3161
3162 /* (A / C) +- (B / C) -> (A +- B) / C. */
3163 if (mul0 == mul1
3164 && operand_equal_p (TREE_OPERAND (arg0, 1),
3165 TREE_OPERAND (arg1, 1), 0))
3166 return fold_build2 (mul0 ? MULT_EXPR : RDIV_EXPR, type,
3167 fold_build2 (code, type,
3168 TREE_OPERAND (arg0, 0),
3169 TREE_OPERAND (arg1, 0)),
3170 TREE_OPERAND (arg0, 1));
3171
3172 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3173 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3174 TREE_OPERAND (arg1, 0), 0)
3175 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3176 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3177 {
3178 REAL_VALUE_TYPE r0, r1;
3179 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3180 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3181 if (!mul0)
3182 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3183 if (!mul1)
3184 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3185 real_arithmetic (&r0, code, &r0, &r1);
3186 return fold_build2 (MULT_EXPR, type,
3187 TREE_OPERAND (arg0, 0),
3188 build_real (type, r0));
3189 }
3190
3191 return NULL_TREE;
3192 }
3193 \f
3194 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3195 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3196
3197 static tree
3198 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
3199 int unsignedp)
3200 {
3201 tree result;
3202
3203 if (bitpos == 0)
3204 {
3205 tree size = TYPE_SIZE (TREE_TYPE (inner));
3206 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3207 || POINTER_TYPE_P (TREE_TYPE (inner)))
3208 && host_integerp (size, 0)
3209 && tree_low_cst (size, 0) == bitsize)
3210 return fold_convert (type, inner);
3211 }
3212
3213 result = build3 (BIT_FIELD_REF, type, inner,
3214 size_int (bitsize), bitsize_int (bitpos));
3215
3216 BIT_FIELD_REF_UNSIGNED (result) = unsignedp;
3217
3218 return result;
3219 }
3220
3221 /* Optimize a bit-field compare.
3222
3223 There are two cases: First is a compare against a constant and the
3224 second is a comparison of two items where the fields are at the same
3225 bit position relative to the start of a chunk (byte, halfword, word)
3226 large enough to contain it. In these cases we can avoid the shift
3227 implicit in bitfield extractions.
3228
3229 For constants, we emit a compare of the shifted constant with the
3230 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3231 compared. For two fields at the same position, we do the ANDs with the
3232 similar mask and compare the result of the ANDs.
3233
3234 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3235 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3236 are the left and right operands of the comparison, respectively.
3237
3238 If the optimization described above can be done, we return the resulting
3239 tree. Otherwise we return zero. */
3240
3241 static tree
3242 optimize_bit_field_compare (enum tree_code code, tree compare_type,
3243 tree lhs, tree rhs)
3244 {
3245 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3246 tree type = TREE_TYPE (lhs);
3247 tree signed_type, unsigned_type;
3248 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3249 enum machine_mode lmode, rmode, nmode;
3250 int lunsignedp, runsignedp;
3251 int lvolatilep = 0, rvolatilep = 0;
3252 tree linner, rinner = NULL_TREE;
3253 tree mask;
3254 tree offset;
3255
3256 /* Get all the information about the extractions being done. If the bit size
3257 if the same as the size of the underlying object, we aren't doing an
3258 extraction at all and so can do nothing. We also don't want to
3259 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3260 then will no longer be able to replace it. */
3261 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3262 &lunsignedp, &lvolatilep, false);
3263 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3264 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3265 return 0;
3266
3267 if (!const_p)
3268 {
3269 /* If this is not a constant, we can only do something if bit positions,
3270 sizes, and signedness are the same. */
3271 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3272 &runsignedp, &rvolatilep, false);
3273
3274 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3275 || lunsignedp != runsignedp || offset != 0
3276 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3277 return 0;
3278 }
3279
3280 /* See if we can find a mode to refer to this field. We should be able to,
3281 but fail if we can't. */
3282 nmode = get_best_mode (lbitsize, lbitpos,
3283 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3284 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3285 TYPE_ALIGN (TREE_TYPE (rinner))),
3286 word_mode, lvolatilep || rvolatilep);
3287 if (nmode == VOIDmode)
3288 return 0;
3289
3290 /* Set signed and unsigned types of the precision of this mode for the
3291 shifts below. */
3292 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3293 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3294
3295 /* Compute the bit position and size for the new reference and our offset
3296 within it. If the new reference is the same size as the original, we
3297 won't optimize anything, so return zero. */
3298 nbitsize = GET_MODE_BITSIZE (nmode);
3299 nbitpos = lbitpos & ~ (nbitsize - 1);
3300 lbitpos -= nbitpos;
3301 if (nbitsize == lbitsize)
3302 return 0;
3303
3304 if (BYTES_BIG_ENDIAN)
3305 lbitpos = nbitsize - lbitsize - lbitpos;
3306
3307 /* Make the mask to be used against the extracted field. */
3308 mask = build_int_cst (unsigned_type, -1);
3309 mask = force_fit_type (mask, 0, false, false);
3310 mask = fold_convert (unsigned_type, mask);
3311 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
3312 mask = const_binop (RSHIFT_EXPR, mask,
3313 size_int (nbitsize - lbitsize - lbitpos), 0);
3314
3315 if (! const_p)
3316 /* If not comparing with constant, just rework the comparison
3317 and return. */
3318 return build2 (code, compare_type,
3319 build2 (BIT_AND_EXPR, unsigned_type,
3320 make_bit_field_ref (linner, unsigned_type,
3321 nbitsize, nbitpos, 1),
3322 mask),
3323 build2 (BIT_AND_EXPR, unsigned_type,
3324 make_bit_field_ref (rinner, unsigned_type,
3325 nbitsize, nbitpos, 1),
3326 mask));
3327
3328 /* Otherwise, we are handling the constant case. See if the constant is too
3329 big for the field. Warn and return a tree of for 0 (false) if so. We do
3330 this not only for its own sake, but to avoid having to test for this
3331 error case below. If we didn't, we might generate wrong code.
3332
3333 For unsigned fields, the constant shifted right by the field length should
3334 be all zero. For signed fields, the high-order bits should agree with
3335 the sign bit. */
3336
3337 if (lunsignedp)
3338 {
3339 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3340 fold_convert (unsigned_type, rhs),
3341 size_int (lbitsize), 0)))
3342 {
3343 warning (0, "comparison is always %d due to width of bit-field",
3344 code == NE_EXPR);
3345 return constant_boolean_node (code == NE_EXPR, compare_type);
3346 }
3347 }
3348 else
3349 {
3350 tree tem = const_binop (RSHIFT_EXPR, fold_convert (signed_type, rhs),
3351 size_int (lbitsize - 1), 0);
3352 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3353 {
3354 warning (0, "comparison is always %d due to width of bit-field",
3355 code == NE_EXPR);
3356 return constant_boolean_node (code == NE_EXPR, compare_type);
3357 }
3358 }
3359
3360 /* Single-bit compares should always be against zero. */
3361 if (lbitsize == 1 && ! integer_zerop (rhs))
3362 {
3363 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3364 rhs = fold_convert (type, integer_zero_node);
3365 }
3366
3367 /* Make a new bitfield reference, shift the constant over the
3368 appropriate number of bits and mask it with the computed mask
3369 (in case this was a signed field). If we changed it, make a new one. */
3370 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
3371 if (lvolatilep)
3372 {
3373 TREE_SIDE_EFFECTS (lhs) = 1;
3374 TREE_THIS_VOLATILE (lhs) = 1;
3375 }
3376
3377 rhs = const_binop (BIT_AND_EXPR,
3378 const_binop (LSHIFT_EXPR,
3379 fold_convert (unsigned_type, rhs),
3380 size_int (lbitpos), 0),
3381 mask, 0);
3382
3383 return build2 (code, compare_type,
3384 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask),
3385 rhs);
3386 }
3387 \f
3388 /* Subroutine for fold_truthop: decode a field reference.
3389
3390 If EXP is a comparison reference, we return the innermost reference.
3391
3392 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3393 set to the starting bit number.
3394
3395 If the innermost field can be completely contained in a mode-sized
3396 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3397
3398 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3399 otherwise it is not changed.
3400
3401 *PUNSIGNEDP is set to the signedness of the field.
3402
3403 *PMASK is set to the mask used. This is either contained in a
3404 BIT_AND_EXPR or derived from the width of the field.
3405
3406 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3407
3408 Return 0 if this is not a component reference or is one that we can't
3409 do anything with. */
3410
3411 static tree
3412 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
3413 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3414 int *punsignedp, int *pvolatilep,
3415 tree *pmask, tree *pand_mask)
3416 {
3417 tree outer_type = 0;
3418 tree and_mask = 0;
3419 tree mask, inner, offset;
3420 tree unsigned_type;
3421 unsigned int precision;
3422
3423 /* All the optimizations using this function assume integer fields.
3424 There are problems with FP fields since the type_for_size call
3425 below can fail for, e.g., XFmode. */
3426 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3427 return 0;
3428
3429 /* We are interested in the bare arrangement of bits, so strip everything
3430 that doesn't affect the machine mode. However, record the type of the
3431 outermost expression if it may matter below. */
3432 if (TREE_CODE (exp) == NOP_EXPR
3433 || TREE_CODE (exp) == CONVERT_EXPR
3434 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3435 outer_type = TREE_TYPE (exp);
3436 STRIP_NOPS (exp);
3437
3438 if (TREE_CODE (exp) == BIT_AND_EXPR)
3439 {
3440 and_mask = TREE_OPERAND (exp, 1);
3441 exp = TREE_OPERAND (exp, 0);
3442 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3443 if (TREE_CODE (and_mask) != INTEGER_CST)
3444 return 0;
3445 }
3446
3447 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3448 punsignedp, pvolatilep, false);
3449 if ((inner == exp && and_mask == 0)
3450 || *pbitsize < 0 || offset != 0
3451 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3452 return 0;
3453
3454 /* If the number of bits in the reference is the same as the bitsize of
3455 the outer type, then the outer type gives the signedness. Otherwise
3456 (in case of a small bitfield) the signedness is unchanged. */
3457 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3458 *punsignedp = TYPE_UNSIGNED (outer_type);
3459
3460 /* Compute the mask to access the bitfield. */
3461 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3462 precision = TYPE_PRECISION (unsigned_type);
3463
3464 mask = build_int_cst (unsigned_type, -1);
3465 mask = force_fit_type (mask, 0, false, false);
3466
3467 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3468 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
3469
3470 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3471 if (and_mask != 0)
3472 mask = fold_build2 (BIT_AND_EXPR, unsigned_type,
3473 fold_convert (unsigned_type, and_mask), mask);
3474
3475 *pmask = mask;
3476 *pand_mask = and_mask;
3477 return inner;
3478 }
3479
3480 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3481 bit positions. */
3482
3483 static int
3484 all_ones_mask_p (tree mask, int size)
3485 {
3486 tree type = TREE_TYPE (mask);
3487 unsigned int precision = TYPE_PRECISION (type);
3488 tree tmask;
3489
3490 tmask = build_int_cst (lang_hooks.types.signed_type (type), -1);
3491 tmask = force_fit_type (tmask, 0, false, false);
3492
3493 return
3494 tree_int_cst_equal (mask,
3495 const_binop (RSHIFT_EXPR,
3496 const_binop (LSHIFT_EXPR, tmask,
3497 size_int (precision - size),
3498 0),
3499 size_int (precision - size), 0));
3500 }
3501
3502 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3503 represents the sign bit of EXP's type. If EXP represents a sign
3504 or zero extension, also test VAL against the unextended type.
3505 The return value is the (sub)expression whose sign bit is VAL,
3506 or NULL_TREE otherwise. */
3507
3508 static tree
3509 sign_bit_p (tree exp, tree val)
3510 {
3511 unsigned HOST_WIDE_INT mask_lo, lo;
3512 HOST_WIDE_INT mask_hi, hi;
3513 int width;
3514 tree t;
3515
3516 /* Tree EXP must have an integral type. */
3517 t = TREE_TYPE (exp);
3518 if (! INTEGRAL_TYPE_P (t))
3519 return NULL_TREE;
3520
3521 /* Tree VAL must be an integer constant. */
3522 if (TREE_CODE (val) != INTEGER_CST
3523 || TREE_CONSTANT_OVERFLOW (val))
3524 return NULL_TREE;
3525
3526 width = TYPE_PRECISION (t);
3527 if (width > HOST_BITS_PER_WIDE_INT)
3528 {
3529 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3530 lo = 0;
3531
3532 mask_hi = ((unsigned HOST_WIDE_INT) -1
3533 >> (2 * HOST_BITS_PER_WIDE_INT - width));
3534 mask_lo = -1;
3535 }
3536 else
3537 {
3538 hi = 0;
3539 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3540
3541 mask_hi = 0;
3542 mask_lo = ((unsigned HOST_WIDE_INT) -1
3543 >> (HOST_BITS_PER_WIDE_INT - width));
3544 }
3545
3546 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3547 treat VAL as if it were unsigned. */
3548 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3549 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3550 return exp;
3551
3552 /* Handle extension from a narrower type. */
3553 if (TREE_CODE (exp) == NOP_EXPR
3554 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3555 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3556
3557 return NULL_TREE;
3558 }
3559
3560 /* Subroutine for fold_truthop: determine if an operand is simple enough
3561 to be evaluated unconditionally. */
3562
3563 static int
3564 simple_operand_p (tree exp)
3565 {
3566 /* Strip any conversions that don't change the machine mode. */
3567 STRIP_NOPS (exp);
3568
3569 return (CONSTANT_CLASS_P (exp)
3570 || TREE_CODE (exp) == SSA_NAME
3571 || (DECL_P (exp)
3572 && ! TREE_ADDRESSABLE (exp)
3573 && ! TREE_THIS_VOLATILE (exp)
3574 && ! DECL_NONLOCAL (exp)
3575 /* Don't regard global variables as simple. They may be
3576 allocated in ways unknown to the compiler (shared memory,
3577 #pragma weak, etc). */
3578 && ! TREE_PUBLIC (exp)
3579 && ! DECL_EXTERNAL (exp)
3580 /* Loading a static variable is unduly expensive, but global
3581 registers aren't expensive. */
3582 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3583 }
3584 \f
3585 /* The following functions are subroutines to fold_range_test and allow it to
3586 try to change a logical combination of comparisons into a range test.
3587
3588 For example, both
3589 X == 2 || X == 3 || X == 4 || X == 5
3590 and
3591 X >= 2 && X <= 5
3592 are converted to
3593 (unsigned) (X - 2) <= 3
3594
3595 We describe each set of comparisons as being either inside or outside
3596 a range, using a variable named like IN_P, and then describe the
3597 range with a lower and upper bound. If one of the bounds is omitted,
3598 it represents either the highest or lowest value of the type.
3599
3600 In the comments below, we represent a range by two numbers in brackets
3601 preceded by a "+" to designate being inside that range, or a "-" to
3602 designate being outside that range, so the condition can be inverted by
3603 flipping the prefix. An omitted bound is represented by a "-". For
3604 example, "- [-, 10]" means being outside the range starting at the lowest
3605 possible value and ending at 10, in other words, being greater than 10.
3606 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3607 always false.
3608
3609 We set up things so that the missing bounds are handled in a consistent
3610 manner so neither a missing bound nor "true" and "false" need to be
3611 handled using a special case. */
3612
3613 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3614 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3615 and UPPER1_P are nonzero if the respective argument is an upper bound
3616 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3617 must be specified for a comparison. ARG1 will be converted to ARG0's
3618 type if both are specified. */
3619
3620 static tree
3621 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3622 tree arg1, int upper1_p)
3623 {
3624 tree tem;
3625 int result;
3626 int sgn0, sgn1;
3627
3628 /* If neither arg represents infinity, do the normal operation.
3629 Else, if not a comparison, return infinity. Else handle the special
3630 comparison rules. Note that most of the cases below won't occur, but
3631 are handled for consistency. */
3632
3633 if (arg0 != 0 && arg1 != 0)
3634 {
3635 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3636 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3637 STRIP_NOPS (tem);
3638 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3639 }
3640
3641 if (TREE_CODE_CLASS (code) != tcc_comparison)
3642 return 0;
3643
3644 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3645 for neither. In real maths, we cannot assume open ended ranges are
3646 the same. But, this is computer arithmetic, where numbers are finite.
3647 We can therefore make the transformation of any unbounded range with
3648 the value Z, Z being greater than any representable number. This permits
3649 us to treat unbounded ranges as equal. */
3650 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3651 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3652 switch (code)
3653 {
3654 case EQ_EXPR:
3655 result = sgn0 == sgn1;
3656 break;
3657 case NE_EXPR:
3658 result = sgn0 != sgn1;
3659 break;
3660 case LT_EXPR:
3661 result = sgn0 < sgn1;
3662 break;
3663 case LE_EXPR:
3664 result = sgn0 <= sgn1;
3665 break;
3666 case GT_EXPR:
3667 result = sgn0 > sgn1;
3668 break;
3669 case GE_EXPR:
3670 result = sgn0 >= sgn1;
3671 break;
3672 default:
3673 gcc_unreachable ();
3674 }
3675
3676 return constant_boolean_node (result, type);
3677 }
3678 \f
3679 /* Given EXP, a logical expression, set the range it is testing into
3680 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3681 actually being tested. *PLOW and *PHIGH will be made of the same type
3682 as the returned expression. If EXP is not a comparison, we will most
3683 likely not be returning a useful value and range. */
3684
3685 static tree
3686 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3687 {
3688 enum tree_code code;
3689 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
3690 tree exp_type = NULL_TREE, arg0_type = NULL_TREE;
3691 int in_p, n_in_p;
3692 tree low, high, n_low, n_high;
3693
3694 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3695 and see if we can refine the range. Some of the cases below may not
3696 happen, but it doesn't seem worth worrying about this. We "continue"
3697 the outer loop when we've changed something; otherwise we "break"
3698 the switch, which will "break" the while. */
3699
3700 in_p = 0;
3701 low = high = fold_convert (TREE_TYPE (exp), integer_zero_node);
3702
3703 while (1)
3704 {
3705 code = TREE_CODE (exp);
3706 exp_type = TREE_TYPE (exp);
3707
3708 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3709 {
3710 if (TREE_CODE_LENGTH (code) > 0)
3711 arg0 = TREE_OPERAND (exp, 0);
3712 if (TREE_CODE_CLASS (code) == tcc_comparison
3713 || TREE_CODE_CLASS (code) == tcc_unary
3714 || TREE_CODE_CLASS (code) == tcc_binary)
3715 arg0_type = TREE_TYPE (arg0);
3716 if (TREE_CODE_CLASS (code) == tcc_binary
3717 || TREE_CODE_CLASS (code) == tcc_comparison
3718 || (TREE_CODE_CLASS (code) == tcc_expression
3719 && TREE_CODE_LENGTH (code) > 1))
3720 arg1 = TREE_OPERAND (exp, 1);
3721 }
3722
3723 switch (code)
3724 {
3725 case TRUTH_NOT_EXPR:
3726 in_p = ! in_p, exp = arg0;
3727 continue;
3728
3729 case EQ_EXPR: case NE_EXPR:
3730 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3731 /* We can only do something if the range is testing for zero
3732 and if the second operand is an integer constant. Note that
3733 saying something is "in" the range we make is done by
3734 complementing IN_P since it will set in the initial case of
3735 being not equal to zero; "out" is leaving it alone. */
3736 if (low == 0 || high == 0
3737 || ! integer_zerop (low) || ! integer_zerop (high)
3738 || TREE_CODE (arg1) != INTEGER_CST)
3739 break;
3740
3741 switch (code)
3742 {
3743 case NE_EXPR: /* - [c, c] */
3744 low = high = arg1;
3745 break;
3746 case EQ_EXPR: /* + [c, c] */
3747 in_p = ! in_p, low = high = arg1;
3748 break;
3749 case GT_EXPR: /* - [-, c] */
3750 low = 0, high = arg1;
3751 break;
3752 case GE_EXPR: /* + [c, -] */
3753 in_p = ! in_p, low = arg1, high = 0;
3754 break;
3755 case LT_EXPR: /* - [c, -] */
3756 low = arg1, high = 0;
3757 break;
3758 case LE_EXPR: /* + [-, c] */
3759 in_p = ! in_p, low = 0, high = arg1;
3760 break;
3761 default:
3762 gcc_unreachable ();
3763 }
3764
3765 /* If this is an unsigned comparison, we also know that EXP is
3766 greater than or equal to zero. We base the range tests we make
3767 on that fact, so we record it here so we can parse existing
3768 range tests. We test arg0_type since often the return type
3769 of, e.g. EQ_EXPR, is boolean. */
3770 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3771 {
3772 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3773 in_p, low, high, 1,
3774 fold_convert (arg0_type, integer_zero_node),
3775 NULL_TREE))
3776 break;
3777
3778 in_p = n_in_p, low = n_low, high = n_high;
3779
3780 /* If the high bound is missing, but we have a nonzero low
3781 bound, reverse the range so it goes from zero to the low bound
3782 minus 1. */
3783 if (high == 0 && low && ! integer_zerop (low))
3784 {
3785 in_p = ! in_p;
3786 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3787 integer_one_node, 0);
3788 low = fold_convert (arg0_type, integer_zero_node);
3789 }
3790 }
3791
3792 exp = arg0;
3793 continue;
3794
3795 case NEGATE_EXPR:
3796 /* (-x) IN [a,b] -> x in [-b, -a] */
3797 n_low = range_binop (MINUS_EXPR, exp_type,
3798 fold_convert (exp_type, integer_zero_node),
3799 0, high, 1);
3800 n_high = range_binop (MINUS_EXPR, exp_type,
3801 fold_convert (exp_type, integer_zero_node),
3802 0, low, 0);
3803 low = n_low, high = n_high;
3804 exp = arg0;
3805 continue;
3806
3807 case BIT_NOT_EXPR:
3808 /* ~ X -> -X - 1 */
3809 exp = build2 (MINUS_EXPR, exp_type, negate_expr (arg0),
3810 fold_convert (exp_type, integer_one_node));
3811 continue;
3812
3813 case PLUS_EXPR: case MINUS_EXPR:
3814 if (TREE_CODE (arg1) != INTEGER_CST)
3815 break;
3816
3817 /* If EXP is signed, any overflow in the computation is undefined,
3818 so we don't worry about it so long as our computations on
3819 the bounds don't overflow. For unsigned, overflow is defined
3820 and this is exactly the right thing. */
3821 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3822 arg0_type, low, 0, arg1, 0);
3823 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3824 arg0_type, high, 1, arg1, 0);
3825 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3826 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3827 break;
3828
3829 /* Check for an unsigned range which has wrapped around the maximum
3830 value thus making n_high < n_low, and normalize it. */
3831 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3832 {
3833 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
3834 integer_one_node, 0);
3835 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
3836 integer_one_node, 0);
3837
3838 /* If the range is of the form +/- [ x+1, x ], we won't
3839 be able to normalize it. But then, it represents the
3840 whole range or the empty set, so make it
3841 +/- [ -, - ]. */
3842 if (tree_int_cst_equal (n_low, low)
3843 && tree_int_cst_equal (n_high, high))
3844 low = high = 0;
3845 else
3846 in_p = ! in_p;
3847 }
3848 else
3849 low = n_low, high = n_high;
3850
3851 exp = arg0;
3852 continue;
3853
3854 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3855 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
3856 break;
3857
3858 if (! INTEGRAL_TYPE_P (arg0_type)
3859 || (low != 0 && ! int_fits_type_p (low, arg0_type))
3860 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
3861 break;
3862
3863 n_low = low, n_high = high;
3864
3865 if (n_low != 0)
3866 n_low = fold_convert (arg0_type, n_low);
3867
3868 if (n_high != 0)
3869 n_high = fold_convert (arg0_type, n_high);
3870
3871
3872 /* If we're converting arg0 from an unsigned type, to exp,
3873 a signed type, we will be doing the comparison as unsigned.
3874 The tests above have already verified that LOW and HIGH
3875 are both positive.
3876
3877 So we have to ensure that we will handle large unsigned
3878 values the same way that the current signed bounds treat
3879 negative values. */
3880
3881 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
3882 {
3883 tree high_positive;
3884 tree equiv_type = lang_hooks.types.type_for_mode
3885 (TYPE_MODE (arg0_type), 1);
3886
3887 /* A range without an upper bound is, naturally, unbounded.
3888 Since convert would have cropped a very large value, use
3889 the max value for the destination type. */
3890 high_positive
3891 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3892 : TYPE_MAX_VALUE (arg0_type);
3893
3894 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
3895 high_positive = fold_build2 (RSHIFT_EXPR, arg0_type,
3896 fold_convert (arg0_type,
3897 high_positive),
3898 fold_convert (arg0_type,
3899 integer_one_node));
3900
3901 /* If the low bound is specified, "and" the range with the
3902 range for which the original unsigned value will be
3903 positive. */
3904 if (low != 0)
3905 {
3906 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3907 1, n_low, n_high, 1,
3908 fold_convert (arg0_type,
3909 integer_zero_node),
3910 high_positive))
3911 break;
3912
3913 in_p = (n_in_p == in_p);
3914 }
3915 else
3916 {
3917 /* Otherwise, "or" the range with the range of the input
3918 that will be interpreted as negative. */
3919 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3920 0, n_low, n_high, 1,
3921 fold_convert (arg0_type,
3922 integer_zero_node),
3923 high_positive))
3924 break;
3925
3926 in_p = (in_p != n_in_p);
3927 }
3928 }
3929
3930 exp = arg0;
3931 low = n_low, high = n_high;
3932 continue;
3933
3934 default:
3935 break;
3936 }
3937
3938 break;
3939 }
3940
3941 /* If EXP is a constant, we can evaluate whether this is true or false. */
3942 if (TREE_CODE (exp) == INTEGER_CST)
3943 {
3944 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3945 exp, 0, low, 0))
3946 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3947 exp, 1, high, 1)));
3948 low = high = 0;
3949 exp = 0;
3950 }
3951
3952 *pin_p = in_p, *plow = low, *phigh = high;
3953 return exp;
3954 }
3955 \f
3956 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3957 type, TYPE, return an expression to test if EXP is in (or out of, depending
3958 on IN_P) the range. Return 0 if the test couldn't be created. */
3959
3960 static tree
3961 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3962 {
3963 tree etype = TREE_TYPE (exp);
3964 tree value;
3965
3966 #ifdef HAVE_canonicalize_funcptr_for_compare
3967 /* Disable this optimization for function pointer expressions
3968 on targets that require function pointer canonicalization. */
3969 if (HAVE_canonicalize_funcptr_for_compare
3970 && TREE_CODE (etype) == POINTER_TYPE
3971 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
3972 return NULL_TREE;
3973 #endif
3974
3975 if (! in_p)
3976 {
3977 value = build_range_check (type, exp, 1, low, high);
3978 if (value != 0)
3979 return invert_truthvalue (value);
3980
3981 return 0;
3982 }
3983
3984 if (low == 0 && high == 0)
3985 return fold_convert (type, integer_one_node);
3986
3987 if (low == 0)
3988 return fold_build2 (LE_EXPR, type, exp,
3989 fold_convert (etype, high));
3990
3991 if (high == 0)
3992 return fold_build2 (GE_EXPR, type, exp,
3993 fold_convert (etype, low));
3994
3995 if (operand_equal_p (low, high, 0))
3996 return fold_build2 (EQ_EXPR, type, exp,
3997 fold_convert (etype, low));
3998
3999 if (integer_zerop (low))
4000 {
4001 if (! TYPE_UNSIGNED (etype))
4002 {
4003 etype = lang_hooks.types.unsigned_type (etype);
4004 high = fold_convert (etype, high);
4005 exp = fold_convert (etype, exp);
4006 }
4007 return build_range_check (type, exp, 1, 0, high);
4008 }
4009
4010 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4011 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4012 {
4013 unsigned HOST_WIDE_INT lo;
4014 HOST_WIDE_INT hi;
4015 int prec;
4016
4017 prec = TYPE_PRECISION (etype);
4018 if (prec <= HOST_BITS_PER_WIDE_INT)
4019 {
4020 hi = 0;
4021 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4022 }
4023 else
4024 {
4025 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4026 lo = (unsigned HOST_WIDE_INT) -1;
4027 }
4028
4029 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4030 {
4031 if (TYPE_UNSIGNED (etype))
4032 {
4033 etype = lang_hooks.types.signed_type (etype);
4034 exp = fold_convert (etype, exp);
4035 }
4036 return fold_build2 (GT_EXPR, type, exp,
4037 fold_convert (etype, integer_zero_node));
4038 }
4039 }
4040
4041 value = const_binop (MINUS_EXPR, high, low, 0);
4042 if (value != 0 && (!flag_wrapv || TREE_OVERFLOW (value))
4043 && ! TYPE_UNSIGNED (etype))
4044 {
4045 tree utype, minv, maxv;
4046
4047 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4048 for the type in question, as we rely on this here. */
4049 switch (TREE_CODE (etype))
4050 {
4051 case INTEGER_TYPE:
4052 case ENUMERAL_TYPE:
4053 case CHAR_TYPE:
4054 /* There is no requirement that LOW be within the range of ETYPE
4055 if the latter is a subtype. It must, however, be within the base
4056 type of ETYPE. So be sure we do the subtraction in that type. */
4057 if (TREE_TYPE (etype))
4058 etype = TREE_TYPE (etype);
4059 utype = lang_hooks.types.unsigned_type (etype);
4060 maxv = fold_convert (utype, TYPE_MAX_VALUE (etype));
4061 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4062 integer_one_node, 1);
4063 minv = fold_convert (utype, TYPE_MIN_VALUE (etype));
4064 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4065 minv, 1, maxv, 1)))
4066 {
4067 etype = utype;
4068 high = fold_convert (etype, high);
4069 low = fold_convert (etype, low);
4070 exp = fold_convert (etype, exp);
4071 value = const_binop (MINUS_EXPR, high, low, 0);
4072 }
4073 break;
4074 default:
4075 break;
4076 }
4077 }
4078
4079 if (value != 0 && ! TREE_OVERFLOW (value))
4080 {
4081 /* There is no requirement that LOW be within the range of ETYPE
4082 if the latter is a subtype. It must, however, be within the base
4083 type of ETYPE. So be sure we do the subtraction in that type. */
4084 if (INTEGRAL_TYPE_P (etype) && TREE_TYPE (etype))
4085 {
4086 etype = TREE_TYPE (etype);
4087 exp = fold_convert (etype, exp);
4088 low = fold_convert (etype, low);
4089 value = fold_convert (etype, value);
4090 }
4091
4092 return build_range_check (type,
4093 fold_build2 (MINUS_EXPR, etype, exp, low),
4094 1, build_int_cst (etype, 0), value);
4095 }
4096
4097 return 0;
4098 }
4099 \f
4100 /* Given two ranges, see if we can merge them into one. Return 1 if we
4101 can, 0 if we can't. Set the output range into the specified parameters. */
4102
4103 static int
4104 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4105 tree high0, int in1_p, tree low1, tree high1)
4106 {
4107 int no_overlap;
4108 int subset;
4109 int temp;
4110 tree tem;
4111 int in_p;
4112 tree low, high;
4113 int lowequal = ((low0 == 0 && low1 == 0)
4114 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4115 low0, 0, low1, 0)));
4116 int highequal = ((high0 == 0 && high1 == 0)
4117 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4118 high0, 1, high1, 1)));
4119
4120 /* Make range 0 be the range that starts first, or ends last if they
4121 start at the same value. Swap them if it isn't. */
4122 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4123 low0, 0, low1, 0))
4124 || (lowequal
4125 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4126 high1, 1, high0, 1))))
4127 {
4128 temp = in0_p, in0_p = in1_p, in1_p = temp;
4129 tem = low0, low0 = low1, low1 = tem;
4130 tem = high0, high0 = high1, high1 = tem;
4131 }
4132
4133 /* Now flag two cases, whether the ranges are disjoint or whether the
4134 second range is totally subsumed in the first. Note that the tests
4135 below are simplified by the ones above. */
4136 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4137 high0, 1, low1, 0));
4138 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4139 high1, 1, high0, 1));
4140
4141 /* We now have four cases, depending on whether we are including or
4142 excluding the two ranges. */
4143 if (in0_p && in1_p)
4144 {
4145 /* If they don't overlap, the result is false. If the second range
4146 is a subset it is the result. Otherwise, the range is from the start
4147 of the second to the end of the first. */
4148 if (no_overlap)
4149 in_p = 0, low = high = 0;
4150 else if (subset)
4151 in_p = 1, low = low1, high = high1;
4152 else
4153 in_p = 1, low = low1, high = high0;
4154 }
4155
4156 else if (in0_p && ! in1_p)
4157 {
4158 /* If they don't overlap, the result is the first range. If they are
4159 equal, the result is false. If the second range is a subset of the
4160 first, and the ranges begin at the same place, we go from just after
4161 the end of the first range to the end of the second. If the second
4162 range is not a subset of the first, or if it is a subset and both
4163 ranges end at the same place, the range starts at the start of the
4164 first range and ends just before the second range.
4165 Otherwise, we can't describe this as a single range. */
4166 if (no_overlap)
4167 in_p = 1, low = low0, high = high0;
4168 else if (lowequal && highequal)
4169 in_p = 0, low = high = 0;
4170 else if (subset && lowequal)
4171 {
4172 in_p = 1, high = high0;
4173 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
4174 integer_one_node, 0);
4175 }
4176 else if (! subset || highequal)
4177 {
4178 in_p = 1, low = low0;
4179 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4180 integer_one_node, 0);
4181 }
4182 else
4183 return 0;
4184 }
4185
4186 else if (! in0_p && in1_p)
4187 {
4188 /* If they don't overlap, the result is the second range. If the second
4189 is a subset of the first, the result is false. Otherwise,
4190 the range starts just after the first range and ends at the
4191 end of the second. */
4192 if (no_overlap)
4193 in_p = 1, low = low1, high = high1;
4194 else if (subset || highequal)
4195 in_p = 0, low = high = 0;
4196 else
4197 {
4198 in_p = 1, high = high1;
4199 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4200 integer_one_node, 0);
4201 }
4202 }
4203
4204 else
4205 {
4206 /* The case where we are excluding both ranges. Here the complex case
4207 is if they don't overlap. In that case, the only time we have a
4208 range is if they are adjacent. If the second is a subset of the
4209 first, the result is the first. Otherwise, the range to exclude
4210 starts at the beginning of the first range and ends at the end of the
4211 second. */
4212 if (no_overlap)
4213 {
4214 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4215 range_binop (PLUS_EXPR, NULL_TREE,
4216 high0, 1,
4217 integer_one_node, 1),
4218 1, low1, 0)))
4219 in_p = 0, low = low0, high = high1;
4220 else
4221 {
4222 /* Canonicalize - [min, x] into - [-, x]. */
4223 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4224 switch (TREE_CODE (TREE_TYPE (low0)))
4225 {
4226 case ENUMERAL_TYPE:
4227 if (TYPE_PRECISION (TREE_TYPE (low0))
4228 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4229 break;
4230 /* FALLTHROUGH */
4231 case INTEGER_TYPE:
4232 case CHAR_TYPE:
4233 if (tree_int_cst_equal (low0,
4234 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4235 low0 = 0;
4236 break;
4237 case POINTER_TYPE:
4238 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4239 && integer_zerop (low0))
4240 low0 = 0;
4241 break;
4242 default:
4243 break;
4244 }
4245
4246 /* Canonicalize - [x, max] into - [x, -]. */
4247 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4248 switch (TREE_CODE (TREE_TYPE (high1)))
4249 {
4250 case ENUMERAL_TYPE:
4251 if (TYPE_PRECISION (TREE_TYPE (high1))
4252 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4253 break;
4254 /* FALLTHROUGH */
4255 case INTEGER_TYPE:
4256 case CHAR_TYPE:
4257 if (tree_int_cst_equal (high1,
4258 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4259 high1 = 0;
4260 break;
4261 case POINTER_TYPE:
4262 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4263 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4264 high1, 1,
4265 integer_one_node, 1)))
4266 high1 = 0;
4267 break;
4268 default:
4269 break;
4270 }
4271
4272 /* The ranges might be also adjacent between the maximum and
4273 minimum values of the given type. For
4274 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4275 return + [x + 1, y - 1]. */
4276 if (low0 == 0 && high1 == 0)
4277 {
4278 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
4279 integer_one_node, 1);
4280 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
4281 integer_one_node, 0);
4282 if (low == 0 || high == 0)
4283 return 0;
4284
4285 in_p = 1;
4286 }
4287 else
4288 return 0;
4289 }
4290 }
4291 else if (subset)
4292 in_p = 0, low = low0, high = high0;
4293 else
4294 in_p = 0, low = low0, high = high1;
4295 }
4296
4297 *pin_p = in_p, *plow = low, *phigh = high;
4298 return 1;
4299 }
4300 \f
4301
4302 /* Subroutine of fold, looking inside expressions of the form
4303 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4304 of the COND_EXPR. This function is being used also to optimize
4305 A op B ? C : A, by reversing the comparison first.
4306
4307 Return a folded expression whose code is not a COND_EXPR
4308 anymore, or NULL_TREE if no folding opportunity is found. */
4309
4310 static tree
4311 fold_cond_expr_with_comparison (tree type, tree arg0, tree arg1, tree arg2)
4312 {
4313 enum tree_code comp_code = TREE_CODE (arg0);
4314 tree arg00 = TREE_OPERAND (arg0, 0);
4315 tree arg01 = TREE_OPERAND (arg0, 1);
4316 tree arg1_type = TREE_TYPE (arg1);
4317 tree tem;
4318
4319 STRIP_NOPS (arg1);
4320 STRIP_NOPS (arg2);
4321
4322 /* If we have A op 0 ? A : -A, consider applying the following
4323 transformations:
4324
4325 A == 0? A : -A same as -A
4326 A != 0? A : -A same as A
4327 A >= 0? A : -A same as abs (A)
4328 A > 0? A : -A same as abs (A)
4329 A <= 0? A : -A same as -abs (A)
4330 A < 0? A : -A same as -abs (A)
4331
4332 None of these transformations work for modes with signed
4333 zeros. If A is +/-0, the first two transformations will
4334 change the sign of the result (from +0 to -0, or vice
4335 versa). The last four will fix the sign of the result,
4336 even though the original expressions could be positive or
4337 negative, depending on the sign of A.
4338
4339 Note that all these transformations are correct if A is
4340 NaN, since the two alternatives (A and -A) are also NaNs. */
4341 if ((FLOAT_TYPE_P (TREE_TYPE (arg01))
4342 ? real_zerop (arg01)
4343 : integer_zerop (arg01))
4344 && ((TREE_CODE (arg2) == NEGATE_EXPR
4345 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4346 /* In the case that A is of the form X-Y, '-A' (arg2) may
4347 have already been folded to Y-X, check for that. */
4348 || (TREE_CODE (arg1) == MINUS_EXPR
4349 && TREE_CODE (arg2) == MINUS_EXPR
4350 && operand_equal_p (TREE_OPERAND (arg1, 0),
4351 TREE_OPERAND (arg2, 1), 0)
4352 && operand_equal_p (TREE_OPERAND (arg1, 1),
4353 TREE_OPERAND (arg2, 0), 0))))
4354 switch (comp_code)
4355 {
4356 case EQ_EXPR:
4357 case UNEQ_EXPR:
4358 tem = fold_convert (arg1_type, arg1);
4359 return pedantic_non_lvalue (fold_convert (type, negate_expr (tem)));
4360 case NE_EXPR:
4361 case LTGT_EXPR:
4362 return pedantic_non_lvalue (fold_convert (type, arg1));
4363 case UNGE_EXPR:
4364 case UNGT_EXPR:
4365 if (flag_trapping_math)
4366 break;
4367 /* Fall through. */
4368 case GE_EXPR:
4369 case GT_EXPR:
4370 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4371 arg1 = fold_convert (lang_hooks.types.signed_type
4372 (TREE_TYPE (arg1)), arg1);
4373 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4374 return pedantic_non_lvalue (fold_convert (type, tem));
4375 case UNLE_EXPR:
4376 case UNLT_EXPR:
4377 if (flag_trapping_math)
4378 break;
4379 case LE_EXPR:
4380 case LT_EXPR:
4381 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4382 arg1 = fold_convert (lang_hooks.types.signed_type
4383 (TREE_TYPE (arg1)), arg1);
4384 tem = fold_build1 (ABS_EXPR, TREE_TYPE (arg1), arg1);
4385 return negate_expr (fold_convert (type, tem));
4386 default:
4387 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4388 break;
4389 }
4390
4391 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4392 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4393 both transformations are correct when A is NaN: A != 0
4394 is then true, and A == 0 is false. */
4395
4396 if (integer_zerop (arg01) && integer_zerop (arg2))
4397 {
4398 if (comp_code == NE_EXPR)
4399 return pedantic_non_lvalue (fold_convert (type, arg1));
4400 else if (comp_code == EQ_EXPR)
4401 return fold_convert (type, integer_zero_node);
4402 }
4403
4404 /* Try some transformations of A op B ? A : B.
4405
4406 A == B? A : B same as B
4407 A != B? A : B same as A
4408 A >= B? A : B same as max (A, B)
4409 A > B? A : B same as max (B, A)
4410 A <= B? A : B same as min (A, B)
4411 A < B? A : B same as min (B, A)
4412
4413 As above, these transformations don't work in the presence
4414 of signed zeros. For example, if A and B are zeros of
4415 opposite sign, the first two transformations will change
4416 the sign of the result. In the last four, the original
4417 expressions give different results for (A=+0, B=-0) and
4418 (A=-0, B=+0), but the transformed expressions do not.
4419
4420 The first two transformations are correct if either A or B
4421 is a NaN. In the first transformation, the condition will
4422 be false, and B will indeed be chosen. In the case of the
4423 second transformation, the condition A != B will be true,
4424 and A will be chosen.
4425
4426 The conversions to max() and min() are not correct if B is
4427 a number and A is not. The conditions in the original
4428 expressions will be false, so all four give B. The min()
4429 and max() versions would give a NaN instead. */
4430 if (operand_equal_for_comparison_p (arg01, arg2, arg00)
4431 /* Avoid these transformations if the COND_EXPR may be used
4432 as an lvalue in the C++ front-end. PR c++/19199. */
4433 && (in_gimple_form
4434 || strcmp (lang_hooks.name, "GNU C++") != 0
4435 || ! maybe_lvalue_p (arg1)
4436 || ! maybe_lvalue_p (arg2)))
4437 {
4438 tree comp_op0 = arg00;
4439 tree comp_op1 = arg01;
4440 tree comp_type = TREE_TYPE (comp_op0);
4441
4442 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4443 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4444 {
4445 comp_type = type;
4446 comp_op0 = arg1;
4447 comp_op1 = arg2;
4448 }
4449
4450 switch (comp_code)
4451 {
4452 case EQ_EXPR:
4453 return pedantic_non_lvalue (fold_convert (type, arg2));
4454 case NE_EXPR:
4455 return pedantic_non_lvalue (fold_convert (type, arg1));
4456 case LE_EXPR:
4457 case LT_EXPR:
4458 case UNLE_EXPR:
4459 case UNLT_EXPR:
4460 /* In C++ a ?: expression can be an lvalue, so put the
4461 operand which will be used if they are equal first
4462 so that we can convert this back to the
4463 corresponding COND_EXPR. */
4464 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4465 {
4466 comp_op0 = fold_convert (comp_type, comp_op0);
4467 comp_op1 = fold_convert (comp_type, comp_op1);
4468 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4469 ? fold_build2 (MIN_EXPR, comp_type, comp_op0, comp_op1)
4470 : fold_build2 (MIN_EXPR, comp_type, comp_op1, comp_op0);
4471 return pedantic_non_lvalue (fold_convert (type, tem));
4472 }
4473 break;
4474 case GE_EXPR:
4475 case GT_EXPR:
4476 case UNGE_EXPR:
4477 case UNGT_EXPR:
4478 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4479 {
4480 comp_op0 = fold_convert (comp_type, comp_op0);
4481 comp_op1 = fold_convert (comp_type, comp_op1);
4482 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4483 ? fold_build2 (MAX_EXPR, comp_type, comp_op0, comp_op1)
4484 : fold_build2 (MAX_EXPR, comp_type, comp_op1, comp_op0);
4485 return pedantic_non_lvalue (fold_convert (type, tem));
4486 }
4487 break;
4488 case UNEQ_EXPR:
4489 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4490 return pedantic_non_lvalue (fold_convert (type, arg2));
4491 break;
4492 case LTGT_EXPR:
4493 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4494 return pedantic_non_lvalue (fold_convert (type, arg1));
4495 break;
4496 default:
4497 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4498 break;
4499 }
4500 }
4501
4502 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4503 we might still be able to simplify this. For example,
4504 if C1 is one less or one more than C2, this might have started
4505 out as a MIN or MAX and been transformed by this function.
4506 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4507
4508 if (INTEGRAL_TYPE_P (type)
4509 && TREE_CODE (arg01) == INTEGER_CST
4510 && TREE_CODE (arg2) == INTEGER_CST)
4511 switch (comp_code)
4512 {
4513 case EQ_EXPR:
4514 /* We can replace A with C1 in this case. */
4515 arg1 = fold_convert (type, arg01);
4516 return fold_build3 (COND_EXPR, type, arg0, arg1, arg2);
4517
4518 case LT_EXPR:
4519 /* If C1 is C2 + 1, this is min(A, C2). */
4520 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4521 OEP_ONLY_CONST)
4522 && operand_equal_p (arg01,
4523 const_binop (PLUS_EXPR, arg2,
4524 integer_one_node, 0),
4525 OEP_ONLY_CONST))
4526 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4527 type, arg1, arg2));
4528 break;
4529
4530 case LE_EXPR:
4531 /* If C1 is C2 - 1, this is min(A, C2). */
4532 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4533 OEP_ONLY_CONST)
4534 && operand_equal_p (arg01,
4535 const_binop (MINUS_EXPR, arg2,
4536 integer_one_node, 0),
4537 OEP_ONLY_CONST))
4538 return pedantic_non_lvalue (fold_build2 (MIN_EXPR,
4539 type, arg1, arg2));
4540 break;
4541
4542 case GT_EXPR:
4543 /* If C1 is C2 - 1, this is max(A, C2). */
4544 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4545 OEP_ONLY_CONST)
4546 && operand_equal_p (arg01,
4547 const_binop (MINUS_EXPR, arg2,
4548 integer_one_node, 0),
4549 OEP_ONLY_CONST))
4550 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4551 type, arg1, arg2));
4552 break;
4553
4554 case GE_EXPR:
4555 /* If C1 is C2 + 1, this is max(A, C2). */
4556 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4557 OEP_ONLY_CONST)
4558 && operand_equal_p (arg01,
4559 const_binop (PLUS_EXPR, arg2,
4560 integer_one_node, 0),
4561 OEP_ONLY_CONST))
4562 return pedantic_non_lvalue (fold_build2 (MAX_EXPR,
4563 type, arg1, arg2));
4564 break;
4565 case NE_EXPR:
4566 break;
4567 default:
4568 gcc_unreachable ();
4569 }
4570
4571 return NULL_TREE;
4572 }
4573
4574
4575 \f
4576 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4577 #define LOGICAL_OP_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
4578 #endif
4579
4580 /* EXP is some logical combination of boolean tests. See if we can
4581 merge it into some range test. Return the new tree if so. */
4582
4583 static tree
4584 fold_range_test (enum tree_code code, tree type, tree op0, tree op1)
4585 {
4586 int or_op = (code == TRUTH_ORIF_EXPR
4587 || code == TRUTH_OR_EXPR);
4588 int in0_p, in1_p, in_p;
4589 tree low0, low1, low, high0, high1, high;
4590 tree lhs = make_range (op0, &in0_p, &low0, &high0);
4591 tree rhs = make_range (op1, &in1_p, &low1, &high1);
4592 tree tem;
4593
4594 /* If this is an OR operation, invert both sides; we will invert
4595 again at the end. */
4596 if (or_op)
4597 in0_p = ! in0_p, in1_p = ! in1_p;
4598
4599 /* If both expressions are the same, if we can merge the ranges, and we
4600 can build the range test, return it or it inverted. If one of the
4601 ranges is always true or always false, consider it to be the same
4602 expression as the other. */
4603 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4604 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4605 in1_p, low1, high1)
4606 && 0 != (tem = (build_range_check (type,
4607 lhs != 0 ? lhs
4608 : rhs != 0 ? rhs : integer_zero_node,
4609 in_p, low, high))))
4610 return or_op ? invert_truthvalue (tem) : tem;
4611
4612 /* On machines where the branch cost is expensive, if this is a
4613 short-circuited branch and the underlying object on both sides
4614 is the same, make a non-short-circuit operation. */
4615 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4616 && lhs != 0 && rhs != 0
4617 && (code == TRUTH_ANDIF_EXPR
4618 || code == TRUTH_ORIF_EXPR)
4619 && operand_equal_p (lhs, rhs, 0))
4620 {
4621 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4622 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4623 which cases we can't do this. */
4624 if (simple_operand_p (lhs))
4625 return build2 (code == TRUTH_ANDIF_EXPR
4626 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4627 type, op0, op1);
4628
4629 else if (lang_hooks.decls.global_bindings_p () == 0
4630 && ! CONTAINS_PLACEHOLDER_P (lhs))
4631 {
4632 tree common = save_expr (lhs);
4633
4634 if (0 != (lhs = build_range_check (type, common,
4635 or_op ? ! in0_p : in0_p,
4636 low0, high0))
4637 && (0 != (rhs = build_range_check (type, common,
4638 or_op ? ! in1_p : in1_p,
4639 low1, high1))))
4640 return build2 (code == TRUTH_ANDIF_EXPR
4641 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4642 type, lhs, rhs);
4643 }
4644 }
4645
4646 return 0;
4647 }
4648 \f
4649 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
4650 bit value. Arrange things so the extra bits will be set to zero if and
4651 only if C is signed-extended to its full width. If MASK is nonzero,
4652 it is an INTEGER_CST that should be AND'ed with the extra bits. */
4653
4654 static tree
4655 unextend (tree c, int p, int unsignedp, tree mask)
4656 {
4657 tree type = TREE_TYPE (c);
4658 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
4659 tree temp;
4660
4661 if (p == modesize || unsignedp)
4662 return c;
4663
4664 /* We work by getting just the sign bit into the low-order bit, then
4665 into the high-order bit, then sign-extend. We then XOR that value
4666 with C. */
4667 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
4668 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
4669
4670 /* We must use a signed type in order to get an arithmetic right shift.
4671 However, we must also avoid introducing accidental overflows, so that
4672 a subsequent call to integer_zerop will work. Hence we must
4673 do the type conversion here. At this point, the constant is either
4674 zero or one, and the conversion to a signed type can never overflow.
4675 We could get an overflow if this conversion is done anywhere else. */
4676 if (TYPE_UNSIGNED (type))
4677 temp = fold_convert (lang_hooks.types.signed_type (type), temp);
4678
4679 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
4680 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
4681 if (mask != 0)
4682 temp = const_binop (BIT_AND_EXPR, temp,
4683 fold_convert (TREE_TYPE (c), mask), 0);
4684 /* If necessary, convert the type back to match the type of C. */
4685 if (TYPE_UNSIGNED (type))
4686 temp = fold_convert (type, temp);
4687
4688 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
4689 }
4690 \f
4691 /* Find ways of folding logical expressions of LHS and RHS:
4692 Try to merge two comparisons to the same innermost item.
4693 Look for range tests like "ch >= '0' && ch <= '9'".
4694 Look for combinations of simple terms on machines with expensive branches
4695 and evaluate the RHS unconditionally.
4696
4697 For example, if we have p->a == 2 && p->b == 4 and we can make an
4698 object large enough to span both A and B, we can do this with a comparison
4699 against the object ANDed with the a mask.
4700
4701 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
4702 operations to do this with one comparison.
4703
4704 We check for both normal comparisons and the BIT_AND_EXPRs made this by
4705 function and the one above.
4706
4707 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
4708 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
4709
4710 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
4711 two operands.
4712
4713 We return the simplified tree or 0 if no optimization is possible. */
4714
4715 static tree
4716 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
4717 {
4718 /* If this is the "or" of two comparisons, we can do something if
4719 the comparisons are NE_EXPR. If this is the "and", we can do something
4720 if the comparisons are EQ_EXPR. I.e.,
4721 (a->b == 2 && a->c == 4) can become (a->new == NEW).
4722
4723 WANTED_CODE is this operation code. For single bit fields, we can
4724 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
4725 comparison for one-bit fields. */
4726
4727 enum tree_code wanted_code;
4728 enum tree_code lcode, rcode;
4729 tree ll_arg, lr_arg, rl_arg, rr_arg;
4730 tree ll_inner, lr_inner, rl_inner, rr_inner;
4731 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
4732 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
4733 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
4734 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
4735 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
4736 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
4737 enum machine_mode lnmode, rnmode;
4738 tree ll_mask, lr_mask, rl_mask, rr_mask;
4739 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
4740 tree l_const, r_const;
4741 tree lntype, rntype, result;
4742 int first_bit, end_bit;
4743 int volatilep;
4744
4745 /* Start by getting the comparison codes. Fail if anything is volatile.
4746 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
4747 it were surrounded with a NE_EXPR. */
4748
4749 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
4750 return 0;
4751
4752 lcode = TREE_CODE (lhs);
4753 rcode = TREE_CODE (rhs);
4754
4755 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
4756 {
4757 lhs = build2 (NE_EXPR, truth_type, lhs,
4758 fold_convert (TREE_TYPE (lhs), integer_zero_node));
4759 lcode = NE_EXPR;
4760 }
4761
4762 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
4763 {
4764 rhs = build2 (NE_EXPR, truth_type, rhs,
4765 fold_convert (TREE_TYPE (rhs), integer_zero_node));
4766 rcode = NE_EXPR;
4767 }
4768
4769 if (TREE_CODE_CLASS (lcode) != tcc_comparison
4770 || TREE_CODE_CLASS (rcode) != tcc_comparison)
4771 return 0;
4772
4773 ll_arg = TREE_OPERAND (lhs, 0);
4774 lr_arg = TREE_OPERAND (lhs, 1);
4775 rl_arg = TREE_OPERAND (rhs, 0);
4776 rr_arg = TREE_OPERAND (rhs, 1);
4777
4778 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
4779 if (simple_operand_p (ll_arg)
4780 && simple_operand_p (lr_arg))
4781 {
4782 tree result;
4783 if (operand_equal_p (ll_arg, rl_arg, 0)
4784 && operand_equal_p (lr_arg, rr_arg, 0))
4785 {
4786 result = combine_comparisons (code, lcode, rcode,
4787 truth_type, ll_arg, lr_arg);
4788 if (result)
4789 return result;
4790 }
4791 else if (operand_equal_p (ll_arg, rr_arg, 0)
4792 && operand_equal_p (lr_arg, rl_arg, 0))
4793 {
4794 result = combine_comparisons (code, lcode,
4795 swap_tree_comparison (rcode),
4796 truth_type, ll_arg, lr_arg);
4797 if (result)
4798 return result;
4799 }
4800 }
4801
4802 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
4803 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
4804
4805 /* If the RHS can be evaluated unconditionally and its operands are
4806 simple, it wins to evaluate the RHS unconditionally on machines
4807 with expensive branches. In this case, this isn't a comparison
4808 that can be merged. Avoid doing this if the RHS is a floating-point
4809 comparison since those can trap. */
4810
4811 if (BRANCH_COST >= 2
4812 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
4813 && simple_operand_p (rl_arg)
4814 && simple_operand_p (rr_arg))
4815 {
4816 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
4817 if (code == TRUTH_OR_EXPR
4818 && lcode == NE_EXPR && integer_zerop (lr_arg)
4819 && rcode == NE_EXPR && integer_zerop (rr_arg)
4820 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4821 return build2 (NE_EXPR, truth_type,
4822 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4823 ll_arg, rl_arg),
4824 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4825
4826 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
4827 if (code == TRUTH_AND_EXPR
4828 && lcode == EQ_EXPR && integer_zerop (lr_arg)
4829 && rcode == EQ_EXPR && integer_zerop (rr_arg)
4830 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
4831 return build2 (EQ_EXPR, truth_type,
4832 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
4833 ll_arg, rl_arg),
4834 fold_convert (TREE_TYPE (ll_arg), integer_zero_node));
4835
4836 if (LOGICAL_OP_NON_SHORT_CIRCUIT)
4837 return build2 (code, truth_type, lhs, rhs);
4838 }
4839
4840 /* See if the comparisons can be merged. Then get all the parameters for
4841 each side. */
4842
4843 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
4844 || (rcode != EQ_EXPR && rcode != NE_EXPR))
4845 return 0;
4846
4847 volatilep = 0;
4848 ll_inner = decode_field_reference (ll_arg,
4849 &ll_bitsize, &ll_bitpos, &ll_mode,
4850 &ll_unsignedp, &volatilep, &ll_mask,
4851 &ll_and_mask);
4852 lr_inner = decode_field_reference (lr_arg,
4853 &lr_bitsize, &lr_bitpos, &lr_mode,
4854 &lr_unsignedp, &volatilep, &lr_mask,
4855 &lr_and_mask);
4856 rl_inner = decode_field_reference (rl_arg,
4857 &rl_bitsize, &rl_bitpos, &rl_mode,
4858 &rl_unsignedp, &volatilep, &rl_mask,
4859 &rl_and_mask);
4860 rr_inner = decode_field_reference (rr_arg,
4861 &rr_bitsize, &rr_bitpos, &rr_mode,
4862 &rr_unsignedp, &volatilep, &rr_mask,
4863 &rr_and_mask);
4864
4865 /* It must be true that the inner operation on the lhs of each
4866 comparison must be the same if we are to be able to do anything.
4867 Then see if we have constants. If not, the same must be true for
4868 the rhs's. */
4869 if (volatilep || ll_inner == 0 || rl_inner == 0
4870 || ! operand_equal_p (ll_inner, rl_inner, 0))
4871 return 0;
4872
4873 if (TREE_CODE (lr_arg) == INTEGER_CST
4874 && TREE_CODE (rr_arg) == INTEGER_CST)
4875 l_const = lr_arg, r_const = rr_arg;
4876 else if (lr_inner == 0 || rr_inner == 0
4877 || ! operand_equal_p (lr_inner, rr_inner, 0))
4878 return 0;
4879 else
4880 l_const = r_const = 0;
4881
4882 /* If either comparison code is not correct for our logical operation,
4883 fail. However, we can convert a one-bit comparison against zero into
4884 the opposite comparison against that bit being set in the field. */
4885
4886 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
4887 if (lcode != wanted_code)
4888 {
4889 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
4890 {
4891 /* Make the left operand unsigned, since we are only interested
4892 in the value of one bit. Otherwise we are doing the wrong
4893 thing below. */
4894 ll_unsignedp = 1;
4895 l_const = ll_mask;
4896 }
4897 else
4898 return 0;
4899 }
4900
4901 /* This is analogous to the code for l_const above. */
4902 if (rcode != wanted_code)
4903 {
4904 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
4905 {
4906 rl_unsignedp = 1;
4907 r_const = rl_mask;
4908 }
4909 else
4910 return 0;
4911 }
4912
4913 /* After this point all optimizations will generate bit-field
4914 references, which we might not want. */
4915 if (! lang_hooks.can_use_bit_fields_p ())
4916 return 0;
4917
4918 /* See if we can find a mode that contains both fields being compared on
4919 the left. If we can't, fail. Otherwise, update all constants and masks
4920 to be relative to a field of that size. */
4921 first_bit = MIN (ll_bitpos, rl_bitpos);
4922 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
4923 lnmode = get_best_mode (end_bit - first_bit, first_bit,
4924 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
4925 volatilep);
4926 if (lnmode == VOIDmode)
4927 return 0;
4928
4929 lnbitsize = GET_MODE_BITSIZE (lnmode);
4930 lnbitpos = first_bit & ~ (lnbitsize - 1);
4931 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
4932 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
4933
4934 if (BYTES_BIG_ENDIAN)
4935 {
4936 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
4937 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
4938 }
4939
4940 ll_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, ll_mask),
4941 size_int (xll_bitpos), 0);
4942 rl_mask = const_binop (LSHIFT_EXPR, fold_convert (lntype, rl_mask),
4943 size_int (xrl_bitpos), 0);
4944
4945 if (l_const)
4946 {
4947 l_const = fold_convert (lntype, l_const);
4948 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4949 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4950 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4951 fold_build1 (BIT_NOT_EXPR,
4952 lntype, ll_mask),
4953 0)))
4954 {
4955 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4956
4957 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4958 }
4959 }
4960 if (r_const)
4961 {
4962 r_const = fold_convert (lntype, r_const);
4963 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4964 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4965 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4966 fold_build1 (BIT_NOT_EXPR,
4967 lntype, rl_mask),
4968 0)))
4969 {
4970 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
4971
4972 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
4973 }
4974 }
4975
4976 /* If the right sides are not constant, do the same for it. Also,
4977 disallow this optimization if a size or signedness mismatch occurs
4978 between the left and right sides. */
4979 if (l_const == 0)
4980 {
4981 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4982 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4983 /* Make sure the two fields on the right
4984 correspond to the left without being swapped. */
4985 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4986 return 0;
4987
4988 first_bit = MIN (lr_bitpos, rr_bitpos);
4989 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4990 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4991 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4992 volatilep);
4993 if (rnmode == VOIDmode)
4994 return 0;
4995
4996 rnbitsize = GET_MODE_BITSIZE (rnmode);
4997 rnbitpos = first_bit & ~ (rnbitsize - 1);
4998 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
4999 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5000
5001 if (BYTES_BIG_ENDIAN)
5002 {
5003 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5004 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5005 }
5006
5007 lr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, lr_mask),
5008 size_int (xlr_bitpos), 0);
5009 rr_mask = const_binop (LSHIFT_EXPR, fold_convert (rntype, rr_mask),
5010 size_int (xrr_bitpos), 0);
5011
5012 /* Make a mask that corresponds to both fields being compared.
5013 Do this for both items being compared. If the operands are the
5014 same size and the bits being compared are in the same position
5015 then we can do this by masking both and comparing the masked
5016 results. */
5017 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5018 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
5019 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5020 {
5021 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5022 ll_unsignedp || rl_unsignedp);
5023 if (! all_ones_mask_p (ll_mask, lnbitsize))
5024 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5025
5026 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
5027 lr_unsignedp || rr_unsignedp);
5028 if (! all_ones_mask_p (lr_mask, rnbitsize))
5029 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5030
5031 return build2 (wanted_code, truth_type, lhs, rhs);
5032 }
5033
5034 /* There is still another way we can do something: If both pairs of
5035 fields being compared are adjacent, we may be able to make a wider
5036 field containing them both.
5037
5038 Note that we still must mask the lhs/rhs expressions. Furthermore,
5039 the mask must be shifted to account for the shift done by
5040 make_bit_field_ref. */
5041 if ((ll_bitsize + ll_bitpos == rl_bitpos
5042 && lr_bitsize + lr_bitpos == rr_bitpos)
5043 || (ll_bitpos == rl_bitpos + rl_bitsize
5044 && lr_bitpos == rr_bitpos + rr_bitsize))
5045 {
5046 tree type;
5047
5048 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
5049 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5050 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
5051 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5052
5053 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5054 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
5055 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5056 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
5057
5058 /* Convert to the smaller type before masking out unwanted bits. */
5059 type = lntype;
5060 if (lntype != rntype)
5061 {
5062 if (lnbitsize > rnbitsize)
5063 {
5064 lhs = fold_convert (rntype, lhs);
5065 ll_mask = fold_convert (rntype, ll_mask);
5066 type = rntype;
5067 }
5068 else if (lnbitsize < rnbitsize)
5069 {
5070 rhs = fold_convert (lntype, rhs);
5071 lr_mask = fold_convert (lntype, lr_mask);
5072 type = lntype;
5073 }
5074 }
5075
5076 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5077 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5078
5079 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5080 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5081
5082 return build2 (wanted_code, truth_type, lhs, rhs);
5083 }
5084
5085 return 0;
5086 }
5087
5088 /* Handle the case of comparisons with constants. If there is something in
5089 common between the masks, those bits of the constants must be the same.
5090 If not, the condition is always false. Test for this to avoid generating
5091 incorrect code below. */
5092 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
5093 if (! integer_zerop (result)
5094 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
5095 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
5096 {
5097 if (wanted_code == NE_EXPR)
5098 {
5099 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5100 return constant_boolean_node (true, truth_type);
5101 }
5102 else
5103 {
5104 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5105 return constant_boolean_node (false, truth_type);
5106 }
5107 }
5108
5109 /* Construct the expression we will return. First get the component
5110 reference we will make. Unless the mask is all ones the width of
5111 that field, perform the mask operation. Then compare with the
5112 merged constant. */
5113 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
5114 ll_unsignedp || rl_unsignedp);
5115
5116 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
5117 if (! all_ones_mask_p (ll_mask, lnbitsize))
5118 result = build2 (BIT_AND_EXPR, lntype, result, ll_mask);
5119
5120 return build2 (wanted_code, truth_type, result,
5121 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
5122 }
5123 \f
5124 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5125 constant. */
5126
5127 static tree
5128 optimize_minmax_comparison (enum tree_code code, tree type, tree op0, tree op1)
5129 {
5130 tree arg0 = op0;
5131 enum tree_code op_code;
5132 tree comp_const = op1;
5133 tree minmax_const;
5134 int consts_equal, consts_lt;
5135 tree inner;
5136
5137 STRIP_SIGN_NOPS (arg0);
5138
5139 op_code = TREE_CODE (arg0);
5140 minmax_const = TREE_OPERAND (arg0, 1);
5141 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5142 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5143 inner = TREE_OPERAND (arg0, 0);
5144
5145 /* If something does not permit us to optimize, return the original tree. */
5146 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5147 || TREE_CODE (comp_const) != INTEGER_CST
5148 || TREE_CONSTANT_OVERFLOW (comp_const)
5149 || TREE_CODE (minmax_const) != INTEGER_CST
5150 || TREE_CONSTANT_OVERFLOW (minmax_const))
5151 return NULL_TREE;
5152
5153 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5154 and GT_EXPR, doing the rest with recursive calls using logical
5155 simplifications. */
5156 switch (code)
5157 {
5158 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5159 {
5160 /* FIXME: We should be able to invert code without building a
5161 scratch tree node, but doing so would require us to
5162 duplicate a part of invert_truthvalue here. */
5163 tree tem = invert_truthvalue (build2 (code, type, op0, op1));
5164 tem = optimize_minmax_comparison (TREE_CODE (tem),
5165 TREE_TYPE (tem),
5166 TREE_OPERAND (tem, 0),
5167 TREE_OPERAND (tem, 1));
5168 return invert_truthvalue (tem);
5169 }
5170
5171 case GE_EXPR:
5172 return
5173 fold_build2 (TRUTH_ORIF_EXPR, type,
5174 optimize_minmax_comparison
5175 (EQ_EXPR, type, arg0, comp_const),
5176 optimize_minmax_comparison
5177 (GT_EXPR, type, arg0, comp_const));
5178
5179 case EQ_EXPR:
5180 if (op_code == MAX_EXPR && consts_equal)
5181 /* MAX (X, 0) == 0 -> X <= 0 */
5182 return fold_build2 (LE_EXPR, type, inner, comp_const);
5183
5184 else if (op_code == MAX_EXPR && consts_lt)
5185 /* MAX (X, 0) == 5 -> X == 5 */
5186 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5187
5188 else if (op_code == MAX_EXPR)
5189 /* MAX (X, 0) == -1 -> false */
5190 return omit_one_operand (type, integer_zero_node, inner);
5191
5192 else if (consts_equal)
5193 /* MIN (X, 0) == 0 -> X >= 0 */
5194 return fold_build2 (GE_EXPR, type, inner, comp_const);
5195
5196 else if (consts_lt)
5197 /* MIN (X, 0) == 5 -> false */
5198 return omit_one_operand (type, integer_zero_node, inner);
5199
5200 else
5201 /* MIN (X, 0) == -1 -> X == -1 */
5202 return fold_build2 (EQ_EXPR, type, inner, comp_const);
5203
5204 case GT_EXPR:
5205 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5206 /* MAX (X, 0) > 0 -> X > 0
5207 MAX (X, 0) > 5 -> X > 5 */
5208 return fold_build2 (GT_EXPR, type, inner, comp_const);
5209
5210 else if (op_code == MAX_EXPR)
5211 /* MAX (X, 0) > -1 -> true */
5212 return omit_one_operand (type, integer_one_node, inner);
5213
5214 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5215 /* MIN (X, 0) > 0 -> false
5216 MIN (X, 0) > 5 -> false */
5217 return omit_one_operand (type, integer_zero_node, inner);
5218
5219 else
5220 /* MIN (X, 0) > -1 -> X > -1 */
5221 return fold_build2 (GT_EXPR, type, inner, comp_const);
5222
5223 default:
5224 return NULL_TREE;
5225 }
5226 }
5227 \f
5228 /* T is an integer expression that is being multiplied, divided, or taken a
5229 modulus (CODE says which and what kind of divide or modulus) by a
5230 constant C. See if we can eliminate that operation by folding it with
5231 other operations already in T. WIDE_TYPE, if non-null, is a type that
5232 should be used for the computation if wider than our type.
5233
5234 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5235 (X * 2) + (Y * 4). We must, however, be assured that either the original
5236 expression would not overflow or that overflow is undefined for the type
5237 in the language in question.
5238
5239 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
5240 the machine has a multiply-accumulate insn or that this is part of an
5241 addressing calculation.
5242
5243 If we return a non-null expression, it is an equivalent form of the
5244 original computation, but need not be in the original type. */
5245
5246 static tree
5247 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
5248 {
5249 /* To avoid exponential search depth, refuse to allow recursion past
5250 three levels. Beyond that (1) it's highly unlikely that we'll find
5251 something interesting and (2) we've probably processed it before
5252 when we built the inner expression. */
5253
5254 static int depth;
5255 tree ret;
5256
5257 if (depth > 3)
5258 return NULL;
5259
5260 depth++;
5261 ret = extract_muldiv_1 (t, c, code, wide_type);
5262 depth--;
5263
5264 return ret;
5265 }
5266
5267 static tree
5268 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
5269 {
5270 tree type = TREE_TYPE (t);
5271 enum tree_code tcode = TREE_CODE (t);
5272 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5273 > GET_MODE_SIZE (TYPE_MODE (type)))
5274 ? wide_type : type);
5275 tree t1, t2;
5276 int same_p = tcode == code;
5277 tree op0 = NULL_TREE, op1 = NULL_TREE;
5278
5279 /* Don't deal with constants of zero here; they confuse the code below. */
5280 if (integer_zerop (c))
5281 return NULL_TREE;
5282
5283 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5284 op0 = TREE_OPERAND (t, 0);
5285
5286 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5287 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5288
5289 /* Note that we need not handle conditional operations here since fold
5290 already handles those cases. So just do arithmetic here. */
5291 switch (tcode)
5292 {
5293 case INTEGER_CST:
5294 /* For a constant, we can always simplify if we are a multiply
5295 or (for divide and modulus) if it is a multiple of our constant. */
5296 if (code == MULT_EXPR
5297 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
5298 return const_binop (code, fold_convert (ctype, t),
5299 fold_convert (ctype, c), 0);
5300 break;
5301
5302 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
5303 /* If op0 is an expression ... */
5304 if ((COMPARISON_CLASS_P (op0)
5305 || UNARY_CLASS_P (op0)
5306 || BINARY_CLASS_P (op0)
5307 || EXPRESSION_CLASS_P (op0))
5308 /* ... and is unsigned, and its type is smaller than ctype,
5309 then we cannot pass through as widening. */
5310 && ((TYPE_UNSIGNED (TREE_TYPE (op0))
5311 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
5312 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
5313 && (GET_MODE_SIZE (TYPE_MODE (ctype))
5314 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
5315 /* ... or this is a truncation (t is narrower than op0),
5316 then we cannot pass through this narrowing. */
5317 || (GET_MODE_SIZE (TYPE_MODE (type))
5318 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
5319 /* ... or signedness changes for division or modulus,
5320 then we cannot pass through this conversion. */
5321 || (code != MULT_EXPR
5322 && (TYPE_UNSIGNED (ctype)
5323 != TYPE_UNSIGNED (TREE_TYPE (op0))))))
5324 break;
5325
5326 /* Pass the constant down and see if we can make a simplification. If
5327 we can, replace this expression with the inner simplification for
5328 possible later conversion to our or some other type. */
5329 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5330 && TREE_CODE (t2) == INTEGER_CST
5331 && ! TREE_CONSTANT_OVERFLOW (t2)
5332 && (0 != (t1 = extract_muldiv (op0, t2, code,
5333 code == MULT_EXPR
5334 ? ctype : NULL_TREE))))
5335 return t1;
5336 break;
5337
5338 case ABS_EXPR:
5339 /* If widening the type changes it from signed to unsigned, then we
5340 must avoid building ABS_EXPR itself as unsigned. */
5341 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5342 {
5343 tree cstype = (*lang_hooks.types.signed_type) (ctype);
5344 if ((t1 = extract_muldiv (op0, c, code, cstype)) != 0)
5345 {
5346 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5347 return fold_convert (ctype, t1);
5348 }
5349 break;
5350 }
5351 /* FALLTHROUGH */
5352 case NEGATE_EXPR:
5353 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5354 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5355 break;
5356
5357 case MIN_EXPR: case MAX_EXPR:
5358 /* If widening the type changes the signedness, then we can't perform
5359 this optimization as that changes the result. */
5360 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5361 break;
5362
5363 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5364 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
5365 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
5366 {
5367 if (tree_int_cst_sgn (c) < 0)
5368 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5369
5370 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5371 fold_convert (ctype, t2));
5372 }
5373 break;
5374
5375 case LSHIFT_EXPR: case RSHIFT_EXPR:
5376 /* If the second operand is constant, this is a multiplication
5377 or floor division, by a power of two, so we can treat it that
5378 way unless the multiplier or divisor overflows. Signed
5379 left-shift overflow is implementation-defined rather than
5380 undefined in C90, so do not convert signed left shift into
5381 multiplication. */
5382 if (TREE_CODE (op1) == INTEGER_CST
5383 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5384 /* const_binop may not detect overflow correctly,
5385 so check for it explicitly here. */
5386 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5387 && TREE_INT_CST_HIGH (op1) == 0
5388 && 0 != (t1 = fold_convert (ctype,
5389 const_binop (LSHIFT_EXPR,
5390 size_one_node,
5391 op1, 0)))
5392 && ! TREE_OVERFLOW (t1))
5393 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5394 ? MULT_EXPR : FLOOR_DIV_EXPR,
5395 ctype, fold_convert (ctype, op0), t1),
5396 c, code, wide_type);
5397 break;
5398
5399 case PLUS_EXPR: case MINUS_EXPR:
5400 /* See if we can eliminate the operation on both sides. If we can, we
5401 can return a new PLUS or MINUS. If we can't, the only remaining
5402 cases where we can do anything are if the second operand is a
5403 constant. */
5404 t1 = extract_muldiv (op0, c, code, wide_type);
5405 t2 = extract_muldiv (op1, c, code, wide_type);
5406 if (t1 != 0 && t2 != 0
5407 && (code == MULT_EXPR
5408 /* If not multiplication, we can only do this if both operands
5409 are divisible by c. */
5410 || (multiple_of_p (ctype, op0, c)
5411 && multiple_of_p (ctype, op1, c))))
5412 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5413 fold_convert (ctype, t2));
5414
5415 /* If this was a subtraction, negate OP1 and set it to be an addition.
5416 This simplifies the logic below. */
5417 if (tcode == MINUS_EXPR)
5418 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5419
5420 if (TREE_CODE (op1) != INTEGER_CST)
5421 break;
5422
5423 /* If either OP1 or C are negative, this optimization is not safe for
5424 some of the division and remainder types while for others we need
5425 to change the code. */
5426 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5427 {
5428 if (code == CEIL_DIV_EXPR)
5429 code = FLOOR_DIV_EXPR;
5430 else if (code == FLOOR_DIV_EXPR)
5431 code = CEIL_DIV_EXPR;
5432 else if (code != MULT_EXPR
5433 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5434 break;
5435 }
5436
5437 /* If it's a multiply or a division/modulus operation of a multiple
5438 of our constant, do the operation and verify it doesn't overflow. */
5439 if (code == MULT_EXPR
5440 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5441 {
5442 op1 = const_binop (code, fold_convert (ctype, op1),
5443 fold_convert (ctype, c), 0);
5444 /* We allow the constant to overflow with wrapping semantics. */
5445 if (op1 == 0
5446 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
5447 break;
5448 }
5449 else
5450 break;
5451
5452 /* If we have an unsigned type is not a sizetype, we cannot widen
5453 the operation since it will change the result if the original
5454 computation overflowed. */
5455 if (TYPE_UNSIGNED (ctype)
5456 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
5457 && ctype != type)
5458 break;
5459
5460 /* If we were able to eliminate our operation from the first side,
5461 apply our operation to the second side and reform the PLUS. */
5462 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5463 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5464
5465 /* The last case is if we are a multiply. In that case, we can
5466 apply the distributive law to commute the multiply and addition
5467 if the multiplication of the constants doesn't overflow. */
5468 if (code == MULT_EXPR)
5469 return fold_build2 (tcode, ctype,
5470 fold_build2 (code, ctype,
5471 fold_convert (ctype, op0),
5472 fold_convert (ctype, c)),
5473 op1);
5474
5475 break;
5476
5477 case MULT_EXPR:
5478 /* We have a special case here if we are doing something like
5479 (C * 8) % 4 since we know that's zero. */
5480 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5481 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5482 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5483 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5484 return omit_one_operand (type, integer_zero_node, op0);
5485
5486 /* ... fall through ... */
5487
5488 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5489 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5490 /* If we can extract our operation from the LHS, do so and return a
5491 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5492 do something only if the second operand is a constant. */
5493 if (same_p
5494 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
5495 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5496 fold_convert (ctype, op1));
5497 else if (tcode == MULT_EXPR && code == MULT_EXPR
5498 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
5499 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5500 fold_convert (ctype, t1));
5501 else if (TREE_CODE (op1) != INTEGER_CST)
5502 return 0;
5503
5504 /* If these are the same operation types, we can associate them
5505 assuming no overflow. */
5506 if (tcode == code
5507 && 0 != (t1 = const_binop (MULT_EXPR, fold_convert (ctype, op1),
5508 fold_convert (ctype, c), 0))
5509 && ! TREE_OVERFLOW (t1))
5510 return fold_build2 (tcode, ctype, fold_convert (ctype, op0), t1);
5511
5512 /* If these operations "cancel" each other, we have the main
5513 optimizations of this pass, which occur when either constant is a
5514 multiple of the other, in which case we replace this with either an
5515 operation or CODE or TCODE.
5516
5517 If we have an unsigned type that is not a sizetype, we cannot do
5518 this since it will change the result if the original computation
5519 overflowed. */
5520 if ((! TYPE_UNSIGNED (ctype)
5521 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
5522 && ! flag_wrapv
5523 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
5524 || (tcode == MULT_EXPR
5525 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
5526 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
5527 {
5528 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
5529 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5530 fold_convert (ctype,
5531 const_binop (TRUNC_DIV_EXPR,
5532 op1, c, 0)));
5533 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
5534 return fold_build2 (code, ctype, fold_convert (ctype, op0),
5535 fold_convert (ctype,
5536 const_binop (TRUNC_DIV_EXPR,
5537 c, op1, 0)));
5538 }
5539 break;
5540
5541 default:
5542 break;
5543 }
5544
5545 return 0;
5546 }
5547 \f
5548 /* Return a node which has the indicated constant VALUE (either 0 or
5549 1), and is of the indicated TYPE. */
5550
5551 tree
5552 constant_boolean_node (int value, tree type)
5553 {
5554 if (type == integer_type_node)
5555 return value ? integer_one_node : integer_zero_node;
5556 else if (type == boolean_type_node)
5557 return value ? boolean_true_node : boolean_false_node;
5558 else
5559 return build_int_cst (type, value);
5560 }
5561
5562
5563 /* Return true if expr looks like an ARRAY_REF and set base and
5564 offset to the appropriate trees. If there is no offset,
5565 offset is set to NULL_TREE. Base will be canonicalized to
5566 something you can get the element type from using
5567 TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
5568 in bytes to the base. */
5569
5570 static bool
5571 extract_array_ref (tree expr, tree *base, tree *offset)
5572 {
5573 /* One canonical form is a PLUS_EXPR with the first
5574 argument being an ADDR_EXPR with a possible NOP_EXPR
5575 attached. */
5576 if (TREE_CODE (expr) == PLUS_EXPR)
5577 {
5578 tree op0 = TREE_OPERAND (expr, 0);
5579 tree inner_base, dummy1;
5580 /* Strip NOP_EXPRs here because the C frontends and/or
5581 folders present us (int *)&x.a + 4B possibly. */
5582 STRIP_NOPS (op0);
5583 if (extract_array_ref (op0, &inner_base, &dummy1))
5584 {
5585 *base = inner_base;
5586 if (dummy1 == NULL_TREE)
5587 *offset = TREE_OPERAND (expr, 1);
5588 else
5589 *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
5590 dummy1, TREE_OPERAND (expr, 1));
5591 return true;
5592 }
5593 }
5594 /* Other canonical form is an ADDR_EXPR of an ARRAY_REF,
5595 which we transform into an ADDR_EXPR with appropriate
5596 offset. For other arguments to the ADDR_EXPR we assume
5597 zero offset and as such do not care about the ADDR_EXPR
5598 type and strip possible nops from it. */
5599 else if (TREE_CODE (expr) == ADDR_EXPR)
5600 {
5601 tree op0 = TREE_OPERAND (expr, 0);
5602 if (TREE_CODE (op0) == ARRAY_REF)
5603 {
5604 tree idx = TREE_OPERAND (op0, 1);
5605 *base = TREE_OPERAND (op0, 0);
5606 *offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
5607 array_ref_element_size (op0));
5608 }
5609 else
5610 {
5611 /* Handle array-to-pointer decay as &a. */
5612 if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE)
5613 *base = TREE_OPERAND (expr, 0);
5614 else
5615 *base = expr;
5616 *offset = NULL_TREE;
5617 }
5618 return true;
5619 }
5620 /* The next canonical form is a VAR_DECL with POINTER_TYPE. */
5621 else if (SSA_VAR_P (expr)
5622 && TREE_CODE (TREE_TYPE (expr)) == POINTER_TYPE)
5623 {
5624 *base = expr;
5625 *offset = NULL_TREE;
5626 return true;
5627 }
5628
5629 return false;
5630 }
5631
5632
5633 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
5634 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
5635 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
5636 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
5637 COND is the first argument to CODE; otherwise (as in the example
5638 given here), it is the second argument. TYPE is the type of the
5639 original expression. Return NULL_TREE if no simplification is
5640 possible. */
5641
5642 static tree
5643 fold_binary_op_with_conditional_arg (enum tree_code code,
5644 tree type, tree op0, tree op1,
5645 tree cond, tree arg, int cond_first_p)
5646 {
5647 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
5648 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
5649 tree test, true_value, false_value;
5650 tree lhs = NULL_TREE;
5651 tree rhs = NULL_TREE;
5652
5653 /* This transformation is only worthwhile if we don't have to wrap
5654 arg in a SAVE_EXPR, and the operation can be simplified on at least
5655 one of the branches once its pushed inside the COND_EXPR. */
5656 if (!TREE_CONSTANT (arg))
5657 return NULL_TREE;
5658
5659 if (TREE_CODE (cond) == COND_EXPR)
5660 {
5661 test = TREE_OPERAND (cond, 0);
5662 true_value = TREE_OPERAND (cond, 1);
5663 false_value = TREE_OPERAND (cond, 2);
5664 /* If this operand throws an expression, then it does not make
5665 sense to try to perform a logical or arithmetic operation
5666 involving it. */
5667 if (VOID_TYPE_P (TREE_TYPE (true_value)))
5668 lhs = true_value;
5669 if (VOID_TYPE_P (TREE_TYPE (false_value)))
5670 rhs = false_value;
5671 }
5672 else
5673 {
5674 tree testtype = TREE_TYPE (cond);
5675 test = cond;
5676 true_value = constant_boolean_node (true, testtype);
5677 false_value = constant_boolean_node (false, testtype);
5678 }
5679
5680 arg = fold_convert (arg_type, arg);
5681 if (lhs == 0)
5682 {
5683 true_value = fold_convert (cond_type, true_value);
5684 if (cond_first_p)
5685 lhs = fold_build2 (code, type, true_value, arg);
5686 else
5687 lhs = fold_build2 (code, type, arg, true_value);
5688 }
5689 if (rhs == 0)
5690 {
5691 false_value = fold_convert (cond_type, false_value);
5692 if (cond_first_p)
5693 rhs = fold_build2 (code, type, false_value, arg);
5694 else
5695 rhs = fold_build2 (code, type, arg, false_value);
5696 }
5697
5698 test = fold_build3 (COND_EXPR, type, test, lhs, rhs);
5699 return fold_convert (type, test);
5700 }
5701
5702 \f
5703 /* Subroutine of fold() that checks for the addition of +/- 0.0.
5704
5705 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
5706 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
5707 ADDEND is the same as X.
5708
5709 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
5710 and finite. The problematic cases are when X is zero, and its mode
5711 has signed zeros. In the case of rounding towards -infinity,
5712 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
5713 modes, X + 0 is not the same as X because -0 + 0 is 0. */
5714
5715 static bool
5716 fold_real_zero_addition_p (tree type, tree addend, int negate)
5717 {
5718 if (!real_zerop (addend))
5719 return false;
5720
5721 /* Don't allow the fold with -fsignaling-nans. */
5722 if (HONOR_SNANS (TYPE_MODE (type)))
5723 return false;
5724
5725 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
5726 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
5727 return true;
5728
5729 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
5730 if (TREE_CODE (addend) == REAL_CST
5731 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
5732 negate = !negate;
5733
5734 /* The mode has signed zeros, and we have to honor their sign.
5735 In this situation, there is only one case we can return true for.
5736 X - 0 is the same as X unless rounding towards -infinity is
5737 supported. */
5738 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
5739 }
5740
5741 /* Subroutine of fold() that checks comparisons of built-in math
5742 functions against real constants.
5743
5744 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
5745 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
5746 is the type of the result and ARG0 and ARG1 are the operands of the
5747 comparison. ARG1 must be a TREE_REAL_CST.
5748
5749 The function returns the constant folded tree if a simplification
5750 can be made, and NULL_TREE otherwise. */
5751
5752 static tree
5753 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
5754 tree type, tree arg0, tree arg1)
5755 {
5756 REAL_VALUE_TYPE c;
5757
5758 if (BUILTIN_SQRT_P (fcode))
5759 {
5760 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
5761 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
5762
5763 c = TREE_REAL_CST (arg1);
5764 if (REAL_VALUE_NEGATIVE (c))
5765 {
5766 /* sqrt(x) < y is always false, if y is negative. */
5767 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
5768 return omit_one_operand (type, integer_zero_node, arg);
5769
5770 /* sqrt(x) > y is always true, if y is negative and we
5771 don't care about NaNs, i.e. negative values of x. */
5772 if (code == NE_EXPR || !HONOR_NANS (mode))
5773 return omit_one_operand (type, integer_one_node, arg);
5774
5775 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
5776 return fold_build2 (GE_EXPR, type, arg,
5777 build_real (TREE_TYPE (arg), dconst0));
5778 }
5779 else if (code == GT_EXPR || code == GE_EXPR)
5780 {
5781 REAL_VALUE_TYPE c2;
5782
5783 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5784 real_convert (&c2, mode, &c2);
5785
5786 if (REAL_VALUE_ISINF (c2))
5787 {
5788 /* sqrt(x) > y is x == +Inf, when y is very large. */
5789 if (HONOR_INFINITIES (mode))
5790 return fold_build2 (EQ_EXPR, type, arg,
5791 build_real (TREE_TYPE (arg), c2));
5792
5793 /* sqrt(x) > y is always false, when y is very large
5794 and we don't care about infinities. */
5795 return omit_one_operand (type, integer_zero_node, arg);
5796 }
5797
5798 /* sqrt(x) > c is the same as x > c*c. */
5799 return fold_build2 (code, type, arg,
5800 build_real (TREE_TYPE (arg), c2));
5801 }
5802 else if (code == LT_EXPR || code == LE_EXPR)
5803 {
5804 REAL_VALUE_TYPE c2;
5805
5806 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
5807 real_convert (&c2, mode, &c2);
5808
5809 if (REAL_VALUE_ISINF (c2))
5810 {
5811 /* sqrt(x) < y is always true, when y is a very large
5812 value and we don't care about NaNs or Infinities. */
5813 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
5814 return omit_one_operand (type, integer_one_node, arg);
5815
5816 /* sqrt(x) < y is x != +Inf when y is very large and we
5817 don't care about NaNs. */
5818 if (! HONOR_NANS (mode))
5819 return fold_build2 (NE_EXPR, type, arg,
5820 build_real (TREE_TYPE (arg), c2));
5821
5822 /* sqrt(x) < y is x >= 0 when y is very large and we
5823 don't care about Infinities. */
5824 if (! HONOR_INFINITIES (mode))
5825 return fold_build2 (GE_EXPR, type, arg,
5826 build_real (TREE_TYPE (arg), dconst0));
5827
5828 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
5829 if (lang_hooks.decls.global_bindings_p () != 0
5830 || CONTAINS_PLACEHOLDER_P (arg))
5831 return NULL_TREE;
5832
5833 arg = save_expr (arg);
5834 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5835 fold_build2 (GE_EXPR, type, arg,
5836 build_real (TREE_TYPE (arg),
5837 dconst0)),
5838 fold_build2 (NE_EXPR, type, arg,
5839 build_real (TREE_TYPE (arg),
5840 c2)));
5841 }
5842
5843 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
5844 if (! HONOR_NANS (mode))
5845 return fold_build2 (code, type, arg,
5846 build_real (TREE_TYPE (arg), c2));
5847
5848 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
5849 if (lang_hooks.decls.global_bindings_p () == 0
5850 && ! CONTAINS_PLACEHOLDER_P (arg))
5851 {
5852 arg = save_expr (arg);
5853 return fold_build2 (TRUTH_ANDIF_EXPR, type,
5854 fold_build2 (GE_EXPR, type, arg,
5855 build_real (TREE_TYPE (arg),
5856 dconst0)),
5857 fold_build2 (code, type, arg,
5858 build_real (TREE_TYPE (arg),
5859 c2)));
5860 }
5861 }
5862 }
5863
5864 return NULL_TREE;
5865 }
5866
5867 /* Subroutine of fold() that optimizes comparisons against Infinities,
5868 either +Inf or -Inf.
5869
5870 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5871 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5872 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5873
5874 The function returns the constant folded tree if a simplification
5875 can be made, and NULL_TREE otherwise. */
5876
5877 static tree
5878 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5879 {
5880 enum machine_mode mode;
5881 REAL_VALUE_TYPE max;
5882 tree temp;
5883 bool neg;
5884
5885 mode = TYPE_MODE (TREE_TYPE (arg0));
5886
5887 /* For negative infinity swap the sense of the comparison. */
5888 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
5889 if (neg)
5890 code = swap_tree_comparison (code);
5891
5892 switch (code)
5893 {
5894 case GT_EXPR:
5895 /* x > +Inf is always false, if with ignore sNANs. */
5896 if (HONOR_SNANS (mode))
5897 return NULL_TREE;
5898 return omit_one_operand (type, integer_zero_node, arg0);
5899
5900 case LE_EXPR:
5901 /* x <= +Inf is always true, if we don't case about NaNs. */
5902 if (! HONOR_NANS (mode))
5903 return omit_one_operand (type, integer_one_node, arg0);
5904
5905 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5906 if (lang_hooks.decls.global_bindings_p () == 0
5907 && ! CONTAINS_PLACEHOLDER_P (arg0))
5908 {
5909 arg0 = save_expr (arg0);
5910 return fold_build2 (EQ_EXPR, type, arg0, arg0);
5911 }
5912 break;
5913
5914 case EQ_EXPR:
5915 case GE_EXPR:
5916 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5917 real_maxval (&max, neg, mode);
5918 return fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5919 arg0, build_real (TREE_TYPE (arg0), max));
5920
5921 case LT_EXPR:
5922 /* x < +Inf is always equal to x <= DBL_MAX. */
5923 real_maxval (&max, neg, mode);
5924 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5925 arg0, build_real (TREE_TYPE (arg0), max));
5926
5927 case NE_EXPR:
5928 /* x != +Inf is always equal to !(x > DBL_MAX). */
5929 real_maxval (&max, neg, mode);
5930 if (! HONOR_NANS (mode))
5931 return fold_build2 (neg ? GE_EXPR : LE_EXPR, type,
5932 arg0, build_real (TREE_TYPE (arg0), max));
5933
5934 /* The transformation below creates non-gimple code and thus is
5935 not appropriate if we are in gimple form. */
5936 if (in_gimple_form)
5937 return NULL_TREE;
5938
5939 temp = fold_build2 (neg ? LT_EXPR : GT_EXPR, type,
5940 arg0, build_real (TREE_TYPE (arg0), max));
5941 return fold_build1 (TRUTH_NOT_EXPR, type, temp);
5942
5943 default:
5944 break;
5945 }
5946
5947 return NULL_TREE;
5948 }
5949
5950 /* Subroutine of fold() that optimizes comparisons of a division by
5951 a nonzero integer constant against an integer constant, i.e.
5952 X/C1 op C2.
5953
5954 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
5955 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
5956 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
5957
5958 The function returns the constant folded tree if a simplification
5959 can be made, and NULL_TREE otherwise. */
5960
5961 static tree
5962 fold_div_compare (enum tree_code code, tree type, tree arg0, tree arg1)
5963 {
5964 tree prod, tmp, hi, lo;
5965 tree arg00 = TREE_OPERAND (arg0, 0);
5966 tree arg01 = TREE_OPERAND (arg0, 1);
5967 unsigned HOST_WIDE_INT lpart;
5968 HOST_WIDE_INT hpart;
5969 int overflow;
5970
5971 /* We have to do this the hard way to detect unsigned overflow.
5972 prod = int_const_binop (MULT_EXPR, arg01, arg1, 0); */
5973 overflow = mul_double (TREE_INT_CST_LOW (arg01),
5974 TREE_INT_CST_HIGH (arg01),
5975 TREE_INT_CST_LOW (arg1),
5976 TREE_INT_CST_HIGH (arg1), &lpart, &hpart);
5977 prod = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5978 prod = force_fit_type (prod, -1, overflow, false);
5979
5980 if (TYPE_UNSIGNED (TREE_TYPE (arg0)))
5981 {
5982 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5983 lo = prod;
5984
5985 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp, 0). */
5986 overflow = add_double (TREE_INT_CST_LOW (prod),
5987 TREE_INT_CST_HIGH (prod),
5988 TREE_INT_CST_LOW (tmp),
5989 TREE_INT_CST_HIGH (tmp),
5990 &lpart, &hpart);
5991 hi = build_int_cst_wide (TREE_TYPE (arg00), lpart, hpart);
5992 hi = force_fit_type (hi, -1, overflow | TREE_OVERFLOW (prod),
5993 TREE_CONSTANT_OVERFLOW (prod));
5994 }
5995 else if (tree_int_cst_sgn (arg01) >= 0)
5996 {
5997 tmp = int_const_binop (MINUS_EXPR, arg01, integer_one_node, 0);
5998 switch (tree_int_cst_sgn (arg1))
5999 {
6000 case -1:
6001 lo = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6002 hi = prod;
6003 break;
6004
6005 case 0:
6006 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6007 hi = tmp;
6008 break;
6009
6010 case 1:
6011 hi = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6012 lo = prod;
6013 break;
6014
6015 default:
6016 gcc_unreachable ();
6017 }
6018 }
6019 else
6020 {
6021 /* A negative divisor reverses the relational operators. */
6022 code = swap_tree_comparison (code);
6023
6024 tmp = int_const_binop (PLUS_EXPR, arg01, integer_one_node, 0);
6025 switch (tree_int_cst_sgn (arg1))
6026 {
6027 case -1:
6028 hi = int_const_binop (MINUS_EXPR, prod, tmp, 0);
6029 lo = prod;
6030 break;
6031
6032 case 0:
6033 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6034 lo = tmp;
6035 break;
6036
6037 case 1:
6038 lo = int_const_binop (PLUS_EXPR, prod, tmp, 0);
6039 hi = prod;
6040 break;
6041
6042 default:
6043 gcc_unreachable ();
6044 }
6045 }
6046
6047 switch (code)
6048 {
6049 case EQ_EXPR:
6050 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6051 return omit_one_operand (type, integer_zero_node, arg00);
6052 if (TREE_OVERFLOW (hi))
6053 return fold_build2 (GE_EXPR, type, arg00, lo);
6054 if (TREE_OVERFLOW (lo))
6055 return fold_build2 (LE_EXPR, type, arg00, hi);
6056 return build_range_check (type, arg00, 1, lo, hi);
6057
6058 case NE_EXPR:
6059 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6060 return omit_one_operand (type, integer_one_node, arg00);
6061 if (TREE_OVERFLOW (hi))
6062 return fold_build2 (LT_EXPR, type, arg00, lo);
6063 if (TREE_OVERFLOW (lo))
6064 return fold_build2 (GT_EXPR, type, arg00, hi);
6065 return build_range_check (type, arg00, 0, lo, hi);
6066
6067 case LT_EXPR:
6068 if (TREE_OVERFLOW (lo))
6069 return omit_one_operand (type, integer_zero_node, arg00);
6070 return fold_build2 (LT_EXPR, type, arg00, lo);
6071
6072 case LE_EXPR:
6073 if (TREE_OVERFLOW (hi))
6074 return omit_one_operand (type, integer_one_node, arg00);
6075 return fold_build2 (LE_EXPR, type, arg00, hi);
6076
6077 case GT_EXPR:
6078 if (TREE_OVERFLOW (hi))
6079 return omit_one_operand (type, integer_zero_node, arg00);
6080 return fold_build2 (GT_EXPR, type, arg00, hi);
6081
6082 case GE_EXPR:
6083 if (TREE_OVERFLOW (lo))
6084 return omit_one_operand (type, integer_one_node, arg00);
6085 return fold_build2 (GE_EXPR, type, arg00, lo);
6086
6087 default:
6088 break;
6089 }
6090
6091 return NULL_TREE;
6092 }
6093
6094
6095 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6096 equality/inequality test, then return a simplified form of the test
6097 using a sign testing. Otherwise return NULL. TYPE is the desired
6098 result type. */
6099
6100 static tree
6101 fold_single_bit_test_into_sign_test (enum tree_code code, tree arg0, tree arg1,
6102 tree result_type)
6103 {
6104 /* If this is testing a single bit, we can optimize the test. */
6105 if ((code == NE_EXPR || code == EQ_EXPR)
6106 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6107 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6108 {
6109 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6110 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6111 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6112
6113 if (arg00 != NULL_TREE
6114 /* This is only a win if casting to a signed type is cheap,
6115 i.e. when arg00's type is not a partial mode. */
6116 && TYPE_PRECISION (TREE_TYPE (arg00))
6117 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6118 {
6119 tree stype = lang_hooks.types.signed_type (TREE_TYPE (arg00));
6120 return fold_build2 (code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6121 result_type, fold_convert (stype, arg00),
6122 fold_convert (stype, integer_zero_node));
6123 }
6124 }
6125
6126 return NULL_TREE;
6127 }
6128
6129 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6130 equality/inequality test, then return a simplified form of
6131 the test using shifts and logical operations. Otherwise return
6132 NULL. TYPE is the desired result type. */
6133
6134 tree
6135 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
6136 tree result_type)
6137 {
6138 /* If this is testing a single bit, we can optimize the test. */
6139 if ((code == NE_EXPR || code == EQ_EXPR)
6140 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6141 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6142 {
6143 tree inner = TREE_OPERAND (arg0, 0);
6144 tree type = TREE_TYPE (arg0);
6145 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6146 enum machine_mode operand_mode = TYPE_MODE (type);
6147 int ops_unsigned;
6148 tree signed_type, unsigned_type, intermediate_type;
6149 tree tem;
6150
6151 /* First, see if we can fold the single bit test into a sign-bit
6152 test. */
6153 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1,
6154 result_type);
6155 if (tem)
6156 return tem;
6157
6158 /* Otherwise we have (A & C) != 0 where C is a single bit,
6159 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6160 Similarly for (A & C) == 0. */
6161
6162 /* If INNER is a right shift of a constant and it plus BITNUM does
6163 not overflow, adjust BITNUM and INNER. */
6164 if (TREE_CODE (inner) == RSHIFT_EXPR
6165 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6166 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6167 && bitnum < TYPE_PRECISION (type)
6168 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6169 bitnum - TYPE_PRECISION (type)))
6170 {
6171 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6172 inner = TREE_OPERAND (inner, 0);
6173 }
6174
6175 /* If we are going to be able to omit the AND below, we must do our
6176 operations as unsigned. If we must use the AND, we have a choice.
6177 Normally unsigned is faster, but for some machines signed is. */
6178 #ifdef LOAD_EXTEND_OP
6179 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6180 && !flag_syntax_only) ? 0 : 1;
6181 #else
6182 ops_unsigned = 1;
6183 #endif
6184
6185 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6186 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6187 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6188 inner = fold_convert (intermediate_type, inner);
6189
6190 if (bitnum != 0)
6191 inner = build2 (RSHIFT_EXPR, intermediate_type,
6192 inner, size_int (bitnum));
6193
6194 if (code == EQ_EXPR)
6195 inner = fold_build2 (BIT_XOR_EXPR, intermediate_type,
6196 inner, integer_one_node);
6197
6198 /* Put the AND last so it can combine with more things. */
6199 inner = build2 (BIT_AND_EXPR, intermediate_type,
6200 inner, integer_one_node);
6201
6202 /* Make sure to return the proper type. */
6203 inner = fold_convert (result_type, inner);
6204
6205 return inner;
6206 }
6207 return NULL_TREE;
6208 }
6209
6210 /* Check whether we are allowed to reorder operands arg0 and arg1,
6211 such that the evaluation of arg1 occurs before arg0. */
6212
6213 static bool
6214 reorder_operands_p (tree arg0, tree arg1)
6215 {
6216 if (! flag_evaluation_order)
6217 return true;
6218 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6219 return true;
6220 return ! TREE_SIDE_EFFECTS (arg0)
6221 && ! TREE_SIDE_EFFECTS (arg1);
6222 }
6223
6224 /* Test whether it is preferable two swap two operands, ARG0 and
6225 ARG1, for example because ARG0 is an integer constant and ARG1
6226 isn't. If REORDER is true, only recommend swapping if we can
6227 evaluate the operands in reverse order. */
6228
6229 bool
6230 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
6231 {
6232 STRIP_SIGN_NOPS (arg0);
6233 STRIP_SIGN_NOPS (arg1);
6234
6235 if (TREE_CODE (arg1) == INTEGER_CST)
6236 return 0;
6237 if (TREE_CODE (arg0) == INTEGER_CST)
6238 return 1;
6239
6240 if (TREE_CODE (arg1) == REAL_CST)
6241 return 0;
6242 if (TREE_CODE (arg0) == REAL_CST)
6243 return 1;
6244
6245 if (TREE_CODE (arg1) == COMPLEX_CST)
6246 return 0;
6247 if (TREE_CODE (arg0) == COMPLEX_CST)
6248 return 1;
6249
6250 if (TREE_CONSTANT (arg1))
6251 return 0;
6252 if (TREE_CONSTANT (arg0))
6253 return 1;
6254
6255 if (optimize_size)
6256 return 0;
6257
6258 if (reorder && flag_evaluation_order
6259 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6260 return 0;
6261
6262 if (DECL_P (arg1))
6263 return 0;
6264 if (DECL_P (arg0))
6265 return 1;
6266
6267 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6268 for commutative and comparison operators. Ensuring a canonical
6269 form allows the optimizers to find additional redundancies without
6270 having to explicitly check for both orderings. */
6271 if (TREE_CODE (arg0) == SSA_NAME
6272 && TREE_CODE (arg1) == SSA_NAME
6273 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6274 return 1;
6275
6276 return 0;
6277 }
6278
6279 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6280 ARG0 is extended to a wider type. */
6281
6282 static tree
6283 fold_widened_comparison (enum tree_code code, tree type, tree arg0, tree arg1)
6284 {
6285 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6286 tree arg1_unw;
6287 tree shorter_type, outer_type;
6288 tree min, max;
6289 bool above, below;
6290
6291 if (arg0_unw == arg0)
6292 return NULL_TREE;
6293 shorter_type = TREE_TYPE (arg0_unw);
6294
6295 #ifdef HAVE_canonicalize_funcptr_for_compare
6296 /* Disable this optimization if we're casting a function pointer
6297 type on targets that require function pointer canonicalization. */
6298 if (HAVE_canonicalize_funcptr_for_compare
6299 && TREE_CODE (shorter_type) == POINTER_TYPE
6300 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6301 return NULL_TREE;
6302 #endif
6303
6304 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6305 return NULL_TREE;
6306
6307 arg1_unw = get_unwidened (arg1, shorter_type);
6308
6309 /* If possible, express the comparison in the shorter mode. */
6310 if ((code == EQ_EXPR || code == NE_EXPR
6311 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6312 && (TREE_TYPE (arg1_unw) == shorter_type
6313 || (TREE_CODE (arg1_unw) == INTEGER_CST
6314 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6315 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6316 && int_fits_type_p (arg1_unw, shorter_type))))
6317 return fold_build2 (code, type, arg0_unw,
6318 fold_convert (shorter_type, arg1_unw));
6319
6320 if (TREE_CODE (arg1_unw) != INTEGER_CST
6321 || TREE_CODE (shorter_type) != INTEGER_TYPE
6322 || !int_fits_type_p (arg1_unw, shorter_type))
6323 return NULL_TREE;
6324
6325 /* If we are comparing with the integer that does not fit into the range
6326 of the shorter type, the result is known. */
6327 outer_type = TREE_TYPE (arg1_unw);
6328 min = lower_bound_in_type (outer_type, shorter_type);
6329 max = upper_bound_in_type (outer_type, shorter_type);
6330
6331 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6332 max, arg1_unw));
6333 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6334 arg1_unw, min));
6335
6336 switch (code)
6337 {
6338 case EQ_EXPR:
6339 if (above || below)
6340 return omit_one_operand (type, integer_zero_node, arg0);
6341 break;
6342
6343 case NE_EXPR:
6344 if (above || below)
6345 return omit_one_operand (type, integer_one_node, arg0);
6346 break;
6347
6348 case LT_EXPR:
6349 case LE_EXPR:
6350 if (above)
6351 return omit_one_operand (type, integer_one_node, arg0);
6352 else if (below)
6353 return omit_one_operand (type, integer_zero_node, arg0);
6354
6355 case GT_EXPR:
6356 case GE_EXPR:
6357 if (above)
6358 return omit_one_operand (type, integer_zero_node, arg0);
6359 else if (below)
6360 return omit_one_operand (type, integer_one_node, arg0);
6361
6362 default:
6363 break;
6364 }
6365
6366 return NULL_TREE;
6367 }
6368
6369 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6370 ARG0 just the signedness is changed. */
6371
6372 static tree
6373 fold_sign_changed_comparison (enum tree_code code, tree type,
6374 tree arg0, tree arg1)
6375 {
6376 tree arg0_inner, tmp;
6377 tree inner_type, outer_type;
6378
6379 if (TREE_CODE (arg0) != NOP_EXPR
6380 && TREE_CODE (arg0) != CONVERT_EXPR)
6381 return NULL_TREE;
6382
6383 outer_type = TREE_TYPE (arg0);
6384 arg0_inner = TREE_OPERAND (arg0, 0);
6385 inner_type = TREE_TYPE (arg0_inner);
6386
6387 #ifdef HAVE_canonicalize_funcptr_for_compare
6388 /* Disable this optimization if we're casting a function pointer
6389 type on targets that require function pointer canonicalization. */
6390 if (HAVE_canonicalize_funcptr_for_compare
6391 && TREE_CODE (inner_type) == POINTER_TYPE
6392 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6393 return NULL_TREE;
6394 #endif
6395
6396 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6397 return NULL_TREE;
6398
6399 if (TREE_CODE (arg1) != INTEGER_CST
6400 && !((TREE_CODE (arg1) == NOP_EXPR
6401 || TREE_CODE (arg1) == CONVERT_EXPR)
6402 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6403 return NULL_TREE;
6404
6405 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6406 && code != NE_EXPR
6407 && code != EQ_EXPR)
6408 return NULL_TREE;
6409
6410 if (TREE_CODE (arg1) == INTEGER_CST)
6411 {
6412 tmp = build_int_cst_wide (inner_type,
6413 TREE_INT_CST_LOW (arg1),
6414 TREE_INT_CST_HIGH (arg1));
6415 arg1 = force_fit_type (tmp, 0,
6416 TREE_OVERFLOW (arg1),
6417 TREE_CONSTANT_OVERFLOW (arg1));
6418 }
6419 else
6420 arg1 = fold_convert (inner_type, arg1);
6421
6422 return fold_build2 (code, type, arg0_inner, arg1);
6423 }
6424
6425 /* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
6426 step of the array. Reconstructs s and delta in the case of s * delta
6427 being an integer constant (and thus already folded).
6428 ADDR is the address. MULT is the multiplicative expression.
6429 If the function succeeds, the new address expression is returned. Otherwise
6430 NULL_TREE is returned. */
6431
6432 static tree
6433 try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
6434 {
6435 tree s, delta, step;
6436 tree ref = TREE_OPERAND (addr, 0), pref;
6437 tree ret, pos;
6438 tree itype;
6439
6440 /* Canonicalize op1 into a possibly non-constant delta
6441 and an INTEGER_CST s. */
6442 if (TREE_CODE (op1) == MULT_EXPR)
6443 {
6444 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6445
6446 STRIP_NOPS (arg0);
6447 STRIP_NOPS (arg1);
6448
6449 if (TREE_CODE (arg0) == INTEGER_CST)
6450 {
6451 s = arg0;
6452 delta = arg1;
6453 }
6454 else if (TREE_CODE (arg1) == INTEGER_CST)
6455 {
6456 s = arg1;
6457 delta = arg0;
6458 }
6459 else
6460 return NULL_TREE;
6461 }
6462 else if (TREE_CODE (op1) == INTEGER_CST)
6463 {
6464 delta = op1;
6465 s = NULL_TREE;
6466 }
6467 else
6468 {
6469 /* Simulate we are delta * 1. */
6470 delta = op1;
6471 s = integer_one_node;
6472 }
6473
6474 for (;; ref = TREE_OPERAND (ref, 0))
6475 {
6476 if (TREE_CODE (ref) == ARRAY_REF)
6477 {
6478 itype = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
6479 if (! itype)
6480 continue;
6481
6482 step = array_ref_element_size (ref);
6483 if (TREE_CODE (step) != INTEGER_CST)
6484 continue;
6485
6486 if (s)
6487 {
6488 if (! tree_int_cst_equal (step, s))
6489 continue;
6490 }
6491 else
6492 {
6493 /* Try if delta is a multiple of step. */
6494 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, delta, step);
6495 if (! tmp)
6496 continue;
6497 delta = tmp;
6498 }
6499
6500 break;
6501 }
6502
6503 if (!handled_component_p (ref))
6504 return NULL_TREE;
6505 }
6506
6507 /* We found the suitable array reference. So copy everything up to it,
6508 and replace the index. */
6509
6510 pref = TREE_OPERAND (addr, 0);
6511 ret = copy_node (pref);
6512 pos = ret;
6513
6514 while (pref != ref)
6515 {
6516 pref = TREE_OPERAND (pref, 0);
6517 TREE_OPERAND (pos, 0) = copy_node (pref);
6518 pos = TREE_OPERAND (pos, 0);
6519 }
6520
6521 TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
6522 fold_convert (itype,
6523 TREE_OPERAND (pos, 1)),
6524 fold_convert (itype, delta));
6525
6526 return fold_build1 (ADDR_EXPR, TREE_TYPE (addr), ret);
6527 }
6528
6529
6530 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6531 means A >= Y && A != MAX, but in this case we know that
6532 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6533
6534 static tree
6535 fold_to_nonsharp_ineq_using_bound (tree ineq, tree bound)
6536 {
6537 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6538
6539 if (TREE_CODE (bound) == LT_EXPR)
6540 a = TREE_OPERAND (bound, 0);
6541 else if (TREE_CODE (bound) == GT_EXPR)
6542 a = TREE_OPERAND (bound, 1);
6543 else
6544 return NULL_TREE;
6545
6546 typea = TREE_TYPE (a);
6547 if (!INTEGRAL_TYPE_P (typea)
6548 && !POINTER_TYPE_P (typea))
6549 return NULL_TREE;
6550
6551 if (TREE_CODE (ineq) == LT_EXPR)
6552 {
6553 a1 = TREE_OPERAND (ineq, 1);
6554 y = TREE_OPERAND (ineq, 0);
6555 }
6556 else if (TREE_CODE (ineq) == GT_EXPR)
6557 {
6558 a1 = TREE_OPERAND (ineq, 0);
6559 y = TREE_OPERAND (ineq, 1);
6560 }
6561 else
6562 return NULL_TREE;
6563
6564 if (TREE_TYPE (a1) != typea)
6565 return NULL_TREE;
6566
6567 diff = fold_build2 (MINUS_EXPR, typea, a1, a);
6568 if (!integer_onep (diff))
6569 return NULL_TREE;
6570
6571 return fold_build2 (GE_EXPR, type, a, y);
6572 }
6573
6574 /* Fold a sum or difference of at least one multiplication.
6575 Returns the folded tree or NULL if no simplification could be made. */
6576
6577 static tree
6578 fold_plusminus_mult_expr (enum tree_code code, tree type, tree arg0, tree arg1)
6579 {
6580 tree arg00, arg01, arg10, arg11;
6581 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6582
6583 /* (A * C) +- (B * C) -> (A+-B) * C.
6584 (A * C) +- A -> A * (C+-1).
6585 We are most concerned about the case where C is a constant,
6586 but other combinations show up during loop reduction. Since
6587 it is not difficult, try all four possibilities. */
6588
6589 if (TREE_CODE (arg0) == MULT_EXPR)
6590 {
6591 arg00 = TREE_OPERAND (arg0, 0);
6592 arg01 = TREE_OPERAND (arg0, 1);
6593 }
6594 else
6595 {
6596 arg00 = arg0;
6597 if (!FLOAT_TYPE_P (type))
6598 arg01 = build_int_cst (type, 1);
6599 else
6600 arg01 = build_real (type, dconst1);
6601 }
6602 if (TREE_CODE (arg1) == MULT_EXPR)
6603 {
6604 arg10 = TREE_OPERAND (arg1, 0);
6605 arg11 = TREE_OPERAND (arg1, 1);
6606 }
6607 else
6608 {
6609 arg10 = arg1;
6610 if (!FLOAT_TYPE_P (type))
6611 arg11 = build_int_cst (type, 1);
6612 else
6613 arg11 = build_real (type, dconst1);
6614 }
6615 same = NULL_TREE;
6616
6617 if (operand_equal_p (arg01, arg11, 0))
6618 same = arg01, alt0 = arg00, alt1 = arg10;
6619 else if (operand_equal_p (arg00, arg10, 0))
6620 same = arg00, alt0 = arg01, alt1 = arg11;
6621 else if (operand_equal_p (arg00, arg11, 0))
6622 same = arg00, alt0 = arg01, alt1 = arg10;
6623 else if (operand_equal_p (arg01, arg10, 0))
6624 same = arg01, alt0 = arg00, alt1 = arg11;
6625
6626 /* No identical multiplicands; see if we can find a common
6627 power-of-two factor in non-power-of-two multiplies. This
6628 can help in multi-dimensional array access. */
6629 else if (host_integerp (arg01, 0)
6630 && host_integerp (arg11, 0))
6631 {
6632 HOST_WIDE_INT int01, int11, tmp;
6633 bool swap = false;
6634 tree maybe_same;
6635 int01 = TREE_INT_CST_LOW (arg01);
6636 int11 = TREE_INT_CST_LOW (arg11);
6637
6638 /* Move min of absolute values to int11. */
6639 if ((int01 >= 0 ? int01 : -int01)
6640 < (int11 >= 0 ? int11 : -int11))
6641 {
6642 tmp = int01, int01 = int11, int11 = tmp;
6643 alt0 = arg00, arg00 = arg10, arg10 = alt0;
6644 maybe_same = arg01;
6645 swap = true;
6646 }
6647 else
6648 maybe_same = arg11;
6649
6650 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
6651 {
6652 alt0 = fold_build2 (MULT_EXPR, TREE_TYPE (arg00), arg00,
6653 build_int_cst (TREE_TYPE (arg00),
6654 int01 / int11));
6655 alt1 = arg10;
6656 same = maybe_same;
6657 if (swap)
6658 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
6659 }
6660 }
6661
6662 if (same)
6663 return fold_build2 (MULT_EXPR, type,
6664 fold_build2 (code, type,
6665 fold_convert (type, alt0),
6666 fold_convert (type, alt1)),
6667 fold_convert (type, same));
6668
6669 return NULL_TREE;
6670 }
6671
6672 /* Fold a unary expression of code CODE and type TYPE with operand
6673 OP0. Return the folded expression if folding is successful.
6674 Otherwise, return NULL_TREE. */
6675
6676 tree
6677 fold_unary (enum tree_code code, tree type, tree op0)
6678 {
6679 tree tem;
6680 tree arg0;
6681 enum tree_code_class kind = TREE_CODE_CLASS (code);
6682
6683 gcc_assert (IS_EXPR_CODE_CLASS (kind)
6684 && TREE_CODE_LENGTH (code) == 1);
6685
6686 arg0 = op0;
6687 if (arg0)
6688 {
6689 if (code == NOP_EXPR || code == CONVERT_EXPR
6690 || code == FLOAT_EXPR || code == ABS_EXPR)
6691 {
6692 /* Don't use STRIP_NOPS, because signedness of argument type
6693 matters. */
6694 STRIP_SIGN_NOPS (arg0);
6695 }
6696 else
6697 {
6698 /* Strip any conversions that don't change the mode. This
6699 is safe for every expression, except for a comparison
6700 expression because its signedness is derived from its
6701 operands.
6702
6703 Note that this is done as an internal manipulation within
6704 the constant folder, in order to find the simplest
6705 representation of the arguments so that their form can be
6706 studied. In any cases, the appropriate type conversions
6707 should be put back in the tree that will get out of the
6708 constant folder. */
6709 STRIP_NOPS (arg0);
6710 }
6711 }
6712
6713 if (TREE_CODE_CLASS (code) == tcc_unary)
6714 {
6715 if (TREE_CODE (arg0) == COMPOUND_EXPR)
6716 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
6717 fold_build1 (code, type, TREE_OPERAND (arg0, 1)));
6718 else if (TREE_CODE (arg0) == COND_EXPR)
6719 {
6720 tree arg01 = TREE_OPERAND (arg0, 1);
6721 tree arg02 = TREE_OPERAND (arg0, 2);
6722 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
6723 arg01 = fold_build1 (code, type, arg01);
6724 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
6725 arg02 = fold_build1 (code, type, arg02);
6726 tem = fold_build3 (COND_EXPR, type, TREE_OPERAND (arg0, 0),
6727 arg01, arg02);
6728
6729 /* If this was a conversion, and all we did was to move into
6730 inside the COND_EXPR, bring it back out. But leave it if
6731 it is a conversion from integer to integer and the
6732 result precision is no wider than a word since such a
6733 conversion is cheap and may be optimized away by combine,
6734 while it couldn't if it were outside the COND_EXPR. Then return
6735 so we don't get into an infinite recursion loop taking the
6736 conversion out and then back in. */
6737
6738 if ((code == NOP_EXPR || code == CONVERT_EXPR
6739 || code == NON_LVALUE_EXPR)
6740 && TREE_CODE (tem) == COND_EXPR
6741 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
6742 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
6743 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
6744 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
6745 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
6746 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
6747 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
6748 && (INTEGRAL_TYPE_P
6749 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
6750 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
6751 || flag_syntax_only))
6752 tem = build1 (code, type,
6753 build3 (COND_EXPR,
6754 TREE_TYPE (TREE_OPERAND
6755 (TREE_OPERAND (tem, 1), 0)),
6756 TREE_OPERAND (tem, 0),
6757 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
6758 TREE_OPERAND (TREE_OPERAND (tem, 2), 0)));
6759 return tem;
6760 }
6761 else if (COMPARISON_CLASS_P (arg0))
6762 {
6763 if (TREE_CODE (type) == BOOLEAN_TYPE)
6764 {
6765 arg0 = copy_node (arg0);
6766 TREE_TYPE (arg0) = type;
6767 return arg0;
6768 }
6769 else if (TREE_CODE (type) != INTEGER_TYPE)
6770 return fold_build3 (COND_EXPR, type, arg0,
6771 fold_build1 (code, type,
6772 integer_one_node),
6773 fold_build1 (code, type,
6774 integer_zero_node));
6775 }
6776 }
6777
6778 switch (code)
6779 {
6780 case NOP_EXPR:
6781 case FLOAT_EXPR:
6782 case CONVERT_EXPR:
6783 case FIX_TRUNC_EXPR:
6784 case FIX_CEIL_EXPR:
6785 case FIX_FLOOR_EXPR:
6786 case FIX_ROUND_EXPR:
6787 if (TREE_TYPE (op0) == type)
6788 return op0;
6789
6790 /* If we have (type) (a CMP b) and type is an integal type, return
6791 new expression involving the new type. */
6792 if (COMPARISON_CLASS_P (op0) && INTEGRAL_TYPE_P (type))
6793 return fold_build2 (TREE_CODE (op0), type, TREE_OPERAND (op0, 0),
6794 TREE_OPERAND (op0, 1));
6795
6796 /* Handle cases of two conversions in a row. */
6797 if (TREE_CODE (op0) == NOP_EXPR
6798 || TREE_CODE (op0) == CONVERT_EXPR)
6799 {
6800 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
6801 tree inter_type = TREE_TYPE (op0);
6802 int inside_int = INTEGRAL_TYPE_P (inside_type);
6803 int inside_ptr = POINTER_TYPE_P (inside_type);
6804 int inside_float = FLOAT_TYPE_P (inside_type);
6805 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
6806 unsigned int inside_prec = TYPE_PRECISION (inside_type);
6807 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
6808 int inter_int = INTEGRAL_TYPE_P (inter_type);
6809 int inter_ptr = POINTER_TYPE_P (inter_type);
6810 int inter_float = FLOAT_TYPE_P (inter_type);
6811 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
6812 unsigned int inter_prec = TYPE_PRECISION (inter_type);
6813 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
6814 int final_int = INTEGRAL_TYPE_P (type);
6815 int final_ptr = POINTER_TYPE_P (type);
6816 int final_float = FLOAT_TYPE_P (type);
6817 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
6818 unsigned int final_prec = TYPE_PRECISION (type);
6819 int final_unsignedp = TYPE_UNSIGNED (type);
6820
6821 /* In addition to the cases of two conversions in a row
6822 handled below, if we are converting something to its own
6823 type via an object of identical or wider precision, neither
6824 conversion is needed. */
6825 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
6826 && ((inter_int && final_int) || (inter_float && final_float))
6827 && inter_prec >= final_prec)
6828 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6829
6830 /* Likewise, if the intermediate and final types are either both
6831 float or both integer, we don't need the middle conversion if
6832 it is wider than the final type and doesn't change the signedness
6833 (for integers). Avoid this if the final type is a pointer
6834 since then we sometimes need the inner conversion. Likewise if
6835 the outer has a precision not equal to the size of its mode. */
6836 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
6837 || (inter_float && inside_float)
6838 || (inter_vec && inside_vec))
6839 && inter_prec >= inside_prec
6840 && (inter_float || inter_vec
6841 || inter_unsignedp == inside_unsignedp)
6842 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6843 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6844 && ! final_ptr
6845 && (! final_vec || inter_prec == inside_prec))
6846 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6847
6848 /* If we have a sign-extension of a zero-extended value, we can
6849 replace that by a single zero-extension. */
6850 if (inside_int && inter_int && final_int
6851 && inside_prec < inter_prec && inter_prec < final_prec
6852 && inside_unsignedp && !inter_unsignedp)
6853 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6854
6855 /* Two conversions in a row are not needed unless:
6856 - some conversion is floating-point (overstrict for now), or
6857 - some conversion is a vector (overstrict for now), or
6858 - the intermediate type is narrower than both initial and
6859 final, or
6860 - the intermediate type and innermost type differ in signedness,
6861 and the outermost type is wider than the intermediate, or
6862 - the initial type is a pointer type and the precisions of the
6863 intermediate and final types differ, or
6864 - the final type is a pointer type and the precisions of the
6865 initial and intermediate types differ. */
6866 if (! inside_float && ! inter_float && ! final_float
6867 && ! inside_vec && ! inter_vec && ! final_vec
6868 && (inter_prec > inside_prec || inter_prec > final_prec)
6869 && ! (inside_int && inter_int
6870 && inter_unsignedp != inside_unsignedp
6871 && inter_prec < final_prec)
6872 && ((inter_unsignedp && inter_prec > inside_prec)
6873 == (final_unsignedp && final_prec > inter_prec))
6874 && ! (inside_ptr && inter_prec != final_prec)
6875 && ! (final_ptr && inside_prec != inter_prec)
6876 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (type))
6877 && TYPE_MODE (type) == TYPE_MODE (inter_type))
6878 && ! final_ptr)
6879 return fold_build1 (code, type, TREE_OPERAND (op0, 0));
6880 }
6881
6882 /* Handle (T *)&A.B.C for A being of type T and B and C
6883 living at offset zero. This occurs frequently in
6884 C++ upcasting and then accessing the base. */
6885 if (TREE_CODE (op0) == ADDR_EXPR
6886 && POINTER_TYPE_P (type)
6887 && handled_component_p (TREE_OPERAND (op0, 0)))
6888 {
6889 HOST_WIDE_INT bitsize, bitpos;
6890 tree offset;
6891 enum machine_mode mode;
6892 int unsignedp, volatilep;
6893 tree base = TREE_OPERAND (op0, 0);
6894 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
6895 &mode, &unsignedp, &volatilep, false);
6896 /* If the reference was to a (constant) zero offset, we can use
6897 the address of the base if it has the same base type
6898 as the result type. */
6899 if (! offset && bitpos == 0
6900 && TYPE_MAIN_VARIANT (TREE_TYPE (type))
6901 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
6902 return fold_convert (type, build_fold_addr_expr (base));
6903 }
6904
6905 if (TREE_CODE (op0) == MODIFY_EXPR
6906 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
6907 /* Detect assigning a bitfield. */
6908 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
6909 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
6910 {
6911 /* Don't leave an assignment inside a conversion
6912 unless assigning a bitfield. */
6913 tem = fold_build1 (code, type, TREE_OPERAND (op0, 1));
6914 /* First do the assignment, then return converted constant. */
6915 tem = build2 (COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
6916 TREE_NO_WARNING (tem) = 1;
6917 TREE_USED (tem) = 1;
6918 return tem;
6919 }
6920
6921 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
6922 constants (if x has signed type, the sign bit cannot be set
6923 in c). This folds extension into the BIT_AND_EXPR. */
6924 if (INTEGRAL_TYPE_P (type)
6925 && TREE_CODE (type) != BOOLEAN_TYPE
6926 && TREE_CODE (op0) == BIT_AND_EXPR
6927 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
6928 {
6929 tree and = op0;
6930 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
6931 int change = 0;
6932
6933 if (TYPE_UNSIGNED (TREE_TYPE (and))
6934 || (TYPE_PRECISION (type)
6935 <= TYPE_PRECISION (TREE_TYPE (and))))
6936 change = 1;
6937 else if (TYPE_PRECISION (TREE_TYPE (and1))
6938 <= HOST_BITS_PER_WIDE_INT
6939 && host_integerp (and1, 1))
6940 {
6941 unsigned HOST_WIDE_INT cst;
6942
6943 cst = tree_low_cst (and1, 1);
6944 cst &= (HOST_WIDE_INT) -1
6945 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
6946 change = (cst == 0);
6947 #ifdef LOAD_EXTEND_OP
6948 if (change
6949 && !flag_syntax_only
6950 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
6951 == ZERO_EXTEND))
6952 {
6953 tree uns = lang_hooks.types.unsigned_type (TREE_TYPE (and0));
6954 and0 = fold_convert (uns, and0);
6955 and1 = fold_convert (uns, and1);
6956 }
6957 #endif
6958 }
6959 if (change)
6960 {
6961 tem = build_int_cst_wide (type, TREE_INT_CST_LOW (and1),
6962 TREE_INT_CST_HIGH (and1));
6963 tem = force_fit_type (tem, 0, TREE_OVERFLOW (and1),
6964 TREE_CONSTANT_OVERFLOW (and1));
6965 return fold_build2 (BIT_AND_EXPR, type,
6966 fold_convert (type, and0), tem);
6967 }
6968 }
6969
6970 /* Convert (T1)((T2)X op Y) into (T1)X op Y, for pointer types T1 and
6971 T2 being pointers to types of the same size. */
6972 if (POINTER_TYPE_P (type)
6973 && BINARY_CLASS_P (arg0)
6974 && TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
6975 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6976 {
6977 tree arg00 = TREE_OPERAND (arg0, 0);
6978 tree t0 = type;
6979 tree t1 = TREE_TYPE (arg00);
6980 tree tt0 = TREE_TYPE (t0);
6981 tree tt1 = TREE_TYPE (t1);
6982 tree s0 = TYPE_SIZE (tt0);
6983 tree s1 = TYPE_SIZE (tt1);
6984
6985 if (s0 && s1 && operand_equal_p (s0, s1, OEP_ONLY_CONST))
6986 return build2 (TREE_CODE (arg0), t0, fold_convert (t0, arg00),
6987 TREE_OPERAND (arg0, 1));
6988 }
6989
6990 tem = fold_convert_const (code, type, arg0);
6991 return tem ? tem : NULL_TREE;
6992
6993 case VIEW_CONVERT_EXPR:
6994 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
6995 return build1 (VIEW_CONVERT_EXPR, type, TREE_OPERAND (op0, 0));
6996 return NULL_TREE;
6997
6998 case NEGATE_EXPR:
6999 if (negate_expr_p (arg0))
7000 return fold_convert (type, negate_expr (arg0));
7001 /* Convert - (~A) to A + 1. */
7002 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == BIT_NOT_EXPR)
7003 return fold_build2 (PLUS_EXPR, type, TREE_OPERAND (arg0, 0),
7004 build_int_cst (type, 1));
7005 return NULL_TREE;
7006
7007 case ABS_EXPR:
7008 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
7009 return fold_abs_const (arg0, type);
7010 else if (TREE_CODE (arg0) == NEGATE_EXPR)
7011 return fold_build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
7012 /* Convert fabs((double)float) into (double)fabsf(float). */
7013 else if (TREE_CODE (arg0) == NOP_EXPR
7014 && TREE_CODE (type) == REAL_TYPE)
7015 {
7016 tree targ0 = strip_float_extensions (arg0);
7017 if (targ0 != arg0)
7018 return fold_convert (type, fold_build1 (ABS_EXPR,
7019 TREE_TYPE (targ0),
7020 targ0));
7021 }
7022 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
7023 else if (tree_expr_nonnegative_p (arg0) || TREE_CODE (arg0) == ABS_EXPR)
7024 return arg0;
7025
7026 /* Strip sign ops from argument. */
7027 if (TREE_CODE (type) == REAL_TYPE)
7028 {
7029 tem = fold_strip_sign_ops (arg0);
7030 if (tem)
7031 return fold_build1 (ABS_EXPR, type, fold_convert (type, tem));
7032 }
7033 return NULL_TREE;
7034
7035 case CONJ_EXPR:
7036 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7037 return fold_convert (type, arg0);
7038 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7039 return build2 (COMPLEX_EXPR, type,
7040 TREE_OPERAND (arg0, 0),
7041 negate_expr (TREE_OPERAND (arg0, 1)));
7042 else if (TREE_CODE (arg0) == COMPLEX_CST)
7043 return build_complex (type, TREE_REALPART (arg0),
7044 negate_expr (TREE_IMAGPART (arg0)));
7045 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7046 return fold_build2 (TREE_CODE (arg0), type,
7047 fold_build1 (CONJ_EXPR, type,
7048 TREE_OPERAND (arg0, 0)),
7049 fold_build1 (CONJ_EXPR, type,
7050 TREE_OPERAND (arg0, 1)));
7051 else if (TREE_CODE (arg0) == CONJ_EXPR)
7052 return TREE_OPERAND (arg0, 0);
7053 return NULL_TREE;
7054
7055 case BIT_NOT_EXPR:
7056 if (TREE_CODE (arg0) == INTEGER_CST)
7057 return fold_not_const (arg0, type);
7058 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
7059 return TREE_OPERAND (arg0, 0);
7060 /* Convert ~ (-A) to A - 1. */
7061 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
7062 return fold_build2 (MINUS_EXPR, type, TREE_OPERAND (arg0, 0),
7063 build_int_cst (type, 1));
7064 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
7065 else if (INTEGRAL_TYPE_P (type)
7066 && ((TREE_CODE (arg0) == MINUS_EXPR
7067 && integer_onep (TREE_OPERAND (arg0, 1)))
7068 || (TREE_CODE (arg0) == PLUS_EXPR
7069 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
7070 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7071 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7072 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7073 && (tem = fold_unary (BIT_NOT_EXPR, type,
7074 fold_convert (type,
7075 TREE_OPERAND (arg0, 0)))))
7076 return fold_build2 (BIT_XOR_EXPR, type, tem,
7077 fold_convert (type, TREE_OPERAND (arg0, 1)));
7078 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7079 && (tem = fold_unary (BIT_NOT_EXPR, type,
7080 fold_convert (type,
7081 TREE_OPERAND (arg0, 1)))))
7082 return fold_build2 (BIT_XOR_EXPR, type,
7083 fold_convert (type, TREE_OPERAND (arg0, 0)), tem);
7084
7085 return NULL_TREE;
7086
7087 case TRUTH_NOT_EXPR:
7088 /* The argument to invert_truthvalue must have Boolean type. */
7089 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
7090 arg0 = fold_convert (boolean_type_node, arg0);
7091
7092 /* Note that the operand of this must be an int
7093 and its values must be 0 or 1.
7094 ("true" is a fixed value perhaps depending on the language,
7095 but we don't handle values other than 1 correctly yet.) */
7096 tem = invert_truthvalue (arg0);
7097 /* Avoid infinite recursion. */
7098 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
7099 return NULL_TREE;
7100 return fold_convert (type, tem);
7101
7102 case REALPART_EXPR:
7103 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7104 return NULL_TREE;
7105 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7106 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
7107 TREE_OPERAND (arg0, 1));
7108 else if (TREE_CODE (arg0) == COMPLEX_CST)
7109 return TREE_REALPART (arg0);
7110 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7111 return fold_build2 (TREE_CODE (arg0), type,
7112 fold_build1 (REALPART_EXPR, type,
7113 TREE_OPERAND (arg0, 0)),
7114 fold_build1 (REALPART_EXPR, type,
7115 TREE_OPERAND (arg0, 1)));
7116 return NULL_TREE;
7117
7118 case IMAGPART_EXPR:
7119 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
7120 return fold_convert (type, integer_zero_node);
7121 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
7122 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
7123 TREE_OPERAND (arg0, 0));
7124 else if (TREE_CODE (arg0) == COMPLEX_CST)
7125 return TREE_IMAGPART (arg0);
7126 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
7127 return fold_build2 (TREE_CODE (arg0), type,
7128 fold_build1 (IMAGPART_EXPR, type,
7129 TREE_OPERAND (arg0, 0)),
7130 fold_build1 (IMAGPART_EXPR, type,
7131 TREE_OPERAND (arg0, 1)));
7132 return NULL_TREE;
7133
7134 default:
7135 return NULL_TREE;
7136 } /* switch (code) */
7137 }
7138
7139 /* Fold a binary expression of code CODE and type TYPE with operands
7140 OP0 and OP1. Return the folded expression if folding is
7141 successful. Otherwise, return NULL_TREE. */
7142
7143 tree
7144 fold_binary (enum tree_code code, tree type, tree op0, tree op1)
7145 {
7146 tree t1 = NULL_TREE;
7147 tree tem;
7148 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
7149 enum tree_code_class kind = TREE_CODE_CLASS (code);
7150
7151 /* WINS will be nonzero when the switch is done
7152 if all operands are constant. */
7153 int wins = 1;
7154
7155 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7156 && TREE_CODE_LENGTH (code) == 2);
7157
7158 arg0 = op0;
7159 arg1 = op1;
7160
7161 if (arg0)
7162 {
7163 tree subop;
7164
7165 /* Strip any conversions that don't change the mode. This is
7166 safe for every expression, except for a comparison expression
7167 because its signedness is derived from its operands. So, in
7168 the latter case, only strip conversions that don't change the
7169 signedness.
7170
7171 Note that this is done as an internal manipulation within the
7172 constant folder, in order to find the simplest representation
7173 of the arguments so that their form can be studied. In any
7174 cases, the appropriate type conversions should be put back in
7175 the tree that will get out of the constant folder. */
7176 if (kind == tcc_comparison)
7177 STRIP_SIGN_NOPS (arg0);
7178 else
7179 STRIP_NOPS (arg0);
7180
7181 if (TREE_CODE (arg0) == COMPLEX_CST)
7182 subop = TREE_REALPART (arg0);
7183 else
7184 subop = arg0;
7185
7186 if (TREE_CODE (subop) != INTEGER_CST
7187 && TREE_CODE (subop) != REAL_CST)
7188 /* Note that TREE_CONSTANT isn't enough:
7189 static var addresses are constant but we can't
7190 do arithmetic on them. */
7191 wins = 0;
7192 }
7193
7194 if (arg1)
7195 {
7196 tree subop;
7197
7198 /* Strip any conversions that don't change the mode. This is
7199 safe for every expression, except for a comparison expression
7200 because its signedness is derived from its operands. So, in
7201 the latter case, only strip conversions that don't change the
7202 signedness.
7203
7204 Note that this is done as an internal manipulation within the
7205 constant folder, in order to find the simplest representation
7206 of the arguments so that their form can be studied. In any
7207 cases, the appropriate type conversions should be put back in
7208 the tree that will get out of the constant folder. */
7209 if (kind == tcc_comparison)
7210 STRIP_SIGN_NOPS (arg1);
7211 else
7212 STRIP_NOPS (arg1);
7213
7214 if (TREE_CODE (arg1) == COMPLEX_CST)
7215 subop = TREE_REALPART (arg1);
7216 else
7217 subop = arg1;
7218
7219 if (TREE_CODE (subop) != INTEGER_CST
7220 && TREE_CODE (subop) != REAL_CST)
7221 /* Note that TREE_CONSTANT isn't enough:
7222 static var addresses are constant but we can't
7223 do arithmetic on them. */
7224 wins = 0;
7225 }
7226
7227 /* If this is a commutative operation, and ARG0 is a constant, move it
7228 to ARG1 to reduce the number of tests below. */
7229 if (commutative_tree_code (code)
7230 && tree_swap_operands_p (arg0, arg1, true))
7231 return fold_build2 (code, type, op1, op0);
7232
7233 /* Now WINS is set as described above,
7234 ARG0 is the first operand of EXPR,
7235 and ARG1 is the second operand (if it has more than one operand).
7236
7237 First check for cases where an arithmetic operation is applied to a
7238 compound, conditional, or comparison operation. Push the arithmetic
7239 operation inside the compound or conditional to see if any folding
7240 can then be done. Convert comparison to conditional for this purpose.
7241 The also optimizes non-constant cases that used to be done in
7242 expand_expr.
7243
7244 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
7245 one of the operands is a comparison and the other is a comparison, a
7246 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
7247 code below would make the expression more complex. Change it to a
7248 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
7249 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
7250
7251 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
7252 || code == EQ_EXPR || code == NE_EXPR)
7253 && ((truth_value_p (TREE_CODE (arg0))
7254 && (truth_value_p (TREE_CODE (arg1))
7255 || (TREE_CODE (arg1) == BIT_AND_EXPR
7256 && integer_onep (TREE_OPERAND (arg1, 1)))))
7257 || (truth_value_p (TREE_CODE (arg1))
7258 && (truth_value_p (TREE_CODE (arg0))
7259 || (TREE_CODE (arg0) == BIT_AND_EXPR
7260 && integer_onep (TREE_OPERAND (arg0, 1)))))))
7261 {
7262 tem = fold_build2 (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
7263 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
7264 : TRUTH_XOR_EXPR,
7265 boolean_type_node,
7266 fold_convert (boolean_type_node, arg0),
7267 fold_convert (boolean_type_node, arg1));
7268
7269 if (code == EQ_EXPR)
7270 tem = invert_truthvalue (tem);
7271
7272 return fold_convert (type, tem);
7273 }
7274
7275 if (TREE_CODE_CLASS (code) == tcc_binary
7276 || TREE_CODE_CLASS (code) == tcc_comparison)
7277 {
7278 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7279 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7280 fold_build2 (code, type,
7281 TREE_OPERAND (arg0, 1), op1));
7282 if (TREE_CODE (arg1) == COMPOUND_EXPR
7283 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
7284 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
7285 fold_build2 (code, type,
7286 op0, TREE_OPERAND (arg1, 1)));
7287
7288 if (TREE_CODE (arg0) == COND_EXPR || COMPARISON_CLASS_P (arg0))
7289 {
7290 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7291 arg0, arg1,
7292 /*cond_first_p=*/1);
7293 if (tem != NULL_TREE)
7294 return tem;
7295 }
7296
7297 if (TREE_CODE (arg1) == COND_EXPR || COMPARISON_CLASS_P (arg1))
7298 {
7299 tem = fold_binary_op_with_conditional_arg (code, type, op0, op1,
7300 arg1, arg0,
7301 /*cond_first_p=*/0);
7302 if (tem != NULL_TREE)
7303 return tem;
7304 }
7305 }
7306
7307 switch (code)
7308 {
7309 case PLUS_EXPR:
7310 /* A + (-B) -> A - B */
7311 if (TREE_CODE (arg1) == NEGATE_EXPR)
7312 return fold_build2 (MINUS_EXPR, type,
7313 fold_convert (type, arg0),
7314 fold_convert (type, TREE_OPERAND (arg1, 0)));
7315 /* (-A) + B -> B - A */
7316 if (TREE_CODE (arg0) == NEGATE_EXPR
7317 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
7318 return fold_build2 (MINUS_EXPR, type,
7319 fold_convert (type, arg1),
7320 fold_convert (type, TREE_OPERAND (arg0, 0)));
7321 /* Convert ~A + 1 to -A. */
7322 if (INTEGRAL_TYPE_P (type)
7323 && TREE_CODE (arg0) == BIT_NOT_EXPR
7324 && integer_onep (arg1))
7325 return fold_build1 (NEGATE_EXPR, type, TREE_OPERAND (arg0, 0));
7326
7327 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the
7328 same or one. */
7329 if ((TREE_CODE (arg0) == MULT_EXPR
7330 || TREE_CODE (arg1) == MULT_EXPR)
7331 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7332 {
7333 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7334 if (tem)
7335 return tem;
7336 }
7337
7338 if (! FLOAT_TYPE_P (type))
7339 {
7340 if (integer_zerop (arg1))
7341 return non_lvalue (fold_convert (type, arg0));
7342
7343 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
7344 with a constant, and the two constants have no bits in common,
7345 we should treat this as a BIT_IOR_EXPR since this may produce more
7346 simplifications. */
7347 if (TREE_CODE (arg0) == BIT_AND_EXPR
7348 && TREE_CODE (arg1) == BIT_AND_EXPR
7349 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7350 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
7351 && integer_zerop (const_binop (BIT_AND_EXPR,
7352 TREE_OPERAND (arg0, 1),
7353 TREE_OPERAND (arg1, 1), 0)))
7354 {
7355 code = BIT_IOR_EXPR;
7356 goto bit_ior;
7357 }
7358
7359 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
7360 (plus (plus (mult) (mult)) (foo)) so that we can
7361 take advantage of the factoring cases below. */
7362 if (((TREE_CODE (arg0) == PLUS_EXPR
7363 || TREE_CODE (arg0) == MINUS_EXPR)
7364 && TREE_CODE (arg1) == MULT_EXPR)
7365 || ((TREE_CODE (arg1) == PLUS_EXPR
7366 || TREE_CODE (arg1) == MINUS_EXPR)
7367 && TREE_CODE (arg0) == MULT_EXPR))
7368 {
7369 tree parg0, parg1, parg, marg;
7370 enum tree_code pcode;
7371
7372 if (TREE_CODE (arg1) == MULT_EXPR)
7373 parg = arg0, marg = arg1;
7374 else
7375 parg = arg1, marg = arg0;
7376 pcode = TREE_CODE (parg);
7377 parg0 = TREE_OPERAND (parg, 0);
7378 parg1 = TREE_OPERAND (parg, 1);
7379 STRIP_NOPS (parg0);
7380 STRIP_NOPS (parg1);
7381
7382 if (TREE_CODE (parg0) == MULT_EXPR
7383 && TREE_CODE (parg1) != MULT_EXPR)
7384 return fold_build2 (pcode, type,
7385 fold_build2 (PLUS_EXPR, type,
7386 fold_convert (type, parg0),
7387 fold_convert (type, marg)),
7388 fold_convert (type, parg1));
7389 if (TREE_CODE (parg0) != MULT_EXPR
7390 && TREE_CODE (parg1) == MULT_EXPR)
7391 return fold_build2 (PLUS_EXPR, type,
7392 fold_convert (type, parg0),
7393 fold_build2 (pcode, type,
7394 fold_convert (type, marg),
7395 fold_convert (type,
7396 parg1)));
7397 }
7398
7399 /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
7400 of the array. Loop optimizer sometimes produce this type of
7401 expressions. */
7402 if (TREE_CODE (arg0) == ADDR_EXPR)
7403 {
7404 tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
7405 if (tem)
7406 return fold_convert (type, tem);
7407 }
7408 else if (TREE_CODE (arg1) == ADDR_EXPR)
7409 {
7410 tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
7411 if (tem)
7412 return fold_convert (type, tem);
7413 }
7414 }
7415 else
7416 {
7417 /* See if ARG1 is zero and X + ARG1 reduces to X. */
7418 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
7419 return non_lvalue (fold_convert (type, arg0));
7420
7421 /* Likewise if the operands are reversed. */
7422 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7423 return non_lvalue (fold_convert (type, arg1));
7424
7425 /* Convert X + -C into X - C. */
7426 if (TREE_CODE (arg1) == REAL_CST
7427 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
7428 {
7429 tem = fold_negate_const (arg1, type);
7430 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
7431 return fold_build2 (MINUS_EXPR, type,
7432 fold_convert (type, arg0),
7433 fold_convert (type, tem));
7434 }
7435
7436 if (flag_unsafe_math_optimizations
7437 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7438 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7439 && (tem = distribute_real_division (code, type, arg0, arg1)))
7440 return tem;
7441
7442 /* Convert x+x into x*2.0. */
7443 if (operand_equal_p (arg0, arg1, 0)
7444 && SCALAR_FLOAT_TYPE_P (type))
7445 return fold_build2 (MULT_EXPR, type, arg0,
7446 build_real (type, dconst2));
7447
7448 /* Convert a + (b*c + d*e) into (a + b*c) + d*e. */
7449 if (flag_unsafe_math_optimizations
7450 && TREE_CODE (arg1) == PLUS_EXPR
7451 && TREE_CODE (arg0) != MULT_EXPR)
7452 {
7453 tree tree10 = TREE_OPERAND (arg1, 0);
7454 tree tree11 = TREE_OPERAND (arg1, 1);
7455 if (TREE_CODE (tree11) == MULT_EXPR
7456 && TREE_CODE (tree10) == MULT_EXPR)
7457 {
7458 tree tree0;
7459 tree0 = fold_build2 (PLUS_EXPR, type, arg0, tree10);
7460 return fold_build2 (PLUS_EXPR, type, tree0, tree11);
7461 }
7462 }
7463 /* Convert (b*c + d*e) + a into b*c + (d*e +a). */
7464 if (flag_unsafe_math_optimizations
7465 && TREE_CODE (arg0) == PLUS_EXPR
7466 && TREE_CODE (arg1) != MULT_EXPR)
7467 {
7468 tree tree00 = TREE_OPERAND (arg0, 0);
7469 tree tree01 = TREE_OPERAND (arg0, 1);
7470 if (TREE_CODE (tree01) == MULT_EXPR
7471 && TREE_CODE (tree00) == MULT_EXPR)
7472 {
7473 tree tree0;
7474 tree0 = fold_build2 (PLUS_EXPR, type, tree01, arg1);
7475 return fold_build2 (PLUS_EXPR, type, tree00, tree0);
7476 }
7477 }
7478 }
7479
7480 bit_rotate:
7481 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
7482 is a rotate of A by C1 bits. */
7483 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
7484 is a rotate of A by B bits. */
7485 {
7486 enum tree_code code0, code1;
7487 code0 = TREE_CODE (arg0);
7488 code1 = TREE_CODE (arg1);
7489 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
7490 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
7491 && operand_equal_p (TREE_OPERAND (arg0, 0),
7492 TREE_OPERAND (arg1, 0), 0)
7493 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
7494 {
7495 tree tree01, tree11;
7496 enum tree_code code01, code11;
7497
7498 tree01 = TREE_OPERAND (arg0, 1);
7499 tree11 = TREE_OPERAND (arg1, 1);
7500 STRIP_NOPS (tree01);
7501 STRIP_NOPS (tree11);
7502 code01 = TREE_CODE (tree01);
7503 code11 = TREE_CODE (tree11);
7504 if (code01 == INTEGER_CST
7505 && code11 == INTEGER_CST
7506 && TREE_INT_CST_HIGH (tree01) == 0
7507 && TREE_INT_CST_HIGH (tree11) == 0
7508 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
7509 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
7510 return build2 (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
7511 code0 == LSHIFT_EXPR ? tree01 : tree11);
7512 else if (code11 == MINUS_EXPR)
7513 {
7514 tree tree110, tree111;
7515 tree110 = TREE_OPERAND (tree11, 0);
7516 tree111 = TREE_OPERAND (tree11, 1);
7517 STRIP_NOPS (tree110);
7518 STRIP_NOPS (tree111);
7519 if (TREE_CODE (tree110) == INTEGER_CST
7520 && 0 == compare_tree_int (tree110,
7521 TYPE_PRECISION
7522 (TREE_TYPE (TREE_OPERAND
7523 (arg0, 0))))
7524 && operand_equal_p (tree01, tree111, 0))
7525 return build2 ((code0 == LSHIFT_EXPR
7526 ? LROTATE_EXPR
7527 : RROTATE_EXPR),
7528 type, TREE_OPERAND (arg0, 0), tree01);
7529 }
7530 else if (code01 == MINUS_EXPR)
7531 {
7532 tree tree010, tree011;
7533 tree010 = TREE_OPERAND (tree01, 0);
7534 tree011 = TREE_OPERAND (tree01, 1);
7535 STRIP_NOPS (tree010);
7536 STRIP_NOPS (tree011);
7537 if (TREE_CODE (tree010) == INTEGER_CST
7538 && 0 == compare_tree_int (tree010,
7539 TYPE_PRECISION
7540 (TREE_TYPE (TREE_OPERAND
7541 (arg0, 0))))
7542 && operand_equal_p (tree11, tree011, 0))
7543 return build2 ((code0 != LSHIFT_EXPR
7544 ? LROTATE_EXPR
7545 : RROTATE_EXPR),
7546 type, TREE_OPERAND (arg0, 0), tree11);
7547 }
7548 }
7549 }
7550
7551 associate:
7552 /* In most languages, can't associate operations on floats through
7553 parentheses. Rather than remember where the parentheses were, we
7554 don't associate floats at all, unless the user has specified
7555 -funsafe-math-optimizations. */
7556
7557 if (! wins
7558 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7559 {
7560 tree var0, con0, lit0, minus_lit0;
7561 tree var1, con1, lit1, minus_lit1;
7562
7563 /* Split both trees into variables, constants, and literals. Then
7564 associate each group together, the constants with literals,
7565 then the result with variables. This increases the chances of
7566 literals being recombined later and of generating relocatable
7567 expressions for the sum of a constant and literal. */
7568 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
7569 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
7570 code == MINUS_EXPR);
7571
7572 /* Only do something if we found more than two objects. Otherwise,
7573 nothing has changed and we risk infinite recursion. */
7574 if (2 < ((var0 != 0) + (var1 != 0)
7575 + (con0 != 0) + (con1 != 0)
7576 + (lit0 != 0) + (lit1 != 0)
7577 + (minus_lit0 != 0) + (minus_lit1 != 0)))
7578 {
7579 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
7580 if (code == MINUS_EXPR)
7581 code = PLUS_EXPR;
7582
7583 var0 = associate_trees (var0, var1, code, type);
7584 con0 = associate_trees (con0, con1, code, type);
7585 lit0 = associate_trees (lit0, lit1, code, type);
7586 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
7587
7588 /* Preserve the MINUS_EXPR if the negative part of the literal is
7589 greater than the positive part. Otherwise, the multiplicative
7590 folding code (i.e extract_muldiv) may be fooled in case
7591 unsigned constants are subtracted, like in the following
7592 example: ((X*2 + 4) - 8U)/2. */
7593 if (minus_lit0 && lit0)
7594 {
7595 if (TREE_CODE (lit0) == INTEGER_CST
7596 && TREE_CODE (minus_lit0) == INTEGER_CST
7597 && tree_int_cst_lt (lit0, minus_lit0))
7598 {
7599 minus_lit0 = associate_trees (minus_lit0, lit0,
7600 MINUS_EXPR, type);
7601 lit0 = 0;
7602 }
7603 else
7604 {
7605 lit0 = associate_trees (lit0, minus_lit0,
7606 MINUS_EXPR, type);
7607 minus_lit0 = 0;
7608 }
7609 }
7610 if (minus_lit0)
7611 {
7612 if (con0 == 0)
7613 return fold_convert (type,
7614 associate_trees (var0, minus_lit0,
7615 MINUS_EXPR, type));
7616 else
7617 {
7618 con0 = associate_trees (con0, minus_lit0,
7619 MINUS_EXPR, type);
7620 return fold_convert (type,
7621 associate_trees (var0, con0,
7622 PLUS_EXPR, type));
7623 }
7624 }
7625
7626 con0 = associate_trees (con0, lit0, code, type);
7627 return fold_convert (type, associate_trees (var0, con0,
7628 code, type));
7629 }
7630 }
7631
7632 binary:
7633 if (wins)
7634 t1 = const_binop (code, arg0, arg1, 0);
7635 if (t1 != NULL_TREE)
7636 {
7637 /* The return value should always have
7638 the same type as the original expression. */
7639 if (TREE_TYPE (t1) != type)
7640 t1 = fold_convert (type, t1);
7641
7642 return t1;
7643 }
7644 return NULL_TREE;
7645
7646 case MINUS_EXPR:
7647 /* A - (-B) -> A + B */
7648 if (TREE_CODE (arg1) == NEGATE_EXPR)
7649 return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
7650 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
7651 if (TREE_CODE (arg0) == NEGATE_EXPR
7652 && (FLOAT_TYPE_P (type)
7653 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
7654 && negate_expr_p (arg1)
7655 && reorder_operands_p (arg0, arg1))
7656 return fold_build2 (MINUS_EXPR, type, negate_expr (arg1),
7657 TREE_OPERAND (arg0, 0));
7658 /* Convert -A - 1 to ~A. */
7659 if (INTEGRAL_TYPE_P (type)
7660 && TREE_CODE (arg0) == NEGATE_EXPR
7661 && integer_onep (arg1))
7662 return fold_build1 (BIT_NOT_EXPR, type, TREE_OPERAND (arg0, 0));
7663
7664 /* Convert -1 - A to ~A. */
7665 if (INTEGRAL_TYPE_P (type)
7666 && integer_all_onesp (arg0))
7667 return fold_build1 (BIT_NOT_EXPR, type, arg1);
7668
7669 if (! FLOAT_TYPE_P (type))
7670 {
7671 if (! wins && integer_zerop (arg0))
7672 return negate_expr (fold_convert (type, arg1));
7673 if (integer_zerop (arg1))
7674 return non_lvalue (fold_convert (type, arg0));
7675
7676 /* Fold A - (A & B) into ~B & A. */
7677 if (!TREE_SIDE_EFFECTS (arg0)
7678 && TREE_CODE (arg1) == BIT_AND_EXPR)
7679 {
7680 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
7681 return fold_build2 (BIT_AND_EXPR, type,
7682 fold_build1 (BIT_NOT_EXPR, type,
7683 TREE_OPERAND (arg1, 0)),
7684 arg0);
7685 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
7686 return fold_build2 (BIT_AND_EXPR, type,
7687 fold_build1 (BIT_NOT_EXPR, type,
7688 TREE_OPERAND (arg1, 1)),
7689 arg0);
7690 }
7691
7692 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
7693 any power of 2 minus 1. */
7694 if (TREE_CODE (arg0) == BIT_AND_EXPR
7695 && TREE_CODE (arg1) == BIT_AND_EXPR
7696 && operand_equal_p (TREE_OPERAND (arg0, 0),
7697 TREE_OPERAND (arg1, 0), 0))
7698 {
7699 tree mask0 = TREE_OPERAND (arg0, 1);
7700 tree mask1 = TREE_OPERAND (arg1, 1);
7701 tree tem = fold_build1 (BIT_NOT_EXPR, type, mask0);
7702
7703 if (operand_equal_p (tem, mask1, 0))
7704 {
7705 tem = fold_build2 (BIT_XOR_EXPR, type,
7706 TREE_OPERAND (arg0, 0), mask1);
7707 return fold_build2 (MINUS_EXPR, type, tem, mask1);
7708 }
7709 }
7710 }
7711
7712 /* See if ARG1 is zero and X - ARG1 reduces to X. */
7713 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
7714 return non_lvalue (fold_convert (type, arg0));
7715
7716 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
7717 ARG0 is zero and X + ARG0 reduces to X, since that would mean
7718 (-ARG1 + ARG0) reduces to -ARG1. */
7719 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
7720 return negate_expr (fold_convert (type, arg1));
7721
7722 /* Fold &x - &x. This can happen from &x.foo - &x.
7723 This is unsafe for certain floats even in non-IEEE formats.
7724 In IEEE, it is unsafe because it does wrong for NaNs.
7725 Also note that operand_equal_p is always false if an operand
7726 is volatile. */
7727
7728 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
7729 && operand_equal_p (arg0, arg1, 0))
7730 return fold_convert (type, integer_zero_node);
7731
7732 /* A - B -> A + (-B) if B is easily negatable. */
7733 if (!wins && negate_expr_p (arg1)
7734 && ((FLOAT_TYPE_P (type)
7735 /* Avoid this transformation if B is a positive REAL_CST. */
7736 && (TREE_CODE (arg1) != REAL_CST
7737 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
7738 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
7739 return fold_build2 (PLUS_EXPR, type,
7740 fold_convert (type, arg0),
7741 fold_convert (type, negate_expr (arg1)));
7742
7743 /* Try folding difference of addresses. */
7744 {
7745 HOST_WIDE_INT diff;
7746
7747 if ((TREE_CODE (arg0) == ADDR_EXPR
7748 || TREE_CODE (arg1) == ADDR_EXPR)
7749 && ptr_difference_const (arg0, arg1, &diff))
7750 return build_int_cst_type (type, diff);
7751 }
7752
7753 /* Fold &a[i] - &a[j] to i-j. */
7754 if (TREE_CODE (arg0) == ADDR_EXPR
7755 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
7756 && TREE_CODE (arg1) == ADDR_EXPR
7757 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
7758 {
7759 tree aref0 = TREE_OPERAND (arg0, 0);
7760 tree aref1 = TREE_OPERAND (arg1, 0);
7761 if (operand_equal_p (TREE_OPERAND (aref0, 0),
7762 TREE_OPERAND (aref1, 0), 0))
7763 {
7764 tree op0 = fold_convert (type, TREE_OPERAND (aref0, 1));
7765 tree op1 = fold_convert (type, TREE_OPERAND (aref1, 1));
7766 tree esz = array_ref_element_size (aref0);
7767 tree diff = build2 (MINUS_EXPR, type, op0, op1);
7768 return fold_build2 (MULT_EXPR, type, diff,
7769 fold_convert (type, esz));
7770
7771 }
7772 }
7773
7774 /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
7775 of the array. Loop optimizer sometimes produce this type of
7776 expressions. */
7777 if (TREE_CODE (arg0) == ADDR_EXPR)
7778 {
7779 tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
7780 if (tem)
7781 return fold_convert (type, tem);
7782 }
7783
7784 if (flag_unsafe_math_optimizations
7785 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
7786 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
7787 && (tem = distribute_real_division (code, type, arg0, arg1)))
7788 return tem;
7789
7790 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the
7791 same or one. */
7792 if ((TREE_CODE (arg0) == MULT_EXPR
7793 || TREE_CODE (arg1) == MULT_EXPR)
7794 && (!FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
7795 {
7796 tree tem = fold_plusminus_mult_expr (code, type, arg0, arg1);
7797 if (tem)
7798 return tem;
7799 }
7800
7801 goto associate;
7802
7803 case MULT_EXPR:
7804 /* (-A) * (-B) -> A * B */
7805 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
7806 return fold_build2 (MULT_EXPR, type,
7807 TREE_OPERAND (arg0, 0),
7808 negate_expr (arg1));
7809 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
7810 return fold_build2 (MULT_EXPR, type,
7811 negate_expr (arg0),
7812 TREE_OPERAND (arg1, 0));
7813
7814 if (! FLOAT_TYPE_P (type))
7815 {
7816 if (integer_zerop (arg1))
7817 return omit_one_operand (type, arg1, arg0);
7818 if (integer_onep (arg1))
7819 return non_lvalue (fold_convert (type, arg0));
7820 /* Transform x * -1 into -x. */
7821 if (integer_all_onesp (arg1))
7822 return fold_convert (type, negate_expr (arg0));
7823
7824 /* (a * (1 << b)) is (a << b) */
7825 if (TREE_CODE (arg1) == LSHIFT_EXPR
7826 && integer_onep (TREE_OPERAND (arg1, 0)))
7827 return fold_build2 (LSHIFT_EXPR, type, arg0,
7828 TREE_OPERAND (arg1, 1));
7829 if (TREE_CODE (arg0) == LSHIFT_EXPR
7830 && integer_onep (TREE_OPERAND (arg0, 0)))
7831 return fold_build2 (LSHIFT_EXPR, type, arg1,
7832 TREE_OPERAND (arg0, 1));
7833
7834 if (TREE_CODE (arg1) == INTEGER_CST
7835 && 0 != (tem = extract_muldiv (op0,
7836 fold_convert (type, arg1),
7837 code, NULL_TREE)))
7838 return fold_convert (type, tem);
7839
7840 }
7841 else
7842 {
7843 /* Maybe fold x * 0 to 0. The expressions aren't the same
7844 when x is NaN, since x * 0 is also NaN. Nor are they the
7845 same in modes with signed zeros, since multiplying a
7846 negative value by 0 gives -0, not +0. */
7847 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
7848 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
7849 && real_zerop (arg1))
7850 return omit_one_operand (type, arg1, arg0);
7851 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
7852 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7853 && real_onep (arg1))
7854 return non_lvalue (fold_convert (type, arg0));
7855
7856 /* Transform x * -1.0 into -x. */
7857 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
7858 && real_minus_onep (arg1))
7859 return fold_convert (type, negate_expr (arg0));
7860
7861 /* Convert (C1/X)*C2 into (C1*C2)/X. */
7862 if (flag_unsafe_math_optimizations
7863 && TREE_CODE (arg0) == RDIV_EXPR
7864 && TREE_CODE (arg1) == REAL_CST
7865 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
7866 {
7867 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
7868 arg1, 0);
7869 if (tem)
7870 return fold_build2 (RDIV_EXPR, type, tem,
7871 TREE_OPERAND (arg0, 1));
7872 }
7873
7874 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
7875 if (operand_equal_p (arg0, arg1, 0))
7876 {
7877 tree tem = fold_strip_sign_ops (arg0);
7878 if (tem != NULL_TREE)
7879 {
7880 tem = fold_convert (type, tem);
7881 return fold_build2 (MULT_EXPR, type, tem, tem);
7882 }
7883 }
7884
7885 if (flag_unsafe_math_optimizations)
7886 {
7887 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
7888 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
7889
7890 /* Optimizations of root(...)*root(...). */
7891 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
7892 {
7893 tree rootfn, arg, arglist;
7894 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7895 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7896
7897 /* Optimize sqrt(x)*sqrt(x) as x. */
7898 if (BUILTIN_SQRT_P (fcode0)
7899 && operand_equal_p (arg00, arg10, 0)
7900 && ! HONOR_SNANS (TYPE_MODE (type)))
7901 return arg00;
7902
7903 /* Optimize root(x)*root(y) as root(x*y). */
7904 rootfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7905 arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7906 arglist = build_tree_list (NULL_TREE, arg);
7907 return build_function_call_expr (rootfn, arglist);
7908 }
7909
7910 /* Optimize expN(x)*expN(y) as expN(x+y). */
7911 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
7912 {
7913 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7914 tree arg = fold_build2 (PLUS_EXPR, type,
7915 TREE_VALUE (TREE_OPERAND (arg0, 1)),
7916 TREE_VALUE (TREE_OPERAND (arg1, 1)));
7917 tree arglist = build_tree_list (NULL_TREE, arg);
7918 return build_function_call_expr (expfn, arglist);
7919 }
7920
7921 /* Optimizations of pow(...)*pow(...). */
7922 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
7923 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
7924 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
7925 {
7926 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
7927 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
7928 1)));
7929 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7930 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7931 1)));
7932
7933 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
7934 if (operand_equal_p (arg01, arg11, 0))
7935 {
7936 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7937 tree arg = fold_build2 (MULT_EXPR, type, arg00, arg10);
7938 tree arglist = tree_cons (NULL_TREE, arg,
7939 build_tree_list (NULL_TREE,
7940 arg01));
7941 return build_function_call_expr (powfn, arglist);
7942 }
7943
7944 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
7945 if (operand_equal_p (arg00, arg10, 0))
7946 {
7947 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
7948 tree arg = fold_build2 (PLUS_EXPR, type, arg01, arg11);
7949 tree arglist = tree_cons (NULL_TREE, arg00,
7950 build_tree_list (NULL_TREE,
7951 arg));
7952 return build_function_call_expr (powfn, arglist);
7953 }
7954 }
7955
7956 /* Optimize tan(x)*cos(x) as sin(x). */
7957 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
7958 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
7959 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
7960 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
7961 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
7962 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
7963 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
7964 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
7965 {
7966 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
7967
7968 if (sinfn != NULL_TREE)
7969 return build_function_call_expr (sinfn,
7970 TREE_OPERAND (arg0, 1));
7971 }
7972
7973 /* Optimize x*pow(x,c) as pow(x,c+1). */
7974 if (fcode1 == BUILT_IN_POW
7975 || fcode1 == BUILT_IN_POWF
7976 || fcode1 == BUILT_IN_POWL)
7977 {
7978 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
7979 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
7980 1)));
7981 if (TREE_CODE (arg11) == REAL_CST
7982 && ! TREE_CONSTANT_OVERFLOW (arg11)
7983 && operand_equal_p (arg0, arg10, 0))
7984 {
7985 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
7986 REAL_VALUE_TYPE c;
7987 tree arg, arglist;
7988
7989 c = TREE_REAL_CST (arg11);
7990 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
7991 arg = build_real (type, c);
7992 arglist = build_tree_list (NULL_TREE, arg);
7993 arglist = tree_cons (NULL_TREE, arg0, arglist);
7994 return build_function_call_expr (powfn, arglist);
7995 }
7996 }
7997
7998 /* Optimize pow(x,c)*x as pow(x,c+1). */
7999 if (fcode0 == BUILT_IN_POW
8000 || fcode0 == BUILT_IN_POWF
8001 || fcode0 == BUILT_IN_POWL)
8002 {
8003 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8004 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
8005 1)));
8006 if (TREE_CODE (arg01) == REAL_CST
8007 && ! TREE_CONSTANT_OVERFLOW (arg01)
8008 && operand_equal_p (arg1, arg00, 0))
8009 {
8010 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8011 REAL_VALUE_TYPE c;
8012 tree arg, arglist;
8013
8014 c = TREE_REAL_CST (arg01);
8015 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
8016 arg = build_real (type, c);
8017 arglist = build_tree_list (NULL_TREE, arg);
8018 arglist = tree_cons (NULL_TREE, arg1, arglist);
8019 return build_function_call_expr (powfn, arglist);
8020 }
8021 }
8022
8023 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
8024 if (! optimize_size
8025 && operand_equal_p (arg0, arg1, 0))
8026 {
8027 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
8028
8029 if (powfn)
8030 {
8031 tree arg = build_real (type, dconst2);
8032 tree arglist = build_tree_list (NULL_TREE, arg);
8033 arglist = tree_cons (NULL_TREE, arg0, arglist);
8034 return build_function_call_expr (powfn, arglist);
8035 }
8036 }
8037 }
8038 }
8039 goto associate;
8040
8041 case BIT_IOR_EXPR:
8042 bit_ior:
8043 if (integer_all_onesp (arg1))
8044 return omit_one_operand (type, arg1, arg0);
8045 if (integer_zerop (arg1))
8046 return non_lvalue (fold_convert (type, arg0));
8047 if (operand_equal_p (arg0, arg1, 0))
8048 return non_lvalue (fold_convert (type, arg0));
8049
8050 /* ~X | X is -1. */
8051 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8052 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8053 {
8054 t1 = build_int_cst (type, -1);
8055 t1 = force_fit_type (t1, 0, false, false);
8056 return omit_one_operand (type, t1, arg1);
8057 }
8058
8059 /* X | ~X is -1. */
8060 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8061 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8062 {
8063 t1 = build_int_cst (type, -1);
8064 t1 = force_fit_type (t1, 0, false, false);
8065 return omit_one_operand (type, t1, arg0);
8066 }
8067
8068 t1 = distribute_bit_expr (code, type, arg0, arg1);
8069 if (t1 != NULL_TREE)
8070 return t1;
8071
8072 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
8073
8074 This results in more efficient code for machines without a NAND
8075 instruction. Combine will canonicalize to the first form
8076 which will allow use of NAND instructions provided by the
8077 backend if they exist. */
8078 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8079 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8080 {
8081 return fold_build1 (BIT_NOT_EXPR, type,
8082 build2 (BIT_AND_EXPR, type,
8083 TREE_OPERAND (arg0, 0),
8084 TREE_OPERAND (arg1, 0)));
8085 }
8086
8087 /* See if this can be simplified into a rotate first. If that
8088 is unsuccessful continue in the association code. */
8089 goto bit_rotate;
8090
8091 case BIT_XOR_EXPR:
8092 if (integer_zerop (arg1))
8093 return non_lvalue (fold_convert (type, arg0));
8094 if (integer_all_onesp (arg1))
8095 return fold_build1 (BIT_NOT_EXPR, type, arg0);
8096 if (operand_equal_p (arg0, arg1, 0))
8097 return omit_one_operand (type, integer_zero_node, arg0);
8098
8099 /* ~X ^ X is -1. */
8100 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8101 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8102 {
8103 t1 = build_int_cst (type, -1);
8104 t1 = force_fit_type (t1, 0, false, false);
8105 return omit_one_operand (type, t1, arg1);
8106 }
8107
8108 /* X ^ ~X is -1. */
8109 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8110 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8111 {
8112 t1 = build_int_cst (type, -1);
8113 t1 = force_fit_type (t1, 0, false, false);
8114 return omit_one_operand (type, t1, arg0);
8115 }
8116
8117 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
8118 with a constant, and the two constants have no bits in common,
8119 we should treat this as a BIT_IOR_EXPR since this may produce more
8120 simplifications. */
8121 if (TREE_CODE (arg0) == BIT_AND_EXPR
8122 && TREE_CODE (arg1) == BIT_AND_EXPR
8123 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8124 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8125 && integer_zerop (const_binop (BIT_AND_EXPR,
8126 TREE_OPERAND (arg0, 1),
8127 TREE_OPERAND (arg1, 1), 0)))
8128 {
8129 code = BIT_IOR_EXPR;
8130 goto bit_ior;
8131 }
8132
8133 /* (X | Y) ^ X -> Y & ~ X*/
8134 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8135 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8136 {
8137 tree t2 = TREE_OPERAND (arg0, 1);
8138 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8139 arg1);
8140 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8141 fold_convert (type, t1));
8142 return t1;
8143 }
8144
8145 /* (Y | X) ^ X -> Y & ~ X*/
8146 if (TREE_CODE (arg0) == BIT_IOR_EXPR
8147 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
8148 {
8149 tree t2 = TREE_OPERAND (arg0, 0);
8150 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8151 arg1);
8152 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8153 fold_convert (type, t1));
8154 return t1;
8155 }
8156
8157 /* X ^ (X | Y) -> Y & ~ X*/
8158 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8159 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
8160 {
8161 tree t2 = TREE_OPERAND (arg1, 1);
8162 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8163 arg0);
8164 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8165 fold_convert (type, t1));
8166 return t1;
8167 }
8168
8169 /* X ^ (Y | X) -> Y & ~ X*/
8170 if (TREE_CODE (arg1) == BIT_IOR_EXPR
8171 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
8172 {
8173 tree t2 = TREE_OPERAND (arg1, 0);
8174 t1 = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg0),
8175 arg0);
8176 t1 = fold_build2 (BIT_AND_EXPR, type, fold_convert (type, t2),
8177 fold_convert (type, t1));
8178 return t1;
8179 }
8180
8181 /* Convert ~X ^ ~Y to X ^ Y. */
8182 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8183 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8184 return fold_build2 (code, type,
8185 fold_convert (type, TREE_OPERAND (arg0, 0)),
8186 fold_convert (type, TREE_OPERAND (arg1, 0)));
8187
8188 /* See if this can be simplified into a rotate first. If that
8189 is unsuccessful continue in the association code. */
8190 goto bit_rotate;
8191
8192 case BIT_AND_EXPR:
8193 if (integer_all_onesp (arg1))
8194 return non_lvalue (fold_convert (type, arg0));
8195 if (integer_zerop (arg1))
8196 return omit_one_operand (type, arg1, arg0);
8197 if (operand_equal_p (arg0, arg1, 0))
8198 return non_lvalue (fold_convert (type, arg0));
8199
8200 /* ~X & X is always zero. */
8201 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8202 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8203 return omit_one_operand (type, integer_zero_node, arg1);
8204
8205 /* X & ~X is always zero. */
8206 if (TREE_CODE (arg1) == BIT_NOT_EXPR
8207 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8208 return omit_one_operand (type, integer_zero_node, arg0);
8209
8210 t1 = distribute_bit_expr (code, type, arg0, arg1);
8211 if (t1 != NULL_TREE)
8212 return t1;
8213 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
8214 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
8215 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
8216 {
8217 unsigned int prec
8218 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
8219
8220 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
8221 && (~TREE_INT_CST_LOW (arg1)
8222 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
8223 return fold_convert (type, TREE_OPERAND (arg0, 0));
8224 }
8225
8226 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
8227
8228 This results in more efficient code for machines without a NOR
8229 instruction. Combine will canonicalize to the first form
8230 which will allow use of NOR instructions provided by the
8231 backend if they exist. */
8232 if (TREE_CODE (arg0) == BIT_NOT_EXPR
8233 && TREE_CODE (arg1) == BIT_NOT_EXPR)
8234 {
8235 return fold_build1 (BIT_NOT_EXPR, type,
8236 build2 (BIT_IOR_EXPR, type,
8237 TREE_OPERAND (arg0, 0),
8238 TREE_OPERAND (arg1, 0)));
8239 }
8240
8241 goto associate;
8242
8243 case RDIV_EXPR:
8244 /* Don't touch a floating-point divide by zero unless the mode
8245 of the constant can represent infinity. */
8246 if (TREE_CODE (arg1) == REAL_CST
8247 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
8248 && real_zerop (arg1))
8249 return NULL_TREE;
8250
8251 /* Optimize A / A to 1.0 if we don't care about
8252 NaNs or Infinities. */
8253 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
8254 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
8255 && operand_equal_p (arg0, arg1, 0))
8256 {
8257 tree r = build_real (TREE_TYPE (arg0), dconst1);
8258
8259 return omit_two_operands (type, r, arg0, arg1);
8260 }
8261
8262 /* (-A) / (-B) -> A / B */
8263 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
8264 return fold_build2 (RDIV_EXPR, type,
8265 TREE_OPERAND (arg0, 0),
8266 negate_expr (arg1));
8267 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
8268 return fold_build2 (RDIV_EXPR, type,
8269 negate_expr (arg0),
8270 TREE_OPERAND (arg1, 0));
8271
8272 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
8273 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8274 && real_onep (arg1))
8275 return non_lvalue (fold_convert (type, arg0));
8276
8277 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
8278 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
8279 && real_minus_onep (arg1))
8280 return non_lvalue (fold_convert (type, negate_expr (arg0)));
8281
8282 /* If ARG1 is a constant, we can convert this to a multiply by the
8283 reciprocal. This does not have the same rounding properties,
8284 so only do this if -funsafe-math-optimizations. We can actually
8285 always safely do it if ARG1 is a power of two, but it's hard to
8286 tell if it is or not in a portable manner. */
8287 if (TREE_CODE (arg1) == REAL_CST)
8288 {
8289 if (flag_unsafe_math_optimizations
8290 && 0 != (tem = const_binop (code, build_real (type, dconst1),
8291 arg1, 0)))
8292 return fold_build2 (MULT_EXPR, type, arg0, tem);
8293 /* Find the reciprocal if optimizing and the result is exact. */
8294 if (optimize)
8295 {
8296 REAL_VALUE_TYPE r;
8297 r = TREE_REAL_CST (arg1);
8298 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
8299 {
8300 tem = build_real (type, r);
8301 return fold_build2 (MULT_EXPR, type,
8302 fold_convert (type, arg0), tem);
8303 }
8304 }
8305 }
8306 /* Convert A/B/C to A/(B*C). */
8307 if (flag_unsafe_math_optimizations
8308 && TREE_CODE (arg0) == RDIV_EXPR)
8309 return fold_build2 (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
8310 fold_build2 (MULT_EXPR, type,
8311 TREE_OPERAND (arg0, 1), arg1));
8312
8313 /* Convert A/(B/C) to (A/B)*C. */
8314 if (flag_unsafe_math_optimizations
8315 && TREE_CODE (arg1) == RDIV_EXPR)
8316 return fold_build2 (MULT_EXPR, type,
8317 fold_build2 (RDIV_EXPR, type, arg0,
8318 TREE_OPERAND (arg1, 0)),
8319 TREE_OPERAND (arg1, 1));
8320
8321 /* Convert C1/(X*C2) into (C1/C2)/X. */
8322 if (flag_unsafe_math_optimizations
8323 && TREE_CODE (arg1) == MULT_EXPR
8324 && TREE_CODE (arg0) == REAL_CST
8325 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
8326 {
8327 tree tem = const_binop (RDIV_EXPR, arg0,
8328 TREE_OPERAND (arg1, 1), 0);
8329 if (tem)
8330 return fold_build2 (RDIV_EXPR, type, tem,
8331 TREE_OPERAND (arg1, 0));
8332 }
8333
8334 if (flag_unsafe_math_optimizations)
8335 {
8336 enum built_in_function fcode = builtin_mathfn_code (arg1);
8337 /* Optimize x/expN(y) into x*expN(-y). */
8338 if (BUILTIN_EXPONENT_P (fcode))
8339 {
8340 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8341 tree arg = negate_expr (TREE_VALUE (TREE_OPERAND (arg1, 1)));
8342 tree arglist = build_tree_list (NULL_TREE,
8343 fold_convert (type, arg));
8344 arg1 = build_function_call_expr (expfn, arglist);
8345 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8346 }
8347
8348 /* Optimize x/pow(y,z) into x*pow(y,-z). */
8349 if (fcode == BUILT_IN_POW
8350 || fcode == BUILT_IN_POWF
8351 || fcode == BUILT_IN_POWL)
8352 {
8353 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
8354 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
8355 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
8356 tree neg11 = fold_convert (type, negate_expr (arg11));
8357 tree arglist = tree_cons(NULL_TREE, arg10,
8358 build_tree_list (NULL_TREE, neg11));
8359 arg1 = build_function_call_expr (powfn, arglist);
8360 return fold_build2 (MULT_EXPR, type, arg0, arg1);
8361 }
8362 }
8363
8364 if (flag_unsafe_math_optimizations)
8365 {
8366 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
8367 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
8368
8369 /* Optimize sin(x)/cos(x) as tan(x). */
8370 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
8371 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
8372 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
8373 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8374 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8375 {
8376 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8377
8378 if (tanfn != NULL_TREE)
8379 return build_function_call_expr (tanfn,
8380 TREE_OPERAND (arg0, 1));
8381 }
8382
8383 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
8384 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
8385 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
8386 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
8387 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
8388 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
8389 {
8390 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
8391
8392 if (tanfn != NULL_TREE)
8393 {
8394 tree tmp = TREE_OPERAND (arg0, 1);
8395 tmp = build_function_call_expr (tanfn, tmp);
8396 return fold_build2 (RDIV_EXPR, type,
8397 build_real (type, dconst1), tmp);
8398 }
8399 }
8400
8401 /* Optimize pow(x,c)/x as pow(x,c-1). */
8402 if (fcode0 == BUILT_IN_POW
8403 || fcode0 == BUILT_IN_POWF
8404 || fcode0 == BUILT_IN_POWL)
8405 {
8406 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
8407 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
8408 if (TREE_CODE (arg01) == REAL_CST
8409 && ! TREE_CONSTANT_OVERFLOW (arg01)
8410 && operand_equal_p (arg1, arg00, 0))
8411 {
8412 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
8413 REAL_VALUE_TYPE c;
8414 tree arg, arglist;
8415
8416 c = TREE_REAL_CST (arg01);
8417 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
8418 arg = build_real (type, c);
8419 arglist = build_tree_list (NULL_TREE, arg);
8420 arglist = tree_cons (NULL_TREE, arg1, arglist);
8421 return build_function_call_expr (powfn, arglist);
8422 }
8423 }
8424 }
8425 goto binary;
8426
8427 case TRUNC_DIV_EXPR:
8428 case ROUND_DIV_EXPR:
8429 case FLOOR_DIV_EXPR:
8430 case CEIL_DIV_EXPR:
8431 case EXACT_DIV_EXPR:
8432 if (integer_onep (arg1))
8433 return non_lvalue (fold_convert (type, arg0));
8434 if (integer_zerop (arg1))
8435 return NULL_TREE;
8436 /* X / -1 is -X. */
8437 if (!TYPE_UNSIGNED (type)
8438 && TREE_CODE (arg1) == INTEGER_CST
8439 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8440 && TREE_INT_CST_HIGH (arg1) == -1)
8441 return fold_convert (type, negate_expr (arg0));
8442
8443 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
8444 operation, EXACT_DIV_EXPR.
8445
8446 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
8447 At one time others generated faster code, it's not clear if they do
8448 after the last round to changes to the DIV code in expmed.c. */
8449 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
8450 && multiple_of_p (type, arg0, arg1))
8451 return fold_build2 (EXACT_DIV_EXPR, type, arg0, arg1);
8452
8453 if (TREE_CODE (arg1) == INTEGER_CST
8454 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8455 return fold_convert (type, tem);
8456
8457 goto binary;
8458
8459 case CEIL_MOD_EXPR:
8460 case FLOOR_MOD_EXPR:
8461 case ROUND_MOD_EXPR:
8462 case TRUNC_MOD_EXPR:
8463 /* X % 1 is always zero, but be sure to preserve any side
8464 effects in X. */
8465 if (integer_onep (arg1))
8466 return omit_one_operand (type, integer_zero_node, arg0);
8467
8468 /* X % 0, return X % 0 unchanged so that we can get the
8469 proper warnings and errors. */
8470 if (integer_zerop (arg1))
8471 return NULL_TREE;
8472
8473 /* 0 % X is always zero, but be sure to preserve any side
8474 effects in X. Place this after checking for X == 0. */
8475 if (integer_zerop (arg0))
8476 return omit_one_operand (type, integer_zero_node, arg1);
8477
8478 /* X % -1 is zero. */
8479 if (!TYPE_UNSIGNED (type)
8480 && TREE_CODE (arg1) == INTEGER_CST
8481 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
8482 && TREE_INT_CST_HIGH (arg1) == -1)
8483 return omit_one_operand (type, integer_zero_node, arg0);
8484
8485 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
8486 i.e. "X % C" into "X & C2", if X and C are positive. */
8487 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
8488 && (TYPE_UNSIGNED (type) || tree_expr_nonnegative_p (arg0))
8489 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) >= 0)
8490 {
8491 unsigned HOST_WIDE_INT high, low;
8492 tree mask;
8493 int l;
8494
8495 l = tree_log2 (arg1);
8496 if (l >= HOST_BITS_PER_WIDE_INT)
8497 {
8498 high = ((unsigned HOST_WIDE_INT) 1
8499 << (l - HOST_BITS_PER_WIDE_INT)) - 1;
8500 low = -1;
8501 }
8502 else
8503 {
8504 high = 0;
8505 low = ((unsigned HOST_WIDE_INT) 1 << l) - 1;
8506 }
8507
8508 mask = build_int_cst_wide (type, low, high);
8509 return fold_build2 (BIT_AND_EXPR, type,
8510 fold_convert (type, arg0), mask);
8511 }
8512
8513 /* X % -C is the same as X % C. */
8514 if (code == TRUNC_MOD_EXPR
8515 && !TYPE_UNSIGNED (type)
8516 && TREE_CODE (arg1) == INTEGER_CST
8517 && !TREE_CONSTANT_OVERFLOW (arg1)
8518 && TREE_INT_CST_HIGH (arg1) < 0
8519 && !flag_trapv
8520 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
8521 && !sign_bit_p (arg1, arg1))
8522 return fold_build2 (code, type, fold_convert (type, arg0),
8523 fold_convert (type, negate_expr (arg1)));
8524
8525 /* X % -Y is the same as X % Y. */
8526 if (code == TRUNC_MOD_EXPR
8527 && !TYPE_UNSIGNED (type)
8528 && TREE_CODE (arg1) == NEGATE_EXPR
8529 && !flag_trapv)
8530 return fold_build2 (code, type, fold_convert (type, arg0),
8531 fold_convert (type, TREE_OPERAND (arg1, 0)));
8532
8533 if (TREE_CODE (arg1) == INTEGER_CST
8534 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE)))
8535 return fold_convert (type, tem);
8536
8537 goto binary;
8538
8539 case LROTATE_EXPR:
8540 case RROTATE_EXPR:
8541 if (integer_all_onesp (arg0))
8542 return omit_one_operand (type, arg0, arg1);
8543 goto shift;
8544
8545 case RSHIFT_EXPR:
8546 /* Optimize -1 >> x for arithmetic right shifts. */
8547 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type))
8548 return omit_one_operand (type, arg0, arg1);
8549 /* ... fall through ... */
8550
8551 case LSHIFT_EXPR:
8552 shift:
8553 if (integer_zerop (arg1))
8554 return non_lvalue (fold_convert (type, arg0));
8555 if (integer_zerop (arg0))
8556 return omit_one_operand (type, arg0, arg1);
8557
8558 /* Since negative shift count is not well-defined,
8559 don't try to compute it in the compiler. */
8560 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
8561 return NULL_TREE;
8562
8563 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
8564 if (TREE_CODE (arg0) == code && host_integerp (arg1, false)
8565 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8566 && host_integerp (TREE_OPERAND (arg0, 1), false)
8567 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8568 {
8569 HOST_WIDE_INT low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
8570 + TREE_INT_CST_LOW (arg1));
8571
8572 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
8573 being well defined. */
8574 if (low >= TYPE_PRECISION (type))
8575 {
8576 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
8577 low = low % TYPE_PRECISION (type);
8578 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
8579 return build_int_cst (type, 0);
8580 else
8581 low = TYPE_PRECISION (type) - 1;
8582 }
8583
8584 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8585 build_int_cst (type, low));
8586 }
8587
8588 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
8589 into x & ((unsigned)-1 >> c) for unsigned types. */
8590 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
8591 || (TYPE_UNSIGNED (type)
8592 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
8593 && host_integerp (arg1, false)
8594 && TREE_INT_CST_LOW (arg1) < TYPE_PRECISION (type)
8595 && host_integerp (TREE_OPERAND (arg0, 1), false)
8596 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < TYPE_PRECISION (type))
8597 {
8598 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
8599 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
8600 tree lshift;
8601 tree arg00;
8602
8603 if (low0 == low1)
8604 {
8605 arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
8606
8607 lshift = build_int_cst (type, -1);
8608 lshift = int_const_binop (code, lshift, arg1, 0);
8609
8610 return fold_build2 (BIT_AND_EXPR, type, arg00, lshift);
8611 }
8612 }
8613
8614 /* Rewrite an LROTATE_EXPR by a constant into an
8615 RROTATE_EXPR by a new constant. */
8616 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
8617 {
8618 tree tem = build_int_cst (NULL_TREE,
8619 GET_MODE_BITSIZE (TYPE_MODE (type)));
8620 tem = fold_convert (TREE_TYPE (arg1), tem);
8621 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
8622 return fold_build2 (RROTATE_EXPR, type, arg0, tem);
8623 }
8624
8625 /* If we have a rotate of a bit operation with the rotate count and
8626 the second operand of the bit operation both constant,
8627 permute the two operations. */
8628 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8629 && (TREE_CODE (arg0) == BIT_AND_EXPR
8630 || TREE_CODE (arg0) == BIT_IOR_EXPR
8631 || TREE_CODE (arg0) == BIT_XOR_EXPR)
8632 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8633 return fold_build2 (TREE_CODE (arg0), type,
8634 fold_build2 (code, type,
8635 TREE_OPERAND (arg0, 0), arg1),
8636 fold_build2 (code, type,
8637 TREE_OPERAND (arg0, 1), arg1));
8638
8639 /* Two consecutive rotates adding up to the width of the mode can
8640 be ignored. */
8641 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
8642 && TREE_CODE (arg0) == RROTATE_EXPR
8643 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8644 && TREE_INT_CST_HIGH (arg1) == 0
8645 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
8646 && ((TREE_INT_CST_LOW (arg1)
8647 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
8648 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
8649 return TREE_OPERAND (arg0, 0);
8650
8651 goto binary;
8652
8653 case MIN_EXPR:
8654 if (operand_equal_p (arg0, arg1, 0))
8655 return omit_one_operand (type, arg0, arg1);
8656 if (INTEGRAL_TYPE_P (type)
8657 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
8658 return omit_one_operand (type, arg1, arg0);
8659 goto associate;
8660
8661 case MAX_EXPR:
8662 if (operand_equal_p (arg0, arg1, 0))
8663 return omit_one_operand (type, arg0, arg1);
8664 if (INTEGRAL_TYPE_P (type)
8665 && TYPE_MAX_VALUE (type)
8666 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
8667 return omit_one_operand (type, arg1, arg0);
8668 goto associate;
8669
8670 case TRUTH_ANDIF_EXPR:
8671 /* Note that the operands of this must be ints
8672 and their values must be 0 or 1.
8673 ("true" is a fixed value perhaps depending on the language.) */
8674 /* If first arg is constant zero, return it. */
8675 if (integer_zerop (arg0))
8676 return fold_convert (type, arg0);
8677 case TRUTH_AND_EXPR:
8678 /* If either arg is constant true, drop it. */
8679 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8680 return non_lvalue (fold_convert (type, arg1));
8681 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
8682 /* Preserve sequence points. */
8683 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8684 return non_lvalue (fold_convert (type, arg0));
8685 /* If second arg is constant zero, result is zero, but first arg
8686 must be evaluated. */
8687 if (integer_zerop (arg1))
8688 return omit_one_operand (type, arg1, arg0);
8689 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
8690 case will be handled here. */
8691 if (integer_zerop (arg0))
8692 return omit_one_operand (type, arg0, arg1);
8693
8694 /* !X && X is always false. */
8695 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8696 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8697 return omit_one_operand (type, integer_zero_node, arg1);
8698 /* X && !X is always false. */
8699 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8700 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8701 return omit_one_operand (type, integer_zero_node, arg0);
8702
8703 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
8704 means A >= Y && A != MAX, but in this case we know that
8705 A < X <= MAX. */
8706
8707 if (!TREE_SIDE_EFFECTS (arg0)
8708 && !TREE_SIDE_EFFECTS (arg1))
8709 {
8710 tem = fold_to_nonsharp_ineq_using_bound (arg0, arg1);
8711 if (tem && !operand_equal_p (tem, arg0, 0))
8712 return fold_build2 (code, type, tem, arg1);
8713
8714 tem = fold_to_nonsharp_ineq_using_bound (arg1, arg0);
8715 if (tem && !operand_equal_p (tem, arg1, 0))
8716 return fold_build2 (code, type, arg0, tem);
8717 }
8718
8719 truth_andor:
8720 /* We only do these simplifications if we are optimizing. */
8721 if (!optimize)
8722 return NULL_TREE;
8723
8724 /* Check for things like (A || B) && (A || C). We can convert this
8725 to A || (B && C). Note that either operator can be any of the four
8726 truth and/or operations and the transformation will still be
8727 valid. Also note that we only care about order for the
8728 ANDIF and ORIF operators. If B contains side effects, this
8729 might change the truth-value of A. */
8730 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8731 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8732 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8733 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8734 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8735 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8736 {
8737 tree a00 = TREE_OPERAND (arg0, 0);
8738 tree a01 = TREE_OPERAND (arg0, 1);
8739 tree a10 = TREE_OPERAND (arg1, 0);
8740 tree a11 = TREE_OPERAND (arg1, 1);
8741 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8742 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8743 && (code == TRUTH_AND_EXPR
8744 || code == TRUTH_OR_EXPR));
8745
8746 if (operand_equal_p (a00, a10, 0))
8747 return fold_build2 (TREE_CODE (arg0), type, a00,
8748 fold_build2 (code, type, a01, a11));
8749 else if (commutative && operand_equal_p (a00, a11, 0))
8750 return fold_build2 (TREE_CODE (arg0), type, a00,
8751 fold_build2 (code, type, a01, a10));
8752 else if (commutative && operand_equal_p (a01, a10, 0))
8753 return fold_build2 (TREE_CODE (arg0), type, a01,
8754 fold_build2 (code, type, a00, a11));
8755
8756 /* This case if tricky because we must either have commutative
8757 operators or else A10 must not have side-effects. */
8758
8759 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8760 && operand_equal_p (a01, a11, 0))
8761 return fold_build2 (TREE_CODE (arg0), type,
8762 fold_build2 (code, type, a00, a10),
8763 a01);
8764 }
8765
8766 /* See if we can build a range comparison. */
8767 if (0 != (tem = fold_range_test (code, type, op0, op1)))
8768 return tem;
8769
8770 /* Check for the possibility of merging component references. If our
8771 lhs is another similar operation, try to merge its rhs with our
8772 rhs. Then try to merge our lhs and rhs. */
8773 if (TREE_CODE (arg0) == code
8774 && 0 != (tem = fold_truthop (code, type,
8775 TREE_OPERAND (arg0, 1), arg1)))
8776 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
8777
8778 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
8779 return tem;
8780
8781 return NULL_TREE;
8782
8783 case TRUTH_ORIF_EXPR:
8784 /* Note that the operands of this must be ints
8785 and their values must be 0 or true.
8786 ("true" is a fixed value perhaps depending on the language.) */
8787 /* If first arg is constant true, return it. */
8788 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8789 return fold_convert (type, arg0);
8790 case TRUTH_OR_EXPR:
8791 /* If either arg is constant zero, drop it. */
8792 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
8793 return non_lvalue (fold_convert (type, arg1));
8794 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
8795 /* Preserve sequence points. */
8796 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
8797 return non_lvalue (fold_convert (type, arg0));
8798 /* If second arg is constant true, result is true, but we must
8799 evaluate first arg. */
8800 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
8801 return omit_one_operand (type, arg1, arg0);
8802 /* Likewise for first arg, but note this only occurs here for
8803 TRUTH_OR_EXPR. */
8804 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
8805 return omit_one_operand (type, arg0, arg1);
8806
8807 /* !X || X is always true. */
8808 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8809 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8810 return omit_one_operand (type, integer_one_node, arg1);
8811 /* X || !X is always true. */
8812 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8813 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8814 return omit_one_operand (type, integer_one_node, arg0);
8815
8816 goto truth_andor;
8817
8818 case TRUTH_XOR_EXPR:
8819 /* If the second arg is constant zero, drop it. */
8820 if (integer_zerop (arg1))
8821 return non_lvalue (fold_convert (type, arg0));
8822 /* If the second arg is constant true, this is a logical inversion. */
8823 if (integer_onep (arg1))
8824 {
8825 /* Only call invert_truthvalue if operand is a truth value. */
8826 if (TREE_CODE (TREE_TYPE (arg0)) != BOOLEAN_TYPE)
8827 tem = fold_build1 (TRUTH_NOT_EXPR, TREE_TYPE (arg0), arg0);
8828 else
8829 tem = invert_truthvalue (arg0);
8830 return non_lvalue (fold_convert (type, tem));
8831 }
8832 /* Identical arguments cancel to zero. */
8833 if (operand_equal_p (arg0, arg1, 0))
8834 return omit_one_operand (type, integer_zero_node, arg0);
8835
8836 /* !X ^ X is always true. */
8837 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
8838 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
8839 return omit_one_operand (type, integer_one_node, arg1);
8840
8841 /* X ^ !X is always true. */
8842 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
8843 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
8844 return omit_one_operand (type, integer_one_node, arg0);
8845
8846 return NULL_TREE;
8847
8848 case EQ_EXPR:
8849 case NE_EXPR:
8850 case LT_EXPR:
8851 case GT_EXPR:
8852 case LE_EXPR:
8853 case GE_EXPR:
8854 /* If one arg is a real or integer constant, put it last. */
8855 if (tree_swap_operands_p (arg0, arg1, true))
8856 return fold_build2 (swap_tree_comparison (code), type, op1, op0);
8857
8858 /* ~a != C becomes a != ~C where C is a constant. Likewise for ==. */
8859 if (TREE_CODE (arg0) == BIT_NOT_EXPR && TREE_CODE (arg1) == INTEGER_CST
8860 && (code == NE_EXPR || code == EQ_EXPR))
8861 return fold_build2 (code, type, TREE_OPERAND (arg0, 0),
8862 fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1),
8863 arg1));
8864
8865 /* bool_var != 0 becomes bool_var. */
8866 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8867 && code == NE_EXPR)
8868 return non_lvalue (fold_convert (type, arg0));
8869
8870 /* bool_var == 1 becomes bool_var. */
8871 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8872 && code == EQ_EXPR)
8873 return non_lvalue (fold_convert (type, arg0));
8874
8875 /* bool_var != 1 becomes !bool_var. */
8876 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
8877 && code == NE_EXPR)
8878 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8879
8880 /* bool_var == 0 becomes !bool_var. */
8881 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
8882 && code == EQ_EXPR)
8883 return fold_build1 (TRUTH_NOT_EXPR, type, arg0);
8884
8885 /* If this is an equality comparison of the address of a non-weak
8886 object against zero, then we know the result. */
8887 if ((code == EQ_EXPR || code == NE_EXPR)
8888 && TREE_CODE (arg0) == ADDR_EXPR
8889 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8890 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8891 && integer_zerop (arg1))
8892 return constant_boolean_node (code != EQ_EXPR, type);
8893
8894 /* If this is an equality comparison of the address of two non-weak,
8895 unaliased symbols neither of which are extern (since we do not
8896 have access to attributes for externs), then we know the result. */
8897 if ((code == EQ_EXPR || code == NE_EXPR)
8898 && TREE_CODE (arg0) == ADDR_EXPR
8899 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
8900 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
8901 && ! lookup_attribute ("alias",
8902 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
8903 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
8904 && TREE_CODE (arg1) == ADDR_EXPR
8905 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
8906 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
8907 && ! lookup_attribute ("alias",
8908 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
8909 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
8910 {
8911 /* We know that we're looking at the address of two
8912 non-weak, unaliased, static _DECL nodes.
8913
8914 It is both wasteful and incorrect to call operand_equal_p
8915 to compare the two ADDR_EXPR nodes. It is wasteful in that
8916 all we need to do is test pointer equality for the arguments
8917 to the two ADDR_EXPR nodes. It is incorrect to use
8918 operand_equal_p as that function is NOT equivalent to a
8919 C equality test. It can in fact return false for two
8920 objects which would test as equal using the C equality
8921 operator. */
8922 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
8923 return constant_boolean_node (equal
8924 ? code == EQ_EXPR : code != EQ_EXPR,
8925 type);
8926 }
8927
8928 /* If this is a comparison of two exprs that look like an
8929 ARRAY_REF of the same object, then we can fold this to a
8930 comparison of the two offsets. */
8931 if (TREE_CODE_CLASS (code) == tcc_comparison)
8932 {
8933 tree base0, offset0, base1, offset1;
8934
8935 if (extract_array_ref (arg0, &base0, &offset0)
8936 && extract_array_ref (arg1, &base1, &offset1)
8937 && operand_equal_p (base0, base1, 0))
8938 {
8939 /* Handle no offsets on both sides specially. */
8940 if (offset0 == NULL_TREE
8941 && offset1 == NULL_TREE)
8942 return fold_build2 (code, type, integer_zero_node,
8943 integer_zero_node);
8944
8945 if (!offset0 || !offset1
8946 || TREE_TYPE (offset0) == TREE_TYPE (offset1))
8947 {
8948 if (offset0 == NULL_TREE)
8949 offset0 = build_int_cst (TREE_TYPE (offset1), 0);
8950 if (offset1 == NULL_TREE)
8951 offset1 = build_int_cst (TREE_TYPE (offset0), 0);
8952 return fold_build2 (code, type, offset0, offset1);
8953 }
8954 }
8955 }
8956
8957 /* Transform comparisons of the form X +- C CMP X. */
8958 if ((code != EQ_EXPR && code != NE_EXPR)
8959 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
8961 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
8962 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
8963 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8964 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
8965 && !(flag_wrapv || flag_trapv))))
8966 {
8967 tree arg01 = TREE_OPERAND (arg0, 1);
8968 enum tree_code code0 = TREE_CODE (arg0);
8969 int is_positive;
8970
8971 if (TREE_CODE (arg01) == REAL_CST)
8972 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
8973 else
8974 is_positive = tree_int_cst_sgn (arg01);
8975
8976 /* (X - c) > X becomes false. */
8977 if (code == GT_EXPR
8978 && ((code0 == MINUS_EXPR && is_positive >= 0)
8979 || (code0 == PLUS_EXPR && is_positive <= 0)))
8980 return constant_boolean_node (0, type);
8981
8982 /* Likewise (X + c) < X becomes false. */
8983 if (code == LT_EXPR
8984 && ((code0 == PLUS_EXPR && is_positive >= 0)
8985 || (code0 == MINUS_EXPR && is_positive <= 0)))
8986 return constant_boolean_node (0, type);
8987
8988 /* Convert (X - c) <= X to true. */
8989 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8990 && code == LE_EXPR
8991 && ((code0 == MINUS_EXPR && is_positive >= 0)
8992 || (code0 == PLUS_EXPR && is_positive <= 0)))
8993 return constant_boolean_node (1, type);
8994
8995 /* Convert (X + c) >= X to true. */
8996 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
8997 && code == GE_EXPR
8998 && ((code0 == PLUS_EXPR && is_positive >= 0)
8999 || (code0 == MINUS_EXPR && is_positive <= 0)))
9000 return constant_boolean_node (1, type);
9001
9002 if (TREE_CODE (arg01) == INTEGER_CST)
9003 {
9004 /* Convert X + c > X and X - c < X to true for integers. */
9005 if (code == GT_EXPR
9006 && ((code0 == PLUS_EXPR && is_positive > 0)
9007 || (code0 == MINUS_EXPR && is_positive < 0)))
9008 return constant_boolean_node (1, type);
9009
9010 if (code == LT_EXPR
9011 && ((code0 == MINUS_EXPR && is_positive > 0)
9012 || (code0 == PLUS_EXPR && is_positive < 0)))
9013 return constant_boolean_node (1, type);
9014
9015 /* Convert X + c <= X and X - c >= X to false for integers. */
9016 if (code == LE_EXPR
9017 && ((code0 == PLUS_EXPR && is_positive > 0)
9018 || (code0 == MINUS_EXPR && is_positive < 0)))
9019 return constant_boolean_node (0, type);
9020
9021 if (code == GE_EXPR
9022 && ((code0 == MINUS_EXPR && is_positive > 0)
9023 || (code0 == PLUS_EXPR && is_positive < 0)))
9024 return constant_boolean_node (0, type);
9025 }
9026 }
9027
9028 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
9029 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9030 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9031 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9032 && !TYPE_UNSIGNED (TREE_TYPE (arg1))
9033 && !(flag_wrapv || flag_trapv))
9034 && (TREE_CODE (arg1) == INTEGER_CST
9035 && !TREE_OVERFLOW (arg1)))
9036 {
9037 tree const1 = TREE_OPERAND (arg0, 1);
9038 tree const2 = arg1;
9039 tree variable = TREE_OPERAND (arg0, 0);
9040 tree lhs;
9041 int lhs_add;
9042 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9043
9044 lhs = fold_build2 (lhs_add ? PLUS_EXPR : MINUS_EXPR,
9045 TREE_TYPE (arg1), const2, const1);
9046 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9047 && (TREE_CODE (lhs) != INTEGER_CST
9048 || !TREE_OVERFLOW (lhs)))
9049 return fold_build2 (code, type, variable, lhs);
9050 }
9051
9052 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9053 {
9054 tree targ0 = strip_float_extensions (arg0);
9055 tree targ1 = strip_float_extensions (arg1);
9056 tree newtype = TREE_TYPE (targ0);
9057
9058 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9059 newtype = TREE_TYPE (targ1);
9060
9061 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9062 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9063 return fold_build2 (code, type, fold_convert (newtype, targ0),
9064 fold_convert (newtype, targ1));
9065
9066 /* (-a) CMP (-b) -> b CMP a */
9067 if (TREE_CODE (arg0) == NEGATE_EXPR
9068 && TREE_CODE (arg1) == NEGATE_EXPR)
9069 return fold_build2 (code, type, TREE_OPERAND (arg1, 0),
9070 TREE_OPERAND (arg0, 0));
9071
9072 if (TREE_CODE (arg1) == REAL_CST)
9073 {
9074 REAL_VALUE_TYPE cst;
9075 cst = TREE_REAL_CST (arg1);
9076
9077 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9078 if (TREE_CODE (arg0) == NEGATE_EXPR)
9079 return
9080 fold_build2 (swap_tree_comparison (code), type,
9081 TREE_OPERAND (arg0, 0),
9082 build_real (TREE_TYPE (arg1),
9083 REAL_VALUE_NEGATE (cst)));
9084
9085 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9086 /* a CMP (-0) -> a CMP 0 */
9087 if (REAL_VALUE_MINUS_ZERO (cst))
9088 return fold_build2 (code, type, arg0,
9089 build_real (TREE_TYPE (arg1), dconst0));
9090
9091 /* x != NaN is always true, other ops are always false. */
9092 if (REAL_VALUE_ISNAN (cst)
9093 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9094 {
9095 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9096 return omit_one_operand (type, tem, arg0);
9097 }
9098
9099 /* Fold comparisons against infinity. */
9100 if (REAL_VALUE_ISINF (cst))
9101 {
9102 tem = fold_inf_compare (code, type, arg0, arg1);
9103 if (tem != NULL_TREE)
9104 return tem;
9105 }
9106 }
9107
9108 /* If this is a comparison of a real constant with a PLUS_EXPR
9109 or a MINUS_EXPR of a real constant, we can convert it into a
9110 comparison with a revised real constant as long as no overflow
9111 occurs when unsafe_math_optimizations are enabled. */
9112 if (flag_unsafe_math_optimizations
9113 && TREE_CODE (arg1) == REAL_CST
9114 && (TREE_CODE (arg0) == PLUS_EXPR
9115 || TREE_CODE (arg0) == MINUS_EXPR)
9116 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9117 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9118 ? MINUS_EXPR : PLUS_EXPR,
9119 arg1, TREE_OPERAND (arg0, 1), 0))
9120 && ! TREE_CONSTANT_OVERFLOW (tem))
9121 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9122
9123 /* Likewise, we can simplify a comparison of a real constant with
9124 a MINUS_EXPR whose first operand is also a real constant, i.e.
9125 (c1 - x) < c2 becomes x > c1-c2. */
9126 if (flag_unsafe_math_optimizations
9127 && TREE_CODE (arg1) == REAL_CST
9128 && TREE_CODE (arg0) == MINUS_EXPR
9129 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9130 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9131 arg1, 0))
9132 && ! TREE_CONSTANT_OVERFLOW (tem))
9133 return fold_build2 (swap_tree_comparison (code), type,
9134 TREE_OPERAND (arg0, 1), tem);
9135
9136 /* Fold comparisons against built-in math functions. */
9137 if (TREE_CODE (arg1) == REAL_CST
9138 && flag_unsafe_math_optimizations
9139 && ! flag_errno_math)
9140 {
9141 enum built_in_function fcode = builtin_mathfn_code (arg0);
9142
9143 if (fcode != END_BUILTINS)
9144 {
9145 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
9146 if (tem != NULL_TREE)
9147 return tem;
9148 }
9149 }
9150 }
9151
9152 /* Convert foo++ == CONST into ++foo == CONST + INCR. */
9153 if (TREE_CONSTANT (arg1)
9154 && (TREE_CODE (arg0) == POSTINCREMENT_EXPR
9155 || TREE_CODE (arg0) == POSTDECREMENT_EXPR)
9156 /* This optimization is invalid for ordered comparisons
9157 if CONST+INCR overflows or if foo+incr might overflow.
9158 This optimization is invalid for floating point due to rounding.
9159 For pointer types we assume overflow doesn't happen. */
9160 && (POINTER_TYPE_P (TREE_TYPE (arg0))
9161 || (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9162 && (code == EQ_EXPR || code == NE_EXPR))))
9163 {
9164 tree varop, newconst;
9165
9166 if (TREE_CODE (arg0) == POSTINCREMENT_EXPR)
9167 {
9168 newconst = fold_build2 (PLUS_EXPR, TREE_TYPE (arg0),
9169 arg1, TREE_OPERAND (arg0, 1));
9170 varop = build2 (PREINCREMENT_EXPR, TREE_TYPE (arg0),
9171 TREE_OPERAND (arg0, 0),
9172 TREE_OPERAND (arg0, 1));
9173 }
9174 else
9175 {
9176 newconst = fold_build2 (MINUS_EXPR, TREE_TYPE (arg0),
9177 arg1, TREE_OPERAND (arg0, 1));
9178 varop = build2 (PREDECREMENT_EXPR, TREE_TYPE (arg0),
9179 TREE_OPERAND (arg0, 0),
9180 TREE_OPERAND (arg0, 1));
9181 }
9182
9183
9184 /* If VAROP is a reference to a bitfield, we must mask
9185 the constant by the width of the field. */
9186 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
9187 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (varop, 0), 1))
9188 && host_integerp (DECL_SIZE (TREE_OPERAND
9189 (TREE_OPERAND (varop, 0), 1)), 1))
9190 {
9191 tree fielddecl = TREE_OPERAND (TREE_OPERAND (varop, 0), 1);
9192 HOST_WIDE_INT size = tree_low_cst (DECL_SIZE (fielddecl), 1);
9193 tree folded_compare, shift;
9194
9195 /* First check whether the comparison would come out
9196 always the same. If we don't do that we would
9197 change the meaning with the masking. */
9198 folded_compare = fold_build2 (code, type,
9199 TREE_OPERAND (varop, 0), arg1);
9200 if (integer_zerop (folded_compare)
9201 || integer_onep (folded_compare))
9202 return omit_one_operand (type, folded_compare, varop);
9203
9204 shift = build_int_cst (NULL_TREE,
9205 TYPE_PRECISION (TREE_TYPE (varop)) - size);
9206 shift = fold_convert (TREE_TYPE (varop), shift);
9207 newconst = fold_build2 (LSHIFT_EXPR, TREE_TYPE (varop),
9208 newconst, shift);
9209 newconst = fold_build2 (RSHIFT_EXPR, TREE_TYPE (varop),
9210 newconst, shift);
9211 }
9212
9213 return fold_build2 (code, type, varop, newconst);
9214 }
9215
9216 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
9217 This transformation affects the cases which are handled in later
9218 optimizations involving comparisons with non-negative constants. */
9219 if (TREE_CODE (arg1) == INTEGER_CST
9220 && TREE_CODE (arg0) != INTEGER_CST
9221 && tree_int_cst_sgn (arg1) > 0)
9222 {
9223 switch (code)
9224 {
9225 case GE_EXPR:
9226 arg1 = const_binop (MINUS_EXPR, arg1,
9227 build_int_cst (TREE_TYPE (arg1), 1), 0);
9228 return fold_build2 (GT_EXPR, type, arg0,
9229 fold_convert (TREE_TYPE (arg0), arg1));
9230
9231 case LT_EXPR:
9232 arg1 = const_binop (MINUS_EXPR, arg1,
9233 build_int_cst (TREE_TYPE (arg1), 1), 0);
9234 return fold_build2 (LE_EXPR, type, arg0,
9235 fold_convert (TREE_TYPE (arg0), arg1));
9236
9237 default:
9238 break;
9239 }
9240 }
9241
9242 /* Comparisons with the highest or lowest possible integer of
9243 the specified size will have known values. */
9244 {
9245 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
9246
9247 if (TREE_CODE (arg1) == INTEGER_CST
9248 && ! TREE_CONSTANT_OVERFLOW (arg1)
9249 && width <= 2 * HOST_BITS_PER_WIDE_INT
9250 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9251 || POINTER_TYPE_P (TREE_TYPE (arg1))))
9252 {
9253 HOST_WIDE_INT signed_max_hi;
9254 unsigned HOST_WIDE_INT signed_max_lo;
9255 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
9256
9257 if (width <= HOST_BITS_PER_WIDE_INT)
9258 {
9259 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9260 - 1;
9261 signed_max_hi = 0;
9262 max_hi = 0;
9263
9264 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9265 {
9266 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9267 min_lo = 0;
9268 min_hi = 0;
9269 }
9270 else
9271 {
9272 max_lo = signed_max_lo;
9273 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9274 min_hi = -1;
9275 }
9276 }
9277 else
9278 {
9279 width -= HOST_BITS_PER_WIDE_INT;
9280 signed_max_lo = -1;
9281 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
9282 - 1;
9283 max_lo = -1;
9284 min_lo = 0;
9285
9286 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
9287 {
9288 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
9289 min_hi = 0;
9290 }
9291 else
9292 {
9293 max_hi = signed_max_hi;
9294 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
9295 }
9296 }
9297
9298 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
9299 && TREE_INT_CST_LOW (arg1) == max_lo)
9300 switch (code)
9301 {
9302 case GT_EXPR:
9303 return omit_one_operand (type, integer_zero_node, arg0);
9304
9305 case GE_EXPR:
9306 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9307
9308 case LE_EXPR:
9309 return omit_one_operand (type, integer_one_node, arg0);
9310
9311 case LT_EXPR:
9312 return fold_build2 (NE_EXPR, type, arg0, arg1);
9313
9314 /* The GE_EXPR and LT_EXPR cases above are not normally
9315 reached because of previous transformations. */
9316
9317 default:
9318 break;
9319 }
9320 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9321 == max_hi
9322 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
9323 switch (code)
9324 {
9325 case GT_EXPR:
9326 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9327 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9328 case LE_EXPR:
9329 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
9330 return fold_build2 (NE_EXPR, type, arg0, arg1);
9331 default:
9332 break;
9333 }
9334 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9335 == min_hi
9336 && TREE_INT_CST_LOW (arg1) == min_lo)
9337 switch (code)
9338 {
9339 case LT_EXPR:
9340 return omit_one_operand (type, integer_zero_node, arg0);
9341
9342 case LE_EXPR:
9343 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9344
9345 case GE_EXPR:
9346 return omit_one_operand (type, integer_one_node, arg0);
9347
9348 case GT_EXPR:
9349 return fold_build2 (NE_EXPR, type, op0, op1);
9350
9351 default:
9352 break;
9353 }
9354 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
9355 == min_hi
9356 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
9357 switch (code)
9358 {
9359 case GE_EXPR:
9360 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9361 return fold_build2 (NE_EXPR, type, arg0, arg1);
9362 case LT_EXPR:
9363 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
9364 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9365 default:
9366 break;
9367 }
9368
9369 else if (!in_gimple_form
9370 && TREE_INT_CST_HIGH (arg1) == signed_max_hi
9371 && TREE_INT_CST_LOW (arg1) == signed_max_lo
9372 && TYPE_UNSIGNED (TREE_TYPE (arg1))
9373 /* signed_type does not work on pointer types. */
9374 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
9375 {
9376 /* The following case also applies to X < signed_max+1
9377 and X >= signed_max+1 because previous transformations. */
9378 if (code == LE_EXPR || code == GT_EXPR)
9379 {
9380 tree st0, st1;
9381 st0 = lang_hooks.types.signed_type (TREE_TYPE (arg0));
9382 st1 = lang_hooks.types.signed_type (TREE_TYPE (arg1));
9383 return fold_build2 (code == LE_EXPR ? GE_EXPR: LT_EXPR,
9384 type, fold_convert (st0, arg0),
9385 build_int_cst (st1, 0));
9386 }
9387 }
9388 }
9389 }
9390
9391 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
9392 a MINUS_EXPR of a constant, we can convert it into a comparison with
9393 a revised constant as long as no overflow occurs. */
9394 if ((code == EQ_EXPR || code == NE_EXPR)
9395 && TREE_CODE (arg1) == INTEGER_CST
9396 && (TREE_CODE (arg0) == PLUS_EXPR
9397 || TREE_CODE (arg0) == MINUS_EXPR)
9398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9399 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9400 ? MINUS_EXPR : PLUS_EXPR,
9401 arg1, TREE_OPERAND (arg0, 1), 0))
9402 && ! TREE_CONSTANT_OVERFLOW (tem))
9403 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9404
9405 /* Similarly for a NEGATE_EXPR. */
9406 else if ((code == EQ_EXPR || code == NE_EXPR)
9407 && TREE_CODE (arg0) == NEGATE_EXPR
9408 && TREE_CODE (arg1) == INTEGER_CST
9409 && 0 != (tem = negate_expr (arg1))
9410 && TREE_CODE (tem) == INTEGER_CST
9411 && ! TREE_CONSTANT_OVERFLOW (tem))
9412 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), tem);
9413
9414 /* If we have X - Y == 0, we can convert that to X == Y and similarly
9415 for !=. Don't do this for ordered comparisons due to overflow. */
9416 else if ((code == NE_EXPR || code == EQ_EXPR)
9417 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
9418 return fold_build2 (code, type,
9419 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
9420
9421 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9422 && (TREE_CODE (arg0) == NOP_EXPR
9423 || TREE_CODE (arg0) == CONVERT_EXPR))
9424 {
9425 /* If we are widening one operand of an integer comparison,
9426 see if the other operand is similarly being widened. Perhaps we
9427 can do the comparison in the narrower type. */
9428 tem = fold_widened_comparison (code, type, arg0, arg1);
9429 if (tem)
9430 return tem;
9431
9432 /* Or if we are changing signedness. */
9433 tem = fold_sign_changed_comparison (code, type, arg0, arg1);
9434 if (tem)
9435 return tem;
9436 }
9437
9438 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9439 constant, we can simplify it. */
9440 else if (TREE_CODE (arg1) == INTEGER_CST
9441 && (TREE_CODE (arg0) == MIN_EXPR
9442 || TREE_CODE (arg0) == MAX_EXPR)
9443 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9444 {
9445 tem = optimize_minmax_comparison (code, type, op0, op1);
9446 if (tem)
9447 return tem;
9448
9449 return NULL_TREE;
9450 }
9451
9452 /* If we are comparing an ABS_EXPR with a constant, we can
9453 convert all the cases into explicit comparisons, but they may
9454 well not be faster than doing the ABS and one comparison.
9455 But ABS (X) <= C is a range comparison, which becomes a subtraction
9456 and a comparison, and is probably faster. */
9457 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
9458 && TREE_CODE (arg0) == ABS_EXPR
9459 && ! TREE_SIDE_EFFECTS (arg0)
9460 && (0 != (tem = negate_expr (arg1)))
9461 && TREE_CODE (tem) == INTEGER_CST
9462 && ! TREE_CONSTANT_OVERFLOW (tem))
9463 return fold_build2 (TRUTH_ANDIF_EXPR, type,
9464 build2 (GE_EXPR, type,
9465 TREE_OPERAND (arg0, 0), tem),
9466 build2 (LE_EXPR, type,
9467 TREE_OPERAND (arg0, 0), arg1));
9468
9469 /* Convert ABS_EXPR<x> >= 0 to true. */
9470 else if (code == GE_EXPR
9471 && tree_expr_nonnegative_p (arg0)
9472 && (integer_zerop (arg1)
9473 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
9474 && real_zerop (arg1))))
9475 return omit_one_operand (type, integer_one_node, arg0);
9476
9477 /* Convert ABS_EXPR<x> < 0 to false. */
9478 else if (code == LT_EXPR
9479 && tree_expr_nonnegative_p (arg0)
9480 && (integer_zerop (arg1) || real_zerop (arg1)))
9481 return omit_one_operand (type, integer_zero_node, arg0);
9482
9483 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
9484 else if ((code == EQ_EXPR || code == NE_EXPR)
9485 && TREE_CODE (arg0) == ABS_EXPR
9486 && (integer_zerop (arg1) || real_zerop (arg1)))
9487 return fold_build2 (code, type, TREE_OPERAND (arg0, 0), arg1);
9488
9489 /* If this is an EQ or NE comparison with zero and ARG0 is
9490 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
9491 two operations, but the latter can be done in one less insn
9492 on machines that have only two-operand insns or on which a
9493 constant cannot be the first operand. */
9494 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
9495 && TREE_CODE (arg0) == BIT_AND_EXPR)
9496 {
9497 tree arg00 = TREE_OPERAND (arg0, 0);
9498 tree arg01 = TREE_OPERAND (arg0, 1);
9499 if (TREE_CODE (arg00) == LSHIFT_EXPR
9500 && integer_onep (TREE_OPERAND (arg00, 0)))
9501 return
9502 fold_build2 (code, type,
9503 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9504 build2 (RSHIFT_EXPR, TREE_TYPE (arg00),
9505 arg01, TREE_OPERAND (arg00, 1)),
9506 fold_convert (TREE_TYPE (arg0),
9507 integer_one_node)),
9508 arg1);
9509 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
9510 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
9511 return
9512 fold_build2 (code, type,
9513 build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9514 build2 (RSHIFT_EXPR, TREE_TYPE (arg01),
9515 arg00, TREE_OPERAND (arg01, 1)),
9516 fold_convert (TREE_TYPE (arg0),
9517 integer_one_node)),
9518 arg1);
9519 }
9520
9521 /* If this is an NE or EQ comparison of zero against the result of a
9522 signed MOD operation whose second operand is a power of 2, make
9523 the MOD operation unsigned since it is simpler and equivalent. */
9524 if ((code == NE_EXPR || code == EQ_EXPR)
9525 && integer_zerop (arg1)
9526 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
9527 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
9528 || TREE_CODE (arg0) == CEIL_MOD_EXPR
9529 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
9530 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
9531 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9532 {
9533 tree newtype = lang_hooks.types.unsigned_type (TREE_TYPE (arg0));
9534 tree newmod = fold_build2 (TREE_CODE (arg0), newtype,
9535 fold_convert (newtype,
9536 TREE_OPERAND (arg0, 0)),
9537 fold_convert (newtype,
9538 TREE_OPERAND (arg0, 1)));
9539
9540 return fold_build2 (code, type, newmod,
9541 fold_convert (newtype, arg1));
9542 }
9543
9544 /* If this is an NE comparison of zero with an AND of one, remove the
9545 comparison since the AND will give the correct value. */
9546 if (code == NE_EXPR && integer_zerop (arg1)
9547 && TREE_CODE (arg0) == BIT_AND_EXPR
9548 && integer_onep (TREE_OPERAND (arg0, 1)))
9549 return fold_convert (type, arg0);
9550
9551 /* If we have (A & C) == C where C is a power of 2, convert this into
9552 (A & C) != 0. Similarly for NE_EXPR. */
9553 if ((code == EQ_EXPR || code == NE_EXPR)
9554 && TREE_CODE (arg0) == BIT_AND_EXPR
9555 && integer_pow2p (TREE_OPERAND (arg0, 1))
9556 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
9557 return fold_build2 (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
9558 arg0, fold_convert (TREE_TYPE (arg0),
9559 integer_zero_node));
9560
9561 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
9562 bit, then fold the expression into A < 0 or A >= 0. */
9563 tem = fold_single_bit_test_into_sign_test (code, arg0, arg1, type);
9564 if (tem)
9565 return tem;
9566
9567 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
9568 Similarly for NE_EXPR. */
9569 if ((code == EQ_EXPR || code == NE_EXPR)
9570 && TREE_CODE (arg0) == BIT_AND_EXPR
9571 && TREE_CODE (arg1) == INTEGER_CST
9572 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9573 {
9574 tree notc = fold_build1 (BIT_NOT_EXPR,
9575 TREE_TYPE (TREE_OPERAND (arg0, 1)),
9576 TREE_OPERAND (arg0, 1));
9577 tree dandnotc = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9578 arg1, notc);
9579 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9580 if (integer_nonzerop (dandnotc))
9581 return omit_one_operand (type, rslt, arg0);
9582 }
9583
9584 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
9585 Similarly for NE_EXPR. */
9586 if ((code == EQ_EXPR || code == NE_EXPR)
9587 && TREE_CODE (arg0) == BIT_IOR_EXPR
9588 && TREE_CODE (arg1) == INTEGER_CST
9589 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9590 {
9591 tree notd = fold_build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
9592 tree candnotd = fold_build2 (BIT_AND_EXPR, TREE_TYPE (arg0),
9593 TREE_OPERAND (arg0, 1), notd);
9594 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
9595 if (integer_nonzerop (candnotd))
9596 return omit_one_operand (type, rslt, arg0);
9597 }
9598
9599 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
9600 and similarly for >= into !=. */
9601 if ((code == LT_EXPR || code == GE_EXPR)
9602 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9603 && TREE_CODE (arg1) == LSHIFT_EXPR
9604 && integer_onep (TREE_OPERAND (arg1, 0)))
9605 return build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9606 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9607 TREE_OPERAND (arg1, 1)),
9608 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9609
9610 else if ((code == LT_EXPR || code == GE_EXPR)
9611 && TYPE_UNSIGNED (TREE_TYPE (arg0))
9612 && (TREE_CODE (arg1) == NOP_EXPR
9613 || TREE_CODE (arg1) == CONVERT_EXPR)
9614 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
9615 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
9616 return
9617 build2 (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
9618 fold_convert (TREE_TYPE (arg0),
9619 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
9620 TREE_OPERAND (TREE_OPERAND (arg1, 0),
9621 1))),
9622 fold_convert (TREE_TYPE (arg0), integer_zero_node));
9623
9624 /* Simplify comparison of something with itself. (For IEEE
9625 floating-point, we can only do some of these simplifications.) */
9626 if (operand_equal_p (arg0, arg1, 0))
9627 {
9628 switch (code)
9629 {
9630 case EQ_EXPR:
9631 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9632 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9633 return constant_boolean_node (1, type);
9634 break;
9635
9636 case GE_EXPR:
9637 case LE_EXPR:
9638 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9639 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9640 return constant_boolean_node (1, type);
9641 return fold_build2 (EQ_EXPR, type, arg0, arg1);
9642
9643 case NE_EXPR:
9644 /* For NE, we can only do this simplification if integer
9645 or we don't honor IEEE floating point NaNs. */
9646 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9647 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9648 break;
9649 /* ... fall through ... */
9650 case GT_EXPR:
9651 case LT_EXPR:
9652 return constant_boolean_node (0, type);
9653 default:
9654 gcc_unreachable ();
9655 }
9656 }
9657
9658 /* If we are comparing an expression that just has comparisons
9659 of two integer values, arithmetic expressions of those comparisons,
9660 and constants, we can simplify it. There are only three cases
9661 to check: the two values can either be equal, the first can be
9662 greater, or the second can be greater. Fold the expression for
9663 those three values. Since each value must be 0 or 1, we have
9664 eight possibilities, each of which corresponds to the constant 0
9665 or 1 or one of the six possible comparisons.
9666
9667 This handles common cases like (a > b) == 0 but also handles
9668 expressions like ((x > y) - (y > x)) > 0, which supposedly
9669 occur in macroized code. */
9670
9671 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9672 {
9673 tree cval1 = 0, cval2 = 0;
9674 int save_p = 0;
9675
9676 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9677 /* Don't handle degenerate cases here; they should already
9678 have been handled anyway. */
9679 && cval1 != 0 && cval2 != 0
9680 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9681 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9682 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9683 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9684 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9685 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9686 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9687 {
9688 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9689 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9690
9691 /* We can't just pass T to eval_subst in case cval1 or cval2
9692 was the same as ARG1. */
9693
9694 tree high_result
9695 = fold_build2 (code, type,
9696 eval_subst (arg0, cval1, maxval,
9697 cval2, minval),
9698 arg1);
9699 tree equal_result
9700 = fold_build2 (code, type,
9701 eval_subst (arg0, cval1, maxval,
9702 cval2, maxval),
9703 arg1);
9704 tree low_result
9705 = fold_build2 (code, type,
9706 eval_subst (arg0, cval1, minval,
9707 cval2, maxval),
9708 arg1);
9709
9710 /* All three of these results should be 0 or 1. Confirm they
9711 are. Then use those values to select the proper code
9712 to use. */
9713
9714 if ((integer_zerop (high_result)
9715 || integer_onep (high_result))
9716 && (integer_zerop (equal_result)
9717 || integer_onep (equal_result))
9718 && (integer_zerop (low_result)
9719 || integer_onep (low_result)))
9720 {
9721 /* Make a 3-bit mask with the high-order bit being the
9722 value for `>', the next for '=', and the low for '<'. */
9723 switch ((integer_onep (high_result) * 4)
9724 + (integer_onep (equal_result) * 2)
9725 + integer_onep (low_result))
9726 {
9727 case 0:
9728 /* Always false. */
9729 return omit_one_operand (type, integer_zero_node, arg0);
9730 case 1:
9731 code = LT_EXPR;
9732 break;
9733 case 2:
9734 code = EQ_EXPR;
9735 break;
9736 case 3:
9737 code = LE_EXPR;
9738 break;
9739 case 4:
9740 code = GT_EXPR;
9741 break;
9742 case 5:
9743 code = NE_EXPR;
9744 break;
9745 case 6:
9746 code = GE_EXPR;
9747 break;
9748 case 7:
9749 /* Always true. */
9750 return omit_one_operand (type, integer_one_node, arg0);
9751 }
9752
9753 if (save_p)
9754 return save_expr (build2 (code, type, cval1, cval2));
9755 else
9756 return fold_build2 (code, type, cval1, cval2);
9757 }
9758 }
9759 }
9760
9761 /* If this is a comparison of a field, we may be able to simplify it. */
9762 if (((TREE_CODE (arg0) == COMPONENT_REF
9763 && lang_hooks.can_use_bit_fields_p ())
9764 || TREE_CODE (arg0) == BIT_FIELD_REF)
9765 && (code == EQ_EXPR || code == NE_EXPR)
9766 /* Handle the constant case even without -O
9767 to make sure the warnings are given. */
9768 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
9769 {
9770 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
9771 if (t1)
9772 return t1;
9773 }
9774
9775 /* Fold a comparison of the address of COMPONENT_REFs with the same
9776 type and component to a comparison of the address of the base
9777 object. In short, &x->a OP &y->a to x OP y and
9778 &x->a OP &y.a to x OP &y */
9779 if (TREE_CODE (arg0) == ADDR_EXPR
9780 && TREE_CODE (TREE_OPERAND (arg0, 0)) == COMPONENT_REF
9781 && TREE_CODE (arg1) == ADDR_EXPR
9782 && TREE_CODE (TREE_OPERAND (arg1, 0)) == COMPONENT_REF)
9783 {
9784 tree cref0 = TREE_OPERAND (arg0, 0);
9785 tree cref1 = TREE_OPERAND (arg1, 0);
9786 if (TREE_OPERAND (cref0, 1) == TREE_OPERAND (cref1, 1))
9787 {
9788 tree op0 = TREE_OPERAND (cref0, 0);
9789 tree op1 = TREE_OPERAND (cref1, 0);
9790 return fold_build2 (code, type,
9791 build_fold_addr_expr (op0),
9792 build_fold_addr_expr (op1));
9793 }
9794 }
9795
9796 /* Optimize comparisons of strlen vs zero to a compare of the
9797 first character of the string vs zero. To wit,
9798 strlen(ptr) == 0 => *ptr == 0
9799 strlen(ptr) != 0 => *ptr != 0
9800 Other cases should reduce to one of these two (or a constant)
9801 due to the return value of strlen being unsigned. */
9802 if ((code == EQ_EXPR || code == NE_EXPR)
9803 && integer_zerop (arg1)
9804 && TREE_CODE (arg0) == CALL_EXPR)
9805 {
9806 tree fndecl = get_callee_fndecl (arg0);
9807 tree arglist;
9808
9809 if (fndecl
9810 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
9811 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
9812 && (arglist = TREE_OPERAND (arg0, 1))
9813 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
9814 && ! TREE_CHAIN (arglist))
9815 {
9816 tree iref = build_fold_indirect_ref (TREE_VALUE (arglist));
9817 return fold_build2 (code, type, iref,
9818 build_int_cst (TREE_TYPE (iref), 0));
9819 }
9820 }
9821
9822 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9823 into a single range test. */
9824 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9825 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9826 && TREE_CODE (arg1) == INTEGER_CST
9827 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9828 && !integer_zerop (TREE_OPERAND (arg0, 1))
9829 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9830 && !TREE_OVERFLOW (arg1))
9831 {
9832 t1 = fold_div_compare (code, type, arg0, arg1);
9833 if (t1 != NULL_TREE)
9834 return t1;
9835 }
9836
9837 if ((code == EQ_EXPR || code == NE_EXPR)
9838 && integer_zerop (arg1)
9839 && tree_expr_nonzero_p (arg0))
9840 {
9841 tree res = constant_boolean_node (code==NE_EXPR, type);
9842 return omit_one_operand (type, res, arg0);
9843 }
9844
9845 t1 = fold_relational_const (code, type, arg0, arg1);
9846 return t1 == NULL_TREE ? NULL_TREE : t1;
9847
9848 case UNORDERED_EXPR:
9849 case ORDERED_EXPR:
9850 case UNLT_EXPR:
9851 case UNLE_EXPR:
9852 case UNGT_EXPR:
9853 case UNGE_EXPR:
9854 case UNEQ_EXPR:
9855 case LTGT_EXPR:
9856 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9857 {
9858 t1 = fold_relational_const (code, type, arg0, arg1);
9859 if (t1 != NULL_TREE)
9860 return t1;
9861 }
9862
9863 /* If the first operand is NaN, the result is constant. */
9864 if (TREE_CODE (arg0) == REAL_CST
9865 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
9866 && (code != LTGT_EXPR || ! flag_trapping_math))
9867 {
9868 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9869 ? integer_zero_node
9870 : integer_one_node;
9871 return omit_one_operand (type, t1, arg1);
9872 }
9873
9874 /* If the second operand is NaN, the result is constant. */
9875 if (TREE_CODE (arg1) == REAL_CST
9876 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
9877 && (code != LTGT_EXPR || ! flag_trapping_math))
9878 {
9879 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
9880 ? integer_zero_node
9881 : integer_one_node;
9882 return omit_one_operand (type, t1, arg0);
9883 }
9884
9885 /* Simplify unordered comparison of something with itself. */
9886 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
9887 && operand_equal_p (arg0, arg1, 0))
9888 return constant_boolean_node (1, type);
9889
9890 if (code == LTGT_EXPR
9891 && !flag_trapping_math
9892 && operand_equal_p (arg0, arg1, 0))
9893 return constant_boolean_node (0, type);
9894
9895 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9896 {
9897 tree targ0 = strip_float_extensions (arg0);
9898 tree targ1 = strip_float_extensions (arg1);
9899 tree newtype = TREE_TYPE (targ0);
9900
9901 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9902 newtype = TREE_TYPE (targ1);
9903
9904 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9905 return fold_build2 (code, type, fold_convert (newtype, targ0),
9906 fold_convert (newtype, targ1));
9907 }
9908
9909 return NULL_TREE;
9910
9911 case COMPOUND_EXPR:
9912 /* When pedantic, a compound expression can be neither an lvalue
9913 nor an integer constant expression. */
9914 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
9915 return NULL_TREE;
9916 /* Don't let (0, 0) be null pointer constant. */
9917 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
9918 : fold_convert (type, arg1);
9919 return pedantic_non_lvalue (tem);
9920
9921 case COMPLEX_EXPR:
9922 if (wins)
9923 return build_complex (type, arg0, arg1);
9924 return NULL_TREE;
9925
9926 case ASSERT_EXPR:
9927 /* An ASSERT_EXPR should never be passed to fold_binary. */
9928 gcc_unreachable ();
9929
9930 default:
9931 return NULL_TREE;
9932 } /* switch (code) */
9933 }
9934
9935 /* Callback for walk_tree, looking for LABEL_EXPR.
9936 Returns tree TP if it is LABEL_EXPR. Otherwise it returns NULL_TREE.
9937 Do not check the sub-tree of GOTO_EXPR. */
9938
9939 static tree
9940 contains_label_1 (tree *tp,
9941 int *walk_subtrees,
9942 void *data ATTRIBUTE_UNUSED)
9943 {
9944 switch (TREE_CODE (*tp))
9945 {
9946 case LABEL_EXPR:
9947 return *tp;
9948 case GOTO_EXPR:
9949 *walk_subtrees = 0;
9950 /* no break */
9951 default:
9952 return NULL_TREE;
9953 }
9954 }
9955
9956 /* Checks whether the sub-tree ST contains a label LABEL_EXPR which is
9957 accessible from outside the sub-tree. Returns NULL_TREE if no
9958 addressable label is found. */
9959
9960 static bool
9961 contains_label_p (tree st)
9962 {
9963 return (walk_tree (&st, contains_label_1 , NULL, NULL) != NULL_TREE);
9964 }
9965
9966 /* Fold a ternary expression of code CODE and type TYPE with operands
9967 OP0, OP1, and OP2. Return the folded expression if folding is
9968 successful. Otherwise, return NULL_TREE. */
9969
9970 tree
9971 fold_ternary (enum tree_code code, tree type, tree op0, tree op1, tree op2)
9972 {
9973 tree tem;
9974 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
9975 enum tree_code_class kind = TREE_CODE_CLASS (code);
9976
9977 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9978 && TREE_CODE_LENGTH (code) == 3);
9979
9980 /* Strip any conversions that don't change the mode. This is safe
9981 for every expression, except for a comparison expression because
9982 its signedness is derived from its operands. So, in the latter
9983 case, only strip conversions that don't change the signedness.
9984
9985 Note that this is done as an internal manipulation within the
9986 constant folder, in order to find the simplest representation of
9987 the arguments so that their form can be studied. In any cases,
9988 the appropriate type conversions should be put back in the tree
9989 that will get out of the constant folder. */
9990 if (op0)
9991 {
9992 arg0 = op0;
9993 STRIP_NOPS (arg0);
9994 }
9995
9996 if (op1)
9997 {
9998 arg1 = op1;
9999 STRIP_NOPS (arg1);
10000 }
10001
10002 switch (code)
10003 {
10004 case COMPONENT_REF:
10005 if (TREE_CODE (arg0) == CONSTRUCTOR
10006 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
10007 {
10008 unsigned HOST_WIDE_INT idx;
10009 tree field, value;
10010 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
10011 if (field == arg1)
10012 return value;
10013 }
10014 return NULL_TREE;
10015
10016 case COND_EXPR:
10017 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
10018 so all simple results must be passed through pedantic_non_lvalue. */
10019 if (TREE_CODE (arg0) == INTEGER_CST)
10020 {
10021 tree unused_op = integer_zerop (arg0) ? op1 : op2;
10022 tem = integer_zerop (arg0) ? op2 : op1;
10023 /* Only optimize constant conditions when the selected branch
10024 has the same type as the COND_EXPR. This avoids optimizing
10025 away "c ? x : throw", where the throw has a void type.
10026 Avoid throwing away that operand which contains label. */
10027 if ((!TREE_SIDE_EFFECTS (unused_op)
10028 || !contains_label_p (unused_op))
10029 && (! VOID_TYPE_P (TREE_TYPE (tem))
10030 || VOID_TYPE_P (type)))
10031 return pedantic_non_lvalue (tem);
10032 return NULL_TREE;
10033 }
10034 if (operand_equal_p (arg1, op2, 0))
10035 return pedantic_omit_one_operand (type, arg1, arg0);
10036
10037 /* If we have A op B ? A : C, we may be able to convert this to a
10038 simpler expression, depending on the operation and the values
10039 of B and C. Signed zeros prevent all of these transformations,
10040 for reasons given above each one.
10041
10042 Also try swapping the arguments and inverting the conditional. */
10043 if (COMPARISON_CLASS_P (arg0)
10044 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10045 arg1, TREE_OPERAND (arg0, 1))
10046 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
10047 {
10048 tem = fold_cond_expr_with_comparison (type, arg0, op1, op2);
10049 if (tem)
10050 return tem;
10051 }
10052
10053 if (COMPARISON_CLASS_P (arg0)
10054 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
10055 op2,
10056 TREE_OPERAND (arg0, 1))
10057 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
10058 {
10059 tem = invert_truthvalue (arg0);
10060 if (COMPARISON_CLASS_P (tem))
10061 {
10062 tem = fold_cond_expr_with_comparison (type, tem, op2, op1);
10063 if (tem)
10064 return tem;
10065 }
10066 }
10067
10068 /* If the second operand is simpler than the third, swap them
10069 since that produces better jump optimization results. */
10070 if (truth_value_p (TREE_CODE (arg0))
10071 && tree_swap_operands_p (op1, op2, false))
10072 {
10073 /* See if this can be inverted. If it can't, possibly because
10074 it was a floating-point inequality comparison, don't do
10075 anything. */
10076 tem = invert_truthvalue (arg0);
10077
10078 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10079 return fold_build3 (code, type, tem, op2, op1);
10080 }
10081
10082 /* Convert A ? 1 : 0 to simply A. */
10083 if (integer_onep (op1)
10084 && integer_zerop (op2)
10085 /* If we try to convert OP0 to our type, the
10086 call to fold will try to move the conversion inside
10087 a COND, which will recurse. In that case, the COND_EXPR
10088 is probably the best choice, so leave it alone. */
10089 && type == TREE_TYPE (arg0))
10090 return pedantic_non_lvalue (arg0);
10091
10092 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
10093 over COND_EXPR in cases such as floating point comparisons. */
10094 if (integer_zerop (op1)
10095 && integer_onep (op2)
10096 && truth_value_p (TREE_CODE (arg0)))
10097 return pedantic_non_lvalue (fold_convert (type,
10098 invert_truthvalue (arg0)));
10099
10100 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
10101 if (TREE_CODE (arg0) == LT_EXPR
10102 && integer_zerop (TREE_OPERAND (arg0, 1))
10103 && integer_zerop (op2)
10104 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
10105 return fold_convert (type, fold_build2 (BIT_AND_EXPR,
10106 TREE_TYPE (tem), tem, arg1));
10107
10108 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
10109 already handled above. */
10110 if (TREE_CODE (arg0) == BIT_AND_EXPR
10111 && integer_onep (TREE_OPERAND (arg0, 1))
10112 && integer_zerop (op2)
10113 && integer_pow2p (arg1))
10114 {
10115 tree tem = TREE_OPERAND (arg0, 0);
10116 STRIP_NOPS (tem);
10117 if (TREE_CODE (tem) == RSHIFT_EXPR
10118 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
10119 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
10120 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
10121 return fold_build2 (BIT_AND_EXPR, type,
10122 TREE_OPERAND (tem, 0), arg1);
10123 }
10124
10125 /* A & N ? N : 0 is simply A & N if N is a power of two. This
10126 is probably obsolete because the first operand should be a
10127 truth value (that's why we have the two cases above), but let's
10128 leave it in until we can confirm this for all front-ends. */
10129 if (integer_zerop (op2)
10130 && TREE_CODE (arg0) == NE_EXPR
10131 && integer_zerop (TREE_OPERAND (arg0, 1))
10132 && integer_pow2p (arg1)
10133 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10134 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10135 arg1, OEP_ONLY_CONST))
10136 return pedantic_non_lvalue (fold_convert (type,
10137 TREE_OPERAND (arg0, 0)));
10138
10139 /* Convert A ? B : 0 into A && B if A and B are truth values. */
10140 if (integer_zerop (op2)
10141 && truth_value_p (TREE_CODE (arg0))
10142 && truth_value_p (TREE_CODE (arg1)))
10143 return fold_build2 (TRUTH_ANDIF_EXPR, type, arg0, arg1);
10144
10145 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
10146 if (integer_onep (op2)
10147 && truth_value_p (TREE_CODE (arg0))
10148 && truth_value_p (TREE_CODE (arg1)))
10149 {
10150 /* Only perform transformation if ARG0 is easily inverted. */
10151 tem = invert_truthvalue (arg0);
10152 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10153 return fold_build2 (TRUTH_ORIF_EXPR, type, tem, arg1);
10154 }
10155
10156 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
10157 if (integer_zerop (arg1)
10158 && truth_value_p (TREE_CODE (arg0))
10159 && truth_value_p (TREE_CODE (op2)))
10160 {
10161 /* Only perform transformation if ARG0 is easily inverted. */
10162 tem = invert_truthvalue (arg0);
10163 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
10164 return fold_build2 (TRUTH_ANDIF_EXPR, type, tem, op2);
10165 }
10166
10167 /* Convert A ? 1 : B into A || B if A and B are truth values. */
10168 if (integer_onep (arg1)
10169 && truth_value_p (TREE_CODE (arg0))
10170 && truth_value_p (TREE_CODE (op2)))
10171 return fold_build2 (TRUTH_ORIF_EXPR, type, arg0, op2);
10172
10173 return NULL_TREE;
10174
10175 case CALL_EXPR:
10176 /* Check for a built-in function. */
10177 if (TREE_CODE (op0) == ADDR_EXPR
10178 && TREE_CODE (TREE_OPERAND (op0, 0)) == FUNCTION_DECL
10179 && DECL_BUILT_IN (TREE_OPERAND (op0, 0)))
10180 return fold_builtin (TREE_OPERAND (op0, 0), op1, false);
10181 return NULL_TREE;
10182
10183 case BIT_FIELD_REF:
10184 if (TREE_CODE (arg0) == VECTOR_CST
10185 && type == TREE_TYPE (TREE_TYPE (arg0))
10186 && host_integerp (arg1, 1)
10187 && host_integerp (op2, 1))
10188 {
10189 unsigned HOST_WIDE_INT width = tree_low_cst (arg1, 1);
10190 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
10191
10192 if (width != 0
10193 && simple_cst_equal (arg1, TYPE_SIZE (type)) == 1
10194 && (idx % width) == 0
10195 && (idx = idx / width)
10196 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
10197 {
10198 tree elements = TREE_VECTOR_CST_ELTS (arg0);
10199 while (idx-- > 0 && elements)
10200 elements = TREE_CHAIN (elements);
10201 if (elements)
10202 return TREE_VALUE (elements);
10203 else
10204 return fold_convert (type, integer_zero_node);
10205 }
10206 }
10207 return NULL_TREE;
10208
10209 default:
10210 return NULL_TREE;
10211 } /* switch (code) */
10212 }
10213
10214 /* Perform constant folding and related simplification of EXPR.
10215 The related simplifications include x*1 => x, x*0 => 0, etc.,
10216 and application of the associative law.
10217 NOP_EXPR conversions may be removed freely (as long as we
10218 are careful not to change the type of the overall expression).
10219 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
10220 but we can constant-fold them if they have constant operands. */
10221
10222 #ifdef ENABLE_FOLD_CHECKING
10223 # define fold(x) fold_1 (x)
10224 static tree fold_1 (tree);
10225 static
10226 #endif
10227 tree
10228 fold (tree expr)
10229 {
10230 const tree t = expr;
10231 enum tree_code code = TREE_CODE (t);
10232 enum tree_code_class kind = TREE_CODE_CLASS (code);
10233 tree tem;
10234
10235 /* Return right away if a constant. */
10236 if (kind == tcc_constant)
10237 return t;
10238
10239 if (IS_EXPR_CODE_CLASS (kind))
10240 {
10241 tree type = TREE_TYPE (t);
10242 tree op0, op1, op2;
10243
10244 switch (TREE_CODE_LENGTH (code))
10245 {
10246 case 1:
10247 op0 = TREE_OPERAND (t, 0);
10248 tem = fold_unary (code, type, op0);
10249 return tem ? tem : expr;
10250 case 2:
10251 op0 = TREE_OPERAND (t, 0);
10252 op1 = TREE_OPERAND (t, 1);
10253 tem = fold_binary (code, type, op0, op1);
10254 return tem ? tem : expr;
10255 case 3:
10256 op0 = TREE_OPERAND (t, 0);
10257 op1 = TREE_OPERAND (t, 1);
10258 op2 = TREE_OPERAND (t, 2);
10259 tem = fold_ternary (code, type, op0, op1, op2);
10260 return tem ? tem : expr;
10261 default:
10262 break;
10263 }
10264 }
10265
10266 switch (code)
10267 {
10268 case CONST_DECL:
10269 return fold (DECL_INITIAL (t));
10270
10271 default:
10272 return t;
10273 } /* switch (code) */
10274 }
10275
10276 #ifdef ENABLE_FOLD_CHECKING
10277 #undef fold
10278
10279 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
10280 static void fold_check_failed (tree, tree);
10281 void print_fold_checksum (tree);
10282
10283 /* When --enable-checking=fold, compute a digest of expr before
10284 and after actual fold call to see if fold did not accidentally
10285 change original expr. */
10286
10287 tree
10288 fold (tree expr)
10289 {
10290 tree ret;
10291 struct md5_ctx ctx;
10292 unsigned char checksum_before[16], checksum_after[16];
10293 htab_t ht;
10294
10295 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10296 md5_init_ctx (&ctx);
10297 fold_checksum_tree (expr, &ctx, ht);
10298 md5_finish_ctx (&ctx, checksum_before);
10299 htab_empty (ht);
10300
10301 ret = fold_1 (expr);
10302
10303 md5_init_ctx (&ctx);
10304 fold_checksum_tree (expr, &ctx, ht);
10305 md5_finish_ctx (&ctx, checksum_after);
10306 htab_delete (ht);
10307
10308 if (memcmp (checksum_before, checksum_after, 16))
10309 fold_check_failed (expr, ret);
10310
10311 return ret;
10312 }
10313
10314 void
10315 print_fold_checksum (tree expr)
10316 {
10317 struct md5_ctx ctx;
10318 unsigned char checksum[16], cnt;
10319 htab_t ht;
10320
10321 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10322 md5_init_ctx (&ctx);
10323 fold_checksum_tree (expr, &ctx, ht);
10324 md5_finish_ctx (&ctx, checksum);
10325 htab_delete (ht);
10326 for (cnt = 0; cnt < 16; ++cnt)
10327 fprintf (stderr, "%02x", checksum[cnt]);
10328 putc ('\n', stderr);
10329 }
10330
10331 static void
10332 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
10333 {
10334 internal_error ("fold check: original tree changed by fold");
10335 }
10336
10337 static void
10338 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
10339 {
10340 void **slot;
10341 enum tree_code code;
10342 char buf[sizeof (struct tree_function_decl)];
10343 int i, len;
10344
10345 recursive_label:
10346
10347 gcc_assert ((sizeof (struct tree_exp) + 5 * sizeof (tree)
10348 <= sizeof (struct tree_function_decl))
10349 && sizeof (struct tree_type) <= sizeof (struct tree_function_decl));
10350 if (expr == NULL)
10351 return;
10352 slot = htab_find_slot (ht, expr, INSERT);
10353 if (*slot != NULL)
10354 return;
10355 *slot = expr;
10356 code = TREE_CODE (expr);
10357 if (TREE_CODE_CLASS (code) == tcc_declaration
10358 && DECL_ASSEMBLER_NAME_SET_P (expr))
10359 {
10360 /* Allow DECL_ASSEMBLER_NAME to be modified. */
10361 memcpy (buf, expr, tree_size (expr));
10362 expr = (tree) buf;
10363 SET_DECL_ASSEMBLER_NAME (expr, NULL);
10364 }
10365 else if (TREE_CODE_CLASS (code) == tcc_type
10366 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)
10367 || TYPE_CACHED_VALUES_P (expr)
10368 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)))
10369 {
10370 /* Allow these fields to be modified. */
10371 memcpy (buf, expr, tree_size (expr));
10372 expr = (tree) buf;
10373 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr) = 0;
10374 TYPE_POINTER_TO (expr) = NULL;
10375 TYPE_REFERENCE_TO (expr) = NULL;
10376 if (TYPE_CACHED_VALUES_P (expr))
10377 {
10378 TYPE_CACHED_VALUES_P (expr) = 0;
10379 TYPE_CACHED_VALUES (expr) = NULL;
10380 }
10381 }
10382 md5_process_bytes (expr, tree_size (expr), ctx);
10383 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
10384 if (TREE_CODE_CLASS (code) != tcc_type
10385 && TREE_CODE_CLASS (code) != tcc_declaration
10386 && code != TREE_LIST)
10387 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
10388 switch (TREE_CODE_CLASS (code))
10389 {
10390 case tcc_constant:
10391 switch (code)
10392 {
10393 case STRING_CST:
10394 md5_process_bytes (TREE_STRING_POINTER (expr),
10395 TREE_STRING_LENGTH (expr), ctx);
10396 break;
10397 case COMPLEX_CST:
10398 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
10399 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
10400 break;
10401 case VECTOR_CST:
10402 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
10403 break;
10404 default:
10405 break;
10406 }
10407 break;
10408 case tcc_exceptional:
10409 switch (code)
10410 {
10411 case TREE_LIST:
10412 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
10413 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
10414 expr = TREE_CHAIN (expr);
10415 goto recursive_label;
10416 break;
10417 case TREE_VEC:
10418 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
10419 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
10420 break;
10421 default:
10422 break;
10423 }
10424 break;
10425 case tcc_expression:
10426 case tcc_reference:
10427 case tcc_comparison:
10428 case tcc_unary:
10429 case tcc_binary:
10430 case tcc_statement:
10431 len = TREE_CODE_LENGTH (code);
10432 for (i = 0; i < len; ++i)
10433 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
10434 break;
10435 case tcc_declaration:
10436 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
10437 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
10438 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
10439 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
10440 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
10441 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
10442 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
10443 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
10444 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
10445
10446 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
10447 {
10448 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
10449 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
10450 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
10451 }
10452 break;
10453 case tcc_type:
10454 if (TREE_CODE (expr) == ENUMERAL_TYPE)
10455 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
10456 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
10457 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
10458 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
10459 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
10460 if (INTEGRAL_TYPE_P (expr)
10461 || SCALAR_FLOAT_TYPE_P (expr))
10462 {
10463 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
10464 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
10465 }
10466 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
10467 if (TREE_CODE (expr) == RECORD_TYPE
10468 || TREE_CODE (expr) == UNION_TYPE
10469 || TREE_CODE (expr) == QUAL_UNION_TYPE)
10470 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
10471 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
10472 break;
10473 default:
10474 break;
10475 }
10476 }
10477
10478 #endif
10479
10480 /* Fold a unary tree expression with code CODE of type TYPE with an
10481 operand OP0. Return a folded expression if successful. Otherwise,
10482 return a tree expression with code CODE of type TYPE with an
10483 operand OP0. */
10484
10485 tree
10486 fold_build1_stat (enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
10487 {
10488 tree tem;
10489 #ifdef ENABLE_FOLD_CHECKING
10490 unsigned char checksum_before[16], checksum_after[16];
10491 struct md5_ctx ctx;
10492 htab_t ht;
10493
10494 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10495 md5_init_ctx (&ctx);
10496 fold_checksum_tree (op0, &ctx, ht);
10497 md5_finish_ctx (&ctx, checksum_before);
10498 htab_empty (ht);
10499 #endif
10500
10501 tem = fold_unary (code, type, op0);
10502 if (!tem)
10503 tem = build1_stat (code, type, op0 PASS_MEM_STAT);
10504
10505 #ifdef ENABLE_FOLD_CHECKING
10506 md5_init_ctx (&ctx);
10507 fold_checksum_tree (op0, &ctx, ht);
10508 md5_finish_ctx (&ctx, checksum_after);
10509 htab_delete (ht);
10510
10511 if (memcmp (checksum_before, checksum_after, 16))
10512 fold_check_failed (op0, tem);
10513 #endif
10514 return tem;
10515 }
10516
10517 /* Fold a binary tree expression with code CODE of type TYPE with
10518 operands OP0 and OP1. Return a folded expression if successful.
10519 Otherwise, return a tree expression with code CODE of type TYPE
10520 with operands OP0 and OP1. */
10521
10522 tree
10523 fold_build2_stat (enum tree_code code, tree type, tree op0, tree op1
10524 MEM_STAT_DECL)
10525 {
10526 tree tem;
10527 #ifdef ENABLE_FOLD_CHECKING
10528 unsigned char checksum_before_op0[16],
10529 checksum_before_op1[16],
10530 checksum_after_op0[16],
10531 checksum_after_op1[16];
10532 struct md5_ctx ctx;
10533 htab_t ht;
10534
10535 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10536 md5_init_ctx (&ctx);
10537 fold_checksum_tree (op0, &ctx, ht);
10538 md5_finish_ctx (&ctx, checksum_before_op0);
10539 htab_empty (ht);
10540
10541 md5_init_ctx (&ctx);
10542 fold_checksum_tree (op1, &ctx, ht);
10543 md5_finish_ctx (&ctx, checksum_before_op1);
10544 htab_empty (ht);
10545 #endif
10546
10547 tem = fold_binary (code, type, op0, op1);
10548 if (!tem)
10549 tem = build2_stat (code, type, op0, op1 PASS_MEM_STAT);
10550
10551 #ifdef ENABLE_FOLD_CHECKING
10552 md5_init_ctx (&ctx);
10553 fold_checksum_tree (op0, &ctx, ht);
10554 md5_finish_ctx (&ctx, checksum_after_op0);
10555 htab_empty (ht);
10556
10557 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10558 fold_check_failed (op0, tem);
10559
10560 md5_init_ctx (&ctx);
10561 fold_checksum_tree (op1, &ctx, ht);
10562 md5_finish_ctx (&ctx, checksum_after_op1);
10563 htab_delete (ht);
10564
10565 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10566 fold_check_failed (op1, tem);
10567 #endif
10568 return tem;
10569 }
10570
10571 /* Fold a ternary tree expression with code CODE of type TYPE with
10572 operands OP0, OP1, and OP2. Return a folded expression if
10573 successful. Otherwise, return a tree expression with code CODE of
10574 type TYPE with operands OP0, OP1, and OP2. */
10575
10576 tree
10577 fold_build3_stat (enum tree_code code, tree type, tree op0, tree op1, tree op2
10578 MEM_STAT_DECL)
10579 {
10580 tree tem;
10581 #ifdef ENABLE_FOLD_CHECKING
10582 unsigned char checksum_before_op0[16],
10583 checksum_before_op1[16],
10584 checksum_before_op2[16],
10585 checksum_after_op0[16],
10586 checksum_after_op1[16],
10587 checksum_after_op2[16];
10588 struct md5_ctx ctx;
10589 htab_t ht;
10590
10591 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
10592 md5_init_ctx (&ctx);
10593 fold_checksum_tree (op0, &ctx, ht);
10594 md5_finish_ctx (&ctx, checksum_before_op0);
10595 htab_empty (ht);
10596
10597 md5_init_ctx (&ctx);
10598 fold_checksum_tree (op1, &ctx, ht);
10599 md5_finish_ctx (&ctx, checksum_before_op1);
10600 htab_empty (ht);
10601
10602 md5_init_ctx (&ctx);
10603 fold_checksum_tree (op2, &ctx, ht);
10604 md5_finish_ctx (&ctx, checksum_before_op2);
10605 htab_empty (ht);
10606 #endif
10607
10608 tem = fold_ternary (code, type, op0, op1, op2);
10609 if (!tem)
10610 tem = build3_stat (code, type, op0, op1, op2 PASS_MEM_STAT);
10611
10612 #ifdef ENABLE_FOLD_CHECKING
10613 md5_init_ctx (&ctx);
10614 fold_checksum_tree (op0, &ctx, ht);
10615 md5_finish_ctx (&ctx, checksum_after_op0);
10616 htab_empty (ht);
10617
10618 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
10619 fold_check_failed (op0, tem);
10620
10621 md5_init_ctx (&ctx);
10622 fold_checksum_tree (op1, &ctx, ht);
10623 md5_finish_ctx (&ctx, checksum_after_op1);
10624 htab_empty (ht);
10625
10626 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
10627 fold_check_failed (op1, tem);
10628
10629 md5_init_ctx (&ctx);
10630 fold_checksum_tree (op2, &ctx, ht);
10631 md5_finish_ctx (&ctx, checksum_after_op2);
10632 htab_delete (ht);
10633
10634 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
10635 fold_check_failed (op2, tem);
10636 #endif
10637 return tem;
10638 }
10639
10640 /* Perform constant folding and related simplification of initializer
10641 expression EXPR. These behave identically to "fold_buildN" but ignore
10642 potential run-time traps and exceptions that fold must preserve. */
10643
10644 #define START_FOLD_INIT \
10645 int saved_signaling_nans = flag_signaling_nans;\
10646 int saved_trapping_math = flag_trapping_math;\
10647 int saved_rounding_math = flag_rounding_math;\
10648 int saved_trapv = flag_trapv;\
10649 flag_signaling_nans = 0;\
10650 flag_trapping_math = 0;\
10651 flag_rounding_math = 0;\
10652 flag_trapv = 0
10653
10654 #define END_FOLD_INIT \
10655 flag_signaling_nans = saved_signaling_nans;\
10656 flag_trapping_math = saved_trapping_math;\
10657 flag_rounding_math = saved_rounding_math;\
10658 flag_trapv = saved_trapv
10659
10660 tree
10661 fold_build1_initializer (enum tree_code code, tree type, tree op)
10662 {
10663 tree result;
10664 START_FOLD_INIT;
10665
10666 result = fold_build1 (code, type, op);
10667
10668 END_FOLD_INIT;
10669 return result;
10670 }
10671
10672 tree
10673 fold_build2_initializer (enum tree_code code, tree type, tree op0, tree op1)
10674 {
10675 tree result;
10676 START_FOLD_INIT;
10677
10678 result = fold_build2 (code, type, op0, op1);
10679
10680 END_FOLD_INIT;
10681 return result;
10682 }
10683
10684 tree
10685 fold_build3_initializer (enum tree_code code, tree type, tree op0, tree op1,
10686 tree op2)
10687 {
10688 tree result;
10689 START_FOLD_INIT;
10690
10691 result = fold_build3 (code, type, op0, op1, op2);
10692
10693 END_FOLD_INIT;
10694 return result;
10695 }
10696
10697 #undef START_FOLD_INIT
10698 #undef END_FOLD_INIT
10699
10700 /* Determine if first argument is a multiple of second argument. Return 0 if
10701 it is not, or we cannot easily determined it to be.
10702
10703 An example of the sort of thing we care about (at this point; this routine
10704 could surely be made more general, and expanded to do what the *_DIV_EXPR's
10705 fold cases do now) is discovering that
10706
10707 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10708
10709 is a multiple of
10710
10711 SAVE_EXPR (J * 8)
10712
10713 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
10714
10715 This code also handles discovering that
10716
10717 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
10718
10719 is a multiple of 8 so we don't have to worry about dealing with a
10720 possible remainder.
10721
10722 Note that we *look* inside a SAVE_EXPR only to determine how it was
10723 calculated; it is not safe for fold to do much of anything else with the
10724 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
10725 at run time. For example, the latter example above *cannot* be implemented
10726 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
10727 evaluation time of the original SAVE_EXPR is not necessarily the same at
10728 the time the new expression is evaluated. The only optimization of this
10729 sort that would be valid is changing
10730
10731 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
10732
10733 divided by 8 to
10734
10735 SAVE_EXPR (I) * SAVE_EXPR (J)
10736
10737 (where the same SAVE_EXPR (J) is used in the original and the
10738 transformed version). */
10739
10740 static int
10741 multiple_of_p (tree type, tree top, tree bottom)
10742 {
10743 if (operand_equal_p (top, bottom, 0))
10744 return 1;
10745
10746 if (TREE_CODE (type) != INTEGER_TYPE)
10747 return 0;
10748
10749 switch (TREE_CODE (top))
10750 {
10751 case BIT_AND_EXPR:
10752 /* Bitwise and provides a power of two multiple. If the mask is
10753 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
10754 if (!integer_pow2p (bottom))
10755 return 0;
10756 /* FALLTHRU */
10757
10758 case MULT_EXPR:
10759 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10760 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10761
10762 case PLUS_EXPR:
10763 case MINUS_EXPR:
10764 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
10765 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
10766
10767 case LSHIFT_EXPR:
10768 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
10769 {
10770 tree op1, t1;
10771
10772 op1 = TREE_OPERAND (top, 1);
10773 /* const_binop may not detect overflow correctly,
10774 so check for it explicitly here. */
10775 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
10776 > TREE_INT_CST_LOW (op1)
10777 && TREE_INT_CST_HIGH (op1) == 0
10778 && 0 != (t1 = fold_convert (type,
10779 const_binop (LSHIFT_EXPR,
10780 size_one_node,
10781 op1, 0)))
10782 && ! TREE_OVERFLOW (t1))
10783 return multiple_of_p (type, t1, bottom);
10784 }
10785 return 0;
10786
10787 case NOP_EXPR:
10788 /* Can't handle conversions from non-integral or wider integral type. */
10789 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
10790 || (TYPE_PRECISION (type)
10791 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
10792 return 0;
10793
10794 /* .. fall through ... */
10795
10796 case SAVE_EXPR:
10797 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
10798
10799 case INTEGER_CST:
10800 if (TREE_CODE (bottom) != INTEGER_CST
10801 || (TYPE_UNSIGNED (type)
10802 && (tree_int_cst_sgn (top) < 0
10803 || tree_int_cst_sgn (bottom) < 0)))
10804 return 0;
10805 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
10806 top, bottom, 0));
10807
10808 default:
10809 return 0;
10810 }
10811 }
10812
10813 /* Return true if `t' is known to be non-negative. */
10814
10815 int
10816 tree_expr_nonnegative_p (tree t)
10817 {
10818 if (TYPE_UNSIGNED (TREE_TYPE (t)))
10819 return 1;
10820
10821 switch (TREE_CODE (t))
10822 {
10823 case ABS_EXPR:
10824 /* We can't return 1 if flag_wrapv is set because
10825 ABS_EXPR<INT_MIN> = INT_MIN. */
10826 if (!(flag_wrapv && INTEGRAL_TYPE_P (TREE_TYPE (t))))
10827 return 1;
10828 break;
10829
10830 case INTEGER_CST:
10831 return tree_int_cst_sgn (t) >= 0;
10832
10833 case REAL_CST:
10834 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
10835
10836 case PLUS_EXPR:
10837 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10838 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10839 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10840
10841 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
10842 both unsigned and at least 2 bits shorter than the result. */
10843 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10844 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10845 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10846 {
10847 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10848 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10849 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10850 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10851 {
10852 unsigned int prec = MAX (TYPE_PRECISION (inner1),
10853 TYPE_PRECISION (inner2)) + 1;
10854 return prec < TYPE_PRECISION (TREE_TYPE (t));
10855 }
10856 }
10857 break;
10858
10859 case MULT_EXPR:
10860 if (FLOAT_TYPE_P (TREE_TYPE (t)))
10861 {
10862 /* x * x for floating point x is always non-negative. */
10863 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
10864 return 1;
10865 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10866 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10867 }
10868
10869 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
10870 both unsigned and their total bits is shorter than the result. */
10871 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
10872 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
10873 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
10874 {
10875 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
10876 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
10877 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
10878 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
10879 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
10880 < TYPE_PRECISION (TREE_TYPE (t));
10881 }
10882 return 0;
10883
10884 case BIT_AND_EXPR:
10885 case MAX_EXPR:
10886 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10887 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10888
10889 case BIT_IOR_EXPR:
10890 case BIT_XOR_EXPR:
10891 case MIN_EXPR:
10892 case RDIV_EXPR:
10893 case TRUNC_DIV_EXPR:
10894 case CEIL_DIV_EXPR:
10895 case FLOOR_DIV_EXPR:
10896 case ROUND_DIV_EXPR:
10897 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
10898 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10899
10900 case TRUNC_MOD_EXPR:
10901 case CEIL_MOD_EXPR:
10902 case FLOOR_MOD_EXPR:
10903 case ROUND_MOD_EXPR:
10904 case SAVE_EXPR:
10905 case NON_LVALUE_EXPR:
10906 case FLOAT_EXPR:
10907 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10908
10909 case COMPOUND_EXPR:
10910 case MODIFY_EXPR:
10911 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10912
10913 case BIND_EXPR:
10914 return tree_expr_nonnegative_p (expr_last (TREE_OPERAND (t, 1)));
10915
10916 case COND_EXPR:
10917 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
10918 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
10919
10920 case NOP_EXPR:
10921 {
10922 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
10923 tree outer_type = TREE_TYPE (t);
10924
10925 if (TREE_CODE (outer_type) == REAL_TYPE)
10926 {
10927 if (TREE_CODE (inner_type) == REAL_TYPE)
10928 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10929 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10930 {
10931 if (TYPE_UNSIGNED (inner_type))
10932 return 1;
10933 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
10934 }
10935 }
10936 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
10937 {
10938 if (TREE_CODE (inner_type) == REAL_TYPE)
10939 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
10940 if (TREE_CODE (inner_type) == INTEGER_TYPE)
10941 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
10942 && TYPE_UNSIGNED (inner_type);
10943 }
10944 }
10945 break;
10946
10947 case TARGET_EXPR:
10948 {
10949 tree temp = TARGET_EXPR_SLOT (t);
10950 t = TARGET_EXPR_INITIAL (t);
10951
10952 /* If the initializer is non-void, then it's a normal expression
10953 that will be assigned to the slot. */
10954 if (!VOID_TYPE_P (t))
10955 return tree_expr_nonnegative_p (t);
10956
10957 /* Otherwise, the initializer sets the slot in some way. One common
10958 way is an assignment statement at the end of the initializer. */
10959 while (1)
10960 {
10961 if (TREE_CODE (t) == BIND_EXPR)
10962 t = expr_last (BIND_EXPR_BODY (t));
10963 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
10964 || TREE_CODE (t) == TRY_CATCH_EXPR)
10965 t = expr_last (TREE_OPERAND (t, 0));
10966 else if (TREE_CODE (t) == STATEMENT_LIST)
10967 t = expr_last (t);
10968 else
10969 break;
10970 }
10971 if (TREE_CODE (t) == MODIFY_EXPR
10972 && TREE_OPERAND (t, 0) == temp)
10973 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
10974
10975 return 0;
10976 }
10977
10978 case CALL_EXPR:
10979 {
10980 tree fndecl = get_callee_fndecl (t);
10981 tree arglist = TREE_OPERAND (t, 1);
10982 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
10983 switch (DECL_FUNCTION_CODE (fndecl))
10984 {
10985 #define CASE_BUILTIN_F(BUILT_IN_FN) \
10986 case BUILT_IN_FN: case BUILT_IN_FN##F: case BUILT_IN_FN##L:
10987 #define CASE_BUILTIN_I(BUILT_IN_FN) \
10988 case BUILT_IN_FN: case BUILT_IN_FN##L: case BUILT_IN_FN##LL:
10989
10990 CASE_BUILTIN_F (BUILT_IN_ACOS)
10991 CASE_BUILTIN_F (BUILT_IN_ACOSH)
10992 CASE_BUILTIN_F (BUILT_IN_CABS)
10993 CASE_BUILTIN_F (BUILT_IN_COSH)
10994 CASE_BUILTIN_F (BUILT_IN_ERFC)
10995 CASE_BUILTIN_F (BUILT_IN_EXP)
10996 CASE_BUILTIN_F (BUILT_IN_EXP10)
10997 CASE_BUILTIN_F (BUILT_IN_EXP2)
10998 CASE_BUILTIN_F (BUILT_IN_FABS)
10999 CASE_BUILTIN_F (BUILT_IN_FDIM)
11000 CASE_BUILTIN_F (BUILT_IN_HYPOT)
11001 CASE_BUILTIN_F (BUILT_IN_POW10)
11002 CASE_BUILTIN_I (BUILT_IN_FFS)
11003 CASE_BUILTIN_I (BUILT_IN_PARITY)
11004 CASE_BUILTIN_I (BUILT_IN_POPCOUNT)
11005 /* Always true. */
11006 return 1;
11007
11008 CASE_BUILTIN_F (BUILT_IN_SQRT)
11009 /* sqrt(-0.0) is -0.0. */
11010 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (t))))
11011 return 1;
11012 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11013
11014 CASE_BUILTIN_F (BUILT_IN_ASINH)
11015 CASE_BUILTIN_F (BUILT_IN_ATAN)
11016 CASE_BUILTIN_F (BUILT_IN_ATANH)
11017 CASE_BUILTIN_F (BUILT_IN_CBRT)
11018 CASE_BUILTIN_F (BUILT_IN_CEIL)
11019 CASE_BUILTIN_F (BUILT_IN_ERF)
11020 CASE_BUILTIN_F (BUILT_IN_EXPM1)
11021 CASE_BUILTIN_F (BUILT_IN_FLOOR)
11022 CASE_BUILTIN_F (BUILT_IN_FMOD)
11023 CASE_BUILTIN_F (BUILT_IN_FREXP)
11024 CASE_BUILTIN_F (BUILT_IN_LCEIL)
11025 CASE_BUILTIN_F (BUILT_IN_LDEXP)
11026 CASE_BUILTIN_F (BUILT_IN_LFLOOR)
11027 CASE_BUILTIN_F (BUILT_IN_LLCEIL)
11028 CASE_BUILTIN_F (BUILT_IN_LLFLOOR)
11029 CASE_BUILTIN_F (BUILT_IN_LLRINT)
11030 CASE_BUILTIN_F (BUILT_IN_LLROUND)
11031 CASE_BUILTIN_F (BUILT_IN_LRINT)
11032 CASE_BUILTIN_F (BUILT_IN_LROUND)
11033 CASE_BUILTIN_F (BUILT_IN_MODF)
11034 CASE_BUILTIN_F (BUILT_IN_NEARBYINT)
11035 CASE_BUILTIN_F (BUILT_IN_POW)
11036 CASE_BUILTIN_F (BUILT_IN_RINT)
11037 CASE_BUILTIN_F (BUILT_IN_ROUND)
11038 CASE_BUILTIN_F (BUILT_IN_SIGNBIT)
11039 CASE_BUILTIN_F (BUILT_IN_SINH)
11040 CASE_BUILTIN_F (BUILT_IN_TANH)
11041 CASE_BUILTIN_F (BUILT_IN_TRUNC)
11042 /* True if the 1st argument is nonnegative. */
11043 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
11044
11045 CASE_BUILTIN_F (BUILT_IN_FMAX)
11046 /* True if the 1st OR 2nd arguments are nonnegative. */
11047 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11048 || tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11049
11050 CASE_BUILTIN_F (BUILT_IN_FMIN)
11051 /* True if the 1st AND 2nd arguments are nonnegative. */
11052 return tree_expr_nonnegative_p (TREE_VALUE (arglist))
11053 && tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11054
11055 CASE_BUILTIN_F (BUILT_IN_COPYSIGN)
11056 /* True if the 2nd argument is nonnegative. */
11057 return tree_expr_nonnegative_p (TREE_VALUE (TREE_CHAIN (arglist)));
11058
11059 default:
11060 break;
11061 #undef CASE_BUILTIN_F
11062 #undef CASE_BUILTIN_I
11063 }
11064 }
11065
11066 /* ... fall through ... */
11067
11068 default:
11069 if (truth_value_p (TREE_CODE (t)))
11070 /* Truth values evaluate to 0 or 1, which is nonnegative. */
11071 return 1;
11072 }
11073
11074 /* We don't know sign of `t', so be conservative and return false. */
11075 return 0;
11076 }
11077
11078 /* Return true when T is an address and is known to be nonzero.
11079 For floating point we further ensure that T is not denormal.
11080 Similar logic is present in nonzero_address in rtlanal.h. */
11081
11082 bool
11083 tree_expr_nonzero_p (tree t)
11084 {
11085 tree type = TREE_TYPE (t);
11086
11087 /* Doing something useful for floating point would need more work. */
11088 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
11089 return false;
11090
11091 switch (TREE_CODE (t))
11092 {
11093 case ABS_EXPR:
11094 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11095
11096 case INTEGER_CST:
11097 /* We used to test for !integer_zerop here. This does not work correctly
11098 if TREE_CONSTANT_OVERFLOW (t). */
11099 return (TREE_INT_CST_LOW (t) != 0
11100 || TREE_INT_CST_HIGH (t) != 0);
11101
11102 case PLUS_EXPR:
11103 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11104 {
11105 /* With the presence of negative values it is hard
11106 to say something. */
11107 if (!tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
11108 || !tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11109 return false;
11110 /* One of operands must be positive and the other non-negative. */
11111 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11112 || tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11113 }
11114 break;
11115
11116 case MULT_EXPR:
11117 if (!TYPE_UNSIGNED (type) && !flag_wrapv)
11118 {
11119 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11120 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11121 }
11122 break;
11123
11124 case NOP_EXPR:
11125 {
11126 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
11127 tree outer_type = TREE_TYPE (t);
11128
11129 return (TYPE_PRECISION (inner_type) >= TYPE_PRECISION (outer_type)
11130 && tree_expr_nonzero_p (TREE_OPERAND (t, 0)));
11131 }
11132 break;
11133
11134 case ADDR_EXPR:
11135 {
11136 tree base = get_base_address (TREE_OPERAND (t, 0));
11137
11138 if (!base)
11139 return false;
11140
11141 /* Weak declarations may link to NULL. */
11142 if (VAR_OR_FUNCTION_DECL_P (base))
11143 return !DECL_WEAK (base);
11144
11145 /* Constants are never weak. */
11146 if (CONSTANT_CLASS_P (base))
11147 return true;
11148
11149 return false;
11150 }
11151
11152 case COND_EXPR:
11153 return (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11154 && tree_expr_nonzero_p (TREE_OPERAND (t, 2)));
11155
11156 case MIN_EXPR:
11157 return (tree_expr_nonzero_p (TREE_OPERAND (t, 0))
11158 && tree_expr_nonzero_p (TREE_OPERAND (t, 1)));
11159
11160 case MAX_EXPR:
11161 if (tree_expr_nonzero_p (TREE_OPERAND (t, 0)))
11162 {
11163 /* When both operands are nonzero, then MAX must be too. */
11164 if (tree_expr_nonzero_p (TREE_OPERAND (t, 1)))
11165 return true;
11166
11167 /* MAX where operand 0 is positive is positive. */
11168 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
11169 }
11170 /* MAX where operand 1 is positive is positive. */
11171 else if (tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11172 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1)))
11173 return true;
11174 break;
11175
11176 case COMPOUND_EXPR:
11177 case MODIFY_EXPR:
11178 case BIND_EXPR:
11179 return tree_expr_nonzero_p (TREE_OPERAND (t, 1));
11180
11181 case SAVE_EXPR:
11182 case NON_LVALUE_EXPR:
11183 return tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11184
11185 case BIT_IOR_EXPR:
11186 return tree_expr_nonzero_p (TREE_OPERAND (t, 1))
11187 || tree_expr_nonzero_p (TREE_OPERAND (t, 0));
11188
11189 case CALL_EXPR:
11190 return alloca_call_p (t);
11191
11192 default:
11193 break;
11194 }
11195 return false;
11196 }
11197
11198 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
11199 attempt to fold the expression to a constant without modifying TYPE,
11200 OP0 or OP1.
11201
11202 If the expression could be simplified to a constant, then return
11203 the constant. If the expression would not be simplified to a
11204 constant, then return NULL_TREE. */
11205
11206 tree
11207 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
11208 {
11209 tree tem = fold_binary (code, type, op0, op1);
11210 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11211 }
11212
11213 /* Given the components of a unary expression CODE, TYPE and OP0,
11214 attempt to fold the expression to a constant without modifying
11215 TYPE or OP0.
11216
11217 If the expression could be simplified to a constant, then return
11218 the constant. If the expression would not be simplified to a
11219 constant, then return NULL_TREE. */
11220
11221 tree
11222 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
11223 {
11224 tree tem = fold_unary (code, type, op0);
11225 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
11226 }
11227
11228 /* If EXP represents referencing an element in a constant string
11229 (either via pointer arithmetic or array indexing), return the
11230 tree representing the value accessed, otherwise return NULL. */
11231
11232 tree
11233 fold_read_from_constant_string (tree exp)
11234 {
11235 if (TREE_CODE (exp) == INDIRECT_REF || TREE_CODE (exp) == ARRAY_REF)
11236 {
11237 tree exp1 = TREE_OPERAND (exp, 0);
11238 tree index;
11239 tree string;
11240
11241 if (TREE_CODE (exp) == INDIRECT_REF)
11242 string = string_constant (exp1, &index);
11243 else
11244 {
11245 tree low_bound = array_ref_low_bound (exp);
11246 index = fold_convert (sizetype, TREE_OPERAND (exp, 1));
11247
11248 /* Optimize the special-case of a zero lower bound.
11249
11250 We convert the low_bound to sizetype to avoid some problems
11251 with constant folding. (E.g. suppose the lower bound is 1,
11252 and its mode is QI. Without the conversion,l (ARRAY
11253 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
11254 +INDEX), which becomes (ARRAY+255+INDEX). Opps!) */
11255 if (! integer_zerop (low_bound))
11256 index = size_diffop (index, fold_convert (sizetype, low_bound));
11257
11258 string = exp1;
11259 }
11260
11261 if (string
11262 && TREE_TYPE (exp) == TREE_TYPE (TREE_TYPE (string))
11263 && TREE_CODE (string) == STRING_CST
11264 && TREE_CODE (index) == INTEGER_CST
11265 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
11266 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
11267 == MODE_INT)
11268 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
11269 return fold_convert (TREE_TYPE (exp),
11270 build_int_cst (NULL_TREE,
11271 (TREE_STRING_POINTER (string)
11272 [TREE_INT_CST_LOW (index)])));
11273 }
11274 return NULL;
11275 }
11276
11277 /* Return the tree for neg (ARG0) when ARG0 is known to be either
11278 an integer constant or real constant.
11279
11280 TYPE is the type of the result. */
11281
11282 static tree
11283 fold_negate_const (tree arg0, tree type)
11284 {
11285 tree t = NULL_TREE;
11286
11287 switch (TREE_CODE (arg0))
11288 {
11289 case INTEGER_CST:
11290 {
11291 unsigned HOST_WIDE_INT low;
11292 HOST_WIDE_INT high;
11293 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11294 TREE_INT_CST_HIGH (arg0),
11295 &low, &high);
11296 t = build_int_cst_wide (type, low, high);
11297 t = force_fit_type (t, 1,
11298 (overflow | TREE_OVERFLOW (arg0))
11299 && !TYPE_UNSIGNED (type),
11300 TREE_CONSTANT_OVERFLOW (arg0));
11301 break;
11302 }
11303
11304 case REAL_CST:
11305 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11306 break;
11307
11308 default:
11309 gcc_unreachable ();
11310 }
11311
11312 return t;
11313 }
11314
11315 /* Return the tree for abs (ARG0) when ARG0 is known to be either
11316 an integer constant or real constant.
11317
11318 TYPE is the type of the result. */
11319
11320 tree
11321 fold_abs_const (tree arg0, tree type)
11322 {
11323 tree t = NULL_TREE;
11324
11325 switch (TREE_CODE (arg0))
11326 {
11327 case INTEGER_CST:
11328 /* If the value is unsigned, then the absolute value is
11329 the same as the ordinary value. */
11330 if (TYPE_UNSIGNED (type))
11331 t = arg0;
11332 /* Similarly, if the value is non-negative. */
11333 else if (INT_CST_LT (integer_minus_one_node, arg0))
11334 t = arg0;
11335 /* If the value is negative, then the absolute value is
11336 its negation. */
11337 else
11338 {
11339 unsigned HOST_WIDE_INT low;
11340 HOST_WIDE_INT high;
11341 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
11342 TREE_INT_CST_HIGH (arg0),
11343 &low, &high);
11344 t = build_int_cst_wide (type, low, high);
11345 t = force_fit_type (t, -1, overflow | TREE_OVERFLOW (arg0),
11346 TREE_CONSTANT_OVERFLOW (arg0));
11347 }
11348 break;
11349
11350 case REAL_CST:
11351 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
11352 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
11353 else
11354 t = arg0;
11355 break;
11356
11357 default:
11358 gcc_unreachable ();
11359 }
11360
11361 return t;
11362 }
11363
11364 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
11365 constant. TYPE is the type of the result. */
11366
11367 static tree
11368 fold_not_const (tree arg0, tree type)
11369 {
11370 tree t = NULL_TREE;
11371
11372 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
11373
11374 t = build_int_cst_wide (type,
11375 ~ TREE_INT_CST_LOW (arg0),
11376 ~ TREE_INT_CST_HIGH (arg0));
11377 t = force_fit_type (t, 0, TREE_OVERFLOW (arg0),
11378 TREE_CONSTANT_OVERFLOW (arg0));
11379
11380 return t;
11381 }
11382
11383 /* Given CODE, a relational operator, the target type, TYPE and two
11384 constant operands OP0 and OP1, return the result of the
11385 relational operation. If the result is not a compile time
11386 constant, then return NULL_TREE. */
11387
11388 static tree
11389 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
11390 {
11391 int result, invert;
11392
11393 /* From here on, the only cases we handle are when the result is
11394 known to be a constant. */
11395
11396 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
11397 {
11398 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
11399 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
11400
11401 /* Handle the cases where either operand is a NaN. */
11402 if (real_isnan (c0) || real_isnan (c1))
11403 {
11404 switch (code)
11405 {
11406 case EQ_EXPR:
11407 case ORDERED_EXPR:
11408 result = 0;
11409 break;
11410
11411 case NE_EXPR:
11412 case UNORDERED_EXPR:
11413 case UNLT_EXPR:
11414 case UNLE_EXPR:
11415 case UNGT_EXPR:
11416 case UNGE_EXPR:
11417 case UNEQ_EXPR:
11418 result = 1;
11419 break;
11420
11421 case LT_EXPR:
11422 case LE_EXPR:
11423 case GT_EXPR:
11424 case GE_EXPR:
11425 case LTGT_EXPR:
11426 if (flag_trapping_math)
11427 return NULL_TREE;
11428 result = 0;
11429 break;
11430
11431 default:
11432 gcc_unreachable ();
11433 }
11434
11435 return constant_boolean_node (result, type);
11436 }
11437
11438 return constant_boolean_node (real_compare (code, c0, c1), type);
11439 }
11440
11441 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
11442
11443 To compute GT, swap the arguments and do LT.
11444 To compute GE, do LT and invert the result.
11445 To compute LE, swap the arguments, do LT and invert the result.
11446 To compute NE, do EQ and invert the result.
11447
11448 Therefore, the code below must handle only EQ and LT. */
11449
11450 if (code == LE_EXPR || code == GT_EXPR)
11451 {
11452 tree tem = op0;
11453 op0 = op1;
11454 op1 = tem;
11455 code = swap_tree_comparison (code);
11456 }
11457
11458 /* Note that it is safe to invert for real values here because we
11459 have already handled the one case that it matters. */
11460
11461 invert = 0;
11462 if (code == NE_EXPR || code == GE_EXPR)
11463 {
11464 invert = 1;
11465 code = invert_tree_comparison (code, false);
11466 }
11467
11468 /* Compute a result for LT or EQ if args permit;
11469 Otherwise return T. */
11470 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
11471 {
11472 if (code == EQ_EXPR)
11473 result = tree_int_cst_equal (op0, op1);
11474 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
11475 result = INT_CST_LT_UNSIGNED (op0, op1);
11476 else
11477 result = INT_CST_LT (op0, op1);
11478 }
11479 else
11480 return NULL_TREE;
11481
11482 if (invert)
11483 result ^= 1;
11484 return constant_boolean_node (result, type);
11485 }
11486
11487 /* Build an expression for the a clean point containing EXPR with type TYPE.
11488 Don't build a cleanup point expression for EXPR which don't have side
11489 effects. */
11490
11491 tree
11492 fold_build_cleanup_point_expr (tree type, tree expr)
11493 {
11494 /* If the expression does not have side effects then we don't have to wrap
11495 it with a cleanup point expression. */
11496 if (!TREE_SIDE_EFFECTS (expr))
11497 return expr;
11498
11499 /* If the expression is a return, check to see if the expression inside the
11500 return has no side effects or the right hand side of the modify expression
11501 inside the return. If either don't have side effects set we don't need to
11502 wrap the expression in a cleanup point expression. Note we don't check the
11503 left hand side of the modify because it should always be a return decl. */
11504 if (TREE_CODE (expr) == RETURN_EXPR)
11505 {
11506 tree op = TREE_OPERAND (expr, 0);
11507 if (!op || !TREE_SIDE_EFFECTS (op))
11508 return expr;
11509 op = TREE_OPERAND (op, 1);
11510 if (!TREE_SIDE_EFFECTS (op))
11511 return expr;
11512 }
11513
11514 return build1 (CLEANUP_POINT_EXPR, type, expr);
11515 }
11516
11517 /* Build an expression for the address of T. Folds away INDIRECT_REF to
11518 avoid confusing the gimplify process. */
11519
11520 tree
11521 build_fold_addr_expr_with_type (tree t, tree ptrtype)
11522 {
11523 /* The size of the object is not relevant when talking about its address. */
11524 if (TREE_CODE (t) == WITH_SIZE_EXPR)
11525 t = TREE_OPERAND (t, 0);
11526
11527 /* Note: doesn't apply to ALIGN_INDIRECT_REF */
11528 if (TREE_CODE (t) == INDIRECT_REF
11529 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
11530 {
11531 t = TREE_OPERAND (t, 0);
11532 if (TREE_TYPE (t) != ptrtype)
11533 t = build1 (NOP_EXPR, ptrtype, t);
11534 }
11535 else
11536 {
11537 tree base = t;
11538
11539 while (handled_component_p (base))
11540 base = TREE_OPERAND (base, 0);
11541 if (DECL_P (base))
11542 TREE_ADDRESSABLE (base) = 1;
11543
11544 t = build1 (ADDR_EXPR, ptrtype, t);
11545 }
11546
11547 return t;
11548 }
11549
11550 tree
11551 build_fold_addr_expr (tree t)
11552 {
11553 return build_fold_addr_expr_with_type (t, build_pointer_type (TREE_TYPE (t)));
11554 }
11555
11556 /* Given a pointer value OP0 and a type TYPE, return a simplified version
11557 of an indirection through OP0, or NULL_TREE if no simplification is
11558 possible. */
11559
11560 tree
11561 fold_indirect_ref_1 (tree type, tree op0)
11562 {
11563 tree sub = op0;
11564 tree subtype;
11565
11566 STRIP_NOPS (sub);
11567 subtype = TREE_TYPE (sub);
11568 if (!POINTER_TYPE_P (subtype))
11569 return NULL_TREE;
11570
11571 if (TREE_CODE (sub) == ADDR_EXPR)
11572 {
11573 tree op = TREE_OPERAND (sub, 0);
11574 tree optype = TREE_TYPE (op);
11575 /* *&p => p; make sure to handle *&"str"[cst] here. */
11576 if (type == optype)
11577 {
11578 tree fop = fold_read_from_constant_string (op);
11579 if (fop)
11580 return fop;
11581 else
11582 return op;
11583 }
11584 /* *(foo *)&fooarray => fooarray[0] */
11585 else if (TREE_CODE (optype) == ARRAY_TYPE
11586 && type == TREE_TYPE (optype))
11587 {
11588 tree type_domain = TYPE_DOMAIN (optype);
11589 tree min_val = size_zero_node;
11590 if (type_domain && TYPE_MIN_VALUE (type_domain))
11591 min_val = TYPE_MIN_VALUE (type_domain);
11592 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
11593 }
11594 }
11595
11596 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
11597 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
11598 && type == TREE_TYPE (TREE_TYPE (subtype)))
11599 {
11600 tree type_domain;
11601 tree min_val = size_zero_node;
11602 sub = build_fold_indirect_ref (sub);
11603 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
11604 if (type_domain && TYPE_MIN_VALUE (type_domain))
11605 min_val = TYPE_MIN_VALUE (type_domain);
11606 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
11607 }
11608
11609 return NULL_TREE;
11610 }
11611
11612 /* Builds an expression for an indirection through T, simplifying some
11613 cases. */
11614
11615 tree
11616 build_fold_indirect_ref (tree t)
11617 {
11618 tree type = TREE_TYPE (TREE_TYPE (t));
11619 tree sub = fold_indirect_ref_1 (type, t);
11620
11621 if (sub)
11622 return sub;
11623 else
11624 return build1 (INDIRECT_REF, type, t);
11625 }
11626
11627 /* Given an INDIRECT_REF T, return either T or a simplified version. */
11628
11629 tree
11630 fold_indirect_ref (tree t)
11631 {
11632 tree sub = fold_indirect_ref_1 (TREE_TYPE (t), TREE_OPERAND (t, 0));
11633
11634 if (sub)
11635 return sub;
11636 else
11637 return t;
11638 }
11639
11640 /* Strip non-trapping, non-side-effecting tree nodes from an expression
11641 whose result is ignored. The type of the returned tree need not be
11642 the same as the original expression. */
11643
11644 tree
11645 fold_ignored_result (tree t)
11646 {
11647 if (!TREE_SIDE_EFFECTS (t))
11648 return integer_zero_node;
11649
11650 for (;;)
11651 switch (TREE_CODE_CLASS (TREE_CODE (t)))
11652 {
11653 case tcc_unary:
11654 t = TREE_OPERAND (t, 0);
11655 break;
11656
11657 case tcc_binary:
11658 case tcc_comparison:
11659 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11660 t = TREE_OPERAND (t, 0);
11661 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
11662 t = TREE_OPERAND (t, 1);
11663 else
11664 return t;
11665 break;
11666
11667 case tcc_expression:
11668 switch (TREE_CODE (t))
11669 {
11670 case COMPOUND_EXPR:
11671 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
11672 return t;
11673 t = TREE_OPERAND (t, 0);
11674 break;
11675
11676 case COND_EXPR:
11677 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
11678 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
11679 return t;
11680 t = TREE_OPERAND (t, 0);
11681 break;
11682
11683 default:
11684 return t;
11685 }
11686 break;
11687
11688 default:
11689 return t;
11690 }
11691 }
11692
11693 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
11694 This can only be applied to objects of a sizetype. */
11695
11696 tree
11697 round_up (tree value, int divisor)
11698 {
11699 tree div = NULL_TREE;
11700
11701 gcc_assert (divisor > 0);
11702 if (divisor == 1)
11703 return value;
11704
11705 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11706 have to do anything. Only do this when we are not given a const,
11707 because in that case, this check is more expensive than just
11708 doing it. */
11709 if (TREE_CODE (value) != INTEGER_CST)
11710 {
11711 div = build_int_cst (TREE_TYPE (value), divisor);
11712
11713 if (multiple_of_p (TREE_TYPE (value), value, div))
11714 return value;
11715 }
11716
11717 /* If divisor is a power of two, simplify this to bit manipulation. */
11718 if (divisor == (divisor & -divisor))
11719 {
11720 tree t;
11721
11722 t = build_int_cst (TREE_TYPE (value), divisor - 1);
11723 value = size_binop (PLUS_EXPR, value, t);
11724 t = build_int_cst (TREE_TYPE (value), -divisor);
11725 value = size_binop (BIT_AND_EXPR, value, t);
11726 }
11727 else
11728 {
11729 if (!div)
11730 div = build_int_cst (TREE_TYPE (value), divisor);
11731 value = size_binop (CEIL_DIV_EXPR, value, div);
11732 value = size_binop (MULT_EXPR, value, div);
11733 }
11734
11735 return value;
11736 }
11737
11738 /* Likewise, but round down. */
11739
11740 tree
11741 round_down (tree value, int divisor)
11742 {
11743 tree div = NULL_TREE;
11744
11745 gcc_assert (divisor > 0);
11746 if (divisor == 1)
11747 return value;
11748
11749 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
11750 have to do anything. Only do this when we are not given a const,
11751 because in that case, this check is more expensive than just
11752 doing it. */
11753 if (TREE_CODE (value) != INTEGER_CST)
11754 {
11755 div = build_int_cst (TREE_TYPE (value), divisor);
11756
11757 if (multiple_of_p (TREE_TYPE (value), value, div))
11758 return value;
11759 }
11760
11761 /* If divisor is a power of two, simplify this to bit manipulation. */
11762 if (divisor == (divisor & -divisor))
11763 {
11764 tree t;
11765
11766 t = build_int_cst (TREE_TYPE (value), -divisor);
11767 value = size_binop (BIT_AND_EXPR, value, t);
11768 }
11769 else
11770 {
11771 if (!div)
11772 div = build_int_cst (TREE_TYPE (value), divisor);
11773 value = size_binop (FLOOR_DIV_EXPR, value, div);
11774 value = size_binop (MULT_EXPR, value, div);
11775 }
11776
11777 return value;
11778 }
11779
11780 /* Returns the pointer to the base of the object addressed by EXP and
11781 extracts the information about the offset of the access, storing it
11782 to PBITPOS and POFFSET. */
11783
11784 static tree
11785 split_address_to_core_and_offset (tree exp,
11786 HOST_WIDE_INT *pbitpos, tree *poffset)
11787 {
11788 tree core;
11789 enum machine_mode mode;
11790 int unsignedp, volatilep;
11791 HOST_WIDE_INT bitsize;
11792
11793 if (TREE_CODE (exp) == ADDR_EXPR)
11794 {
11795 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
11796 poffset, &mode, &unsignedp, &volatilep,
11797 false);
11798 core = build_fold_addr_expr (core);
11799 }
11800 else
11801 {
11802 core = exp;
11803 *pbitpos = 0;
11804 *poffset = NULL_TREE;
11805 }
11806
11807 return core;
11808 }
11809
11810 /* Returns true if addresses of E1 and E2 differ by a constant, false
11811 otherwise. If they do, E1 - E2 is stored in *DIFF. */
11812
11813 bool
11814 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
11815 {
11816 tree core1, core2;
11817 HOST_WIDE_INT bitpos1, bitpos2;
11818 tree toffset1, toffset2, tdiff, type;
11819
11820 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
11821 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
11822
11823 if (bitpos1 % BITS_PER_UNIT != 0
11824 || bitpos2 % BITS_PER_UNIT != 0
11825 || !operand_equal_p (core1, core2, 0))
11826 return false;
11827
11828 if (toffset1 && toffset2)
11829 {
11830 type = TREE_TYPE (toffset1);
11831 if (type != TREE_TYPE (toffset2))
11832 toffset2 = fold_convert (type, toffset2);
11833
11834 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
11835 if (!cst_and_fits_in_hwi (tdiff))
11836 return false;
11837
11838 *diff = int_cst_value (tdiff);
11839 }
11840 else if (toffset1 || toffset2)
11841 {
11842 /* If only one of the offsets is non-constant, the difference cannot
11843 be a constant. */
11844 return false;
11845 }
11846 else
11847 *diff = 0;
11848
11849 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
11850 return true;
11851 }
11852
11853 /* Simplify the floating point expression EXP when the sign of the
11854 result is not significant. Return NULL_TREE if no simplification
11855 is possible. */
11856
11857 tree
11858 fold_strip_sign_ops (tree exp)
11859 {
11860 tree arg0, arg1;
11861
11862 switch (TREE_CODE (exp))
11863 {
11864 case ABS_EXPR:
11865 case NEGATE_EXPR:
11866 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11867 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
11868
11869 case MULT_EXPR:
11870 case RDIV_EXPR:
11871 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
11872 return NULL_TREE;
11873 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
11874 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
11875 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
11876 return fold_build2 (TREE_CODE (exp), TREE_TYPE (exp),
11877 arg0 ? arg0 : TREE_OPERAND (exp, 0),
11878 arg1 ? arg1 : TREE_OPERAND (exp, 1));
11879 break;
11880
11881 default:
11882 break;
11883 }
11884 return NULL_TREE;
11885 }
11886
This page took 0.54865 seconds and 5 git commands to generate.