]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
b93665c94d04e85e61c264dcf1f140cd8ca60845
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30 /* The entry points in this file are fold, size_int_wide, size_binop
31 and force_fit_type.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'.
41
42 force_fit_type takes a constant and prior overflow indicator, and
43 forces the value to fit the type. It returns an overflow indicator. */
44
45 #include "config.h"
46 #include "system.h"
47 #include "coretypes.h"
48 #include "tm.h"
49 #include "flags.h"
50 #include "tree.h"
51 #include "real.h"
52 #include "rtl.h"
53 #include "expr.h"
54 #include "tm_p.h"
55 #include "toplev.h"
56 #include "ggc.h"
57 #include "hashtab.h"
58 #include "langhooks.h"
59 #include "md5.h"
60
61 static void encode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT, HOST_WIDE_INT);
62 static void decode (HOST_WIDE_INT *, unsigned HOST_WIDE_INT *, HOST_WIDE_INT *);
63 static bool negate_mathfn_p (enum built_in_function);
64 static bool negate_expr_p (tree);
65 static tree negate_expr (tree);
66 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
67 static tree associate_trees (tree, tree, enum tree_code, tree);
68 static tree int_const_binop (enum tree_code, tree, tree, int);
69 static tree const_binop (enum tree_code, tree, tree, int);
70 static hashval_t size_htab_hash (const void *);
71 static int size_htab_eq (const void *, const void *);
72 static tree fold_convert_const (enum tree_code, tree, tree);
73 static enum tree_code invert_tree_comparison (enum tree_code);
74 static enum tree_code swap_tree_comparison (enum tree_code);
75 static int comparison_to_compcode (enum tree_code);
76 static enum tree_code compcode_to_comparison (int);
77 static int truth_value_p (enum tree_code);
78 static int operand_equal_for_comparison_p (tree, tree, tree);
79 static int twoval_comparison_p (tree, tree *, tree *, int *);
80 static tree eval_subst (tree, tree, tree, tree, tree);
81 static tree pedantic_omit_one_operand (tree, tree, tree);
82 static tree distribute_bit_expr (enum tree_code, tree, tree, tree);
83 static tree make_bit_field_ref (tree, tree, int, int, int);
84 static tree optimize_bit_field_compare (enum tree_code, tree, tree, tree);
85 static tree decode_field_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
86 enum machine_mode *, int *, int *,
87 tree *, tree *);
88 static int all_ones_mask_p (tree, int);
89 static tree sign_bit_p (tree, tree);
90 static int simple_operand_p (tree);
91 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
92 static tree make_range (tree, int *, tree *, tree *);
93 static tree build_range_check (tree, tree, int, tree, tree);
94 static int merge_ranges (int *, tree *, tree *, int, tree, tree, int, tree,
95 tree);
96 static tree fold_range_test (tree);
97 static tree unextend (tree, int, int, tree);
98 static tree fold_truthop (enum tree_code, tree, tree, tree);
99 static tree optimize_minmax_comparison (tree);
100 static tree extract_muldiv (tree, tree, enum tree_code, tree);
101 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree);
102 static tree strip_compound_expr (tree, tree);
103 static int multiple_of_p (tree, tree, tree);
104 static tree constant_boolean_node (int, tree);
105 static int count_cond (tree, int);
106 static tree fold_binary_op_with_conditional_arg (enum tree_code, tree, tree,
107 tree, int);
108 static bool fold_real_zero_addition_p (tree, tree, int);
109 static tree fold_mathfn_compare (enum built_in_function, enum tree_code,
110 tree, tree, tree);
111 static tree fold_inf_compare (enum tree_code, tree, tree, tree);
112 static bool reorder_operands_p (tree, tree);
113 static bool tree_swap_operands_p (tree, tree, bool);
114
115 /* The following constants represent a bit based encoding of GCC's
116 comparison operators. This encoding simplifies transformations
117 on relational comparison operators, such as AND and OR. */
118 #define COMPCODE_FALSE 0
119 #define COMPCODE_LT 1
120 #define COMPCODE_EQ 2
121 #define COMPCODE_LE 3
122 #define COMPCODE_GT 4
123 #define COMPCODE_NE 5
124 #define COMPCODE_GE 6
125 #define COMPCODE_TRUE 7
126
127 /* We know that A1 + B1 = SUM1, using 2's complement arithmetic and ignoring
128 overflow. Suppose A, B and SUM have the same respective signs as A1, B1,
129 and SUM1. Then this yields nonzero if overflow occurred during the
130 addition.
131
132 Overflow occurs if A and B have the same sign, but A and SUM differ in
133 sign. Use `^' to test whether signs differ, and `< 0' to isolate the
134 sign. */
135 #define OVERFLOW_SUM_SIGN(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
136 \f
137 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
138 We do that by representing the two-word integer in 4 words, with only
139 HOST_BITS_PER_WIDE_INT / 2 bits stored in each word, as a positive
140 number. The value of the word is LOWPART + HIGHPART * BASE. */
141
142 #define LOWPART(x) \
143 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2)) - 1))
144 #define HIGHPART(x) \
145 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT / 2)
146 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT / 2)
147
148 /* Unpack a two-word integer into 4 words.
149 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
150 WORDS points to the array of HOST_WIDE_INTs. */
151
152 static void
153 encode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT low, HOST_WIDE_INT hi)
154 {
155 words[0] = LOWPART (low);
156 words[1] = HIGHPART (low);
157 words[2] = LOWPART (hi);
158 words[3] = HIGHPART (hi);
159 }
160
161 /* Pack an array of 4 words into a two-word integer.
162 WORDS points to the array of words.
163 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
164
165 static void
166 decode (HOST_WIDE_INT *words, unsigned HOST_WIDE_INT *low,
167 HOST_WIDE_INT *hi)
168 {
169 *low = words[0] + words[1] * BASE;
170 *hi = words[2] + words[3] * BASE;
171 }
172 \f
173 /* Make the integer constant T valid for its type by setting to 0 or 1 all
174 the bits in the constant that don't belong in the type.
175
176 Return 1 if a signed overflow occurs, 0 otherwise. If OVERFLOW is
177 nonzero, a signed overflow has already occurred in calculating T, so
178 propagate it. */
179
180 int
181 force_fit_type (tree t, int overflow)
182 {
183 unsigned HOST_WIDE_INT low;
184 HOST_WIDE_INT high;
185 unsigned int prec;
186
187 if (TREE_CODE (t) == REAL_CST)
188 {
189 /* ??? Used to check for overflow here via CHECK_FLOAT_TYPE.
190 Consider doing it via real_convert now. */
191 return overflow;
192 }
193
194 else if (TREE_CODE (t) != INTEGER_CST)
195 return overflow;
196
197 low = TREE_INT_CST_LOW (t);
198 high = TREE_INT_CST_HIGH (t);
199
200 if (POINTER_TYPE_P (TREE_TYPE (t))
201 || TREE_CODE (TREE_TYPE (t)) == OFFSET_TYPE)
202 prec = POINTER_SIZE;
203 else
204 prec = TYPE_PRECISION (TREE_TYPE (t));
205
206 /* First clear all bits that are beyond the type's precision. */
207
208 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
209 ;
210 else if (prec > HOST_BITS_PER_WIDE_INT)
211 TREE_INT_CST_HIGH (t)
212 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
213 else
214 {
215 TREE_INT_CST_HIGH (t) = 0;
216 if (prec < HOST_BITS_PER_WIDE_INT)
217 TREE_INT_CST_LOW (t) &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
218 }
219
220 /* Unsigned types do not suffer sign extension or overflow unless they
221 are a sizetype. */
222 if (TREE_UNSIGNED (TREE_TYPE (t))
223 && ! (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
224 && TYPE_IS_SIZETYPE (TREE_TYPE (t))))
225 return overflow;
226
227 /* If the value's sign bit is set, extend the sign. */
228 if (prec != 2 * HOST_BITS_PER_WIDE_INT
229 && (prec > HOST_BITS_PER_WIDE_INT
230 ? 0 != (TREE_INT_CST_HIGH (t)
231 & ((HOST_WIDE_INT) 1
232 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
233 : 0 != (TREE_INT_CST_LOW (t)
234 & ((unsigned HOST_WIDE_INT) 1 << (prec - 1)))))
235 {
236 /* Value is negative:
237 set to 1 all the bits that are outside this type's precision. */
238 if (prec > HOST_BITS_PER_WIDE_INT)
239 TREE_INT_CST_HIGH (t)
240 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
241 else
242 {
243 TREE_INT_CST_HIGH (t) = -1;
244 if (prec < HOST_BITS_PER_WIDE_INT)
245 TREE_INT_CST_LOW (t) |= ((unsigned HOST_WIDE_INT) (-1) << prec);
246 }
247 }
248
249 /* Return nonzero if signed overflow occurred. */
250 return
251 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
252 != 0);
253 }
254 \f
255 /* Add two doubleword integers with doubleword result.
256 Each argument is given as two `HOST_WIDE_INT' pieces.
257 One argument is L1 and H1; the other, L2 and H2.
258 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
259
260 int
261 add_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
262 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
263 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
264 {
265 unsigned HOST_WIDE_INT l;
266 HOST_WIDE_INT h;
267
268 l = l1 + l2;
269 h = h1 + h2 + (l < l1);
270
271 *lv = l;
272 *hv = h;
273 return OVERFLOW_SUM_SIGN (h1, h2, h);
274 }
275
276 /* Negate a doubleword integer with doubleword result.
277 Return nonzero if the operation overflows, assuming it's signed.
278 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
279 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
280
281 int
282 neg_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
283 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
284 {
285 if (l1 == 0)
286 {
287 *lv = 0;
288 *hv = - h1;
289 return (*hv & h1) < 0;
290 }
291 else
292 {
293 *lv = -l1;
294 *hv = ~h1;
295 return 0;
296 }
297 }
298 \f
299 /* Multiply two doubleword integers with doubleword result.
300 Return nonzero if the operation overflows, assuming it's signed.
301 Each argument is given as two `HOST_WIDE_INT' pieces.
302 One argument is L1 and H1; the other, L2 and H2.
303 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
304
305 int
306 mul_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
307 unsigned HOST_WIDE_INT l2, HOST_WIDE_INT h2,
308 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
309 {
310 HOST_WIDE_INT arg1[4];
311 HOST_WIDE_INT arg2[4];
312 HOST_WIDE_INT prod[4 * 2];
313 unsigned HOST_WIDE_INT carry;
314 int i, j, k;
315 unsigned HOST_WIDE_INT toplow, neglow;
316 HOST_WIDE_INT tophigh, neghigh;
317
318 encode (arg1, l1, h1);
319 encode (arg2, l2, h2);
320
321 memset (prod, 0, sizeof prod);
322
323 for (i = 0; i < 4; i++)
324 {
325 carry = 0;
326 for (j = 0; j < 4; j++)
327 {
328 k = i + j;
329 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
330 carry += arg1[i] * arg2[j];
331 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
332 carry += prod[k];
333 prod[k] = LOWPART (carry);
334 carry = HIGHPART (carry);
335 }
336 prod[i + 4] = carry;
337 }
338
339 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
340
341 /* Check for overflow by calculating the top half of the answer in full;
342 it should agree with the low half's sign bit. */
343 decode (prod + 4, &toplow, &tophigh);
344 if (h1 < 0)
345 {
346 neg_double (l2, h2, &neglow, &neghigh);
347 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
348 }
349 if (h2 < 0)
350 {
351 neg_double (l1, h1, &neglow, &neghigh);
352 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
353 }
354 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
355 }
356 \f
357 /* Shift the doubleword integer in L1, H1 left by COUNT places
358 keeping only PREC bits of result.
359 Shift right if COUNT is negative.
360 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
361 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
362
363 void
364 lshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
365 HOST_WIDE_INT count, unsigned int prec,
366 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv, int arith)
367 {
368 unsigned HOST_WIDE_INT signmask;
369
370 if (count < 0)
371 {
372 rshift_double (l1, h1, -count, prec, lv, hv, arith);
373 return;
374 }
375
376 #ifdef SHIFT_COUNT_TRUNCATED
377 if (SHIFT_COUNT_TRUNCATED)
378 count %= prec;
379 #endif
380
381 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
382 {
383 /* Shifting by the host word size is undefined according to the
384 ANSI standard, so we must handle this as a special case. */
385 *hv = 0;
386 *lv = 0;
387 }
388 else if (count >= HOST_BITS_PER_WIDE_INT)
389 {
390 *hv = l1 << (count - HOST_BITS_PER_WIDE_INT);
391 *lv = 0;
392 }
393 else
394 {
395 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
396 | (l1 >> (HOST_BITS_PER_WIDE_INT - count - 1) >> 1));
397 *lv = l1 << count;
398 }
399
400 /* Sign extend all bits that are beyond the precision. */
401
402 signmask = -((prec > HOST_BITS_PER_WIDE_INT
403 ? ((unsigned HOST_WIDE_INT) *hv
404 >> (prec - HOST_BITS_PER_WIDE_INT - 1))
405 : (*lv >> (prec - 1))) & 1);
406
407 if (prec >= 2 * HOST_BITS_PER_WIDE_INT)
408 ;
409 else if (prec >= HOST_BITS_PER_WIDE_INT)
410 {
411 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
412 *hv |= signmask << (prec - HOST_BITS_PER_WIDE_INT);
413 }
414 else
415 {
416 *hv = signmask;
417 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << prec);
418 *lv |= signmask << prec;
419 }
420 }
421
422 /* Shift the doubleword integer in L1, H1 right by COUNT places
423 keeping only PREC bits of result. COUNT must be positive.
424 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
425 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
426
427 void
428 rshift_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
429 HOST_WIDE_INT count, unsigned int prec,
430 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv,
431 int arith)
432 {
433 unsigned HOST_WIDE_INT signmask;
434
435 signmask = (arith
436 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
437 : 0);
438
439 #ifdef SHIFT_COUNT_TRUNCATED
440 if (SHIFT_COUNT_TRUNCATED)
441 count %= prec;
442 #endif
443
444 if (count >= 2 * HOST_BITS_PER_WIDE_INT)
445 {
446 /* Shifting by the host word size is undefined according to the
447 ANSI standard, so we must handle this as a special case. */
448 *hv = 0;
449 *lv = 0;
450 }
451 else if (count >= HOST_BITS_PER_WIDE_INT)
452 {
453 *hv = 0;
454 *lv = (unsigned HOST_WIDE_INT) h1 >> (count - HOST_BITS_PER_WIDE_INT);
455 }
456 else
457 {
458 *hv = (unsigned HOST_WIDE_INT) h1 >> count;
459 *lv = ((l1 >> count)
460 | ((unsigned HOST_WIDE_INT) h1 << (HOST_BITS_PER_WIDE_INT - count - 1) << 1));
461 }
462
463 /* Zero / sign extend all bits that are beyond the precision. */
464
465 if (count >= (HOST_WIDE_INT)prec)
466 {
467 *hv = signmask;
468 *lv = signmask;
469 }
470 else if ((prec - count) >= 2 * HOST_BITS_PER_WIDE_INT)
471 ;
472 else if ((prec - count) >= HOST_BITS_PER_WIDE_INT)
473 {
474 *hv &= ~((HOST_WIDE_INT) (-1) << (prec - count - HOST_BITS_PER_WIDE_INT));
475 *hv |= signmask << (prec - count - HOST_BITS_PER_WIDE_INT);
476 }
477 else
478 {
479 *hv = signmask;
480 *lv &= ~((unsigned HOST_WIDE_INT) (-1) << (prec - count));
481 *lv |= signmask << (prec - count);
482 }
483 }
484 \f
485 /* Rotate the doubleword integer in L1, H1 left by COUNT places
486 keeping only PREC bits of result.
487 Rotate right if COUNT is negative.
488 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
489
490 void
491 lrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
492 HOST_WIDE_INT count, unsigned int prec,
493 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
494 {
495 unsigned HOST_WIDE_INT s1l, s2l;
496 HOST_WIDE_INT s1h, s2h;
497
498 count %= prec;
499 if (count < 0)
500 count += prec;
501
502 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
503 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
504 *lv = s1l | s2l;
505 *hv = s1h | s2h;
506 }
507
508 /* Rotate the doubleword integer in L1, H1 left by COUNT places
509 keeping only PREC bits of result. COUNT must be positive.
510 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
511
512 void
513 rrotate_double (unsigned HOST_WIDE_INT l1, HOST_WIDE_INT h1,
514 HOST_WIDE_INT count, unsigned int prec,
515 unsigned HOST_WIDE_INT *lv, HOST_WIDE_INT *hv)
516 {
517 unsigned HOST_WIDE_INT s1l, s2l;
518 HOST_WIDE_INT s1h, s2h;
519
520 count %= prec;
521 if (count < 0)
522 count += prec;
523
524 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
525 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
526 *lv = s1l | s2l;
527 *hv = s1h | s2h;
528 }
529 \f
530 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
531 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
532 CODE is a tree code for a kind of division, one of
533 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
534 or EXACT_DIV_EXPR
535 It controls how the quotient is rounded to an integer.
536 Return nonzero if the operation overflows.
537 UNS nonzero says do unsigned division. */
538
539 int
540 div_and_round_double (enum tree_code code, int uns,
541 unsigned HOST_WIDE_INT lnum_orig, /* num == numerator == dividend */
542 HOST_WIDE_INT hnum_orig,
543 unsigned HOST_WIDE_INT lden_orig, /* den == denominator == divisor */
544 HOST_WIDE_INT hden_orig,
545 unsigned HOST_WIDE_INT *lquo,
546 HOST_WIDE_INT *hquo, unsigned HOST_WIDE_INT *lrem,
547 HOST_WIDE_INT *hrem)
548 {
549 int quo_neg = 0;
550 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
551 HOST_WIDE_INT den[4], quo[4];
552 int i, j;
553 unsigned HOST_WIDE_INT work;
554 unsigned HOST_WIDE_INT carry = 0;
555 unsigned HOST_WIDE_INT lnum = lnum_orig;
556 HOST_WIDE_INT hnum = hnum_orig;
557 unsigned HOST_WIDE_INT lden = lden_orig;
558 HOST_WIDE_INT hden = hden_orig;
559 int overflow = 0;
560
561 if (hden == 0 && lden == 0)
562 overflow = 1, lden = 1;
563
564 /* Calculate quotient sign and convert operands to unsigned. */
565 if (!uns)
566 {
567 if (hnum < 0)
568 {
569 quo_neg = ~ quo_neg;
570 /* (minimum integer) / (-1) is the only overflow case. */
571 if (neg_double (lnum, hnum, &lnum, &hnum)
572 && ((HOST_WIDE_INT) lden & hden) == -1)
573 overflow = 1;
574 }
575 if (hden < 0)
576 {
577 quo_neg = ~ quo_neg;
578 neg_double (lden, hden, &lden, &hden);
579 }
580 }
581
582 if (hnum == 0 && hden == 0)
583 { /* single precision */
584 *hquo = *hrem = 0;
585 /* This unsigned division rounds toward zero. */
586 *lquo = lnum / lden;
587 goto finish_up;
588 }
589
590 if (hnum == 0)
591 { /* trivial case: dividend < divisor */
592 /* hden != 0 already checked. */
593 *hquo = *lquo = 0;
594 *hrem = hnum;
595 *lrem = lnum;
596 goto finish_up;
597 }
598
599 memset (quo, 0, sizeof quo);
600
601 memset (num, 0, sizeof num); /* to zero 9th element */
602 memset (den, 0, sizeof den);
603
604 encode (num, lnum, hnum);
605 encode (den, lden, hden);
606
607 /* Special code for when the divisor < BASE. */
608 if (hden == 0 && lden < (unsigned HOST_WIDE_INT) BASE)
609 {
610 /* hnum != 0 already checked. */
611 for (i = 4 - 1; i >= 0; i--)
612 {
613 work = num[i] + carry * BASE;
614 quo[i] = work / lden;
615 carry = work % lden;
616 }
617 }
618 else
619 {
620 /* Full double precision division,
621 with thanks to Don Knuth's "Seminumerical Algorithms". */
622 int num_hi_sig, den_hi_sig;
623 unsigned HOST_WIDE_INT quo_est, scale;
624
625 /* Find the highest nonzero divisor digit. */
626 for (i = 4 - 1;; i--)
627 if (den[i] != 0)
628 {
629 den_hi_sig = i;
630 break;
631 }
632
633 /* Insure that the first digit of the divisor is at least BASE/2.
634 This is required by the quotient digit estimation algorithm. */
635
636 scale = BASE / (den[den_hi_sig] + 1);
637 if (scale > 1)
638 { /* scale divisor and dividend */
639 carry = 0;
640 for (i = 0; i <= 4 - 1; i++)
641 {
642 work = (num[i] * scale) + carry;
643 num[i] = LOWPART (work);
644 carry = HIGHPART (work);
645 }
646
647 num[4] = carry;
648 carry = 0;
649 for (i = 0; i <= 4 - 1; i++)
650 {
651 work = (den[i] * scale) + carry;
652 den[i] = LOWPART (work);
653 carry = HIGHPART (work);
654 if (den[i] != 0) den_hi_sig = i;
655 }
656 }
657
658 num_hi_sig = 4;
659
660 /* Main loop */
661 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--)
662 {
663 /* Guess the next quotient digit, quo_est, by dividing the first
664 two remaining dividend digits by the high order quotient digit.
665 quo_est is never low and is at most 2 high. */
666 unsigned HOST_WIDE_INT tmp;
667
668 num_hi_sig = i + den_hi_sig + 1;
669 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
670 if (num[num_hi_sig] != den[den_hi_sig])
671 quo_est = work / den[den_hi_sig];
672 else
673 quo_est = BASE - 1;
674
675 /* Refine quo_est so it's usually correct, and at most one high. */
676 tmp = work - quo_est * den[den_hi_sig];
677 if (tmp < BASE
678 && (den[den_hi_sig - 1] * quo_est
679 > (tmp * BASE + num[num_hi_sig - 2])))
680 quo_est--;
681
682 /* Try QUO_EST as the quotient digit, by multiplying the
683 divisor by QUO_EST and subtracting from the remaining dividend.
684 Keep in mind that QUO_EST is the I - 1st digit. */
685
686 carry = 0;
687 for (j = 0; j <= den_hi_sig; j++)
688 {
689 work = quo_est * den[j] + carry;
690 carry = HIGHPART (work);
691 work = num[i + j] - LOWPART (work);
692 num[i + j] = LOWPART (work);
693 carry += HIGHPART (work) != 0;
694 }
695
696 /* If quo_est was high by one, then num[i] went negative and
697 we need to correct things. */
698 if (num[num_hi_sig] < (HOST_WIDE_INT) carry)
699 {
700 quo_est--;
701 carry = 0; /* add divisor back in */
702 for (j = 0; j <= den_hi_sig; j++)
703 {
704 work = num[i + j] + den[j] + carry;
705 carry = HIGHPART (work);
706 num[i + j] = LOWPART (work);
707 }
708
709 num [num_hi_sig] += carry;
710 }
711
712 /* Store the quotient digit. */
713 quo[i] = quo_est;
714 }
715 }
716
717 decode (quo, lquo, hquo);
718
719 finish_up:
720 /* If result is negative, make it so. */
721 if (quo_neg)
722 neg_double (*lquo, *hquo, lquo, hquo);
723
724 /* compute trial remainder: rem = num - (quo * den) */
725 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
726 neg_double (*lrem, *hrem, lrem, hrem);
727 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
728
729 switch (code)
730 {
731 case TRUNC_DIV_EXPR:
732 case TRUNC_MOD_EXPR: /* round toward zero */
733 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
734 return overflow;
735
736 case FLOOR_DIV_EXPR:
737 case FLOOR_MOD_EXPR: /* round toward negative infinity */
738 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
739 {
740 /* quo = quo - 1; */
741 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
742 lquo, hquo);
743 }
744 else
745 return overflow;
746 break;
747
748 case CEIL_DIV_EXPR:
749 case CEIL_MOD_EXPR: /* round toward positive infinity */
750 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
751 {
752 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
753 lquo, hquo);
754 }
755 else
756 return overflow;
757 break;
758
759 case ROUND_DIV_EXPR:
760 case ROUND_MOD_EXPR: /* round to closest integer */
761 {
762 unsigned HOST_WIDE_INT labs_rem = *lrem;
763 HOST_WIDE_INT habs_rem = *hrem;
764 unsigned HOST_WIDE_INT labs_den = lden, ltwice;
765 HOST_WIDE_INT habs_den = hden, htwice;
766
767 /* Get absolute values. */
768 if (*hrem < 0)
769 neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
770 if (hden < 0)
771 neg_double (lden, hden, &labs_den, &habs_den);
772
773 /* If (2 * abs (lrem) >= abs (lden)) */
774 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
775 labs_rem, habs_rem, &ltwice, &htwice);
776
777 if (((unsigned HOST_WIDE_INT) habs_den
778 < (unsigned HOST_WIDE_INT) htwice)
779 || (((unsigned HOST_WIDE_INT) habs_den
780 == (unsigned HOST_WIDE_INT) htwice)
781 && (labs_den < ltwice)))
782 {
783 if (*hquo < 0)
784 /* quo = quo - 1; */
785 add_double (*lquo, *hquo,
786 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
787 else
788 /* quo = quo + 1; */
789 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
790 lquo, hquo);
791 }
792 else
793 return overflow;
794 }
795 break;
796
797 default:
798 abort ();
799 }
800
801 /* Compute true remainder: rem = num - (quo * den) */
802 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
803 neg_double (*lrem, *hrem, lrem, hrem);
804 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
805 return overflow;
806 }
807 \f
808 /* Return true if built-in mathematical function specified by CODE
809 preserves the sign of it argument, i.e. -f(x) == f(-x). */
810
811 static bool
812 negate_mathfn_p (enum built_in_function code)
813 {
814 switch (code)
815 {
816 case BUILT_IN_ASIN:
817 case BUILT_IN_ASINF:
818 case BUILT_IN_ASINL:
819 case BUILT_IN_ATAN:
820 case BUILT_IN_ATANF:
821 case BUILT_IN_ATANL:
822 case BUILT_IN_SIN:
823 case BUILT_IN_SINF:
824 case BUILT_IN_SINL:
825 case BUILT_IN_TAN:
826 case BUILT_IN_TANF:
827 case BUILT_IN_TANL:
828 return true;
829
830 default:
831 break;
832 }
833 return false;
834 }
835
836 /* Determine whether an expression T can be cheaply negated using
837 the function negate_expr. */
838
839 static bool
840 negate_expr_p (tree t)
841 {
842 unsigned HOST_WIDE_INT val;
843 unsigned int prec;
844 tree type;
845
846 if (t == 0)
847 return false;
848
849 type = TREE_TYPE (t);
850
851 STRIP_SIGN_NOPS (t);
852 switch (TREE_CODE (t))
853 {
854 case INTEGER_CST:
855 if (TREE_UNSIGNED (type) || ! flag_trapv)
856 return true;
857
858 /* Check that -CST will not overflow type. */
859 prec = TYPE_PRECISION (type);
860 if (prec > HOST_BITS_PER_WIDE_INT)
861 {
862 if (TREE_INT_CST_LOW (t) != 0)
863 return true;
864 prec -= HOST_BITS_PER_WIDE_INT;
865 val = TREE_INT_CST_HIGH (t);
866 }
867 else
868 val = TREE_INT_CST_LOW (t);
869 if (prec < HOST_BITS_PER_WIDE_INT)
870 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
871 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
872
873 case REAL_CST:
874 case NEGATE_EXPR:
875 return true;
876
877 case COMPLEX_CST:
878 return negate_expr_p (TREE_REALPART (t))
879 && negate_expr_p (TREE_IMAGPART (t));
880
881 case PLUS_EXPR:
882 if (FLOAT_TYPE_P (type) && !flag_unsafe_math_optimizations)
883 return false;
884 /* -(A + B) -> (-B) - A. */
885 if (negate_expr_p (TREE_OPERAND (t, 1))
886 && reorder_operands_p (TREE_OPERAND (t, 0),
887 TREE_OPERAND (t, 1)))
888 return true;
889 /* -(A + B) -> (-A) - B. */
890 return negate_expr_p (TREE_OPERAND (t, 0));
891
892 case MINUS_EXPR:
893 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
894 return (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
895 && reorder_operands_p (TREE_OPERAND (t, 0),
896 TREE_OPERAND (t, 1));
897
898 case MULT_EXPR:
899 if (TREE_UNSIGNED (TREE_TYPE (t)))
900 break;
901
902 /* Fall through. */
903
904 case RDIV_EXPR:
905 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
906 return negate_expr_p (TREE_OPERAND (t, 1))
907 || negate_expr_p (TREE_OPERAND (t, 0));
908 break;
909
910 case NOP_EXPR:
911 /* Negate -((double)float) as (double)(-float). */
912 if (TREE_CODE (type) == REAL_TYPE)
913 {
914 tree tem = strip_float_extensions (t);
915 if (tem != t)
916 return negate_expr_p (tem);
917 }
918 break;
919
920 case CALL_EXPR:
921 /* Negate -f(x) as f(-x). */
922 if (negate_mathfn_p (builtin_mathfn_code (t)))
923 return negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1)));
924 break;
925
926 default:
927 break;
928 }
929 return false;
930 }
931
932 /* Given T, an expression, return the negation of T. Allow for T to be
933 null, in which case return null. */
934
935 static tree
936 negate_expr (tree t)
937 {
938 tree type;
939 tree tem;
940
941 if (t == 0)
942 return 0;
943
944 type = TREE_TYPE (t);
945 STRIP_SIGN_NOPS (t);
946
947 switch (TREE_CODE (t))
948 {
949 case INTEGER_CST:
950 {
951 unsigned HOST_WIDE_INT low;
952 HOST_WIDE_INT high;
953 int overflow = neg_double (TREE_INT_CST_LOW (t),
954 TREE_INT_CST_HIGH (t),
955 &low, &high);
956 tem = build_int_2 (low, high);
957 TREE_TYPE (tem) = type;
958 TREE_OVERFLOW (tem)
959 = (TREE_OVERFLOW (t)
960 | force_fit_type (tem, overflow && !TREE_UNSIGNED (type)));
961 TREE_CONSTANT_OVERFLOW (tem)
962 = TREE_OVERFLOW (tem) | TREE_CONSTANT_OVERFLOW (t);
963 }
964 if (! TREE_OVERFLOW (tem)
965 || TREE_UNSIGNED (type)
966 || ! flag_trapv)
967 return tem;
968 break;
969
970 case REAL_CST:
971 tem = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (t)));
972 /* Two's complement FP formats, such as c4x, may overflow. */
973 if (! TREE_OVERFLOW (tem) || ! flag_trapping_math)
974 return convert (type, tem);
975 break;
976
977 case COMPLEX_CST:
978 {
979 tree rpart = negate_expr (TREE_REALPART (t));
980 tree ipart = negate_expr (TREE_IMAGPART (t));
981
982 if ((TREE_CODE (rpart) == REAL_CST
983 && TREE_CODE (ipart) == REAL_CST)
984 || (TREE_CODE (rpart) == INTEGER_CST
985 && TREE_CODE (ipart) == INTEGER_CST))
986 return build_complex (type, rpart, ipart);
987 }
988 break;
989
990 case NEGATE_EXPR:
991 return convert (type, TREE_OPERAND (t, 0));
992
993 case PLUS_EXPR:
994 if (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
995 {
996 /* -(A + B) -> (-B) - A. */
997 if (negate_expr_p (TREE_OPERAND (t, 1))
998 && reorder_operands_p (TREE_OPERAND (t, 0),
999 TREE_OPERAND (t, 1)))
1000 return convert (type,
1001 fold (build (MINUS_EXPR, TREE_TYPE (t),
1002 negate_expr (TREE_OPERAND (t, 1)),
1003 TREE_OPERAND (t, 0))));
1004 /* -(A + B) -> (-A) - B. */
1005 if (negate_expr_p (TREE_OPERAND (t, 0)))
1006 return convert (type,
1007 fold (build (MINUS_EXPR, TREE_TYPE (t),
1008 negate_expr (TREE_OPERAND (t, 0)),
1009 TREE_OPERAND (t, 1))));
1010 }
1011 break;
1012
1013 case MINUS_EXPR:
1014 /* - (A - B) -> B - A */
1015 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
1016 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
1017 return convert (type,
1018 fold (build (MINUS_EXPR, TREE_TYPE (t),
1019 TREE_OPERAND (t, 1),
1020 TREE_OPERAND (t, 0))));
1021 break;
1022
1023 case MULT_EXPR:
1024 if (TREE_UNSIGNED (TREE_TYPE (t)))
1025 break;
1026
1027 /* Fall through. */
1028
1029 case RDIV_EXPR:
1030 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
1031 {
1032 tem = TREE_OPERAND (t, 1);
1033 if (negate_expr_p (tem))
1034 return convert (type,
1035 fold (build (TREE_CODE (t), TREE_TYPE (t),
1036 TREE_OPERAND (t, 0),
1037 negate_expr (tem))));
1038 tem = TREE_OPERAND (t, 0);
1039 if (negate_expr_p (tem))
1040 return convert (type,
1041 fold (build (TREE_CODE (t), TREE_TYPE (t),
1042 negate_expr (tem),
1043 TREE_OPERAND (t, 1))));
1044 }
1045 break;
1046
1047 case NOP_EXPR:
1048 /* Convert -((double)float) into (double)(-float). */
1049 if (TREE_CODE (type) == REAL_TYPE)
1050 {
1051 tem = strip_float_extensions (t);
1052 if (tem != t && negate_expr_p (tem))
1053 return convert (type, negate_expr (tem));
1054 }
1055 break;
1056
1057 case CALL_EXPR:
1058 /* Negate -f(x) as f(-x). */
1059 if (negate_mathfn_p (builtin_mathfn_code (t))
1060 && negate_expr_p (TREE_VALUE (TREE_OPERAND (t, 1))))
1061 {
1062 tree fndecl, arg, arglist;
1063
1064 fndecl = get_callee_fndecl (t);
1065 arg = negate_expr (TREE_VALUE (TREE_OPERAND (t, 1)));
1066 arglist = build_tree_list (NULL_TREE, arg);
1067 return build_function_call_expr (fndecl, arglist);
1068 }
1069 break;
1070
1071 default:
1072 break;
1073 }
1074
1075 return convert (type, fold (build1 (NEGATE_EXPR, TREE_TYPE (t), t)));
1076 }
1077 \f
1078 /* Split a tree IN into a constant, literal and variable parts that could be
1079 combined with CODE to make IN. "constant" means an expression with
1080 TREE_CONSTANT but that isn't an actual constant. CODE must be a
1081 commutative arithmetic operation. Store the constant part into *CONP,
1082 the literal in *LITP and return the variable part. If a part isn't
1083 present, set it to null. If the tree does not decompose in this way,
1084 return the entire tree as the variable part and the other parts as null.
1085
1086 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
1087 case, we negate an operand that was subtracted. Except if it is a
1088 literal for which we use *MINUS_LITP instead.
1089
1090 If NEGATE_P is true, we are negating all of IN, again except a literal
1091 for which we use *MINUS_LITP instead.
1092
1093 If IN is itself a literal or constant, return it as appropriate.
1094
1095 Note that we do not guarantee that any of the three values will be the
1096 same type as IN, but they will have the same signedness and mode. */
1097
1098 static tree
1099 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
1100 tree *minus_litp, int negate_p)
1101 {
1102 tree var = 0;
1103
1104 *conp = 0;
1105 *litp = 0;
1106 *minus_litp = 0;
1107
1108 /* Strip any conversions that don't change the machine mode or signedness. */
1109 STRIP_SIGN_NOPS (in);
1110
1111 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST)
1112 *litp = in;
1113 else if (TREE_CODE (in) == code
1114 || (! FLOAT_TYPE_P (TREE_TYPE (in))
1115 /* We can associate addition and subtraction together (even
1116 though the C standard doesn't say so) for integers because
1117 the value is not affected. For reals, the value might be
1118 affected, so we can't. */
1119 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
1120 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
1121 {
1122 tree op0 = TREE_OPERAND (in, 0);
1123 tree op1 = TREE_OPERAND (in, 1);
1124 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
1125 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
1126
1127 /* First see if either of the operands is a literal, then a constant. */
1128 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST)
1129 *litp = op0, op0 = 0;
1130 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST)
1131 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
1132
1133 if (op0 != 0 && TREE_CONSTANT (op0))
1134 *conp = op0, op0 = 0;
1135 else if (op1 != 0 && TREE_CONSTANT (op1))
1136 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
1137
1138 /* If we haven't dealt with either operand, this is not a case we can
1139 decompose. Otherwise, VAR is either of the ones remaining, if any. */
1140 if (op0 != 0 && op1 != 0)
1141 var = in;
1142 else if (op0 != 0)
1143 var = op0;
1144 else
1145 var = op1, neg_var_p = neg1_p;
1146
1147 /* Now do any needed negations. */
1148 if (neg_litp_p)
1149 *minus_litp = *litp, *litp = 0;
1150 if (neg_conp_p)
1151 *conp = negate_expr (*conp);
1152 if (neg_var_p)
1153 var = negate_expr (var);
1154 }
1155 else if (TREE_CONSTANT (in))
1156 *conp = in;
1157 else
1158 var = in;
1159
1160 if (negate_p)
1161 {
1162 if (*litp)
1163 *minus_litp = *litp, *litp = 0;
1164 else if (*minus_litp)
1165 *litp = *minus_litp, *minus_litp = 0;
1166 *conp = negate_expr (*conp);
1167 var = negate_expr (var);
1168 }
1169
1170 return var;
1171 }
1172
1173 /* Re-associate trees split by the above function. T1 and T2 are either
1174 expressions to associate or null. Return the new expression, if any. If
1175 we build an operation, do it in TYPE and with CODE. */
1176
1177 static tree
1178 associate_trees (tree t1, tree t2, enum tree_code code, tree type)
1179 {
1180 if (t1 == 0)
1181 return t2;
1182 else if (t2 == 0)
1183 return t1;
1184
1185 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
1186 try to fold this since we will have infinite recursion. But do
1187 deal with any NEGATE_EXPRs. */
1188 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
1189 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
1190 {
1191 if (code == PLUS_EXPR)
1192 {
1193 if (TREE_CODE (t1) == NEGATE_EXPR)
1194 return build (MINUS_EXPR, type, convert (type, t2),
1195 convert (type, TREE_OPERAND (t1, 0)));
1196 else if (TREE_CODE (t2) == NEGATE_EXPR)
1197 return build (MINUS_EXPR, type, convert (type, t1),
1198 convert (type, TREE_OPERAND (t2, 0)));
1199 }
1200 return build (code, type, convert (type, t1), convert (type, t2));
1201 }
1202
1203 return fold (build (code, type, convert (type, t1), convert (type, t2)));
1204 }
1205 \f
1206 /* Combine two integer constants ARG1 and ARG2 under operation CODE
1207 to produce a new constant.
1208
1209 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1210
1211 static tree
1212 int_const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1213 {
1214 unsigned HOST_WIDE_INT int1l, int2l;
1215 HOST_WIDE_INT int1h, int2h;
1216 unsigned HOST_WIDE_INT low;
1217 HOST_WIDE_INT hi;
1218 unsigned HOST_WIDE_INT garbagel;
1219 HOST_WIDE_INT garbageh;
1220 tree t;
1221 tree type = TREE_TYPE (arg1);
1222 int uns = TREE_UNSIGNED (type);
1223 int is_sizetype
1224 = (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type));
1225 int overflow = 0;
1226 int no_overflow = 0;
1227
1228 int1l = TREE_INT_CST_LOW (arg1);
1229 int1h = TREE_INT_CST_HIGH (arg1);
1230 int2l = TREE_INT_CST_LOW (arg2);
1231 int2h = TREE_INT_CST_HIGH (arg2);
1232
1233 switch (code)
1234 {
1235 case BIT_IOR_EXPR:
1236 low = int1l | int2l, hi = int1h | int2h;
1237 break;
1238
1239 case BIT_XOR_EXPR:
1240 low = int1l ^ int2l, hi = int1h ^ int2h;
1241 break;
1242
1243 case BIT_AND_EXPR:
1244 low = int1l & int2l, hi = int1h & int2h;
1245 break;
1246
1247 case RSHIFT_EXPR:
1248 int2l = -int2l;
1249 case LSHIFT_EXPR:
1250 /* It's unclear from the C standard whether shifts can overflow.
1251 The following code ignores overflow; perhaps a C standard
1252 interpretation ruling is needed. */
1253 lshift_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1254 &low, &hi, !uns);
1255 no_overflow = 1;
1256 break;
1257
1258 case RROTATE_EXPR:
1259 int2l = - int2l;
1260 case LROTATE_EXPR:
1261 lrotate_double (int1l, int1h, int2l, TYPE_PRECISION (type),
1262 &low, &hi);
1263 break;
1264
1265 case PLUS_EXPR:
1266 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1267 break;
1268
1269 case MINUS_EXPR:
1270 neg_double (int2l, int2h, &low, &hi);
1271 add_double (int1l, int1h, low, hi, &low, &hi);
1272 overflow = OVERFLOW_SUM_SIGN (hi, int2h, int1h);
1273 break;
1274
1275 case MULT_EXPR:
1276 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1277 break;
1278
1279 case TRUNC_DIV_EXPR:
1280 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1281 case EXACT_DIV_EXPR:
1282 /* This is a shortcut for a common special case. */
1283 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1284 && ! TREE_CONSTANT_OVERFLOW (arg1)
1285 && ! TREE_CONSTANT_OVERFLOW (arg2)
1286 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1287 {
1288 if (code == CEIL_DIV_EXPR)
1289 int1l += int2l - 1;
1290
1291 low = int1l / int2l, hi = 0;
1292 break;
1293 }
1294
1295 /* ... fall through ... */
1296
1297 case ROUND_DIV_EXPR:
1298 if (int2h == 0 && int2l == 1)
1299 {
1300 low = int1l, hi = int1h;
1301 break;
1302 }
1303 if (int1l == int2l && int1h == int2h
1304 && ! (int1l == 0 && int1h == 0))
1305 {
1306 low = 1, hi = 0;
1307 break;
1308 }
1309 overflow = div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1310 &low, &hi, &garbagel, &garbageh);
1311 break;
1312
1313 case TRUNC_MOD_EXPR:
1314 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1315 /* This is a shortcut for a common special case. */
1316 if (int2h == 0 && (HOST_WIDE_INT) int2l > 0
1317 && ! TREE_CONSTANT_OVERFLOW (arg1)
1318 && ! TREE_CONSTANT_OVERFLOW (arg2)
1319 && int1h == 0 && (HOST_WIDE_INT) int1l >= 0)
1320 {
1321 if (code == CEIL_MOD_EXPR)
1322 int1l += int2l - 1;
1323 low = int1l % int2l, hi = 0;
1324 break;
1325 }
1326
1327 /* ... fall through ... */
1328
1329 case ROUND_MOD_EXPR:
1330 overflow = div_and_round_double (code, uns,
1331 int1l, int1h, int2l, int2h,
1332 &garbagel, &garbageh, &low, &hi);
1333 break;
1334
1335 case MIN_EXPR:
1336 case MAX_EXPR:
1337 if (uns)
1338 low = (((unsigned HOST_WIDE_INT) int1h
1339 < (unsigned HOST_WIDE_INT) int2h)
1340 || (((unsigned HOST_WIDE_INT) int1h
1341 == (unsigned HOST_WIDE_INT) int2h)
1342 && int1l < int2l));
1343 else
1344 low = (int1h < int2h
1345 || (int1h == int2h && int1l < int2l));
1346
1347 if (low == (code == MIN_EXPR))
1348 low = int1l, hi = int1h;
1349 else
1350 low = int2l, hi = int2h;
1351 break;
1352
1353 default:
1354 abort ();
1355 }
1356
1357 /* If this is for a sizetype, can be represented as one (signed)
1358 HOST_WIDE_INT word, and doesn't overflow, use size_int since it caches
1359 constants. */
1360 if (is_sizetype
1361 && ((hi == 0 && (HOST_WIDE_INT) low >= 0)
1362 || (hi == -1 && (HOST_WIDE_INT) low < 0))
1363 && overflow == 0 && ! TREE_OVERFLOW (arg1) && ! TREE_OVERFLOW (arg2))
1364 return size_int_type_wide (low, type);
1365 else
1366 {
1367 t = build_int_2 (low, hi);
1368 TREE_TYPE (t) = TREE_TYPE (arg1);
1369 }
1370
1371 TREE_OVERFLOW (t)
1372 = ((notrunc
1373 ? (!uns || is_sizetype) && overflow
1374 : (force_fit_type (t, (!uns || is_sizetype) && overflow)
1375 && ! no_overflow))
1376 | TREE_OVERFLOW (arg1)
1377 | TREE_OVERFLOW (arg2));
1378
1379 /* If we're doing a size calculation, unsigned arithmetic does overflow.
1380 So check if force_fit_type truncated the value. */
1381 if (is_sizetype
1382 && ! TREE_OVERFLOW (t)
1383 && (TREE_INT_CST_HIGH (t) != hi
1384 || TREE_INT_CST_LOW (t) != low))
1385 TREE_OVERFLOW (t) = 1;
1386
1387 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1388 | TREE_CONSTANT_OVERFLOW (arg1)
1389 | TREE_CONSTANT_OVERFLOW (arg2));
1390 return t;
1391 }
1392
1393 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1394 constant. We assume ARG1 and ARG2 have the same data type, or at least
1395 are the same kind of constant and the same machine mode.
1396
1397 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
1398
1399 static tree
1400 const_binop (enum tree_code code, tree arg1, tree arg2, int notrunc)
1401 {
1402 STRIP_NOPS (arg1);
1403 STRIP_NOPS (arg2);
1404
1405 if (TREE_CODE (arg1) == INTEGER_CST)
1406 return int_const_binop (code, arg1, arg2, notrunc);
1407
1408 if (TREE_CODE (arg1) == REAL_CST)
1409 {
1410 enum machine_mode mode;
1411 REAL_VALUE_TYPE d1;
1412 REAL_VALUE_TYPE d2;
1413 REAL_VALUE_TYPE value;
1414 tree t, type;
1415
1416 d1 = TREE_REAL_CST (arg1);
1417 d2 = TREE_REAL_CST (arg2);
1418
1419 type = TREE_TYPE (arg1);
1420 mode = TYPE_MODE (type);
1421
1422 /* Don't perform operation if we honor signaling NaNs and
1423 either operand is a NaN. */
1424 if (HONOR_SNANS (mode)
1425 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1426 return NULL_TREE;
1427
1428 /* Don't perform operation if it would raise a division
1429 by zero exception. */
1430 if (code == RDIV_EXPR
1431 && REAL_VALUES_EQUAL (d2, dconst0)
1432 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1433 return NULL_TREE;
1434
1435 /* If either operand is a NaN, just return it. Otherwise, set up
1436 for floating-point trap; we return an overflow. */
1437 if (REAL_VALUE_ISNAN (d1))
1438 return arg1;
1439 else if (REAL_VALUE_ISNAN (d2))
1440 return arg2;
1441
1442 REAL_ARITHMETIC (value, code, d1, d2);
1443
1444 t = build_real (type, real_value_truncate (mode, value));
1445
1446 TREE_OVERFLOW (t)
1447 = (force_fit_type (t, 0)
1448 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1449 TREE_CONSTANT_OVERFLOW (t)
1450 = TREE_OVERFLOW (t)
1451 | TREE_CONSTANT_OVERFLOW (arg1)
1452 | TREE_CONSTANT_OVERFLOW (arg2);
1453 return t;
1454 }
1455 if (TREE_CODE (arg1) == COMPLEX_CST)
1456 {
1457 tree type = TREE_TYPE (arg1);
1458 tree r1 = TREE_REALPART (arg1);
1459 tree i1 = TREE_IMAGPART (arg1);
1460 tree r2 = TREE_REALPART (arg2);
1461 tree i2 = TREE_IMAGPART (arg2);
1462 tree t;
1463
1464 switch (code)
1465 {
1466 case PLUS_EXPR:
1467 t = build_complex (type,
1468 const_binop (PLUS_EXPR, r1, r2, notrunc),
1469 const_binop (PLUS_EXPR, i1, i2, notrunc));
1470 break;
1471
1472 case MINUS_EXPR:
1473 t = build_complex (type,
1474 const_binop (MINUS_EXPR, r1, r2, notrunc),
1475 const_binop (MINUS_EXPR, i1, i2, notrunc));
1476 break;
1477
1478 case MULT_EXPR:
1479 t = build_complex (type,
1480 const_binop (MINUS_EXPR,
1481 const_binop (MULT_EXPR,
1482 r1, r2, notrunc),
1483 const_binop (MULT_EXPR,
1484 i1, i2, notrunc),
1485 notrunc),
1486 const_binop (PLUS_EXPR,
1487 const_binop (MULT_EXPR,
1488 r1, i2, notrunc),
1489 const_binop (MULT_EXPR,
1490 i1, r2, notrunc),
1491 notrunc));
1492 break;
1493
1494 case RDIV_EXPR:
1495 {
1496 tree magsquared
1497 = const_binop (PLUS_EXPR,
1498 const_binop (MULT_EXPR, r2, r2, notrunc),
1499 const_binop (MULT_EXPR, i2, i2, notrunc),
1500 notrunc);
1501
1502 t = build_complex (type,
1503 const_binop
1504 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1505 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1506 const_binop (PLUS_EXPR,
1507 const_binop (MULT_EXPR, r1, r2,
1508 notrunc),
1509 const_binop (MULT_EXPR, i1, i2,
1510 notrunc),
1511 notrunc),
1512 magsquared, notrunc),
1513 const_binop
1514 (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1515 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1516 const_binop (MINUS_EXPR,
1517 const_binop (MULT_EXPR, i1, r2,
1518 notrunc),
1519 const_binop (MULT_EXPR, r1, i2,
1520 notrunc),
1521 notrunc),
1522 magsquared, notrunc));
1523 }
1524 break;
1525
1526 default:
1527 abort ();
1528 }
1529 return t;
1530 }
1531 return 0;
1532 }
1533
1534 /* These are the hash table functions for the hash table of INTEGER_CST
1535 nodes of a sizetype. */
1536
1537 /* Return the hash code code X, an INTEGER_CST. */
1538
1539 static hashval_t
1540 size_htab_hash (const void *x)
1541 {
1542 tree t = (tree) x;
1543
1544 return (TREE_INT_CST_HIGH (t) ^ TREE_INT_CST_LOW (t)
1545 ^ htab_hash_pointer (TREE_TYPE (t))
1546 ^ (TREE_OVERFLOW (t) << 20));
1547 }
1548
1549 /* Return nonzero if the value represented by *X (an INTEGER_CST tree node)
1550 is the same as that given by *Y, which is the same. */
1551
1552 static int
1553 size_htab_eq (const void *x, const void *y)
1554 {
1555 tree xt = (tree) x;
1556 tree yt = (tree) y;
1557
1558 return (TREE_INT_CST_HIGH (xt) == TREE_INT_CST_HIGH (yt)
1559 && TREE_INT_CST_LOW (xt) == TREE_INT_CST_LOW (yt)
1560 && TREE_TYPE (xt) == TREE_TYPE (yt)
1561 && TREE_OVERFLOW (xt) == TREE_OVERFLOW (yt));
1562 }
1563 \f
1564 /* Return an INTEGER_CST with value whose low-order HOST_BITS_PER_WIDE_INT
1565 bits are given by NUMBER and of the sizetype represented by KIND. */
1566
1567 tree
1568 size_int_wide (HOST_WIDE_INT number, enum size_type_kind kind)
1569 {
1570 return size_int_type_wide (number, sizetype_tab[(int) kind]);
1571 }
1572
1573 /* Likewise, but the desired type is specified explicitly. */
1574
1575 static GTY (()) tree new_const;
1576 static GTY ((if_marked ("ggc_marked_p"), param_is (union tree_node)))
1577 htab_t size_htab;
1578
1579 tree
1580 size_int_type_wide (HOST_WIDE_INT number, tree type)
1581 {
1582 void **slot;
1583
1584 if (size_htab == 0)
1585 {
1586 size_htab = htab_create_ggc (1024, size_htab_hash, size_htab_eq, NULL);
1587 new_const = make_node (INTEGER_CST);
1588 }
1589
1590 /* Adjust NEW_CONST to be the constant we want. If it's already in the
1591 hash table, we return the value from the hash table. Otherwise, we
1592 place that in the hash table and make a new node for the next time. */
1593 TREE_INT_CST_LOW (new_const) = number;
1594 TREE_INT_CST_HIGH (new_const) = number < 0 ? -1 : 0;
1595 TREE_TYPE (new_const) = type;
1596 TREE_OVERFLOW (new_const) = TREE_CONSTANT_OVERFLOW (new_const)
1597 = force_fit_type (new_const, 0);
1598
1599 slot = htab_find_slot (size_htab, new_const, INSERT);
1600 if (*slot == 0)
1601 {
1602 tree t = new_const;
1603
1604 *slot = new_const;
1605 new_const = make_node (INTEGER_CST);
1606 return t;
1607 }
1608 else
1609 return (tree) *slot;
1610 }
1611
1612 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1613 is a tree code. The type of the result is taken from the operands.
1614 Both must be the same type integer type and it must be a size type.
1615 If the operands are constant, so is the result. */
1616
1617 tree
1618 size_binop (enum tree_code code, tree arg0, tree arg1)
1619 {
1620 tree type = TREE_TYPE (arg0);
1621
1622 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1623 || type != TREE_TYPE (arg1))
1624 abort ();
1625
1626 /* Handle the special case of two integer constants faster. */
1627 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1628 {
1629 /* And some specific cases even faster than that. */
1630 if (code == PLUS_EXPR && integer_zerop (arg0))
1631 return arg1;
1632 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
1633 && integer_zerop (arg1))
1634 return arg0;
1635 else if (code == MULT_EXPR && integer_onep (arg0))
1636 return arg1;
1637
1638 /* Handle general case of two integer constants. */
1639 return int_const_binop (code, arg0, arg1, 0);
1640 }
1641
1642 if (arg0 == error_mark_node || arg1 == error_mark_node)
1643 return error_mark_node;
1644
1645 return fold (build (code, type, arg0, arg1));
1646 }
1647
1648 /* Given two values, either both of sizetype or both of bitsizetype,
1649 compute the difference between the two values. Return the value
1650 in signed type corresponding to the type of the operands. */
1651
1652 tree
1653 size_diffop (tree arg0, tree arg1)
1654 {
1655 tree type = TREE_TYPE (arg0);
1656 tree ctype;
1657
1658 if (TREE_CODE (type) != INTEGER_TYPE || ! TYPE_IS_SIZETYPE (type)
1659 || type != TREE_TYPE (arg1))
1660 abort ();
1661
1662 /* If the type is already signed, just do the simple thing. */
1663 if (! TREE_UNSIGNED (type))
1664 return size_binop (MINUS_EXPR, arg0, arg1);
1665
1666 ctype = (type == bitsizetype || type == ubitsizetype
1667 ? sbitsizetype : ssizetype);
1668
1669 /* If either operand is not a constant, do the conversions to the signed
1670 type and subtract. The hardware will do the right thing with any
1671 overflow in the subtraction. */
1672 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1673 return size_binop (MINUS_EXPR, convert (ctype, arg0),
1674 convert (ctype, arg1));
1675
1676 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1677 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1678 overflow) and negate (which can't either). Special-case a result
1679 of zero while we're here. */
1680 if (tree_int_cst_equal (arg0, arg1))
1681 return convert (ctype, integer_zero_node);
1682 else if (tree_int_cst_lt (arg1, arg0))
1683 return convert (ctype, size_binop (MINUS_EXPR, arg0, arg1));
1684 else
1685 return size_binop (MINUS_EXPR, convert (ctype, integer_zero_node),
1686 convert (ctype, size_binop (MINUS_EXPR, arg1, arg0)));
1687 }
1688 \f
1689
1690 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1691 type TYPE. If no simplification can be done return NULL_TREE. */
1692
1693 static tree
1694 fold_convert_const (enum tree_code code, tree type, tree arg1)
1695 {
1696 int overflow = 0;
1697 tree t;
1698
1699 if (TREE_TYPE (arg1) == type)
1700 return arg1;
1701
1702 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type))
1703 {
1704 if (TREE_CODE (arg1) == INTEGER_CST)
1705 {
1706 /* If we would build a constant wider than GCC supports,
1707 leave the conversion unfolded. */
1708 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1709 return NULL_TREE;
1710
1711 /* If we are trying to make a sizetype for a small integer, use
1712 size_int to pick up cached types to reduce duplicate nodes. */
1713 if (TREE_CODE (type) == INTEGER_TYPE && TYPE_IS_SIZETYPE (type)
1714 && !TREE_CONSTANT_OVERFLOW (arg1)
1715 && compare_tree_int (arg1, 10000) < 0)
1716 return size_int_type_wide (TREE_INT_CST_LOW (arg1), type);
1717
1718 /* Given an integer constant, make new constant with new type,
1719 appropriately sign-extended or truncated. */
1720 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1721 TREE_INT_CST_HIGH (arg1));
1722 TREE_TYPE (t) = type;
1723 /* Indicate an overflow if (1) ARG1 already overflowed,
1724 or (2) force_fit_type indicates an overflow.
1725 Tell force_fit_type that an overflow has already occurred
1726 if ARG1 is a too-large unsigned value and T is signed.
1727 But don't indicate an overflow if converting a pointer. */
1728 TREE_OVERFLOW (t)
1729 = ((force_fit_type (t,
1730 (TREE_INT_CST_HIGH (arg1) < 0
1731 && (TREE_UNSIGNED (type)
1732 < TREE_UNSIGNED (TREE_TYPE (arg1)))))
1733 && ! POINTER_TYPE_P (TREE_TYPE (arg1)))
1734 || TREE_OVERFLOW (arg1));
1735 TREE_CONSTANT_OVERFLOW (t)
1736 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1737 return t;
1738 }
1739 else if (TREE_CODE (arg1) == REAL_CST)
1740 {
1741 /* The following code implements the floating point to integer
1742 conversion rules required by the Java Language Specification,
1743 that IEEE NaNs are mapped to zero and values that overflow
1744 the target precision saturate, i.e. values greater than
1745 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1746 are mapped to INT_MIN. These semantics are allowed by the
1747 C and C++ standards that simply state that the behavior of
1748 FP-to-integer conversion is unspecified upon overflow. */
1749
1750 HOST_WIDE_INT high, low;
1751
1752 REAL_VALUE_TYPE r;
1753 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1754
1755 switch (code)
1756 {
1757 case FIX_TRUNC_EXPR:
1758 real_trunc (&r, VOIDmode, &x);
1759 break;
1760
1761 case FIX_CEIL_EXPR:
1762 real_ceil (&r, VOIDmode, &x);
1763 break;
1764
1765 case FIX_FLOOR_EXPR:
1766 real_floor (&r, VOIDmode, &x);
1767 break;
1768
1769 default:
1770 abort ();
1771 }
1772
1773 /* If R is NaN, return zero and show we have an overflow. */
1774 if (REAL_VALUE_ISNAN (r))
1775 {
1776 overflow = 1;
1777 high = 0;
1778 low = 0;
1779 }
1780
1781 /* See if R is less than the lower bound or greater than the
1782 upper bound. */
1783
1784 if (! overflow)
1785 {
1786 tree lt = TYPE_MIN_VALUE (type);
1787 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1788 if (REAL_VALUES_LESS (r, l))
1789 {
1790 overflow = 1;
1791 high = TREE_INT_CST_HIGH (lt);
1792 low = TREE_INT_CST_LOW (lt);
1793 }
1794 }
1795
1796 if (! overflow)
1797 {
1798 tree ut = TYPE_MAX_VALUE (type);
1799 if (ut)
1800 {
1801 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1802 if (REAL_VALUES_LESS (u, r))
1803 {
1804 overflow = 1;
1805 high = TREE_INT_CST_HIGH (ut);
1806 low = TREE_INT_CST_LOW (ut);
1807 }
1808 }
1809 }
1810
1811 if (! overflow)
1812 REAL_VALUE_TO_INT (&low, &high, r);
1813
1814 t = build_int_2 (low, high);
1815 TREE_TYPE (t) = type;
1816 TREE_OVERFLOW (t)
1817 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1818 TREE_CONSTANT_OVERFLOW (t)
1819 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1820 return t;
1821 }
1822 }
1823 else if (TREE_CODE (type) == REAL_TYPE)
1824 {
1825 if (TREE_CODE (arg1) == INTEGER_CST)
1826 return build_real_from_int_cst (type, arg1);
1827 if (TREE_CODE (arg1) == REAL_CST)
1828 {
1829 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1830 {
1831 /* We make a copy of ARG1 so that we don't modify an
1832 existing constant tree. */
1833 t = copy_node (arg1);
1834 TREE_TYPE (t) = type;
1835 return t;
1836 }
1837
1838 t = build_real (type,
1839 real_value_truncate (TYPE_MODE (type),
1840 TREE_REAL_CST (arg1)));
1841
1842 TREE_OVERFLOW (t)
1843 = TREE_OVERFLOW (arg1) | force_fit_type (t, 0);
1844 TREE_CONSTANT_OVERFLOW (t)
1845 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1846 return t;
1847 }
1848 }
1849 return NULL_TREE;
1850 }
1851 \f
1852 /* Return an expr equal to X but certainly not valid as an lvalue. */
1853
1854 tree
1855 non_lvalue (tree x)
1856 {
1857 tree result;
1858
1859 /* These things are certainly not lvalues. */
1860 if (TREE_CODE (x) == NON_LVALUE_EXPR
1861 || TREE_CODE (x) == INTEGER_CST
1862 || TREE_CODE (x) == REAL_CST
1863 || TREE_CODE (x) == STRING_CST
1864 || TREE_CODE (x) == ADDR_EXPR)
1865 return x;
1866
1867 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1868 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1869 return result;
1870 }
1871
1872 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1873 Zero means allow extended lvalues. */
1874
1875 int pedantic_lvalues;
1876
1877 /* When pedantic, return an expr equal to X but certainly not valid as a
1878 pedantic lvalue. Otherwise, return X. */
1879
1880 tree
1881 pedantic_non_lvalue (tree x)
1882 {
1883 if (pedantic_lvalues)
1884 return non_lvalue (x);
1885 else
1886 return x;
1887 }
1888 \f
1889 /* Given a tree comparison code, return the code that is the logical inverse
1890 of the given code. It is not safe to do this for floating-point
1891 comparisons, except for NE_EXPR and EQ_EXPR. */
1892
1893 static enum tree_code
1894 invert_tree_comparison (enum tree_code code)
1895 {
1896 switch (code)
1897 {
1898 case EQ_EXPR:
1899 return NE_EXPR;
1900 case NE_EXPR:
1901 return EQ_EXPR;
1902 case GT_EXPR:
1903 return LE_EXPR;
1904 case GE_EXPR:
1905 return LT_EXPR;
1906 case LT_EXPR:
1907 return GE_EXPR;
1908 case LE_EXPR:
1909 return GT_EXPR;
1910 default:
1911 abort ();
1912 }
1913 }
1914
1915 /* Similar, but return the comparison that results if the operands are
1916 swapped. This is safe for floating-point. */
1917
1918 static enum tree_code
1919 swap_tree_comparison (enum tree_code code)
1920 {
1921 switch (code)
1922 {
1923 case EQ_EXPR:
1924 case NE_EXPR:
1925 return code;
1926 case GT_EXPR:
1927 return LT_EXPR;
1928 case GE_EXPR:
1929 return LE_EXPR;
1930 case LT_EXPR:
1931 return GT_EXPR;
1932 case LE_EXPR:
1933 return GE_EXPR;
1934 default:
1935 abort ();
1936 }
1937 }
1938
1939
1940 /* Convert a comparison tree code from an enum tree_code representation
1941 into a compcode bit-based encoding. This function is the inverse of
1942 compcode_to_comparison. */
1943
1944 static int
1945 comparison_to_compcode (enum tree_code code)
1946 {
1947 switch (code)
1948 {
1949 case LT_EXPR:
1950 return COMPCODE_LT;
1951 case EQ_EXPR:
1952 return COMPCODE_EQ;
1953 case LE_EXPR:
1954 return COMPCODE_LE;
1955 case GT_EXPR:
1956 return COMPCODE_GT;
1957 case NE_EXPR:
1958 return COMPCODE_NE;
1959 case GE_EXPR:
1960 return COMPCODE_GE;
1961 default:
1962 abort ();
1963 }
1964 }
1965
1966 /* Convert a compcode bit-based encoding of a comparison operator back
1967 to GCC's enum tree_code representation. This function is the
1968 inverse of comparison_to_compcode. */
1969
1970 static enum tree_code
1971 compcode_to_comparison (int code)
1972 {
1973 switch (code)
1974 {
1975 case COMPCODE_LT:
1976 return LT_EXPR;
1977 case COMPCODE_EQ:
1978 return EQ_EXPR;
1979 case COMPCODE_LE:
1980 return LE_EXPR;
1981 case COMPCODE_GT:
1982 return GT_EXPR;
1983 case COMPCODE_NE:
1984 return NE_EXPR;
1985 case COMPCODE_GE:
1986 return GE_EXPR;
1987 default:
1988 abort ();
1989 }
1990 }
1991
1992 /* Return nonzero if CODE is a tree code that represents a truth value. */
1993
1994 static int
1995 truth_value_p (enum tree_code code)
1996 {
1997 return (TREE_CODE_CLASS (code) == '<'
1998 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1999 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
2000 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
2001 }
2002 \f
2003 /* Return nonzero if two operands (typically of the same tree node)
2004 are necessarily equal. If either argument has side-effects this
2005 function returns zero.
2006
2007 If ONLY_CONST is nonzero, only return nonzero for constants.
2008 This function tests whether the operands are indistinguishable;
2009 it does not test whether they are equal using C's == operation.
2010 The distinction is important for IEEE floating point, because
2011 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2012 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2013
2014 If ONLY_CONST is zero, a VAR_DECL is considered equal to itself
2015 even though it may hold multiple values during a function.
2016 This is because a GCC tree node guarantees that nothing else is
2017 executed between the evaluation of its "operands" (which may often
2018 be evaluated in arbitrary order). Hence if the operands themselves
2019 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2020 same value in each operand/subexpression. Hence a zero value for
2021 ONLY_CONST assumes isochronic (or instantaneous) tree equivalence.
2022 If comparing arbitrary expression trees, such as from different
2023 statements, ONLY_CONST must usually be nonzero. */
2024
2025 int
2026 operand_equal_p (tree arg0, tree arg1, int only_const)
2027 {
2028 tree fndecl;
2029
2030 /* If both types don't have the same signedness, then we can't consider
2031 them equal. We must check this before the STRIP_NOPS calls
2032 because they may change the signedness of the arguments. */
2033 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
2034 return 0;
2035
2036 STRIP_NOPS (arg0);
2037 STRIP_NOPS (arg1);
2038
2039 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2040 /* This is needed for conversions and for COMPONENT_REF.
2041 Might as well play it safe and always test this. */
2042 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2043 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2044 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2045 return 0;
2046
2047 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2048 We don't care about side effects in that case because the SAVE_EXPR
2049 takes care of that for us. In all other cases, two expressions are
2050 equal if they have no side effects. If we have two identical
2051 expressions with side effects that should be treated the same due
2052 to the only side effects being identical SAVE_EXPR's, that will
2053 be detected in the recursive calls below. */
2054 if (arg0 == arg1 && ! only_const
2055 && (TREE_CODE (arg0) == SAVE_EXPR
2056 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2057 return 1;
2058
2059 /* Next handle constant cases, those for which we can return 1 even
2060 if ONLY_CONST is set. */
2061 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2062 switch (TREE_CODE (arg0))
2063 {
2064 case INTEGER_CST:
2065 return (! TREE_CONSTANT_OVERFLOW (arg0)
2066 && ! TREE_CONSTANT_OVERFLOW (arg1)
2067 && tree_int_cst_equal (arg0, arg1));
2068
2069 case REAL_CST:
2070 return (! TREE_CONSTANT_OVERFLOW (arg0)
2071 && ! TREE_CONSTANT_OVERFLOW (arg1)
2072 && REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2073 TREE_REAL_CST (arg1)));
2074
2075 case VECTOR_CST:
2076 {
2077 tree v1, v2;
2078
2079 if (TREE_CONSTANT_OVERFLOW (arg0)
2080 || TREE_CONSTANT_OVERFLOW (arg1))
2081 return 0;
2082
2083 v1 = TREE_VECTOR_CST_ELTS (arg0);
2084 v2 = TREE_VECTOR_CST_ELTS (arg1);
2085 while (v1 && v2)
2086 {
2087 if (!operand_equal_p (v1, v2, only_const))
2088 return 0;
2089 v1 = TREE_CHAIN (v1);
2090 v2 = TREE_CHAIN (v2);
2091 }
2092
2093 return 1;
2094 }
2095
2096 case COMPLEX_CST:
2097 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2098 only_const)
2099 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2100 only_const));
2101
2102 case STRING_CST:
2103 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2104 && ! memcmp (TREE_STRING_POINTER (arg0),
2105 TREE_STRING_POINTER (arg1),
2106 TREE_STRING_LENGTH (arg0)));
2107
2108 case ADDR_EXPR:
2109 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2110 0);
2111 default:
2112 break;
2113 }
2114
2115 if (only_const)
2116 return 0;
2117
2118 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2119 {
2120 case '1':
2121 /* Two conversions are equal only if signedness and modes match. */
2122 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
2123 && (TREE_UNSIGNED (TREE_TYPE (arg0))
2124 != TREE_UNSIGNED (TREE_TYPE (arg1))))
2125 return 0;
2126
2127 return operand_equal_p (TREE_OPERAND (arg0, 0),
2128 TREE_OPERAND (arg1, 0), 0);
2129
2130 case '<':
2131 case '2':
2132 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0)
2133 && operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1),
2134 0))
2135 return 1;
2136
2137 /* For commutative ops, allow the other order. */
2138 return (commutative_tree_code (TREE_CODE (arg0))
2139 && operand_equal_p (TREE_OPERAND (arg0, 0),
2140 TREE_OPERAND (arg1, 1), 0)
2141 && operand_equal_p (TREE_OPERAND (arg0, 1),
2142 TREE_OPERAND (arg1, 0), 0));
2143
2144 case 'r':
2145 /* If either of the pointer (or reference) expressions we are
2146 dereferencing contain a side effect, these cannot be equal. */
2147 if (TREE_SIDE_EFFECTS (arg0)
2148 || TREE_SIDE_EFFECTS (arg1))
2149 return 0;
2150
2151 switch (TREE_CODE (arg0))
2152 {
2153 case INDIRECT_REF:
2154 return operand_equal_p (TREE_OPERAND (arg0, 0),
2155 TREE_OPERAND (arg1, 0), 0);
2156
2157 case COMPONENT_REF:
2158 case ARRAY_REF:
2159 case ARRAY_RANGE_REF:
2160 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2161 TREE_OPERAND (arg1, 0), 0)
2162 && operand_equal_p (TREE_OPERAND (arg0, 1),
2163 TREE_OPERAND (arg1, 1), 0));
2164
2165 case BIT_FIELD_REF:
2166 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2167 TREE_OPERAND (arg1, 0), 0)
2168 && operand_equal_p (TREE_OPERAND (arg0, 1),
2169 TREE_OPERAND (arg1, 1), 0)
2170 && operand_equal_p (TREE_OPERAND (arg0, 2),
2171 TREE_OPERAND (arg1, 2), 0));
2172 default:
2173 return 0;
2174 }
2175
2176 case 'e':
2177 switch (TREE_CODE (arg0))
2178 {
2179 case ADDR_EXPR:
2180 case TRUTH_NOT_EXPR:
2181 return operand_equal_p (TREE_OPERAND (arg0, 0),
2182 TREE_OPERAND (arg1, 0), 0);
2183
2184 case RTL_EXPR:
2185 return rtx_equal_p (RTL_EXPR_RTL (arg0), RTL_EXPR_RTL (arg1));
2186
2187 case CALL_EXPR:
2188 /* If the CALL_EXPRs call different functions, then they
2189 clearly can not be equal. */
2190 if (! operand_equal_p (TREE_OPERAND (arg0, 0),
2191 TREE_OPERAND (arg1, 0), 0))
2192 return 0;
2193
2194 /* Only consider const functions equivalent. */
2195 fndecl = get_callee_fndecl (arg0);
2196 if (fndecl == NULL_TREE
2197 || ! (flags_from_decl_or_type (fndecl) & ECF_CONST))
2198 return 0;
2199
2200 /* Now see if all the arguments are the same. operand_equal_p
2201 does not handle TREE_LIST, so we walk the operands here
2202 feeding them to operand_equal_p. */
2203 arg0 = TREE_OPERAND (arg0, 1);
2204 arg1 = TREE_OPERAND (arg1, 1);
2205 while (arg0 && arg1)
2206 {
2207 if (! operand_equal_p (TREE_VALUE (arg0), TREE_VALUE (arg1), 0))
2208 return 0;
2209
2210 arg0 = TREE_CHAIN (arg0);
2211 arg1 = TREE_CHAIN (arg1);
2212 }
2213
2214 /* If we get here and both argument lists are exhausted
2215 then the CALL_EXPRs are equal. */
2216 return ! (arg0 || arg1);
2217
2218 default:
2219 return 0;
2220 }
2221
2222 case 'd':
2223 /* Consider __builtin_sqrt equal to sqrt. */
2224 return TREE_CODE (arg0) == FUNCTION_DECL
2225 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2226 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2227 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1);
2228
2229 default:
2230 return 0;
2231 }
2232 }
2233 \f
2234 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2235 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2236
2237 When in doubt, return 0. */
2238
2239 static int
2240 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2241 {
2242 int unsignedp1, unsignedpo;
2243 tree primarg0, primarg1, primother;
2244 unsigned int correct_width;
2245
2246 if (operand_equal_p (arg0, arg1, 0))
2247 return 1;
2248
2249 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2250 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2251 return 0;
2252
2253 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2254 and see if the inner values are the same. This removes any
2255 signedness comparison, which doesn't matter here. */
2256 primarg0 = arg0, primarg1 = arg1;
2257 STRIP_NOPS (primarg0);
2258 STRIP_NOPS (primarg1);
2259 if (operand_equal_p (primarg0, primarg1, 0))
2260 return 1;
2261
2262 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2263 actual comparison operand, ARG0.
2264
2265 First throw away any conversions to wider types
2266 already present in the operands. */
2267
2268 primarg1 = get_narrower (arg1, &unsignedp1);
2269 primother = get_narrower (other, &unsignedpo);
2270
2271 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2272 if (unsignedp1 == unsignedpo
2273 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2274 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2275 {
2276 tree type = TREE_TYPE (arg0);
2277
2278 /* Make sure shorter operand is extended the right way
2279 to match the longer operand. */
2280 primarg1 = convert ((*lang_hooks.types.signed_or_unsigned_type)
2281 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2282
2283 if (operand_equal_p (arg0, convert (type, primarg1), 0))
2284 return 1;
2285 }
2286
2287 return 0;
2288 }
2289 \f
2290 /* See if ARG is an expression that is either a comparison or is performing
2291 arithmetic on comparisons. The comparisons must only be comparing
2292 two different values, which will be stored in *CVAL1 and *CVAL2; if
2293 they are nonzero it means that some operands have already been found.
2294 No variables may be used anywhere else in the expression except in the
2295 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2296 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2297
2298 If this is true, return 1. Otherwise, return zero. */
2299
2300 static int
2301 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2302 {
2303 enum tree_code code = TREE_CODE (arg);
2304 char class = TREE_CODE_CLASS (code);
2305
2306 /* We can handle some of the 'e' cases here. */
2307 if (class == 'e' && code == TRUTH_NOT_EXPR)
2308 class = '1';
2309 else if (class == 'e'
2310 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2311 || code == COMPOUND_EXPR))
2312 class = '2';
2313
2314 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0
2315 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2316 {
2317 /* If we've already found a CVAL1 or CVAL2, this expression is
2318 two complex to handle. */
2319 if (*cval1 || *cval2)
2320 return 0;
2321
2322 class = '1';
2323 *save_p = 1;
2324 }
2325
2326 switch (class)
2327 {
2328 case '1':
2329 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2330
2331 case '2':
2332 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2333 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2334 cval1, cval2, save_p));
2335
2336 case 'c':
2337 return 1;
2338
2339 case 'e':
2340 if (code == COND_EXPR)
2341 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2342 cval1, cval2, save_p)
2343 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2344 cval1, cval2, save_p)
2345 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2346 cval1, cval2, save_p));
2347 return 0;
2348
2349 case '<':
2350 /* First see if we can handle the first operand, then the second. For
2351 the second operand, we know *CVAL1 can't be zero. It must be that
2352 one side of the comparison is each of the values; test for the
2353 case where this isn't true by failing if the two operands
2354 are the same. */
2355
2356 if (operand_equal_p (TREE_OPERAND (arg, 0),
2357 TREE_OPERAND (arg, 1), 0))
2358 return 0;
2359
2360 if (*cval1 == 0)
2361 *cval1 = TREE_OPERAND (arg, 0);
2362 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2363 ;
2364 else if (*cval2 == 0)
2365 *cval2 = TREE_OPERAND (arg, 0);
2366 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2367 ;
2368 else
2369 return 0;
2370
2371 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2372 ;
2373 else if (*cval2 == 0)
2374 *cval2 = TREE_OPERAND (arg, 1);
2375 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2376 ;
2377 else
2378 return 0;
2379
2380 return 1;
2381
2382 default:
2383 return 0;
2384 }
2385 }
2386 \f
2387 /* ARG is a tree that is known to contain just arithmetic operations and
2388 comparisons. Evaluate the operations in the tree substituting NEW0 for
2389 any occurrence of OLD0 as an operand of a comparison and likewise for
2390 NEW1 and OLD1. */
2391
2392 static tree
2393 eval_subst (tree arg, tree old0, tree new0, tree old1, tree new1)
2394 {
2395 tree type = TREE_TYPE (arg);
2396 enum tree_code code = TREE_CODE (arg);
2397 char class = TREE_CODE_CLASS (code);
2398
2399 /* We can handle some of the 'e' cases here. */
2400 if (class == 'e' && code == TRUTH_NOT_EXPR)
2401 class = '1';
2402 else if (class == 'e'
2403 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2404 class = '2';
2405
2406 switch (class)
2407 {
2408 case '1':
2409 return fold (build1 (code, type,
2410 eval_subst (TREE_OPERAND (arg, 0),
2411 old0, new0, old1, new1)));
2412
2413 case '2':
2414 return fold (build (code, type,
2415 eval_subst (TREE_OPERAND (arg, 0),
2416 old0, new0, old1, new1),
2417 eval_subst (TREE_OPERAND (arg, 1),
2418 old0, new0, old1, new1)));
2419
2420 case 'e':
2421 switch (code)
2422 {
2423 case SAVE_EXPR:
2424 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
2425
2426 case COMPOUND_EXPR:
2427 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
2428
2429 case COND_EXPR:
2430 return fold (build (code, type,
2431 eval_subst (TREE_OPERAND (arg, 0),
2432 old0, new0, old1, new1),
2433 eval_subst (TREE_OPERAND (arg, 1),
2434 old0, new0, old1, new1),
2435 eval_subst (TREE_OPERAND (arg, 2),
2436 old0, new0, old1, new1)));
2437 default:
2438 break;
2439 }
2440 /* Fall through - ??? */
2441
2442 case '<':
2443 {
2444 tree arg0 = TREE_OPERAND (arg, 0);
2445 tree arg1 = TREE_OPERAND (arg, 1);
2446
2447 /* We need to check both for exact equality and tree equality. The
2448 former will be true if the operand has a side-effect. In that
2449 case, we know the operand occurred exactly once. */
2450
2451 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2452 arg0 = new0;
2453 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
2454 arg0 = new1;
2455
2456 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
2457 arg1 = new0;
2458 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
2459 arg1 = new1;
2460
2461 return fold (build (code, type, arg0, arg1));
2462 }
2463
2464 default:
2465 return arg;
2466 }
2467 }
2468 \f
2469 /* Return a tree for the case when the result of an expression is RESULT
2470 converted to TYPE and OMITTED was previously an operand of the expression
2471 but is now not needed (e.g., we folded OMITTED * 0).
2472
2473 If OMITTED has side effects, we must evaluate it. Otherwise, just do
2474 the conversion of RESULT to TYPE. */
2475
2476 tree
2477 omit_one_operand (tree type, tree result, tree omitted)
2478 {
2479 tree t = convert (type, result);
2480
2481 if (TREE_SIDE_EFFECTS (omitted))
2482 return build (COMPOUND_EXPR, type, omitted, t);
2483
2484 return non_lvalue (t);
2485 }
2486
2487 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
2488
2489 static tree
2490 pedantic_omit_one_operand (tree type, tree result, tree omitted)
2491 {
2492 tree t = convert (type, result);
2493
2494 if (TREE_SIDE_EFFECTS (omitted))
2495 return build (COMPOUND_EXPR, type, omitted, t);
2496
2497 return pedantic_non_lvalue (t);
2498 }
2499 \f
2500 /* Return a simplified tree node for the truth-negation of ARG. This
2501 never alters ARG itself. We assume that ARG is an operation that
2502 returns a truth value (0 or 1). */
2503
2504 tree
2505 invert_truthvalue (tree arg)
2506 {
2507 tree type = TREE_TYPE (arg);
2508 enum tree_code code = TREE_CODE (arg);
2509
2510 if (code == ERROR_MARK)
2511 return arg;
2512
2513 /* If this is a comparison, we can simply invert it, except for
2514 floating-point non-equality comparisons, in which case we just
2515 enclose a TRUTH_NOT_EXPR around what we have. */
2516
2517 if (TREE_CODE_CLASS (code) == '<')
2518 {
2519 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2520 && !flag_unsafe_math_optimizations
2521 && code != NE_EXPR
2522 && code != EQ_EXPR)
2523 return build1 (TRUTH_NOT_EXPR, type, arg);
2524 else
2525 return build (invert_tree_comparison (code), type,
2526 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2527 }
2528
2529 switch (code)
2530 {
2531 case INTEGER_CST:
2532 return convert (type, build_int_2 (integer_zerop (arg), 0));
2533
2534 case TRUTH_AND_EXPR:
2535 return build (TRUTH_OR_EXPR, type,
2536 invert_truthvalue (TREE_OPERAND (arg, 0)),
2537 invert_truthvalue (TREE_OPERAND (arg, 1)));
2538
2539 case TRUTH_OR_EXPR:
2540 return build (TRUTH_AND_EXPR, type,
2541 invert_truthvalue (TREE_OPERAND (arg, 0)),
2542 invert_truthvalue (TREE_OPERAND (arg, 1)));
2543
2544 case TRUTH_XOR_EXPR:
2545 /* Here we can invert either operand. We invert the first operand
2546 unless the second operand is a TRUTH_NOT_EXPR in which case our
2547 result is the XOR of the first operand with the inside of the
2548 negation of the second operand. */
2549
2550 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2551 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2552 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2553 else
2554 return build (TRUTH_XOR_EXPR, type,
2555 invert_truthvalue (TREE_OPERAND (arg, 0)),
2556 TREE_OPERAND (arg, 1));
2557
2558 case TRUTH_ANDIF_EXPR:
2559 return build (TRUTH_ORIF_EXPR, type,
2560 invert_truthvalue (TREE_OPERAND (arg, 0)),
2561 invert_truthvalue (TREE_OPERAND (arg, 1)));
2562
2563 case TRUTH_ORIF_EXPR:
2564 return build (TRUTH_ANDIF_EXPR, type,
2565 invert_truthvalue (TREE_OPERAND (arg, 0)),
2566 invert_truthvalue (TREE_OPERAND (arg, 1)));
2567
2568 case TRUTH_NOT_EXPR:
2569 return TREE_OPERAND (arg, 0);
2570
2571 case COND_EXPR:
2572 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2573 invert_truthvalue (TREE_OPERAND (arg, 1)),
2574 invert_truthvalue (TREE_OPERAND (arg, 2)));
2575
2576 case COMPOUND_EXPR:
2577 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2578 invert_truthvalue (TREE_OPERAND (arg, 1)));
2579
2580 case WITH_RECORD_EXPR:
2581 return build (WITH_RECORD_EXPR, type,
2582 invert_truthvalue (TREE_OPERAND (arg, 0)),
2583 TREE_OPERAND (arg, 1));
2584
2585 case NON_LVALUE_EXPR:
2586 return invert_truthvalue (TREE_OPERAND (arg, 0));
2587
2588 case NOP_EXPR:
2589 case CONVERT_EXPR:
2590 case FLOAT_EXPR:
2591 return build1 (TREE_CODE (arg), type,
2592 invert_truthvalue (TREE_OPERAND (arg, 0)));
2593
2594 case BIT_AND_EXPR:
2595 if (!integer_onep (TREE_OPERAND (arg, 1)))
2596 break;
2597 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2598
2599 case SAVE_EXPR:
2600 return build1 (TRUTH_NOT_EXPR, type, arg);
2601
2602 case CLEANUP_POINT_EXPR:
2603 return build1 (CLEANUP_POINT_EXPR, type,
2604 invert_truthvalue (TREE_OPERAND (arg, 0)));
2605
2606 default:
2607 break;
2608 }
2609 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2610 abort ();
2611 return build1 (TRUTH_NOT_EXPR, type, arg);
2612 }
2613
2614 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2615 operands are another bit-wise operation with a common input. If so,
2616 distribute the bit operations to save an operation and possibly two if
2617 constants are involved. For example, convert
2618 (A | B) & (A | C) into A | (B & C)
2619 Further simplification will occur if B and C are constants.
2620
2621 If this optimization cannot be done, 0 will be returned. */
2622
2623 static tree
2624 distribute_bit_expr (enum tree_code code, tree type, tree arg0, tree arg1)
2625 {
2626 tree common;
2627 tree left, right;
2628
2629 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2630 || TREE_CODE (arg0) == code
2631 || (TREE_CODE (arg0) != BIT_AND_EXPR
2632 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2633 return 0;
2634
2635 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2636 {
2637 common = TREE_OPERAND (arg0, 0);
2638 left = TREE_OPERAND (arg0, 1);
2639 right = TREE_OPERAND (arg1, 1);
2640 }
2641 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2642 {
2643 common = TREE_OPERAND (arg0, 0);
2644 left = TREE_OPERAND (arg0, 1);
2645 right = TREE_OPERAND (arg1, 0);
2646 }
2647 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2648 {
2649 common = TREE_OPERAND (arg0, 1);
2650 left = TREE_OPERAND (arg0, 0);
2651 right = TREE_OPERAND (arg1, 1);
2652 }
2653 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2654 {
2655 common = TREE_OPERAND (arg0, 1);
2656 left = TREE_OPERAND (arg0, 0);
2657 right = TREE_OPERAND (arg1, 0);
2658 }
2659 else
2660 return 0;
2661
2662 return fold (build (TREE_CODE (arg0), type, common,
2663 fold (build (code, type, left, right))));
2664 }
2665 \f
2666 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2667 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
2668
2669 static tree
2670 make_bit_field_ref (tree inner, tree type, int bitsize, int bitpos,
2671 int unsignedp)
2672 {
2673 tree result = build (BIT_FIELD_REF, type, inner,
2674 size_int (bitsize), bitsize_int (bitpos));
2675
2676 TREE_UNSIGNED (result) = unsignedp;
2677
2678 return result;
2679 }
2680
2681 /* Optimize a bit-field compare.
2682
2683 There are two cases: First is a compare against a constant and the
2684 second is a comparison of two items where the fields are at the same
2685 bit position relative to the start of a chunk (byte, halfword, word)
2686 large enough to contain it. In these cases we can avoid the shift
2687 implicit in bitfield extractions.
2688
2689 For constants, we emit a compare of the shifted constant with the
2690 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2691 compared. For two fields at the same position, we do the ANDs with the
2692 similar mask and compare the result of the ANDs.
2693
2694 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2695 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2696 are the left and right operands of the comparison, respectively.
2697
2698 If the optimization described above can be done, we return the resulting
2699 tree. Otherwise we return zero. */
2700
2701 static tree
2702 optimize_bit_field_compare (enum tree_code code, tree compare_type,
2703 tree lhs, tree rhs)
2704 {
2705 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
2706 tree type = TREE_TYPE (lhs);
2707 tree signed_type, unsigned_type;
2708 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2709 enum machine_mode lmode, rmode, nmode;
2710 int lunsignedp, runsignedp;
2711 int lvolatilep = 0, rvolatilep = 0;
2712 tree linner, rinner = NULL_TREE;
2713 tree mask;
2714 tree offset;
2715
2716 /* Get all the information about the extractions being done. If the bit size
2717 if the same as the size of the underlying object, we aren't doing an
2718 extraction at all and so can do nothing. We also don't want to
2719 do anything if the inner expression is a PLACEHOLDER_EXPR since we
2720 then will no longer be able to replace it. */
2721 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2722 &lunsignedp, &lvolatilep);
2723 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2724 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
2725 return 0;
2726
2727 if (!const_p)
2728 {
2729 /* If this is not a constant, we can only do something if bit positions,
2730 sizes, and signedness are the same. */
2731 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
2732 &runsignedp, &rvolatilep);
2733
2734 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2735 || lunsignedp != runsignedp || offset != 0
2736 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
2737 return 0;
2738 }
2739
2740 /* See if we can find a mode to refer to this field. We should be able to,
2741 but fail if we can't. */
2742 nmode = get_best_mode (lbitsize, lbitpos,
2743 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
2744 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
2745 TYPE_ALIGN (TREE_TYPE (rinner))),
2746 word_mode, lvolatilep || rvolatilep);
2747 if (nmode == VOIDmode)
2748 return 0;
2749
2750 /* Set signed and unsigned types of the precision of this mode for the
2751 shifts below. */
2752 signed_type = (*lang_hooks.types.type_for_mode) (nmode, 0);
2753 unsigned_type = (*lang_hooks.types.type_for_mode) (nmode, 1);
2754
2755 /* Compute the bit position and size for the new reference and our offset
2756 within it. If the new reference is the same size as the original, we
2757 won't optimize anything, so return zero. */
2758 nbitsize = GET_MODE_BITSIZE (nmode);
2759 nbitpos = lbitpos & ~ (nbitsize - 1);
2760 lbitpos -= nbitpos;
2761 if (nbitsize == lbitsize)
2762 return 0;
2763
2764 if (BYTES_BIG_ENDIAN)
2765 lbitpos = nbitsize - lbitsize - lbitpos;
2766
2767 /* Make the mask to be used against the extracted field. */
2768 mask = build_int_2 (~0, ~0);
2769 TREE_TYPE (mask) = unsigned_type;
2770 force_fit_type (mask, 0);
2771 mask = convert (unsigned_type, mask);
2772 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize), 0);
2773 mask = const_binop (RSHIFT_EXPR, mask,
2774 size_int (nbitsize - lbitsize - lbitpos), 0);
2775
2776 if (! const_p)
2777 /* If not comparing with constant, just rework the comparison
2778 and return. */
2779 return build (code, compare_type,
2780 build (BIT_AND_EXPR, unsigned_type,
2781 make_bit_field_ref (linner, unsigned_type,
2782 nbitsize, nbitpos, 1),
2783 mask),
2784 build (BIT_AND_EXPR, unsigned_type,
2785 make_bit_field_ref (rinner, unsigned_type,
2786 nbitsize, nbitpos, 1),
2787 mask));
2788
2789 /* Otherwise, we are handling the constant case. See if the constant is too
2790 big for the field. Warn and return a tree of for 0 (false) if so. We do
2791 this not only for its own sake, but to avoid having to test for this
2792 error case below. If we didn't, we might generate wrong code.
2793
2794 For unsigned fields, the constant shifted right by the field length should
2795 be all zero. For signed fields, the high-order bits should agree with
2796 the sign bit. */
2797
2798 if (lunsignedp)
2799 {
2800 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2801 convert (unsigned_type, rhs),
2802 size_int (lbitsize), 0)))
2803 {
2804 warning ("comparison is always %d due to width of bit-field",
2805 code == NE_EXPR);
2806 return convert (compare_type,
2807 (code == NE_EXPR
2808 ? integer_one_node : integer_zero_node));
2809 }
2810 }
2811 else
2812 {
2813 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2814 size_int (lbitsize - 1), 0);
2815 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2816 {
2817 warning ("comparison is always %d due to width of bit-field",
2818 code == NE_EXPR);
2819 return convert (compare_type,
2820 (code == NE_EXPR
2821 ? integer_one_node : integer_zero_node));
2822 }
2823 }
2824
2825 /* Single-bit compares should always be against zero. */
2826 if (lbitsize == 1 && ! integer_zerop (rhs))
2827 {
2828 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2829 rhs = convert (type, integer_zero_node);
2830 }
2831
2832 /* Make a new bitfield reference, shift the constant over the
2833 appropriate number of bits and mask it with the computed mask
2834 (in case this was a signed field). If we changed it, make a new one. */
2835 lhs = make_bit_field_ref (linner, unsigned_type, nbitsize, nbitpos, 1);
2836 if (lvolatilep)
2837 {
2838 TREE_SIDE_EFFECTS (lhs) = 1;
2839 TREE_THIS_VOLATILE (lhs) = 1;
2840 }
2841
2842 rhs = fold (const_binop (BIT_AND_EXPR,
2843 const_binop (LSHIFT_EXPR,
2844 convert (unsigned_type, rhs),
2845 size_int (lbitpos), 0),
2846 mask, 0));
2847
2848 return build (code, compare_type,
2849 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2850 rhs);
2851 }
2852 \f
2853 /* Subroutine for fold_truthop: decode a field reference.
2854
2855 If EXP is a comparison reference, we return the innermost reference.
2856
2857 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2858 set to the starting bit number.
2859
2860 If the innermost field can be completely contained in a mode-sized
2861 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2862
2863 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2864 otherwise it is not changed.
2865
2866 *PUNSIGNEDP is set to the signedness of the field.
2867
2868 *PMASK is set to the mask used. This is either contained in a
2869 BIT_AND_EXPR or derived from the width of the field.
2870
2871 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
2872
2873 Return 0 if this is not a component reference or is one that we can't
2874 do anything with. */
2875
2876 static tree
2877 decode_field_reference (tree exp, HOST_WIDE_INT *pbitsize,
2878 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
2879 int *punsignedp, int *pvolatilep,
2880 tree *pmask, tree *pand_mask)
2881 {
2882 tree outer_type = 0;
2883 tree and_mask = 0;
2884 tree mask, inner, offset;
2885 tree unsigned_type;
2886 unsigned int precision;
2887
2888 /* All the optimizations using this function assume integer fields.
2889 There are problems with FP fields since the type_for_size call
2890 below can fail for, e.g., XFmode. */
2891 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2892 return 0;
2893
2894 /* We are interested in the bare arrangement of bits, so strip everything
2895 that doesn't affect the machine mode. However, record the type of the
2896 outermost expression if it may matter below. */
2897 if (TREE_CODE (exp) == NOP_EXPR
2898 || TREE_CODE (exp) == CONVERT_EXPR
2899 || TREE_CODE (exp) == NON_LVALUE_EXPR)
2900 outer_type = TREE_TYPE (exp);
2901 STRIP_NOPS (exp);
2902
2903 if (TREE_CODE (exp) == BIT_AND_EXPR)
2904 {
2905 and_mask = TREE_OPERAND (exp, 1);
2906 exp = TREE_OPERAND (exp, 0);
2907 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2908 if (TREE_CODE (and_mask) != INTEGER_CST)
2909 return 0;
2910 }
2911
2912 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2913 punsignedp, pvolatilep);
2914 if ((inner == exp && and_mask == 0)
2915 || *pbitsize < 0 || offset != 0
2916 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
2917 return 0;
2918
2919 /* If the number of bits in the reference is the same as the bitsize of
2920 the outer type, then the outer type gives the signedness. Otherwise
2921 (in case of a small bitfield) the signedness is unchanged. */
2922 if (outer_type && *pbitsize == tree_low_cst (TYPE_SIZE (outer_type), 1))
2923 *punsignedp = TREE_UNSIGNED (outer_type);
2924
2925 /* Compute the mask to access the bitfield. */
2926 unsigned_type = (*lang_hooks.types.type_for_size) (*pbitsize, 1);
2927 precision = TYPE_PRECISION (unsigned_type);
2928
2929 mask = build_int_2 (~0, ~0);
2930 TREE_TYPE (mask) = unsigned_type;
2931 force_fit_type (mask, 0);
2932 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2933 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2934
2935 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2936 if (and_mask != 0)
2937 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2938 convert (unsigned_type, and_mask), mask));
2939
2940 *pmask = mask;
2941 *pand_mask = and_mask;
2942 return inner;
2943 }
2944
2945 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
2946 bit positions. */
2947
2948 static int
2949 all_ones_mask_p (tree mask, int size)
2950 {
2951 tree type = TREE_TYPE (mask);
2952 unsigned int precision = TYPE_PRECISION (type);
2953 tree tmask;
2954
2955 tmask = build_int_2 (~0, ~0);
2956 TREE_TYPE (tmask) = (*lang_hooks.types.signed_type) (type);
2957 force_fit_type (tmask, 0);
2958 return
2959 tree_int_cst_equal (mask,
2960 const_binop (RSHIFT_EXPR,
2961 const_binop (LSHIFT_EXPR, tmask,
2962 size_int (precision - size),
2963 0),
2964 size_int (precision - size), 0));
2965 }
2966
2967 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
2968 represents the sign bit of EXP's type. If EXP represents a sign
2969 or zero extension, also test VAL against the unextended type.
2970 The return value is the (sub)expression whose sign bit is VAL,
2971 or NULL_TREE otherwise. */
2972
2973 static tree
2974 sign_bit_p (tree exp, tree val)
2975 {
2976 unsigned HOST_WIDE_INT mask_lo, lo;
2977 HOST_WIDE_INT mask_hi, hi;
2978 int width;
2979 tree t;
2980
2981 /* Tree EXP must have an integral type. */
2982 t = TREE_TYPE (exp);
2983 if (! INTEGRAL_TYPE_P (t))
2984 return NULL_TREE;
2985
2986 /* Tree VAL must be an integer constant. */
2987 if (TREE_CODE (val) != INTEGER_CST
2988 || TREE_CONSTANT_OVERFLOW (val))
2989 return NULL_TREE;
2990
2991 width = TYPE_PRECISION (t);
2992 if (width > HOST_BITS_PER_WIDE_INT)
2993 {
2994 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
2995 lo = 0;
2996
2997 mask_hi = ((unsigned HOST_WIDE_INT) -1
2998 >> (2 * HOST_BITS_PER_WIDE_INT - width));
2999 mask_lo = -1;
3000 }
3001 else
3002 {
3003 hi = 0;
3004 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3005
3006 mask_hi = 0;
3007 mask_lo = ((unsigned HOST_WIDE_INT) -1
3008 >> (HOST_BITS_PER_WIDE_INT - width));
3009 }
3010
3011 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3012 treat VAL as if it were unsigned. */
3013 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3014 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3015 return exp;
3016
3017 /* Handle extension from a narrower type. */
3018 if (TREE_CODE (exp) == NOP_EXPR
3019 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3020 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3021
3022 return NULL_TREE;
3023 }
3024
3025 /* Subroutine for fold_truthop: determine if an operand is simple enough
3026 to be evaluated unconditionally. */
3027
3028 static int
3029 simple_operand_p (tree exp)
3030 {
3031 /* Strip any conversions that don't change the machine mode. */
3032 while ((TREE_CODE (exp) == NOP_EXPR
3033 || TREE_CODE (exp) == CONVERT_EXPR)
3034 && (TYPE_MODE (TREE_TYPE (exp))
3035 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
3036 exp = TREE_OPERAND (exp, 0);
3037
3038 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
3039 || (DECL_P (exp)
3040 && ! TREE_ADDRESSABLE (exp)
3041 && ! TREE_THIS_VOLATILE (exp)
3042 && ! DECL_NONLOCAL (exp)
3043 /* Don't regard global variables as simple. They may be
3044 allocated in ways unknown to the compiler (shared memory,
3045 #pragma weak, etc). */
3046 && ! TREE_PUBLIC (exp)
3047 && ! DECL_EXTERNAL (exp)
3048 /* Loading a static variable is unduly expensive, but global
3049 registers aren't expensive. */
3050 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3051 }
3052 \f
3053 /* The following functions are subroutines to fold_range_test and allow it to
3054 try to change a logical combination of comparisons into a range test.
3055
3056 For example, both
3057 X == 2 || X == 3 || X == 4 || X == 5
3058 and
3059 X >= 2 && X <= 5
3060 are converted to
3061 (unsigned) (X - 2) <= 3
3062
3063 We describe each set of comparisons as being either inside or outside
3064 a range, using a variable named like IN_P, and then describe the
3065 range with a lower and upper bound. If one of the bounds is omitted,
3066 it represents either the highest or lowest value of the type.
3067
3068 In the comments below, we represent a range by two numbers in brackets
3069 preceded by a "+" to designate being inside that range, or a "-" to
3070 designate being outside that range, so the condition can be inverted by
3071 flipping the prefix. An omitted bound is represented by a "-". For
3072 example, "- [-, 10]" means being outside the range starting at the lowest
3073 possible value and ending at 10, in other words, being greater than 10.
3074 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3075 always false.
3076
3077 We set up things so that the missing bounds are handled in a consistent
3078 manner so neither a missing bound nor "true" and "false" need to be
3079 handled using a special case. */
3080
3081 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3082 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3083 and UPPER1_P are nonzero if the respective argument is an upper bound
3084 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3085 must be specified for a comparison. ARG1 will be converted to ARG0's
3086 type if both are specified. */
3087
3088 static tree
3089 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3090 tree arg1, int upper1_p)
3091 {
3092 tree tem;
3093 int result;
3094 int sgn0, sgn1;
3095
3096 /* If neither arg represents infinity, do the normal operation.
3097 Else, if not a comparison, return infinity. Else handle the special
3098 comparison rules. Note that most of the cases below won't occur, but
3099 are handled for consistency. */
3100
3101 if (arg0 != 0 && arg1 != 0)
3102 {
3103 tem = fold (build (code, type != 0 ? type : TREE_TYPE (arg0),
3104 arg0, convert (TREE_TYPE (arg0), arg1)));
3105 STRIP_NOPS (tem);
3106 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3107 }
3108
3109 if (TREE_CODE_CLASS (code) != '<')
3110 return 0;
3111
3112 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3113 for neither. In real maths, we cannot assume open ended ranges are
3114 the same. But, this is computer arithmetic, where numbers are finite.
3115 We can therefore make the transformation of any unbounded range with
3116 the value Z, Z being greater than any representable number. This permits
3117 us to treat unbounded ranges as equal. */
3118 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3119 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3120 switch (code)
3121 {
3122 case EQ_EXPR:
3123 result = sgn0 == sgn1;
3124 break;
3125 case NE_EXPR:
3126 result = sgn0 != sgn1;
3127 break;
3128 case LT_EXPR:
3129 result = sgn0 < sgn1;
3130 break;
3131 case LE_EXPR:
3132 result = sgn0 <= sgn1;
3133 break;
3134 case GT_EXPR:
3135 result = sgn0 > sgn1;
3136 break;
3137 case GE_EXPR:
3138 result = sgn0 >= sgn1;
3139 break;
3140 default:
3141 abort ();
3142 }
3143
3144 return convert (type, result ? integer_one_node : integer_zero_node);
3145 }
3146 \f
3147 /* Given EXP, a logical expression, set the range it is testing into
3148 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
3149 actually being tested. *PLOW and *PHIGH will be made of the same type
3150 as the returned expression. If EXP is not a comparison, we will most
3151 likely not be returning a useful value and range. */
3152
3153 static tree
3154 make_range (tree exp, int *pin_p, tree *plow, tree *phigh)
3155 {
3156 enum tree_code code;
3157 tree arg0 = NULL_TREE, arg1 = NULL_TREE, type = NULL_TREE;
3158 tree orig_type = NULL_TREE;
3159 int in_p, n_in_p;
3160 tree low, high, n_low, n_high;
3161
3162 /* Start with simply saying "EXP != 0" and then look at the code of EXP
3163 and see if we can refine the range. Some of the cases below may not
3164 happen, but it doesn't seem worth worrying about this. We "continue"
3165 the outer loop when we've changed something; otherwise we "break"
3166 the switch, which will "break" the while. */
3167
3168 in_p = 0, low = high = convert (TREE_TYPE (exp), integer_zero_node);
3169
3170 while (1)
3171 {
3172 code = TREE_CODE (exp);
3173
3174 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
3175 {
3176 if (first_rtl_op (code) > 0)
3177 arg0 = TREE_OPERAND (exp, 0);
3178 if (TREE_CODE_CLASS (code) == '<'
3179 || TREE_CODE_CLASS (code) == '1'
3180 || TREE_CODE_CLASS (code) == '2')
3181 type = TREE_TYPE (arg0);
3182 if (TREE_CODE_CLASS (code) == '2'
3183 || TREE_CODE_CLASS (code) == '<'
3184 || (TREE_CODE_CLASS (code) == 'e'
3185 && TREE_CODE_LENGTH (code) > 1))
3186 arg1 = TREE_OPERAND (exp, 1);
3187 }
3188
3189 /* Set ORIG_TYPE as soon as TYPE is non-null so that we do not
3190 lose a cast by accident. */
3191 if (type != NULL_TREE && orig_type == NULL_TREE)
3192 orig_type = type;
3193
3194 switch (code)
3195 {
3196 case TRUTH_NOT_EXPR:
3197 in_p = ! in_p, exp = arg0;
3198 continue;
3199
3200 case EQ_EXPR: case NE_EXPR:
3201 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3202 /* We can only do something if the range is testing for zero
3203 and if the second operand is an integer constant. Note that
3204 saying something is "in" the range we make is done by
3205 complementing IN_P since it will set in the initial case of
3206 being not equal to zero; "out" is leaving it alone. */
3207 if (low == 0 || high == 0
3208 || ! integer_zerop (low) || ! integer_zerop (high)
3209 || TREE_CODE (arg1) != INTEGER_CST)
3210 break;
3211
3212 switch (code)
3213 {
3214 case NE_EXPR: /* - [c, c] */
3215 low = high = arg1;
3216 break;
3217 case EQ_EXPR: /* + [c, c] */
3218 in_p = ! in_p, low = high = arg1;
3219 break;
3220 case GT_EXPR: /* - [-, c] */
3221 low = 0, high = arg1;
3222 break;
3223 case GE_EXPR: /* + [c, -] */
3224 in_p = ! in_p, low = arg1, high = 0;
3225 break;
3226 case LT_EXPR: /* - [c, -] */
3227 low = arg1, high = 0;
3228 break;
3229 case LE_EXPR: /* + [-, c] */
3230 in_p = ! in_p, low = 0, high = arg1;
3231 break;
3232 default:
3233 abort ();
3234 }
3235
3236 exp = arg0;
3237
3238 /* If this is an unsigned comparison, we also know that EXP is
3239 greater than or equal to zero. We base the range tests we make
3240 on that fact, so we record it here so we can parse existing
3241 range tests. */
3242 if (TREE_UNSIGNED (type) && (low == 0 || high == 0))
3243 {
3244 if (! merge_ranges (&n_in_p, &n_low, &n_high, in_p, low, high,
3245 1, convert (type, integer_zero_node),
3246 NULL_TREE))
3247 break;
3248
3249 in_p = n_in_p, low = n_low, high = n_high;
3250
3251 /* If the high bound is missing, but we have a nonzero low
3252 bound, reverse the range so it goes from zero to the low bound
3253 minus 1. */
3254 if (high == 0 && low && ! integer_zerop (low))
3255 {
3256 in_p = ! in_p;
3257 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3258 integer_one_node, 0);
3259 low = convert (type, integer_zero_node);
3260 }
3261 }
3262 continue;
3263
3264 case NEGATE_EXPR:
3265 /* (-x) IN [a,b] -> x in [-b, -a] */
3266 n_low = range_binop (MINUS_EXPR, type,
3267 convert (type, integer_zero_node), 0, high, 1);
3268 n_high = range_binop (MINUS_EXPR, type,
3269 convert (type, integer_zero_node), 0, low, 0);
3270 low = n_low, high = n_high;
3271 exp = arg0;
3272 continue;
3273
3274 case BIT_NOT_EXPR:
3275 /* ~ X -> -X - 1 */
3276 exp = build (MINUS_EXPR, type, negate_expr (arg0),
3277 convert (type, integer_one_node));
3278 continue;
3279
3280 case PLUS_EXPR: case MINUS_EXPR:
3281 if (TREE_CODE (arg1) != INTEGER_CST)
3282 break;
3283
3284 /* If EXP is signed, any overflow in the computation is undefined,
3285 so we don't worry about it so long as our computations on
3286 the bounds don't overflow. For unsigned, overflow is defined
3287 and this is exactly the right thing. */
3288 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3289 type, low, 0, arg1, 0);
3290 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
3291 type, high, 1, arg1, 0);
3292 if ((n_low != 0 && TREE_OVERFLOW (n_low))
3293 || (n_high != 0 && TREE_OVERFLOW (n_high)))
3294 break;
3295
3296 /* Check for an unsigned range which has wrapped around the maximum
3297 value thus making n_high < n_low, and normalize it. */
3298 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
3299 {
3300 low = range_binop (PLUS_EXPR, type, n_high, 0,
3301 integer_one_node, 0);
3302 high = range_binop (MINUS_EXPR, type, n_low, 0,
3303 integer_one_node, 0);
3304
3305 /* If the range is of the form +/- [ x+1, x ], we won't
3306 be able to normalize it. But then, it represents the
3307 whole range or the empty set, so make it
3308 +/- [ -, - ]. */
3309 if (tree_int_cst_equal (n_low, low)
3310 && tree_int_cst_equal (n_high, high))
3311 low = high = 0;
3312 else
3313 in_p = ! in_p;
3314 }
3315 else
3316 low = n_low, high = n_high;
3317
3318 exp = arg0;
3319 continue;
3320
3321 case NOP_EXPR: case NON_LVALUE_EXPR: case CONVERT_EXPR:
3322 if (TYPE_PRECISION (type) > TYPE_PRECISION (orig_type))
3323 break;
3324
3325 if (! INTEGRAL_TYPE_P (type)
3326 || (low != 0 && ! int_fits_type_p (low, type))
3327 || (high != 0 && ! int_fits_type_p (high, type)))
3328 break;
3329
3330 n_low = low, n_high = high;
3331
3332 if (n_low != 0)
3333 n_low = convert (type, n_low);
3334
3335 if (n_high != 0)
3336 n_high = convert (type, n_high);
3337
3338 /* If we're converting from an unsigned to a signed type,
3339 we will be doing the comparison as unsigned. The tests above
3340 have already verified that LOW and HIGH are both positive.
3341
3342 So we have to make sure that the original unsigned value will
3343 be interpreted as positive. */
3344 if (TREE_UNSIGNED (type) && ! TREE_UNSIGNED (TREE_TYPE (exp)))
3345 {
3346 tree equiv_type = (*lang_hooks.types.type_for_mode)
3347 (TYPE_MODE (type), 1);
3348 tree high_positive;
3349
3350 /* A range without an upper bound is, naturally, unbounded.
3351 Since convert would have cropped a very large value, use
3352 the max value for the destination type. */
3353 high_positive
3354 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
3355 : TYPE_MAX_VALUE (type);
3356
3357 if (TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (exp)))
3358 high_positive = fold (build (RSHIFT_EXPR, type,
3359 convert (type, high_positive),
3360 convert (type, integer_one_node)));
3361
3362 /* If the low bound is specified, "and" the range with the
3363 range for which the original unsigned value will be
3364 positive. */
3365 if (low != 0)
3366 {
3367 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3368 1, n_low, n_high,
3369 1, convert (type, integer_zero_node),
3370 high_positive))
3371 break;
3372
3373 in_p = (n_in_p == in_p);
3374 }
3375 else
3376 {
3377 /* Otherwise, "or" the range with the range of the input
3378 that will be interpreted as negative. */
3379 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3380 0, n_low, n_high,
3381 1, convert (type, integer_zero_node),
3382 high_positive))
3383 break;
3384
3385 in_p = (in_p != n_in_p);
3386 }
3387 }
3388
3389 exp = arg0;
3390 low = n_low, high = n_high;
3391 continue;
3392
3393 default:
3394 break;
3395 }
3396
3397 break;
3398 }
3399
3400 /* If EXP is a constant, we can evaluate whether this is true or false. */
3401 if (TREE_CODE (exp) == INTEGER_CST)
3402 {
3403 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
3404 exp, 0, low, 0))
3405 && integer_onep (range_binop (LE_EXPR, integer_type_node,
3406 exp, 1, high, 1)));
3407 low = high = 0;
3408 exp = 0;
3409 }
3410
3411 *pin_p = in_p, *plow = low, *phigh = high;
3412 return exp;
3413 }
3414 \f
3415 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
3416 type, TYPE, return an expression to test if EXP is in (or out of, depending
3417 on IN_P) the range. */
3418
3419 static tree
3420 build_range_check (tree type, tree exp, int in_p, tree low, tree high)
3421 {
3422 tree etype = TREE_TYPE (exp);
3423 tree value;
3424
3425 if (! in_p
3426 && (0 != (value = build_range_check (type, exp, 1, low, high))))
3427 return invert_truthvalue (value);
3428
3429 if (low == 0 && high == 0)
3430 return convert (type, integer_one_node);
3431
3432 if (low == 0)
3433 return fold (build (LE_EXPR, type, exp, high));
3434
3435 if (high == 0)
3436 return fold (build (GE_EXPR, type, exp, low));
3437
3438 if (operand_equal_p (low, high, 0))
3439 return fold (build (EQ_EXPR, type, exp, low));
3440
3441 if (integer_zerop (low))
3442 {
3443 if (! TREE_UNSIGNED (etype))
3444 {
3445 etype = (*lang_hooks.types.unsigned_type) (etype);
3446 high = convert (etype, high);
3447 exp = convert (etype, exp);
3448 }
3449 return build_range_check (type, exp, 1, 0, high);
3450 }
3451
3452 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
3453 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
3454 {
3455 unsigned HOST_WIDE_INT lo;
3456 HOST_WIDE_INT hi;
3457 int prec;
3458
3459 prec = TYPE_PRECISION (etype);
3460 if (prec <= HOST_BITS_PER_WIDE_INT)
3461 {
3462 hi = 0;
3463 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
3464 }
3465 else
3466 {
3467 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
3468 lo = (unsigned HOST_WIDE_INT) -1;
3469 }
3470
3471 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
3472 {
3473 if (TREE_UNSIGNED (etype))
3474 {
3475 etype = (*lang_hooks.types.signed_type) (etype);
3476 exp = convert (etype, exp);
3477 }
3478 return fold (build (GT_EXPR, type, exp,
3479 convert (etype, integer_zero_node)));
3480 }
3481 }
3482
3483 if (0 != (value = const_binop (MINUS_EXPR, high, low, 0))
3484 && ! TREE_OVERFLOW (value))
3485 return build_range_check (type,
3486 fold (build (MINUS_EXPR, etype, exp, low)),
3487 1, convert (etype, integer_zero_node), value);
3488
3489 return 0;
3490 }
3491 \f
3492 /* Given two ranges, see if we can merge them into one. Return 1 if we
3493 can, 0 if we can't. Set the output range into the specified parameters. */
3494
3495 static int
3496 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
3497 tree high0, int in1_p, tree low1, tree high1)
3498 {
3499 int no_overlap;
3500 int subset;
3501 int temp;
3502 tree tem;
3503 int in_p;
3504 tree low, high;
3505 int lowequal = ((low0 == 0 && low1 == 0)
3506 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3507 low0, 0, low1, 0)));
3508 int highequal = ((high0 == 0 && high1 == 0)
3509 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
3510 high0, 1, high1, 1)));
3511
3512 /* Make range 0 be the range that starts first, or ends last if they
3513 start at the same value. Swap them if it isn't. */
3514 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
3515 low0, 0, low1, 0))
3516 || (lowequal
3517 && integer_onep (range_binop (GT_EXPR, integer_type_node,
3518 high1, 1, high0, 1))))
3519 {
3520 temp = in0_p, in0_p = in1_p, in1_p = temp;
3521 tem = low0, low0 = low1, low1 = tem;
3522 tem = high0, high0 = high1, high1 = tem;
3523 }
3524
3525 /* Now flag two cases, whether the ranges are disjoint or whether the
3526 second range is totally subsumed in the first. Note that the tests
3527 below are simplified by the ones above. */
3528 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
3529 high0, 1, low1, 0));
3530 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
3531 high1, 1, high0, 1));
3532
3533 /* We now have four cases, depending on whether we are including or
3534 excluding the two ranges. */
3535 if (in0_p && in1_p)
3536 {
3537 /* If they don't overlap, the result is false. If the second range
3538 is a subset it is the result. Otherwise, the range is from the start
3539 of the second to the end of the first. */
3540 if (no_overlap)
3541 in_p = 0, low = high = 0;
3542 else if (subset)
3543 in_p = 1, low = low1, high = high1;
3544 else
3545 in_p = 1, low = low1, high = high0;
3546 }
3547
3548 else if (in0_p && ! in1_p)
3549 {
3550 /* If they don't overlap, the result is the first range. If they are
3551 equal, the result is false. If the second range is a subset of the
3552 first, and the ranges begin at the same place, we go from just after
3553 the end of the first range to the end of the second. If the second
3554 range is not a subset of the first, or if it is a subset and both
3555 ranges end at the same place, the range starts at the start of the
3556 first range and ends just before the second range.
3557 Otherwise, we can't describe this as a single range. */
3558 if (no_overlap)
3559 in_p = 1, low = low0, high = high0;
3560 else if (lowequal && highequal)
3561 in_p = 0, low = high = 0;
3562 else if (subset && lowequal)
3563 {
3564 in_p = 1, high = high0;
3565 low = range_binop (PLUS_EXPR, NULL_TREE, high1, 0,
3566 integer_one_node, 0);
3567 }
3568 else if (! subset || highequal)
3569 {
3570 in_p = 1, low = low0;
3571 high = range_binop (MINUS_EXPR, NULL_TREE, low1, 0,
3572 integer_one_node, 0);
3573 }
3574 else
3575 return 0;
3576 }
3577
3578 else if (! in0_p && in1_p)
3579 {
3580 /* If they don't overlap, the result is the second range. If the second
3581 is a subset of the first, the result is false. Otherwise,
3582 the range starts just after the first range and ends at the
3583 end of the second. */
3584 if (no_overlap)
3585 in_p = 1, low = low1, high = high1;
3586 else if (subset || highequal)
3587 in_p = 0, low = high = 0;
3588 else
3589 {
3590 in_p = 1, high = high1;
3591 low = range_binop (PLUS_EXPR, NULL_TREE, high0, 1,
3592 integer_one_node, 0);
3593 }
3594 }
3595
3596 else
3597 {
3598 /* The case where we are excluding both ranges. Here the complex case
3599 is if they don't overlap. In that case, the only time we have a
3600 range is if they are adjacent. If the second is a subset of the
3601 first, the result is the first. Otherwise, the range to exclude
3602 starts at the beginning of the first range and ends at the end of the
3603 second. */
3604 if (no_overlap)
3605 {
3606 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
3607 range_binop (PLUS_EXPR, NULL_TREE,
3608 high0, 1,
3609 integer_one_node, 1),
3610 1, low1, 0)))
3611 in_p = 0, low = low0, high = high1;
3612 else
3613 return 0;
3614 }
3615 else if (subset)
3616 in_p = 0, low = low0, high = high0;
3617 else
3618 in_p = 0, low = low0, high = high1;
3619 }
3620
3621 *pin_p = in_p, *plow = low, *phigh = high;
3622 return 1;
3623 }
3624 \f
3625 #ifndef RANGE_TEST_NON_SHORT_CIRCUIT
3626 #define RANGE_TEST_NON_SHORT_CIRCUIT (BRANCH_COST >= 2)
3627 #endif
3628
3629 /* EXP is some logical combination of boolean tests. See if we can
3630 merge it into some range test. Return the new tree if so. */
3631
3632 static tree
3633 fold_range_test (tree exp)
3634 {
3635 int or_op = (TREE_CODE (exp) == TRUTH_ORIF_EXPR
3636 || TREE_CODE (exp) == TRUTH_OR_EXPR);
3637 int in0_p, in1_p, in_p;
3638 tree low0, low1, low, high0, high1, high;
3639 tree lhs = make_range (TREE_OPERAND (exp, 0), &in0_p, &low0, &high0);
3640 tree rhs = make_range (TREE_OPERAND (exp, 1), &in1_p, &low1, &high1);
3641 tree tem;
3642
3643 /* If this is an OR operation, invert both sides; we will invert
3644 again at the end. */
3645 if (or_op)
3646 in0_p = ! in0_p, in1_p = ! in1_p;
3647
3648 /* If both expressions are the same, if we can merge the ranges, and we
3649 can build the range test, return it or it inverted. If one of the
3650 ranges is always true or always false, consider it to be the same
3651 expression as the other. */
3652 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
3653 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
3654 in1_p, low1, high1)
3655 && 0 != (tem = (build_range_check (TREE_TYPE (exp),
3656 lhs != 0 ? lhs
3657 : rhs != 0 ? rhs : integer_zero_node,
3658 in_p, low, high))))
3659 return or_op ? invert_truthvalue (tem) : tem;
3660
3661 /* On machines where the branch cost is expensive, if this is a
3662 short-circuited branch and the underlying object on both sides
3663 is the same, make a non-short-circuit operation. */
3664 else if (RANGE_TEST_NON_SHORT_CIRCUIT
3665 && lhs != 0 && rhs != 0
3666 && (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3667 || TREE_CODE (exp) == TRUTH_ORIF_EXPR)
3668 && operand_equal_p (lhs, rhs, 0))
3669 {
3670 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
3671 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
3672 which cases we can't do this. */
3673 if (simple_operand_p (lhs))
3674 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3675 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3676 TREE_TYPE (exp), TREE_OPERAND (exp, 0),
3677 TREE_OPERAND (exp, 1));
3678
3679 else if ((*lang_hooks.decls.global_bindings_p) () == 0
3680 && ! CONTAINS_PLACEHOLDER_P (lhs))
3681 {
3682 tree common = save_expr (lhs);
3683
3684 if (0 != (lhs = build_range_check (TREE_TYPE (exp), common,
3685 or_op ? ! in0_p : in0_p,
3686 low0, high0))
3687 && (0 != (rhs = build_range_check (TREE_TYPE (exp), common,
3688 or_op ? ! in1_p : in1_p,
3689 low1, high1))))
3690 return build (TREE_CODE (exp) == TRUTH_ANDIF_EXPR
3691 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
3692 TREE_TYPE (exp), lhs, rhs);
3693 }
3694 }
3695
3696 return 0;
3697 }
3698 \f
3699 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
3700 bit value. Arrange things so the extra bits will be set to zero if and
3701 only if C is signed-extended to its full width. If MASK is nonzero,
3702 it is an INTEGER_CST that should be AND'ed with the extra bits. */
3703
3704 static tree
3705 unextend (tree c, int p, int unsignedp, tree mask)
3706 {
3707 tree type = TREE_TYPE (c);
3708 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
3709 tree temp;
3710
3711 if (p == modesize || unsignedp)
3712 return c;
3713
3714 /* We work by getting just the sign bit into the low-order bit, then
3715 into the high-order bit, then sign-extend. We then XOR that value
3716 with C. */
3717 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
3718 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
3719
3720 /* We must use a signed type in order to get an arithmetic right shift.
3721 However, we must also avoid introducing accidental overflows, so that
3722 a subsequent call to integer_zerop will work. Hence we must
3723 do the type conversion here. At this point, the constant is either
3724 zero or one, and the conversion to a signed type can never overflow.
3725 We could get an overflow if this conversion is done anywhere else. */
3726 if (TREE_UNSIGNED (type))
3727 temp = convert ((*lang_hooks.types.signed_type) (type), temp);
3728
3729 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
3730 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
3731 if (mask != 0)
3732 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
3733 /* If necessary, convert the type back to match the type of C. */
3734 if (TREE_UNSIGNED (type))
3735 temp = convert (type, temp);
3736
3737 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
3738 }
3739 \f
3740 /* Find ways of folding logical expressions of LHS and RHS:
3741 Try to merge two comparisons to the same innermost item.
3742 Look for range tests like "ch >= '0' && ch <= '9'".
3743 Look for combinations of simple terms on machines with expensive branches
3744 and evaluate the RHS unconditionally.
3745
3746 For example, if we have p->a == 2 && p->b == 4 and we can make an
3747 object large enough to span both A and B, we can do this with a comparison
3748 against the object ANDed with the a mask.
3749
3750 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
3751 operations to do this with one comparison.
3752
3753 We check for both normal comparisons and the BIT_AND_EXPRs made this by
3754 function and the one above.
3755
3756 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
3757 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
3758
3759 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
3760 two operands.
3761
3762 We return the simplified tree or 0 if no optimization is possible. */
3763
3764 static tree
3765 fold_truthop (enum tree_code code, tree truth_type, tree lhs, tree rhs)
3766 {
3767 /* If this is the "or" of two comparisons, we can do something if
3768 the comparisons are NE_EXPR. If this is the "and", we can do something
3769 if the comparisons are EQ_EXPR. I.e.,
3770 (a->b == 2 && a->c == 4) can become (a->new == NEW).
3771
3772 WANTED_CODE is this operation code. For single bit fields, we can
3773 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
3774 comparison for one-bit fields. */
3775
3776 enum tree_code wanted_code;
3777 enum tree_code lcode, rcode;
3778 tree ll_arg, lr_arg, rl_arg, rr_arg;
3779 tree ll_inner, lr_inner, rl_inner, rr_inner;
3780 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
3781 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
3782 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
3783 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
3784 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
3785 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
3786 enum machine_mode lnmode, rnmode;
3787 tree ll_mask, lr_mask, rl_mask, rr_mask;
3788 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
3789 tree l_const, r_const;
3790 tree lntype, rntype, result;
3791 int first_bit, end_bit;
3792 int volatilep;
3793
3794 /* Start by getting the comparison codes. Fail if anything is volatile.
3795 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
3796 it were surrounded with a NE_EXPR. */
3797
3798 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
3799 return 0;
3800
3801 lcode = TREE_CODE (lhs);
3802 rcode = TREE_CODE (rhs);
3803
3804 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
3805 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
3806
3807 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
3808 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
3809
3810 if (TREE_CODE_CLASS (lcode) != '<' || TREE_CODE_CLASS (rcode) != '<')
3811 return 0;
3812
3813 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
3814 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
3815
3816 ll_arg = TREE_OPERAND (lhs, 0);
3817 lr_arg = TREE_OPERAND (lhs, 1);
3818 rl_arg = TREE_OPERAND (rhs, 0);
3819 rr_arg = TREE_OPERAND (rhs, 1);
3820
3821 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
3822 if (simple_operand_p (ll_arg)
3823 && simple_operand_p (lr_arg)
3824 && !FLOAT_TYPE_P (TREE_TYPE (ll_arg)))
3825 {
3826 int compcode;
3827
3828 if (operand_equal_p (ll_arg, rl_arg, 0)
3829 && operand_equal_p (lr_arg, rr_arg, 0))
3830 {
3831 int lcompcode, rcompcode;
3832
3833 lcompcode = comparison_to_compcode (lcode);
3834 rcompcode = comparison_to_compcode (rcode);
3835 compcode = (code == TRUTH_AND_EXPR)
3836 ? lcompcode & rcompcode
3837 : lcompcode | rcompcode;
3838 }
3839 else if (operand_equal_p (ll_arg, rr_arg, 0)
3840 && operand_equal_p (lr_arg, rl_arg, 0))
3841 {
3842 int lcompcode, rcompcode;
3843
3844 rcode = swap_tree_comparison (rcode);
3845 lcompcode = comparison_to_compcode (lcode);
3846 rcompcode = comparison_to_compcode (rcode);
3847 compcode = (code == TRUTH_AND_EXPR)
3848 ? lcompcode & rcompcode
3849 : lcompcode | rcompcode;
3850 }
3851 else
3852 compcode = -1;
3853
3854 if (compcode == COMPCODE_TRUE)
3855 return convert (truth_type, integer_one_node);
3856 else if (compcode == COMPCODE_FALSE)
3857 return convert (truth_type, integer_zero_node);
3858 else if (compcode != -1)
3859 return build (compcode_to_comparison (compcode),
3860 truth_type, ll_arg, lr_arg);
3861 }
3862
3863 /* If the RHS can be evaluated unconditionally and its operands are
3864 simple, it wins to evaluate the RHS unconditionally on machines
3865 with expensive branches. In this case, this isn't a comparison
3866 that can be merged. Avoid doing this if the RHS is a floating-point
3867 comparison since those can trap. */
3868
3869 if (BRANCH_COST >= 2
3870 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
3871 && simple_operand_p (rl_arg)
3872 && simple_operand_p (rr_arg))
3873 {
3874 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
3875 if (code == TRUTH_OR_EXPR
3876 && lcode == NE_EXPR && integer_zerop (lr_arg)
3877 && rcode == NE_EXPR && integer_zerop (rr_arg)
3878 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3879 return build (NE_EXPR, truth_type,
3880 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3881 ll_arg, rl_arg),
3882 integer_zero_node);
3883
3884 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
3885 if (code == TRUTH_AND_EXPR
3886 && lcode == EQ_EXPR && integer_zerop (lr_arg)
3887 && rcode == EQ_EXPR && integer_zerop (rr_arg)
3888 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg))
3889 return build (EQ_EXPR, truth_type,
3890 build (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
3891 ll_arg, rl_arg),
3892 integer_zero_node);
3893
3894 return build (code, truth_type, lhs, rhs);
3895 }
3896
3897 /* See if the comparisons can be merged. Then get all the parameters for
3898 each side. */
3899
3900 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
3901 || (rcode != EQ_EXPR && rcode != NE_EXPR))
3902 return 0;
3903
3904 volatilep = 0;
3905 ll_inner = decode_field_reference (ll_arg,
3906 &ll_bitsize, &ll_bitpos, &ll_mode,
3907 &ll_unsignedp, &volatilep, &ll_mask,
3908 &ll_and_mask);
3909 lr_inner = decode_field_reference (lr_arg,
3910 &lr_bitsize, &lr_bitpos, &lr_mode,
3911 &lr_unsignedp, &volatilep, &lr_mask,
3912 &lr_and_mask);
3913 rl_inner = decode_field_reference (rl_arg,
3914 &rl_bitsize, &rl_bitpos, &rl_mode,
3915 &rl_unsignedp, &volatilep, &rl_mask,
3916 &rl_and_mask);
3917 rr_inner = decode_field_reference (rr_arg,
3918 &rr_bitsize, &rr_bitpos, &rr_mode,
3919 &rr_unsignedp, &volatilep, &rr_mask,
3920 &rr_and_mask);
3921
3922 /* It must be true that the inner operation on the lhs of each
3923 comparison must be the same if we are to be able to do anything.
3924 Then see if we have constants. If not, the same must be true for
3925 the rhs's. */
3926 if (volatilep || ll_inner == 0 || rl_inner == 0
3927 || ! operand_equal_p (ll_inner, rl_inner, 0))
3928 return 0;
3929
3930 if (TREE_CODE (lr_arg) == INTEGER_CST
3931 && TREE_CODE (rr_arg) == INTEGER_CST)
3932 l_const = lr_arg, r_const = rr_arg;
3933 else if (lr_inner == 0 || rr_inner == 0
3934 || ! operand_equal_p (lr_inner, rr_inner, 0))
3935 return 0;
3936 else
3937 l_const = r_const = 0;
3938
3939 /* If either comparison code is not correct for our logical operation,
3940 fail. However, we can convert a one-bit comparison against zero into
3941 the opposite comparison against that bit being set in the field. */
3942
3943 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
3944 if (lcode != wanted_code)
3945 {
3946 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
3947 {
3948 /* Make the left operand unsigned, since we are only interested
3949 in the value of one bit. Otherwise we are doing the wrong
3950 thing below. */
3951 ll_unsignedp = 1;
3952 l_const = ll_mask;
3953 }
3954 else
3955 return 0;
3956 }
3957
3958 /* This is analogous to the code for l_const above. */
3959 if (rcode != wanted_code)
3960 {
3961 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
3962 {
3963 rl_unsignedp = 1;
3964 r_const = rl_mask;
3965 }
3966 else
3967 return 0;
3968 }
3969
3970 /* After this point all optimizations will generate bit-field
3971 references, which we might not want. */
3972 if (! (*lang_hooks.can_use_bit_fields_p) ())
3973 return 0;
3974
3975 /* See if we can find a mode that contains both fields being compared on
3976 the left. If we can't, fail. Otherwise, update all constants and masks
3977 to be relative to a field of that size. */
3978 first_bit = MIN (ll_bitpos, rl_bitpos);
3979 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
3980 lnmode = get_best_mode (end_bit - first_bit, first_bit,
3981 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
3982 volatilep);
3983 if (lnmode == VOIDmode)
3984 return 0;
3985
3986 lnbitsize = GET_MODE_BITSIZE (lnmode);
3987 lnbitpos = first_bit & ~ (lnbitsize - 1);
3988 lntype = (*lang_hooks.types.type_for_size) (lnbitsize, 1);
3989 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
3990
3991 if (BYTES_BIG_ENDIAN)
3992 {
3993 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
3994 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
3995 }
3996
3997 ll_mask = const_binop (LSHIFT_EXPR, convert (lntype, ll_mask),
3998 size_int (xll_bitpos), 0);
3999 rl_mask = const_binop (LSHIFT_EXPR, convert (lntype, rl_mask),
4000 size_int (xrl_bitpos), 0);
4001
4002 if (l_const)
4003 {
4004 l_const = convert (lntype, l_const);
4005 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
4006 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
4007 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
4008 fold (build1 (BIT_NOT_EXPR,
4009 lntype, ll_mask)),
4010 0)))
4011 {
4012 warning ("comparison is always %d", wanted_code == NE_EXPR);
4013
4014 return convert (truth_type,
4015 wanted_code == NE_EXPR
4016 ? integer_one_node : integer_zero_node);
4017 }
4018 }
4019 if (r_const)
4020 {
4021 r_const = convert (lntype, r_const);
4022 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
4023 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
4024 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
4025 fold (build1 (BIT_NOT_EXPR,
4026 lntype, rl_mask)),
4027 0)))
4028 {
4029 warning ("comparison is always %d", wanted_code == NE_EXPR);
4030
4031 return convert (truth_type,
4032 wanted_code == NE_EXPR
4033 ? integer_one_node : integer_zero_node);
4034 }
4035 }
4036
4037 /* If the right sides are not constant, do the same for it. Also,
4038 disallow this optimization if a size or signedness mismatch occurs
4039 between the left and right sides. */
4040 if (l_const == 0)
4041 {
4042 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
4043 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
4044 /* Make sure the two fields on the right
4045 correspond to the left without being swapped. */
4046 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
4047 return 0;
4048
4049 first_bit = MIN (lr_bitpos, rr_bitpos);
4050 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
4051 rnmode = get_best_mode (end_bit - first_bit, first_bit,
4052 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
4053 volatilep);
4054 if (rnmode == VOIDmode)
4055 return 0;
4056
4057 rnbitsize = GET_MODE_BITSIZE (rnmode);
4058 rnbitpos = first_bit & ~ (rnbitsize - 1);
4059 rntype = (*lang_hooks.types.type_for_size) (rnbitsize, 1);
4060 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
4061
4062 if (BYTES_BIG_ENDIAN)
4063 {
4064 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
4065 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
4066 }
4067
4068 lr_mask = const_binop (LSHIFT_EXPR, convert (rntype, lr_mask),
4069 size_int (xlr_bitpos), 0);
4070 rr_mask = const_binop (LSHIFT_EXPR, convert (rntype, rr_mask),
4071 size_int (xrr_bitpos), 0);
4072
4073 /* Make a mask that corresponds to both fields being compared.
4074 Do this for both items being compared. If the operands are the
4075 same size and the bits being compared are in the same position
4076 then we can do this by masking both and comparing the masked
4077 results. */
4078 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4079 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
4080 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
4081 {
4082 lhs = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4083 ll_unsignedp || rl_unsignedp);
4084 if (! all_ones_mask_p (ll_mask, lnbitsize))
4085 lhs = build (BIT_AND_EXPR, lntype, lhs, ll_mask);
4086
4087 rhs = make_bit_field_ref (lr_inner, rntype, rnbitsize, rnbitpos,
4088 lr_unsignedp || rr_unsignedp);
4089 if (! all_ones_mask_p (lr_mask, rnbitsize))
4090 rhs = build (BIT_AND_EXPR, rntype, rhs, lr_mask);
4091
4092 return build (wanted_code, truth_type, lhs, rhs);
4093 }
4094
4095 /* There is still another way we can do something: If both pairs of
4096 fields being compared are adjacent, we may be able to make a wider
4097 field containing them both.
4098
4099 Note that we still must mask the lhs/rhs expressions. Furthermore,
4100 the mask must be shifted to account for the shift done by
4101 make_bit_field_ref. */
4102 if ((ll_bitsize + ll_bitpos == rl_bitpos
4103 && lr_bitsize + lr_bitpos == rr_bitpos)
4104 || (ll_bitpos == rl_bitpos + rl_bitsize
4105 && lr_bitpos == rr_bitpos + rr_bitsize))
4106 {
4107 tree type;
4108
4109 lhs = make_bit_field_ref (ll_inner, lntype, ll_bitsize + rl_bitsize,
4110 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
4111 rhs = make_bit_field_ref (lr_inner, rntype, lr_bitsize + rr_bitsize,
4112 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
4113
4114 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
4115 size_int (MIN (xll_bitpos, xrl_bitpos)), 0);
4116 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
4117 size_int (MIN (xlr_bitpos, xrr_bitpos)), 0);
4118
4119 /* Convert to the smaller type before masking out unwanted bits. */
4120 type = lntype;
4121 if (lntype != rntype)
4122 {
4123 if (lnbitsize > rnbitsize)
4124 {
4125 lhs = convert (rntype, lhs);
4126 ll_mask = convert (rntype, ll_mask);
4127 type = rntype;
4128 }
4129 else if (lnbitsize < rnbitsize)
4130 {
4131 rhs = convert (lntype, rhs);
4132 lr_mask = convert (lntype, lr_mask);
4133 type = lntype;
4134 }
4135 }
4136
4137 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
4138 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
4139
4140 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
4141 rhs = build (BIT_AND_EXPR, type, rhs, lr_mask);
4142
4143 return build (wanted_code, truth_type, lhs, rhs);
4144 }
4145
4146 return 0;
4147 }
4148
4149 /* Handle the case of comparisons with constants. If there is something in
4150 common between the masks, those bits of the constants must be the same.
4151 If not, the condition is always false. Test for this to avoid generating
4152 incorrect code below. */
4153 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
4154 if (! integer_zerop (result)
4155 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
4156 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
4157 {
4158 if (wanted_code == NE_EXPR)
4159 {
4160 warning ("`or' of unmatched not-equal tests is always 1");
4161 return convert (truth_type, integer_one_node);
4162 }
4163 else
4164 {
4165 warning ("`and' of mutually exclusive equal-tests is always 0");
4166 return convert (truth_type, integer_zero_node);
4167 }
4168 }
4169
4170 /* Construct the expression we will return. First get the component
4171 reference we will make. Unless the mask is all ones the width of
4172 that field, perform the mask operation. Then compare with the
4173 merged constant. */
4174 result = make_bit_field_ref (ll_inner, lntype, lnbitsize, lnbitpos,
4175 ll_unsignedp || rl_unsignedp);
4176
4177 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
4178 if (! all_ones_mask_p (ll_mask, lnbitsize))
4179 result = build (BIT_AND_EXPR, lntype, result, ll_mask);
4180
4181 return build (wanted_code, truth_type, result,
4182 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
4183 }
4184 \f
4185 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
4186 constant. */
4187
4188 static tree
4189 optimize_minmax_comparison (tree t)
4190 {
4191 tree type = TREE_TYPE (t);
4192 tree arg0 = TREE_OPERAND (t, 0);
4193 enum tree_code op_code;
4194 tree comp_const = TREE_OPERAND (t, 1);
4195 tree minmax_const;
4196 int consts_equal, consts_lt;
4197 tree inner;
4198
4199 STRIP_SIGN_NOPS (arg0);
4200
4201 op_code = TREE_CODE (arg0);
4202 minmax_const = TREE_OPERAND (arg0, 1);
4203 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
4204 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
4205 inner = TREE_OPERAND (arg0, 0);
4206
4207 /* If something does not permit us to optimize, return the original tree. */
4208 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
4209 || TREE_CODE (comp_const) != INTEGER_CST
4210 || TREE_CONSTANT_OVERFLOW (comp_const)
4211 || TREE_CODE (minmax_const) != INTEGER_CST
4212 || TREE_CONSTANT_OVERFLOW (minmax_const))
4213 return t;
4214
4215 /* Now handle all the various comparison codes. We only handle EQ_EXPR
4216 and GT_EXPR, doing the rest with recursive calls using logical
4217 simplifications. */
4218 switch (TREE_CODE (t))
4219 {
4220 case NE_EXPR: case LT_EXPR: case LE_EXPR:
4221 return
4222 invert_truthvalue (optimize_minmax_comparison (invert_truthvalue (t)));
4223
4224 case GE_EXPR:
4225 return
4226 fold (build (TRUTH_ORIF_EXPR, type,
4227 optimize_minmax_comparison
4228 (build (EQ_EXPR, type, arg0, comp_const)),
4229 optimize_minmax_comparison
4230 (build (GT_EXPR, type, arg0, comp_const))));
4231
4232 case EQ_EXPR:
4233 if (op_code == MAX_EXPR && consts_equal)
4234 /* MAX (X, 0) == 0 -> X <= 0 */
4235 return fold (build (LE_EXPR, type, inner, comp_const));
4236
4237 else if (op_code == MAX_EXPR && consts_lt)
4238 /* MAX (X, 0) == 5 -> X == 5 */
4239 return fold (build (EQ_EXPR, type, inner, comp_const));
4240
4241 else if (op_code == MAX_EXPR)
4242 /* MAX (X, 0) == -1 -> false */
4243 return omit_one_operand (type, integer_zero_node, inner);
4244
4245 else if (consts_equal)
4246 /* MIN (X, 0) == 0 -> X >= 0 */
4247 return fold (build (GE_EXPR, type, inner, comp_const));
4248
4249 else if (consts_lt)
4250 /* MIN (X, 0) == 5 -> false */
4251 return omit_one_operand (type, integer_zero_node, inner);
4252
4253 else
4254 /* MIN (X, 0) == -1 -> X == -1 */
4255 return fold (build (EQ_EXPR, type, inner, comp_const));
4256
4257 case GT_EXPR:
4258 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
4259 /* MAX (X, 0) > 0 -> X > 0
4260 MAX (X, 0) > 5 -> X > 5 */
4261 return fold (build (GT_EXPR, type, inner, comp_const));
4262
4263 else if (op_code == MAX_EXPR)
4264 /* MAX (X, 0) > -1 -> true */
4265 return omit_one_operand (type, integer_one_node, inner);
4266
4267 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
4268 /* MIN (X, 0) > 0 -> false
4269 MIN (X, 0) > 5 -> false */
4270 return omit_one_operand (type, integer_zero_node, inner);
4271
4272 else
4273 /* MIN (X, 0) > -1 -> X > -1 */
4274 return fold (build (GT_EXPR, type, inner, comp_const));
4275
4276 default:
4277 return t;
4278 }
4279 }
4280 \f
4281 /* T is an integer expression that is being multiplied, divided, or taken a
4282 modulus (CODE says which and what kind of divide or modulus) by a
4283 constant C. See if we can eliminate that operation by folding it with
4284 other operations already in T. WIDE_TYPE, if non-null, is a type that
4285 should be used for the computation if wider than our type.
4286
4287 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
4288 (X * 2) + (Y * 4). We must, however, be assured that either the original
4289 expression would not overflow or that overflow is undefined for the type
4290 in the language in question.
4291
4292 We also canonicalize (X + 7) * 4 into X * 4 + 28 in the hope that either
4293 the machine has a multiply-accumulate insn or that this is part of an
4294 addressing calculation.
4295
4296 If we return a non-null expression, it is an equivalent form of the
4297 original computation, but need not be in the original type. */
4298
4299 static tree
4300 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type)
4301 {
4302 /* To avoid exponential search depth, refuse to allow recursion past
4303 three levels. Beyond that (1) it's highly unlikely that we'll find
4304 something interesting and (2) we've probably processed it before
4305 when we built the inner expression. */
4306
4307 static int depth;
4308 tree ret;
4309
4310 if (depth > 3)
4311 return NULL;
4312
4313 depth++;
4314 ret = extract_muldiv_1 (t, c, code, wide_type);
4315 depth--;
4316
4317 return ret;
4318 }
4319
4320 static tree
4321 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type)
4322 {
4323 tree type = TREE_TYPE (t);
4324 enum tree_code tcode = TREE_CODE (t);
4325 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
4326 > GET_MODE_SIZE (TYPE_MODE (type)))
4327 ? wide_type : type);
4328 tree t1, t2;
4329 int same_p = tcode == code;
4330 tree op0 = NULL_TREE, op1 = NULL_TREE;
4331
4332 /* Don't deal with constants of zero here; they confuse the code below. */
4333 if (integer_zerop (c))
4334 return NULL_TREE;
4335
4336 if (TREE_CODE_CLASS (tcode) == '1')
4337 op0 = TREE_OPERAND (t, 0);
4338
4339 if (TREE_CODE_CLASS (tcode) == '2')
4340 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
4341
4342 /* Note that we need not handle conditional operations here since fold
4343 already handles those cases. So just do arithmetic here. */
4344 switch (tcode)
4345 {
4346 case INTEGER_CST:
4347 /* For a constant, we can always simplify if we are a multiply
4348 or (for divide and modulus) if it is a multiple of our constant. */
4349 if (code == MULT_EXPR
4350 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c, 0)))
4351 return const_binop (code, convert (ctype, t), convert (ctype, c), 0);
4352 break;
4353
4354 case CONVERT_EXPR: case NON_LVALUE_EXPR: case NOP_EXPR:
4355 /* If op0 is an expression ... */
4356 if ((TREE_CODE_CLASS (TREE_CODE (op0)) == '<'
4357 || TREE_CODE_CLASS (TREE_CODE (op0)) == '1'
4358 || TREE_CODE_CLASS (TREE_CODE (op0)) == '2'
4359 || TREE_CODE_CLASS (TREE_CODE (op0)) == 'e')
4360 /* ... and is unsigned, and its type is smaller than ctype,
4361 then we cannot pass through as widening. */
4362 && ((TREE_UNSIGNED (TREE_TYPE (op0))
4363 && ! (TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4364 && TYPE_IS_SIZETYPE (TREE_TYPE (op0)))
4365 && (GET_MODE_SIZE (TYPE_MODE (ctype))
4366 > GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0)))))
4367 /* ... or its type is larger than ctype,
4368 then we cannot pass through this truncation. */
4369 || (GET_MODE_SIZE (TYPE_MODE (ctype))
4370 < GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (op0))))
4371 /* ... or signedness changes for division or modulus,
4372 then we cannot pass through this conversion. */
4373 || (code != MULT_EXPR
4374 && (TREE_UNSIGNED (ctype)
4375 != TREE_UNSIGNED (TREE_TYPE (op0))))))
4376 break;
4377
4378 /* Pass the constant down and see if we can make a simplification. If
4379 we can, replace this expression with the inner simplification for
4380 possible later conversion to our or some other type. */
4381 if ((t2 = convert (TREE_TYPE (op0), c)) != 0
4382 && TREE_CODE (t2) == INTEGER_CST
4383 && ! TREE_CONSTANT_OVERFLOW (t2)
4384 && (0 != (t1 = extract_muldiv (op0, t2, code,
4385 code == MULT_EXPR
4386 ? ctype : NULL_TREE))))
4387 return t1;
4388 break;
4389
4390 case NEGATE_EXPR: case ABS_EXPR:
4391 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4392 return fold (build1 (tcode, ctype, convert (ctype, t1)));
4393 break;
4394
4395 case MIN_EXPR: case MAX_EXPR:
4396 /* If widening the type changes the signedness, then we can't perform
4397 this optimization as that changes the result. */
4398 if (TREE_UNSIGNED (ctype) != TREE_UNSIGNED (type))
4399 break;
4400
4401 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
4402 if ((t1 = extract_muldiv (op0, c, code, wide_type)) != 0
4403 && (t2 = extract_muldiv (op1, c, code, wide_type)) != 0)
4404 {
4405 if (tree_int_cst_sgn (c) < 0)
4406 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
4407
4408 return fold (build (tcode, ctype, convert (ctype, t1),
4409 convert (ctype, t2)));
4410 }
4411 break;
4412
4413 case WITH_RECORD_EXPR:
4414 if ((t1 = extract_muldiv (TREE_OPERAND (t, 0), c, code, wide_type)) != 0)
4415 return build (WITH_RECORD_EXPR, TREE_TYPE (t1), t1,
4416 TREE_OPERAND (t, 1));
4417 break;
4418
4419 case LSHIFT_EXPR: case RSHIFT_EXPR:
4420 /* If the second operand is constant, this is a multiplication
4421 or floor division, by a power of two, so we can treat it that
4422 way unless the multiplier or divisor overflows. */
4423 if (TREE_CODE (op1) == INTEGER_CST
4424 /* const_binop may not detect overflow correctly,
4425 so check for it explicitly here. */
4426 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
4427 && TREE_INT_CST_HIGH (op1) == 0
4428 && 0 != (t1 = convert (ctype,
4429 const_binop (LSHIFT_EXPR, size_one_node,
4430 op1, 0)))
4431 && ! TREE_OVERFLOW (t1))
4432 return extract_muldiv (build (tcode == LSHIFT_EXPR
4433 ? MULT_EXPR : FLOOR_DIV_EXPR,
4434 ctype, convert (ctype, op0), t1),
4435 c, code, wide_type);
4436 break;
4437
4438 case PLUS_EXPR: case MINUS_EXPR:
4439 /* See if we can eliminate the operation on both sides. If we can, we
4440 can return a new PLUS or MINUS. If we can't, the only remaining
4441 cases where we can do anything are if the second operand is a
4442 constant. */
4443 t1 = extract_muldiv (op0, c, code, wide_type);
4444 t2 = extract_muldiv (op1, c, code, wide_type);
4445 if (t1 != 0 && t2 != 0
4446 && (code == MULT_EXPR
4447 /* If not multiplication, we can only do this if both operands
4448 are divisible by c. */
4449 || (multiple_of_p (ctype, op0, c)
4450 && multiple_of_p (ctype, op1, c))))
4451 return fold (build (tcode, ctype, convert (ctype, t1),
4452 convert (ctype, t2)));
4453
4454 /* If this was a subtraction, negate OP1 and set it to be an addition.
4455 This simplifies the logic below. */
4456 if (tcode == MINUS_EXPR)
4457 tcode = PLUS_EXPR, op1 = negate_expr (op1);
4458
4459 if (TREE_CODE (op1) != INTEGER_CST)
4460 break;
4461
4462 /* If either OP1 or C are negative, this optimization is not safe for
4463 some of the division and remainder types while for others we need
4464 to change the code. */
4465 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
4466 {
4467 if (code == CEIL_DIV_EXPR)
4468 code = FLOOR_DIV_EXPR;
4469 else if (code == FLOOR_DIV_EXPR)
4470 code = CEIL_DIV_EXPR;
4471 else if (code != MULT_EXPR
4472 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
4473 break;
4474 }
4475
4476 /* If it's a multiply or a division/modulus operation of a multiple
4477 of our constant, do the operation and verify it doesn't overflow. */
4478 if (code == MULT_EXPR
4479 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4480 {
4481 op1 = const_binop (code, convert (ctype, op1),
4482 convert (ctype, c), 0);
4483 /* We allow the constant to overflow with wrapping semantics. */
4484 if (op1 == 0
4485 || (TREE_OVERFLOW (op1) && ! flag_wrapv))
4486 break;
4487 }
4488 else
4489 break;
4490
4491 /* If we have an unsigned type is not a sizetype, we cannot widen
4492 the operation since it will change the result if the original
4493 computation overflowed. */
4494 if (TREE_UNSIGNED (ctype)
4495 && ! (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype))
4496 && ctype != type)
4497 break;
4498
4499 /* If we were able to eliminate our operation from the first side,
4500 apply our operation to the second side and reform the PLUS. */
4501 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
4502 return fold (build (tcode, ctype, convert (ctype, t1), op1));
4503
4504 /* The last case is if we are a multiply. In that case, we can
4505 apply the distributive law to commute the multiply and addition
4506 if the multiplication of the constants doesn't overflow. */
4507 if (code == MULT_EXPR)
4508 return fold (build (tcode, ctype, fold (build (code, ctype,
4509 convert (ctype, op0),
4510 convert (ctype, c))),
4511 op1));
4512
4513 break;
4514
4515 case MULT_EXPR:
4516 /* We have a special case here if we are doing something like
4517 (C * 8) % 4 since we know that's zero. */
4518 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
4519 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
4520 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
4521 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4522 return omit_one_operand (type, integer_zero_node, op0);
4523
4524 /* ... fall through ... */
4525
4526 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
4527 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
4528 /* If we can extract our operation from the LHS, do so and return a
4529 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
4530 do something only if the second operand is a constant. */
4531 if (same_p
4532 && (t1 = extract_muldiv (op0, c, code, wide_type)) != 0)
4533 return fold (build (tcode, ctype, convert (ctype, t1),
4534 convert (ctype, op1)));
4535 else if (tcode == MULT_EXPR && code == MULT_EXPR
4536 && (t1 = extract_muldiv (op1, c, code, wide_type)) != 0)
4537 return fold (build (tcode, ctype, convert (ctype, op0),
4538 convert (ctype, t1)));
4539 else if (TREE_CODE (op1) != INTEGER_CST)
4540 return 0;
4541
4542 /* If these are the same operation types, we can associate them
4543 assuming no overflow. */
4544 if (tcode == code
4545 && 0 != (t1 = const_binop (MULT_EXPR, convert (ctype, op1),
4546 convert (ctype, c), 0))
4547 && ! TREE_OVERFLOW (t1))
4548 return fold (build (tcode, ctype, convert (ctype, op0), t1));
4549
4550 /* If these operations "cancel" each other, we have the main
4551 optimizations of this pass, which occur when either constant is a
4552 multiple of the other, in which case we replace this with either an
4553 operation or CODE or TCODE.
4554
4555 If we have an unsigned type that is not a sizetype, we cannot do
4556 this since it will change the result if the original computation
4557 overflowed. */
4558 if ((! TREE_UNSIGNED (ctype)
4559 || (TREE_CODE (ctype) == INTEGER_TYPE && TYPE_IS_SIZETYPE (ctype)))
4560 && ! flag_wrapv
4561 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
4562 || (tcode == MULT_EXPR
4563 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
4564 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR)))
4565 {
4566 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c, 0)))
4567 return fold (build (tcode, ctype, convert (ctype, op0),
4568 convert (ctype,
4569 const_binop (TRUNC_DIV_EXPR,
4570 op1, c, 0))));
4571 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1, 0)))
4572 return fold (build (code, ctype, convert (ctype, op0),
4573 convert (ctype,
4574 const_binop (TRUNC_DIV_EXPR,
4575 c, op1, 0))));
4576 }
4577 break;
4578
4579 default:
4580 break;
4581 }
4582
4583 return 0;
4584 }
4585 \f
4586 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
4587 S, a SAVE_EXPR, return the expression actually being evaluated. Note
4588 that we may sometimes modify the tree. */
4589
4590 static tree
4591 strip_compound_expr (tree t, tree s)
4592 {
4593 enum tree_code code = TREE_CODE (t);
4594
4595 /* See if this is the COMPOUND_EXPR we want to eliminate. */
4596 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
4597 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
4598 return TREE_OPERAND (t, 1);
4599
4600 /* See if this is a COND_EXPR or a simple arithmetic operator. We
4601 don't bother handling any other types. */
4602 else if (code == COND_EXPR)
4603 {
4604 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4605 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4606 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
4607 }
4608 else if (TREE_CODE_CLASS (code) == '1')
4609 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4610 else if (TREE_CODE_CLASS (code) == '<'
4611 || TREE_CODE_CLASS (code) == '2')
4612 {
4613 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
4614 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
4615 }
4616
4617 return t;
4618 }
4619 \f
4620 /* Return a node which has the indicated constant VALUE (either 0 or
4621 1), and is of the indicated TYPE. */
4622
4623 static tree
4624 constant_boolean_node (int value, tree type)
4625 {
4626 if (type == integer_type_node)
4627 return value ? integer_one_node : integer_zero_node;
4628 else if (TREE_CODE (type) == BOOLEAN_TYPE)
4629 return (*lang_hooks.truthvalue_conversion) (value ? integer_one_node :
4630 integer_zero_node);
4631 else
4632 {
4633 tree t = build_int_2 (value, 0);
4634
4635 TREE_TYPE (t) = type;
4636 return t;
4637 }
4638 }
4639
4640 /* Utility function for the following routine, to see how complex a nesting of
4641 COND_EXPRs can be. EXPR is the expression and LIMIT is a count beyond which
4642 we don't care (to avoid spending too much time on complex expressions.). */
4643
4644 static int
4645 count_cond (tree expr, int lim)
4646 {
4647 int ctrue, cfalse;
4648
4649 if (TREE_CODE (expr) != COND_EXPR)
4650 return 0;
4651 else if (lim <= 0)
4652 return 0;
4653
4654 ctrue = count_cond (TREE_OPERAND (expr, 1), lim - 1);
4655 cfalse = count_cond (TREE_OPERAND (expr, 2), lim - 1 - ctrue);
4656 return MIN (lim, 1 + ctrue + cfalse);
4657 }
4658
4659 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
4660 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
4661 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
4662 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
4663 COND is the first argument to CODE; otherwise (as in the example
4664 given here), it is the second argument. TYPE is the type of the
4665 original expression. */
4666
4667 static tree
4668 fold_binary_op_with_conditional_arg (enum tree_code code, tree type,
4669 tree cond, tree arg, int cond_first_p)
4670 {
4671 tree test, true_value, false_value;
4672 tree lhs = NULL_TREE;
4673 tree rhs = NULL_TREE;
4674 /* In the end, we'll produce a COND_EXPR. Both arms of the
4675 conditional expression will be binary operations. The left-hand
4676 side of the expression to be executed if the condition is true
4677 will be pointed to by TRUE_LHS. Similarly, the right-hand side
4678 of the expression to be executed if the condition is true will be
4679 pointed to by TRUE_RHS. FALSE_LHS and FALSE_RHS are analogous --
4680 but apply to the expression to be executed if the conditional is
4681 false. */
4682 tree *true_lhs;
4683 tree *true_rhs;
4684 tree *false_lhs;
4685 tree *false_rhs;
4686 /* These are the codes to use for the left-hand side and right-hand
4687 side of the COND_EXPR. Normally, they are the same as CODE. */
4688 enum tree_code lhs_code = code;
4689 enum tree_code rhs_code = code;
4690 /* And these are the types of the expressions. */
4691 tree lhs_type = type;
4692 tree rhs_type = type;
4693 int save = 0;
4694
4695 if (cond_first_p)
4696 {
4697 true_rhs = false_rhs = &arg;
4698 true_lhs = &true_value;
4699 false_lhs = &false_value;
4700 }
4701 else
4702 {
4703 true_lhs = false_lhs = &arg;
4704 true_rhs = &true_value;
4705 false_rhs = &false_value;
4706 }
4707
4708 if (TREE_CODE (cond) == COND_EXPR)
4709 {
4710 test = TREE_OPERAND (cond, 0);
4711 true_value = TREE_OPERAND (cond, 1);
4712 false_value = TREE_OPERAND (cond, 2);
4713 /* If this operand throws an expression, then it does not make
4714 sense to try to perform a logical or arithmetic operation
4715 involving it. Instead of building `a + throw 3' for example,
4716 we simply build `a, throw 3'. */
4717 if (VOID_TYPE_P (TREE_TYPE (true_value)))
4718 {
4719 if (! cond_first_p)
4720 {
4721 lhs_code = COMPOUND_EXPR;
4722 lhs_type = void_type_node;
4723 }
4724 else
4725 lhs = true_value;
4726 }
4727 if (VOID_TYPE_P (TREE_TYPE (false_value)))
4728 {
4729 if (! cond_first_p)
4730 {
4731 rhs_code = COMPOUND_EXPR;
4732 rhs_type = void_type_node;
4733 }
4734 else
4735 rhs = false_value;
4736 }
4737 }
4738 else
4739 {
4740 tree testtype = TREE_TYPE (cond);
4741 test = cond;
4742 true_value = convert (testtype, integer_one_node);
4743 false_value = convert (testtype, integer_zero_node);
4744 }
4745
4746 /* If ARG is complex we want to make sure we only evaluate it once. Though
4747 this is only required if it is volatile, it might be more efficient even
4748 if it is not. However, if we succeed in folding one part to a constant,
4749 we do not need to make this SAVE_EXPR. Since we do this optimization
4750 primarily to see if we do end up with constant and this SAVE_EXPR
4751 interferes with later optimizations, suppressing it when we can is
4752 important.
4753
4754 If we are not in a function, we can't make a SAVE_EXPR, so don't try to
4755 do so. Don't try to see if the result is a constant if an arm is a
4756 COND_EXPR since we get exponential behavior in that case. */
4757
4758 if (saved_expr_p (arg))
4759 save = 1;
4760 else if (lhs == 0 && rhs == 0
4761 && !TREE_CONSTANT (arg)
4762 && (*lang_hooks.decls.global_bindings_p) () == 0
4763 && ((TREE_CODE (arg) != VAR_DECL && TREE_CODE (arg) != PARM_DECL)
4764 || TREE_SIDE_EFFECTS (arg)))
4765 {
4766 if (TREE_CODE (true_value) != COND_EXPR)
4767 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4768
4769 if (TREE_CODE (false_value) != COND_EXPR)
4770 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4771
4772 if ((lhs == 0 || ! TREE_CONSTANT (lhs))
4773 && (rhs == 0 || !TREE_CONSTANT (rhs)))
4774 {
4775 arg = save_expr (arg);
4776 lhs = rhs = 0;
4777 save = saved_expr_p (arg);
4778 }
4779 }
4780
4781 if (lhs == 0)
4782 lhs = fold (build (lhs_code, lhs_type, *true_lhs, *true_rhs));
4783 if (rhs == 0)
4784 rhs = fold (build (rhs_code, rhs_type, *false_lhs, *false_rhs));
4785
4786 test = fold (build (COND_EXPR, type, test, lhs, rhs));
4787
4788 /* If ARG involves a SAVE_EXPR, we need to ensure it is evaluated
4789 ahead of the COND_EXPR we made. Otherwise we would have it only
4790 evaluated in one branch, with the other branch using the result
4791 but missing the evaluation code. Beware that the save_expr call
4792 above might not return a SAVE_EXPR, so testing the TREE_CODE
4793 of ARG is not enough to decide here.  */
4794 if (save)
4795 return build (COMPOUND_EXPR, type,
4796 convert (void_type_node, arg),
4797 strip_compound_expr (test, arg));
4798 else
4799 return convert (type, test);
4800 }
4801
4802 \f
4803 /* Subroutine of fold() that checks for the addition of +/- 0.0.
4804
4805 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
4806 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
4807 ADDEND is the same as X.
4808
4809 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
4810 and finite. The problematic cases are when X is zero, and its mode
4811 has signed zeros. In the case of rounding towards -infinity,
4812 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
4813 modes, X + 0 is not the same as X because -0 + 0 is 0. */
4814
4815 static bool
4816 fold_real_zero_addition_p (tree type, tree addend, int negate)
4817 {
4818 if (!real_zerop (addend))
4819 return false;
4820
4821 /* Don't allow the fold with -fsignaling-nans. */
4822 if (HONOR_SNANS (TYPE_MODE (type)))
4823 return false;
4824
4825 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
4826 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
4827 return true;
4828
4829 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
4830 if (TREE_CODE (addend) == REAL_CST
4831 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
4832 negate = !negate;
4833
4834 /* The mode has signed zeros, and we have to honor their sign.
4835 In this situation, there is only one case we can return true for.
4836 X - 0 is the same as X unless rounding towards -infinity is
4837 supported. */
4838 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
4839 }
4840
4841 /* Subroutine of fold() that checks comparisons of built-in math
4842 functions against real constants.
4843
4844 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
4845 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
4846 is the type of the result and ARG0 and ARG1 are the operands of the
4847 comparison. ARG1 must be a TREE_REAL_CST.
4848
4849 The function returns the constant folded tree if a simplification
4850 can be made, and NULL_TREE otherwise. */
4851
4852 static tree
4853 fold_mathfn_compare (enum built_in_function fcode, enum tree_code code,
4854 tree type, tree arg0, tree arg1)
4855 {
4856 REAL_VALUE_TYPE c;
4857
4858 if (fcode == BUILT_IN_SQRT
4859 || fcode == BUILT_IN_SQRTF
4860 || fcode == BUILT_IN_SQRTL)
4861 {
4862 tree arg = TREE_VALUE (TREE_OPERAND (arg0, 1));
4863 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
4864
4865 c = TREE_REAL_CST (arg1);
4866 if (REAL_VALUE_NEGATIVE (c))
4867 {
4868 /* sqrt(x) < y is always false, if y is negative. */
4869 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
4870 return omit_one_operand (type,
4871 convert (type, integer_zero_node),
4872 arg);
4873
4874 /* sqrt(x) > y is always true, if y is negative and we
4875 don't care about NaNs, i.e. negative values of x. */
4876 if (code == NE_EXPR || !HONOR_NANS (mode))
4877 return omit_one_operand (type,
4878 convert (type, integer_one_node),
4879 arg);
4880
4881 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
4882 return fold (build (GE_EXPR, type, arg,
4883 build_real (TREE_TYPE (arg), dconst0)));
4884 }
4885 else if (code == GT_EXPR || code == GE_EXPR)
4886 {
4887 REAL_VALUE_TYPE c2;
4888
4889 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4890 real_convert (&c2, mode, &c2);
4891
4892 if (REAL_VALUE_ISINF (c2))
4893 {
4894 /* sqrt(x) > y is x == +Inf, when y is very large. */
4895 if (HONOR_INFINITIES (mode))
4896 return fold (build (EQ_EXPR, type, arg,
4897 build_real (TREE_TYPE (arg), c2)));
4898
4899 /* sqrt(x) > y is always false, when y is very large
4900 and we don't care about infinities. */
4901 return omit_one_operand (type,
4902 convert (type, integer_zero_node),
4903 arg);
4904 }
4905
4906 /* sqrt(x) > c is the same as x > c*c. */
4907 return fold (build (code, type, arg,
4908 build_real (TREE_TYPE (arg), c2)));
4909 }
4910 else if (code == LT_EXPR || code == LE_EXPR)
4911 {
4912 REAL_VALUE_TYPE c2;
4913
4914 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
4915 real_convert (&c2, mode, &c2);
4916
4917 if (REAL_VALUE_ISINF (c2))
4918 {
4919 /* sqrt(x) < y is always true, when y is a very large
4920 value and we don't care about NaNs or Infinities. */
4921 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
4922 return omit_one_operand (type,
4923 convert (type, integer_one_node),
4924 arg);
4925
4926 /* sqrt(x) < y is x != +Inf when y is very large and we
4927 don't care about NaNs. */
4928 if (! HONOR_NANS (mode))
4929 return fold (build (NE_EXPR, type, arg,
4930 build_real (TREE_TYPE (arg), c2)));
4931
4932 /* sqrt(x) < y is x >= 0 when y is very large and we
4933 don't care about Infinities. */
4934 if (! HONOR_INFINITIES (mode))
4935 return fold (build (GE_EXPR, type, arg,
4936 build_real (TREE_TYPE (arg), dconst0)));
4937
4938 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
4939 if ((*lang_hooks.decls.global_bindings_p) () != 0
4940 || CONTAINS_PLACEHOLDER_P (arg))
4941 return NULL_TREE;
4942
4943 arg = save_expr (arg);
4944 return fold (build (TRUTH_ANDIF_EXPR, type,
4945 fold (build (GE_EXPR, type, arg,
4946 build_real (TREE_TYPE (arg),
4947 dconst0))),
4948 fold (build (NE_EXPR, type, arg,
4949 build_real (TREE_TYPE (arg),
4950 c2)))));
4951 }
4952
4953 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
4954 if (! HONOR_NANS (mode))
4955 return fold (build (code, type, arg,
4956 build_real (TREE_TYPE (arg), c2)));
4957
4958 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
4959 if ((*lang_hooks.decls.global_bindings_p) () == 0
4960 && ! CONTAINS_PLACEHOLDER_P (arg))
4961 {
4962 arg = save_expr (arg);
4963 return fold (build (TRUTH_ANDIF_EXPR, type,
4964 fold (build (GE_EXPR, type, arg,
4965 build_real (TREE_TYPE (arg),
4966 dconst0))),
4967 fold (build (code, type, arg,
4968 build_real (TREE_TYPE (arg),
4969 c2)))));
4970 }
4971 }
4972 }
4973
4974 return NULL_TREE;
4975 }
4976
4977 /* Subroutine of fold() that optimizes comparisons against Infinities,
4978 either +Inf or -Inf.
4979
4980 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
4981 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
4982 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
4983
4984 The function returns the constant folded tree if a simplification
4985 can be made, and NULL_TREE otherwise. */
4986
4987 static tree
4988 fold_inf_compare (enum tree_code code, tree type, tree arg0, tree arg1)
4989 {
4990 enum machine_mode mode;
4991 REAL_VALUE_TYPE max;
4992 tree temp;
4993 bool neg;
4994
4995 mode = TYPE_MODE (TREE_TYPE (arg0));
4996
4997 /* For negative infinity swap the sense of the comparison. */
4998 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
4999 if (neg)
5000 code = swap_tree_comparison (code);
5001
5002 switch (code)
5003 {
5004 case GT_EXPR:
5005 /* x > +Inf is always false, if with ignore sNANs. */
5006 if (HONOR_SNANS (mode))
5007 return NULL_TREE;
5008 return omit_one_operand (type,
5009 convert (type, integer_zero_node),
5010 arg0);
5011
5012 case LE_EXPR:
5013 /* x <= +Inf is always true, if we don't case about NaNs. */
5014 if (! HONOR_NANS (mode))
5015 return omit_one_operand (type,
5016 convert (type, integer_one_node),
5017 arg0);
5018
5019 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
5020 if ((*lang_hooks.decls.global_bindings_p) () == 0
5021 && ! CONTAINS_PLACEHOLDER_P (arg0))
5022 {
5023 arg0 = save_expr (arg0);
5024 return fold (build (EQ_EXPR, type, arg0, arg0));
5025 }
5026 break;
5027
5028 case EQ_EXPR:
5029 case GE_EXPR:
5030 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
5031 real_maxval (&max, neg, mode);
5032 return fold (build (neg ? LT_EXPR : GT_EXPR, type,
5033 arg0, build_real (TREE_TYPE (arg0), max)));
5034
5035 case LT_EXPR:
5036 /* x < +Inf is always equal to x <= DBL_MAX. */
5037 real_maxval (&max, neg, mode);
5038 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5039 arg0, build_real (TREE_TYPE (arg0), max)));
5040
5041 case NE_EXPR:
5042 /* x != +Inf is always equal to !(x > DBL_MAX). */
5043 real_maxval (&max, neg, mode);
5044 if (! HONOR_NANS (mode))
5045 return fold (build (neg ? GE_EXPR : LE_EXPR, type,
5046 arg0, build_real (TREE_TYPE (arg0), max)));
5047 temp = fold (build (neg ? LT_EXPR : GT_EXPR, type,
5048 arg0, build_real (TREE_TYPE (arg0), max)));
5049 return fold (build1 (TRUTH_NOT_EXPR, type, temp));
5050
5051 default:
5052 break;
5053 }
5054
5055 return NULL_TREE;
5056 }
5057
5058 /* If CODE with arguments ARG0 and ARG1 represents a single bit
5059 equality/inequality test, then return a simplified form of
5060 the test using shifts and logical operations. Otherwise return
5061 NULL. TYPE is the desired result type. */
5062
5063 tree
5064 fold_single_bit_test (enum tree_code code, tree arg0, tree arg1,
5065 tree result_type)
5066 {
5067 /* If this is a TRUTH_NOT_EXPR, it may have a single bit test inside
5068 operand 0. */
5069 if (code == TRUTH_NOT_EXPR)
5070 {
5071 code = TREE_CODE (arg0);
5072 if (code != NE_EXPR && code != EQ_EXPR)
5073 return NULL_TREE;
5074
5075 /* Extract the arguments of the EQ/NE. */
5076 arg1 = TREE_OPERAND (arg0, 1);
5077 arg0 = TREE_OPERAND (arg0, 0);
5078
5079 /* This requires us to invert the code. */
5080 code = (code == EQ_EXPR ? NE_EXPR : EQ_EXPR);
5081 }
5082
5083 /* If this is testing a single bit, we can optimize the test. */
5084 if ((code == NE_EXPR || code == EQ_EXPR)
5085 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
5086 && integer_pow2p (TREE_OPERAND (arg0, 1)))
5087 {
5088 tree inner = TREE_OPERAND (arg0, 0);
5089 tree type = TREE_TYPE (arg0);
5090 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
5091 enum machine_mode operand_mode = TYPE_MODE (type);
5092 int ops_unsigned;
5093 tree signed_type, unsigned_type, intermediate_type;
5094 tree arg00;
5095
5096 /* If we have (A & C) != 0 where C is the sign bit of A, convert
5097 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
5098 arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
5099 if (arg00 != NULL_TREE)
5100 {
5101 tree stype = (*lang_hooks.types.signed_type) (TREE_TYPE (arg00));
5102 return fold (build (code == EQ_EXPR ? GE_EXPR : LT_EXPR, result_type,
5103 convert (stype, arg00),
5104 convert (stype, integer_zero_node)));
5105 }
5106
5107 /* At this point, we know that arg0 is not testing the sign bit. */
5108 if (TYPE_PRECISION (type) - 1 == bitnum)
5109 abort ();
5110
5111 /* Otherwise we have (A & C) != 0 where C is a single bit,
5112 convert that into ((A >> C2) & 1). Where C2 = log2(C).
5113 Similarly for (A & C) == 0. */
5114
5115 /* If INNER is a right shift of a constant and it plus BITNUM does
5116 not overflow, adjust BITNUM and INNER. */
5117 if (TREE_CODE (inner) == RSHIFT_EXPR
5118 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
5119 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
5120 && bitnum < TYPE_PRECISION (type)
5121 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
5122 bitnum - TYPE_PRECISION (type)))
5123 {
5124 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
5125 inner = TREE_OPERAND (inner, 0);
5126 }
5127
5128 /* If we are going to be able to omit the AND below, we must do our
5129 operations as unsigned. If we must use the AND, we have a choice.
5130 Normally unsigned is faster, but for some machines signed is. */
5131 #ifdef LOAD_EXTEND_OP
5132 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1);
5133 #else
5134 ops_unsigned = 1;
5135 #endif
5136
5137 signed_type = (*lang_hooks.types.type_for_mode) (operand_mode, 0);
5138 unsigned_type = (*lang_hooks.types.type_for_mode) (operand_mode, 1);
5139 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
5140 inner = convert (intermediate_type, inner);
5141
5142 if (bitnum != 0)
5143 inner = build (RSHIFT_EXPR, intermediate_type,
5144 inner, size_int (bitnum));
5145
5146 if (code == EQ_EXPR)
5147 inner = build (BIT_XOR_EXPR, intermediate_type,
5148 inner, integer_one_node);
5149
5150 /* Put the AND last so it can combine with more things. */
5151 inner = build (BIT_AND_EXPR, intermediate_type,
5152 inner, integer_one_node);
5153
5154 /* Make sure to return the proper type. */
5155 inner = convert (result_type, inner);
5156
5157 return inner;
5158 }
5159 return NULL_TREE;
5160 }
5161
5162 /* Check whether we are allowed to reorder operands arg0 and arg1,
5163 such that the evaluation of arg1 occurs before arg0. */
5164
5165 static bool
5166 reorder_operands_p (tree arg0, tree arg1)
5167 {
5168 if (! flag_evaluation_order)
5169 return true;
5170 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
5171 return true;
5172 return ! TREE_SIDE_EFFECTS (arg0)
5173 && ! TREE_SIDE_EFFECTS (arg1);
5174 }
5175
5176 /* Test whether it is preferable two swap two operands, ARG0 and
5177 ARG1, for example because ARG0 is an integer constant and ARG1
5178 isn't. If REORDER is true, only recommend swapping if we can
5179 evaluate the operands in reverse order. */
5180
5181 static bool
5182 tree_swap_operands_p (tree arg0, tree arg1, bool reorder)
5183 {
5184 STRIP_SIGN_NOPS (arg0);
5185 STRIP_SIGN_NOPS (arg1);
5186
5187 if (TREE_CODE (arg1) == INTEGER_CST)
5188 return 0;
5189 if (TREE_CODE (arg0) == INTEGER_CST)
5190 return 1;
5191
5192 if (TREE_CODE (arg1) == REAL_CST)
5193 return 0;
5194 if (TREE_CODE (arg0) == REAL_CST)
5195 return 1;
5196
5197 if (TREE_CODE (arg1) == COMPLEX_CST)
5198 return 0;
5199 if (TREE_CODE (arg0) == COMPLEX_CST)
5200 return 1;
5201
5202 if (TREE_CONSTANT (arg1))
5203 return 0;
5204 if (TREE_CONSTANT (arg0))
5205 return 1;
5206
5207 if (optimize_size)
5208 return 0;
5209
5210 if (reorder && flag_evaluation_order
5211 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
5212 return 0;
5213
5214 if (DECL_P (arg1))
5215 return 0;
5216 if (DECL_P (arg0))
5217 return 1;
5218
5219 return 0;
5220 }
5221
5222 /* Perform constant folding and related simplification of EXPR.
5223 The related simplifications include x*1 => x, x*0 => 0, etc.,
5224 and application of the associative law.
5225 NOP_EXPR conversions may be removed freely (as long as we
5226 are careful not to change the C type of the overall expression)
5227 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
5228 but we can constant-fold them if they have constant operands. */
5229
5230 #ifdef ENABLE_FOLD_CHECKING
5231 # define fold(x) fold_1 (x)
5232 static tree fold_1 (tree);
5233 static
5234 #endif
5235 tree
5236 fold (tree expr)
5237 {
5238 tree t = expr, orig_t;
5239 tree t1 = NULL_TREE;
5240 tree tem;
5241 tree type = TREE_TYPE (expr);
5242 tree arg0 = NULL_TREE, arg1 = NULL_TREE;
5243 enum tree_code code = TREE_CODE (t);
5244 int kind = TREE_CODE_CLASS (code);
5245 int invert;
5246 /* WINS will be nonzero when the switch is done
5247 if all operands are constant. */
5248 int wins = 1;
5249
5250 /* Don't try to process an RTL_EXPR since its operands aren't trees.
5251 Likewise for a SAVE_EXPR that's already been evaluated. */
5252 if (code == RTL_EXPR || (code == SAVE_EXPR && SAVE_EXPR_RTL (t) != 0))
5253 return t;
5254
5255 /* Return right away if a constant. */
5256 if (kind == 'c')
5257 return t;
5258
5259 orig_t = t;
5260
5261 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
5262 {
5263 tree subop;
5264
5265 /* Special case for conversion ops that can have fixed point args. */
5266 arg0 = TREE_OPERAND (t, 0);
5267
5268 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
5269 if (arg0 != 0)
5270 STRIP_SIGN_NOPS (arg0);
5271
5272 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
5273 subop = TREE_REALPART (arg0);
5274 else
5275 subop = arg0;
5276
5277 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
5278 && TREE_CODE (subop) != REAL_CST)
5279 /* Note that TREE_CONSTANT isn't enough:
5280 static var addresses are constant but we can't
5281 do arithmetic on them. */
5282 wins = 0;
5283 }
5284 else if (IS_EXPR_CODE_CLASS (kind))
5285 {
5286 int len = first_rtl_op (code);
5287 int i;
5288 for (i = 0; i < len; i++)
5289 {
5290 tree op = TREE_OPERAND (t, i);
5291 tree subop;
5292
5293 if (op == 0)
5294 continue; /* Valid for CALL_EXPR, at least. */
5295
5296 if (kind == '<' || code == RSHIFT_EXPR)
5297 {
5298 /* Signedness matters here. Perhaps we can refine this
5299 later. */
5300 STRIP_SIGN_NOPS (op);
5301 }
5302 else
5303 /* Strip any conversions that don't change the mode. */
5304 STRIP_NOPS (op);
5305
5306 if (TREE_CODE (op) == COMPLEX_CST)
5307 subop = TREE_REALPART (op);
5308 else
5309 subop = op;
5310
5311 if (TREE_CODE (subop) != INTEGER_CST
5312 && TREE_CODE (subop) != REAL_CST)
5313 /* Note that TREE_CONSTANT isn't enough:
5314 static var addresses are constant but we can't
5315 do arithmetic on them. */
5316 wins = 0;
5317
5318 if (i == 0)
5319 arg0 = op;
5320 else if (i == 1)
5321 arg1 = op;
5322 }
5323 }
5324
5325 /* If this is a commutative operation, and ARG0 is a constant, move it
5326 to ARG1 to reduce the number of tests below. */
5327 if (commutative_tree_code (code)
5328 && tree_swap_operands_p (arg0, arg1, true))
5329 return fold (build (code, type, arg1, arg0));
5330
5331 /* Now WINS is set as described above,
5332 ARG0 is the first operand of EXPR,
5333 and ARG1 is the second operand (if it has more than one operand).
5334
5335 First check for cases where an arithmetic operation is applied to a
5336 compound, conditional, or comparison operation. Push the arithmetic
5337 operation inside the compound or conditional to see if any folding
5338 can then be done. Convert comparison to conditional for this purpose.
5339 The also optimizes non-constant cases that used to be done in
5340 expand_expr.
5341
5342 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
5343 one of the operands is a comparison and the other is a comparison, a
5344 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
5345 code below would make the expression more complex. Change it to a
5346 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
5347 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
5348
5349 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
5350 || code == EQ_EXPR || code == NE_EXPR)
5351 && ((truth_value_p (TREE_CODE (arg0))
5352 && (truth_value_p (TREE_CODE (arg1))
5353 || (TREE_CODE (arg1) == BIT_AND_EXPR
5354 && integer_onep (TREE_OPERAND (arg1, 1)))))
5355 || (truth_value_p (TREE_CODE (arg1))
5356 && (truth_value_p (TREE_CODE (arg0))
5357 || (TREE_CODE (arg0) == BIT_AND_EXPR
5358 && integer_onep (TREE_OPERAND (arg0, 1)))))))
5359 {
5360 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
5361 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
5362 : TRUTH_XOR_EXPR,
5363 type, arg0, arg1));
5364
5365 if (code == EQ_EXPR)
5366 t = invert_truthvalue (t);
5367
5368 return t;
5369 }
5370
5371 if (TREE_CODE_CLASS (code) == '1')
5372 {
5373 if (TREE_CODE (arg0) == COMPOUND_EXPR)
5374 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5375 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
5376 else if (TREE_CODE (arg0) == COND_EXPR)
5377 {
5378 tree arg01 = TREE_OPERAND (arg0, 1);
5379 tree arg02 = TREE_OPERAND (arg0, 2);
5380 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
5381 arg01 = fold (build1 (code, type, arg01));
5382 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
5383 arg02 = fold (build1 (code, type, arg02));
5384 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
5385 arg01, arg02));
5386
5387 /* If this was a conversion, and all we did was to move into
5388 inside the COND_EXPR, bring it back out. But leave it if
5389 it is a conversion from integer to integer and the
5390 result precision is no wider than a word since such a
5391 conversion is cheap and may be optimized away by combine,
5392 while it couldn't if it were outside the COND_EXPR. Then return
5393 so we don't get into an infinite recursion loop taking the
5394 conversion out and then back in. */
5395
5396 if ((code == NOP_EXPR || code == CONVERT_EXPR
5397 || code == NON_LVALUE_EXPR)
5398 && TREE_CODE (t) == COND_EXPR
5399 && TREE_CODE (TREE_OPERAND (t, 1)) == code
5400 && TREE_CODE (TREE_OPERAND (t, 2)) == code
5401 && ! VOID_TYPE_P (TREE_OPERAND (t, 1))
5402 && ! VOID_TYPE_P (TREE_OPERAND (t, 2))
5403 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
5404 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
5405 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
5406 && (INTEGRAL_TYPE_P
5407 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))))
5408 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
5409 t = build1 (code, type,
5410 build (COND_EXPR,
5411 TREE_TYPE (TREE_OPERAND
5412 (TREE_OPERAND (t, 1), 0)),
5413 TREE_OPERAND (t, 0),
5414 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
5415 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
5416 return t;
5417 }
5418 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
5419 return fold (build (COND_EXPR, type, arg0,
5420 fold (build1 (code, type, integer_one_node)),
5421 fold (build1 (code, type, integer_zero_node))));
5422 }
5423 else if (TREE_CODE_CLASS (code) == '<'
5424 && TREE_CODE (arg0) == COMPOUND_EXPR)
5425 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5426 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5427 else if (TREE_CODE_CLASS (code) == '<'
5428 && TREE_CODE (arg1) == COMPOUND_EXPR)
5429 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5430 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
5431 else if (TREE_CODE_CLASS (code) == '2'
5432 || TREE_CODE_CLASS (code) == '<')
5433 {
5434 if (TREE_CODE (arg1) == COMPOUND_EXPR
5435 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg1, 0))
5436 && ! TREE_SIDE_EFFECTS (arg0))
5437 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
5438 fold (build (code, type,
5439 arg0, TREE_OPERAND (arg1, 1))));
5440 else if ((TREE_CODE (arg1) == COND_EXPR
5441 || (TREE_CODE_CLASS (TREE_CODE (arg1)) == '<'
5442 && TREE_CODE_CLASS (code) != '<'))
5443 && (TREE_CODE (arg0) != COND_EXPR
5444 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5445 && (! TREE_SIDE_EFFECTS (arg0)
5446 || ((*lang_hooks.decls.global_bindings_p) () == 0
5447 && ! CONTAINS_PLACEHOLDER_P (arg0))))
5448 return
5449 fold_binary_op_with_conditional_arg (code, type, arg1, arg0,
5450 /*cond_first_p=*/0);
5451 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
5452 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
5453 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
5454 else if ((TREE_CODE (arg0) == COND_EXPR
5455 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
5456 && TREE_CODE_CLASS (code) != '<'))
5457 && (TREE_CODE (arg1) != COND_EXPR
5458 || count_cond (arg0, 25) + count_cond (arg1, 25) <= 25)
5459 && (! TREE_SIDE_EFFECTS (arg1)
5460 || ((*lang_hooks.decls.global_bindings_p) () == 0
5461 && ! CONTAINS_PLACEHOLDER_P (arg1))))
5462 return
5463 fold_binary_op_with_conditional_arg (code, type, arg0, arg1,
5464 /*cond_first_p=*/1);
5465 }
5466
5467 switch (code)
5468 {
5469 case INTEGER_CST:
5470 case REAL_CST:
5471 case VECTOR_CST:
5472 case STRING_CST:
5473 case COMPLEX_CST:
5474 case CONSTRUCTOR:
5475 return t;
5476
5477 case CONST_DECL:
5478 return fold (DECL_INITIAL (t));
5479
5480 case NOP_EXPR:
5481 case FLOAT_EXPR:
5482 case CONVERT_EXPR:
5483 case FIX_TRUNC_EXPR:
5484 case FIX_CEIL_EXPR:
5485 case FIX_FLOOR_EXPR:
5486 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
5487 return TREE_OPERAND (t, 0);
5488
5489 /* Handle cases of two conversions in a row. */
5490 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
5491 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
5492 {
5493 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5494 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
5495 tree final_type = TREE_TYPE (t);
5496 int inside_int = INTEGRAL_TYPE_P (inside_type);
5497 int inside_ptr = POINTER_TYPE_P (inside_type);
5498 int inside_float = FLOAT_TYPE_P (inside_type);
5499 unsigned int inside_prec = TYPE_PRECISION (inside_type);
5500 int inside_unsignedp = TREE_UNSIGNED (inside_type);
5501 int inter_int = INTEGRAL_TYPE_P (inter_type);
5502 int inter_ptr = POINTER_TYPE_P (inter_type);
5503 int inter_float = FLOAT_TYPE_P (inter_type);
5504 unsigned int inter_prec = TYPE_PRECISION (inter_type);
5505 int inter_unsignedp = TREE_UNSIGNED (inter_type);
5506 int final_int = INTEGRAL_TYPE_P (final_type);
5507 int final_ptr = POINTER_TYPE_P (final_type);
5508 int final_float = FLOAT_TYPE_P (final_type);
5509 unsigned int final_prec = TYPE_PRECISION (final_type);
5510 int final_unsignedp = TREE_UNSIGNED (final_type);
5511
5512 /* In addition to the cases of two conversions in a row
5513 handled below, if we are converting something to its own
5514 type via an object of identical or wider precision, neither
5515 conversion is needed. */
5516 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (final_type)
5517 && ((inter_int && final_int) || (inter_float && final_float))
5518 && inter_prec >= final_prec)
5519 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5520
5521 /* Likewise, if the intermediate and final types are either both
5522 float or both integer, we don't need the middle conversion if
5523 it is wider than the final type and doesn't change the signedness
5524 (for integers). Avoid this if the final type is a pointer
5525 since then we sometimes need the inner conversion. Likewise if
5526 the outer has a precision not equal to the size of its mode. */
5527 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
5528 || (inter_float && inside_float))
5529 && inter_prec >= inside_prec
5530 && (inter_float || inter_unsignedp == inside_unsignedp)
5531 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5532 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5533 && ! final_ptr)
5534 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5535
5536 /* If we have a sign-extension of a zero-extended value, we can
5537 replace that by a single zero-extension. */
5538 if (inside_int && inter_int && final_int
5539 && inside_prec < inter_prec && inter_prec < final_prec
5540 && inside_unsignedp && !inter_unsignedp)
5541 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5542
5543 /* Two conversions in a row are not needed unless:
5544 - some conversion is floating-point (overstrict for now), or
5545 - the intermediate type is narrower than both initial and
5546 final, or
5547 - the intermediate type and innermost type differ in signedness,
5548 and the outermost type is wider than the intermediate, or
5549 - the initial type is a pointer type and the precisions of the
5550 intermediate and final types differ, or
5551 - the final type is a pointer type and the precisions of the
5552 initial and intermediate types differ. */
5553 if (! inside_float && ! inter_float && ! final_float
5554 && (inter_prec > inside_prec || inter_prec > final_prec)
5555 && ! (inside_int && inter_int
5556 && inter_unsignedp != inside_unsignedp
5557 && inter_prec < final_prec)
5558 && ((inter_unsignedp && inter_prec > inside_prec)
5559 == (final_unsignedp && final_prec > inter_prec))
5560 && ! (inside_ptr && inter_prec != final_prec)
5561 && ! (final_ptr && inside_prec != inter_prec)
5562 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
5563 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
5564 && ! final_ptr)
5565 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5566 }
5567
5568 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
5569 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
5570 /* Detect assigning a bitfield. */
5571 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
5572 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
5573 {
5574 /* Don't leave an assignment inside a conversion
5575 unless assigning a bitfield. */
5576 tree prev = TREE_OPERAND (t, 0);
5577 if (t == orig_t)
5578 t = copy_node (t);
5579 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
5580 /* First do the assignment, then return converted constant. */
5581 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
5582 TREE_USED (t) = 1;
5583 return t;
5584 }
5585
5586 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
5587 constants (if x has signed type, the sign bit cannot be set
5588 in c). This folds extension into the BIT_AND_EXPR. */
5589 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
5590 && TREE_CODE (TREE_TYPE (t)) != BOOLEAN_TYPE
5591 && TREE_CODE (TREE_OPERAND (t, 0)) == BIT_AND_EXPR
5592 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST)
5593 {
5594 tree and = TREE_OPERAND (t, 0);
5595 tree and0 = TREE_OPERAND (and, 0), and1 = TREE_OPERAND (and, 1);
5596 int change = 0;
5597
5598 if (TREE_UNSIGNED (TREE_TYPE (and))
5599 || (TYPE_PRECISION (TREE_TYPE (t))
5600 <= TYPE_PRECISION (TREE_TYPE (and))))
5601 change = 1;
5602 else if (TYPE_PRECISION (TREE_TYPE (and1))
5603 <= HOST_BITS_PER_WIDE_INT
5604 && host_integerp (and1, 1))
5605 {
5606 unsigned HOST_WIDE_INT cst;
5607
5608 cst = tree_low_cst (and1, 1);
5609 cst &= (HOST_WIDE_INT) -1
5610 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
5611 change = (cst == 0);
5612 #ifdef LOAD_EXTEND_OP
5613 if (change
5614 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
5615 == ZERO_EXTEND))
5616 {
5617 tree uns = (*lang_hooks.types.unsigned_type) (TREE_TYPE (and0));
5618 and0 = convert (uns, and0);
5619 and1 = convert (uns, and1);
5620 }
5621 #endif
5622 }
5623 if (change)
5624 return fold (build (BIT_AND_EXPR, TREE_TYPE (t),
5625 convert (TREE_TYPE (t), and0),
5626 convert (TREE_TYPE (t), and1)));
5627 }
5628
5629 tem = fold_convert_const (code, TREE_TYPE (t), arg0);
5630 return tem ? tem : t;
5631
5632 case VIEW_CONVERT_EXPR:
5633 if (TREE_CODE (TREE_OPERAND (t, 0)) == VIEW_CONVERT_EXPR)
5634 return build1 (VIEW_CONVERT_EXPR, type,
5635 TREE_OPERAND (TREE_OPERAND (t, 0), 0));
5636 return t;
5637
5638 case COMPONENT_REF:
5639 if (TREE_CODE (arg0) == CONSTRUCTOR
5640 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
5641 {
5642 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
5643 if (m)
5644 t = TREE_VALUE (m);
5645 }
5646 return t;
5647
5648 case RANGE_EXPR:
5649 if (TREE_CONSTANT (t) != wins)
5650 {
5651 if (t == orig_t)
5652 t = copy_node (t);
5653 TREE_CONSTANT (t) = wins;
5654 }
5655 return t;
5656
5657 case NEGATE_EXPR:
5658 if (negate_expr_p (arg0))
5659 return negate_expr (arg0);
5660 return t;
5661
5662 case ABS_EXPR:
5663 if (wins)
5664 {
5665 if (TREE_CODE (arg0) == INTEGER_CST)
5666 {
5667 /* If the value is unsigned, then the absolute value is
5668 the same as the ordinary value. */
5669 if (TREE_UNSIGNED (type))
5670 return arg0;
5671 /* Similarly, if the value is non-negative. */
5672 else if (INT_CST_LT (integer_minus_one_node, arg0))
5673 return arg0;
5674 /* If the value is negative, then the absolute value is
5675 its negation. */
5676 else
5677 {
5678 unsigned HOST_WIDE_INT low;
5679 HOST_WIDE_INT high;
5680 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
5681 TREE_INT_CST_HIGH (arg0),
5682 &low, &high);
5683 t = build_int_2 (low, high);
5684 TREE_TYPE (t) = type;
5685 TREE_OVERFLOW (t)
5686 = (TREE_OVERFLOW (arg0)
5687 | force_fit_type (t, overflow));
5688 TREE_CONSTANT_OVERFLOW (t)
5689 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
5690 }
5691 }
5692 else if (TREE_CODE (arg0) == REAL_CST)
5693 {
5694 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
5695 t = build_real (type,
5696 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
5697 }
5698 }
5699 else if (TREE_CODE (arg0) == NEGATE_EXPR)
5700 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0)));
5701 /* Convert fabs((double)float) into (double)fabsf(float). */
5702 else if (TREE_CODE (arg0) == NOP_EXPR
5703 && TREE_CODE (type) == REAL_TYPE)
5704 {
5705 tree targ0 = strip_float_extensions (arg0);
5706 if (targ0 != arg0)
5707 return convert (type, fold (build1 (ABS_EXPR, TREE_TYPE (targ0),
5708 targ0)));
5709 }
5710 else if (tree_expr_nonnegative_p (arg0))
5711 return arg0;
5712 return t;
5713
5714 case CONJ_EXPR:
5715 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5716 return convert (type, arg0);
5717 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5718 return build (COMPLEX_EXPR, type,
5719 TREE_OPERAND (arg0, 0),
5720 negate_expr (TREE_OPERAND (arg0, 1)));
5721 else if (TREE_CODE (arg0) == COMPLEX_CST)
5722 return build_complex (type, TREE_REALPART (arg0),
5723 negate_expr (TREE_IMAGPART (arg0)));
5724 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5725 return fold (build (TREE_CODE (arg0), type,
5726 fold (build1 (CONJ_EXPR, type,
5727 TREE_OPERAND (arg0, 0))),
5728 fold (build1 (CONJ_EXPR,
5729 type, TREE_OPERAND (arg0, 1)))));
5730 else if (TREE_CODE (arg0) == CONJ_EXPR)
5731 return TREE_OPERAND (arg0, 0);
5732 return t;
5733
5734 case BIT_NOT_EXPR:
5735 if (wins)
5736 {
5737 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
5738 ~ TREE_INT_CST_HIGH (arg0));
5739 TREE_TYPE (t) = type;
5740 force_fit_type (t, 0);
5741 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
5742 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
5743 }
5744 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
5745 return TREE_OPERAND (arg0, 0);
5746 return t;
5747
5748 case PLUS_EXPR:
5749 /* A + (-B) -> A - B */
5750 if (TREE_CODE (arg1) == NEGATE_EXPR)
5751 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
5752 /* (-A) + B -> B - A */
5753 if (TREE_CODE (arg0) == NEGATE_EXPR)
5754 return fold (build (MINUS_EXPR, type, arg1, TREE_OPERAND (arg0, 0)));
5755 else if (! FLOAT_TYPE_P (type))
5756 {
5757 if (integer_zerop (arg1))
5758 return non_lvalue (convert (type, arg0));
5759
5760 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
5761 with a constant, and the two constants have no bits in common,
5762 we should treat this as a BIT_IOR_EXPR since this may produce more
5763 simplifications. */
5764 if (TREE_CODE (arg0) == BIT_AND_EXPR
5765 && TREE_CODE (arg1) == BIT_AND_EXPR
5766 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
5767 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
5768 && integer_zerop (const_binop (BIT_AND_EXPR,
5769 TREE_OPERAND (arg0, 1),
5770 TREE_OPERAND (arg1, 1), 0)))
5771 {
5772 code = BIT_IOR_EXPR;
5773 goto bit_ior;
5774 }
5775
5776 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
5777 (plus (plus (mult) (mult)) (foo)) so that we can
5778 take advantage of the factoring cases below. */
5779 if ((TREE_CODE (arg0) == PLUS_EXPR
5780 && TREE_CODE (arg1) == MULT_EXPR)
5781 || (TREE_CODE (arg1) == PLUS_EXPR
5782 && TREE_CODE (arg0) == MULT_EXPR))
5783 {
5784 tree parg0, parg1, parg, marg;
5785
5786 if (TREE_CODE (arg0) == PLUS_EXPR)
5787 parg = arg0, marg = arg1;
5788 else
5789 parg = arg1, marg = arg0;
5790 parg0 = TREE_OPERAND (parg, 0);
5791 parg1 = TREE_OPERAND (parg, 1);
5792 STRIP_NOPS (parg0);
5793 STRIP_NOPS (parg1);
5794
5795 if (TREE_CODE (parg0) == MULT_EXPR
5796 && TREE_CODE (parg1) != MULT_EXPR)
5797 return fold (build (PLUS_EXPR, type,
5798 fold (build (PLUS_EXPR, type,
5799 convert (type, parg0),
5800 convert (type, marg))),
5801 convert (type, parg1)));
5802 if (TREE_CODE (parg0) != MULT_EXPR
5803 && TREE_CODE (parg1) == MULT_EXPR)
5804 return fold (build (PLUS_EXPR, type,
5805 fold (build (PLUS_EXPR, type,
5806 convert (type, parg1),
5807 convert (type, marg))),
5808 convert (type, parg0)));
5809 }
5810
5811 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR)
5812 {
5813 tree arg00, arg01, arg10, arg11;
5814 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
5815
5816 /* (A * C) + (B * C) -> (A+B) * C.
5817 We are most concerned about the case where C is a constant,
5818 but other combinations show up during loop reduction. Since
5819 it is not difficult, try all four possibilities. */
5820
5821 arg00 = TREE_OPERAND (arg0, 0);
5822 arg01 = TREE_OPERAND (arg0, 1);
5823 arg10 = TREE_OPERAND (arg1, 0);
5824 arg11 = TREE_OPERAND (arg1, 1);
5825 same = NULL_TREE;
5826
5827 if (operand_equal_p (arg01, arg11, 0))
5828 same = arg01, alt0 = arg00, alt1 = arg10;
5829 else if (operand_equal_p (arg00, arg10, 0))
5830 same = arg00, alt0 = arg01, alt1 = arg11;
5831 else if (operand_equal_p (arg00, arg11, 0))
5832 same = arg00, alt0 = arg01, alt1 = arg10;
5833 else if (operand_equal_p (arg01, arg10, 0))
5834 same = arg01, alt0 = arg00, alt1 = arg11;
5835
5836 /* No identical multiplicands; see if we can find a common
5837 power-of-two factor in non-power-of-two multiplies. This
5838 can help in multi-dimensional array access. */
5839 else if (TREE_CODE (arg01) == INTEGER_CST
5840 && TREE_CODE (arg11) == INTEGER_CST
5841 && TREE_INT_CST_HIGH (arg01) == 0
5842 && TREE_INT_CST_HIGH (arg11) == 0)
5843 {
5844 HOST_WIDE_INT int01, int11, tmp;
5845 int01 = TREE_INT_CST_LOW (arg01);
5846 int11 = TREE_INT_CST_LOW (arg11);
5847
5848 /* Move min of absolute values to int11. */
5849 if ((int01 >= 0 ? int01 : -int01)
5850 < (int11 >= 0 ? int11 : -int11))
5851 {
5852 tmp = int01, int01 = int11, int11 = tmp;
5853 alt0 = arg00, arg00 = arg10, arg10 = alt0;
5854 alt0 = arg01, arg01 = arg11, arg11 = alt0;
5855 }
5856
5857 if (exact_log2 (int11) > 0 && int01 % int11 == 0)
5858 {
5859 alt0 = fold (build (MULT_EXPR, type, arg00,
5860 build_int_2 (int01 / int11, 0)));
5861 alt1 = arg10;
5862 same = arg11;
5863 }
5864 }
5865
5866 if (same)
5867 return fold (build (MULT_EXPR, type,
5868 fold (build (PLUS_EXPR, type, alt0, alt1)),
5869 same));
5870 }
5871 }
5872 else
5873 {
5874 /* See if ARG1 is zero and X + ARG1 reduces to X. */
5875 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
5876 return non_lvalue (convert (type, arg0));
5877
5878 /* Likewise if the operands are reversed. */
5879 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
5880 return non_lvalue (convert (type, arg1));
5881
5882 /* Convert x+x into x*2.0. */
5883 if (operand_equal_p (arg0, arg1, 0)
5884 && SCALAR_FLOAT_TYPE_P (type))
5885 return fold (build (MULT_EXPR, type, arg0,
5886 build_real (type, dconst2)));
5887
5888 /* Convert x*c+x into x*(c+1). */
5889 if (flag_unsafe_math_optimizations
5890 && TREE_CODE (arg0) == MULT_EXPR
5891 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5892 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5893 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
5894 {
5895 REAL_VALUE_TYPE c;
5896
5897 c = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5898 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5899 return fold (build (MULT_EXPR, type, arg1,
5900 build_real (type, c)));
5901 }
5902
5903 /* Convert x+x*c into x*(c+1). */
5904 if (flag_unsafe_math_optimizations
5905 && TREE_CODE (arg1) == MULT_EXPR
5906 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5907 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5908 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
5909 {
5910 REAL_VALUE_TYPE c;
5911
5912 c = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5913 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
5914 return fold (build (MULT_EXPR, type, arg0,
5915 build_real (type, c)));
5916 }
5917
5918 /* Convert x*c1+x*c2 into x*(c1+c2). */
5919 if (flag_unsafe_math_optimizations
5920 && TREE_CODE (arg0) == MULT_EXPR
5921 && TREE_CODE (arg1) == MULT_EXPR
5922 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
5923 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg0, 1))
5924 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST
5925 && ! TREE_CONSTANT_OVERFLOW (TREE_OPERAND (arg1, 1))
5926 && operand_equal_p (TREE_OPERAND (arg0, 0),
5927 TREE_OPERAND (arg1, 0), 0))
5928 {
5929 REAL_VALUE_TYPE c1, c2;
5930
5931 c1 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
5932 c2 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
5933 real_arithmetic (&c1, PLUS_EXPR, &c1, &c2);
5934 return fold (build (MULT_EXPR, type,
5935 TREE_OPERAND (arg0, 0),
5936 build_real (type, c1)));
5937 }
5938 }
5939
5940 bit_rotate:
5941 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
5942 is a rotate of A by C1 bits. */
5943 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
5944 is a rotate of A by B bits. */
5945 {
5946 enum tree_code code0, code1;
5947 code0 = TREE_CODE (arg0);
5948 code1 = TREE_CODE (arg1);
5949 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
5950 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
5951 && operand_equal_p (TREE_OPERAND (arg0, 0),
5952 TREE_OPERAND (arg1, 0), 0)
5953 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
5954 {
5955 tree tree01, tree11;
5956 enum tree_code code01, code11;
5957
5958 tree01 = TREE_OPERAND (arg0, 1);
5959 tree11 = TREE_OPERAND (arg1, 1);
5960 STRIP_NOPS (tree01);
5961 STRIP_NOPS (tree11);
5962 code01 = TREE_CODE (tree01);
5963 code11 = TREE_CODE (tree11);
5964 if (code01 == INTEGER_CST
5965 && code11 == INTEGER_CST
5966 && TREE_INT_CST_HIGH (tree01) == 0
5967 && TREE_INT_CST_HIGH (tree11) == 0
5968 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
5969 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
5970 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
5971 code0 == LSHIFT_EXPR ? tree01 : tree11);
5972 else if (code11 == MINUS_EXPR)
5973 {
5974 tree tree110, tree111;
5975 tree110 = TREE_OPERAND (tree11, 0);
5976 tree111 = TREE_OPERAND (tree11, 1);
5977 STRIP_NOPS (tree110);
5978 STRIP_NOPS (tree111);
5979 if (TREE_CODE (tree110) == INTEGER_CST
5980 && 0 == compare_tree_int (tree110,
5981 TYPE_PRECISION
5982 (TREE_TYPE (TREE_OPERAND
5983 (arg0, 0))))
5984 && operand_equal_p (tree01, tree111, 0))
5985 return build ((code0 == LSHIFT_EXPR
5986 ? LROTATE_EXPR
5987 : RROTATE_EXPR),
5988 type, TREE_OPERAND (arg0, 0), tree01);
5989 }
5990 else if (code01 == MINUS_EXPR)
5991 {
5992 tree tree010, tree011;
5993 tree010 = TREE_OPERAND (tree01, 0);
5994 tree011 = TREE_OPERAND (tree01, 1);
5995 STRIP_NOPS (tree010);
5996 STRIP_NOPS (tree011);
5997 if (TREE_CODE (tree010) == INTEGER_CST
5998 && 0 == compare_tree_int (tree010,
5999 TYPE_PRECISION
6000 (TREE_TYPE (TREE_OPERAND
6001 (arg0, 0))))
6002 && operand_equal_p (tree11, tree011, 0))
6003 return build ((code0 != LSHIFT_EXPR
6004 ? LROTATE_EXPR
6005 : RROTATE_EXPR),
6006 type, TREE_OPERAND (arg0, 0), tree11);
6007 }
6008 }
6009 }
6010
6011 associate:
6012 /* In most languages, can't associate operations on floats through
6013 parentheses. Rather than remember where the parentheses were, we
6014 don't associate floats at all, unless the user has specified
6015 -funsafe-math-optimizations. */
6016
6017 if (! wins
6018 && (! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations))
6019 {
6020 tree var0, con0, lit0, minus_lit0;
6021 tree var1, con1, lit1, minus_lit1;
6022
6023 /* Split both trees into variables, constants, and literals. Then
6024 associate each group together, the constants with literals,
6025 then the result with variables. This increases the chances of
6026 literals being recombined later and of generating relocatable
6027 expressions for the sum of a constant and literal. */
6028 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
6029 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
6030 code == MINUS_EXPR);
6031
6032 /* Only do something if we found more than two objects. Otherwise,
6033 nothing has changed and we risk infinite recursion. */
6034 if (2 < ((var0 != 0) + (var1 != 0)
6035 + (con0 != 0) + (con1 != 0)
6036 + (lit0 != 0) + (lit1 != 0)
6037 + (minus_lit0 != 0) + (minus_lit1 != 0)))
6038 {
6039 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
6040 if (code == MINUS_EXPR)
6041 code = PLUS_EXPR;
6042
6043 var0 = associate_trees (var0, var1, code, type);
6044 con0 = associate_trees (con0, con1, code, type);
6045 lit0 = associate_trees (lit0, lit1, code, type);
6046 minus_lit0 = associate_trees (minus_lit0, minus_lit1, code, type);
6047
6048 /* Preserve the MINUS_EXPR if the negative part of the literal is
6049 greater than the positive part. Otherwise, the multiplicative
6050 folding code (i.e extract_muldiv) may be fooled in case
6051 unsigned constants are subtracted, like in the following
6052 example: ((X*2 + 4) - 8U)/2. */
6053 if (minus_lit0 && lit0)
6054 {
6055 if (TREE_CODE (lit0) == INTEGER_CST
6056 && TREE_CODE (minus_lit0) == INTEGER_CST
6057 && tree_int_cst_lt (lit0, minus_lit0))
6058 {
6059 minus_lit0 = associate_trees (minus_lit0, lit0,
6060 MINUS_EXPR, type);
6061 lit0 = 0;
6062 }
6063 else
6064 {
6065 lit0 = associate_trees (lit0, minus_lit0,
6066 MINUS_EXPR, type);
6067 minus_lit0 = 0;
6068 }
6069 }
6070 if (minus_lit0)
6071 {
6072 if (con0 == 0)
6073 return convert (type, associate_trees (var0, minus_lit0,
6074 MINUS_EXPR, type));
6075 else
6076 {
6077 con0 = associate_trees (con0, minus_lit0,
6078 MINUS_EXPR, type);
6079 return convert (type, associate_trees (var0, con0,
6080 PLUS_EXPR, type));
6081 }
6082 }
6083
6084 con0 = associate_trees (con0, lit0, code, type);
6085 return convert (type, associate_trees (var0, con0, code, type));
6086 }
6087 }
6088
6089 binary:
6090 if (wins)
6091 t1 = const_binop (code, arg0, arg1, 0);
6092 if (t1 != NULL_TREE)
6093 {
6094 /* The return value should always have
6095 the same type as the original expression. */
6096 if (TREE_TYPE (t1) != TREE_TYPE (t))
6097 t1 = convert (TREE_TYPE (t), t1);
6098
6099 return t1;
6100 }
6101 return t;
6102
6103 case MINUS_EXPR:
6104 /* A - (-B) -> A + B */
6105 if (TREE_CODE (arg1) == NEGATE_EXPR)
6106 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
6107 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
6108 if (TREE_CODE (arg0) == NEGATE_EXPR
6109 && (FLOAT_TYPE_P (type)
6110 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv))
6111 && negate_expr_p (arg1)
6112 && reorder_operands_p (arg0, arg1))
6113 return fold (build (MINUS_EXPR, type, negate_expr (arg1),
6114 TREE_OPERAND (arg0, 0)));
6115
6116 if (! FLOAT_TYPE_P (type))
6117 {
6118 if (! wins && integer_zerop (arg0))
6119 return negate_expr (convert (type, arg1));
6120 if (integer_zerop (arg1))
6121 return non_lvalue (convert (type, arg0));
6122
6123 /* Fold A - (A & B) into ~B & A. */
6124 if (!TREE_SIDE_EFFECTS (arg0)
6125 && TREE_CODE (arg1) == BIT_AND_EXPR)
6126 {
6127 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
6128 return fold (build (BIT_AND_EXPR, type,
6129 fold (build1 (BIT_NOT_EXPR, type,
6130 TREE_OPERAND (arg1, 0))),
6131 arg0));
6132 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
6133 return fold (build (BIT_AND_EXPR, type,
6134 fold (build1 (BIT_NOT_EXPR, type,
6135 TREE_OPERAND (arg1, 1))),
6136 arg0));
6137 }
6138
6139 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
6140 any power of 2 minus 1. */
6141 if (TREE_CODE (arg0) == BIT_AND_EXPR
6142 && TREE_CODE (arg1) == BIT_AND_EXPR
6143 && operand_equal_p (TREE_OPERAND (arg0, 0),
6144 TREE_OPERAND (arg1, 0), 0))
6145 {
6146 tree mask0 = TREE_OPERAND (arg0, 1);
6147 tree mask1 = TREE_OPERAND (arg1, 1);
6148 tree tem = fold (build1 (BIT_NOT_EXPR, type, mask0));
6149
6150 if (operand_equal_p (tem, mask1, 0))
6151 {
6152 tem = fold (build (BIT_XOR_EXPR, type,
6153 TREE_OPERAND (arg0, 0), mask1));
6154 return fold (build (MINUS_EXPR, type, tem, mask1));
6155 }
6156 }
6157 }
6158
6159 /* See if ARG1 is zero and X - ARG1 reduces to X. */
6160 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
6161 return non_lvalue (convert (type, arg0));
6162
6163 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
6164 ARG0 is zero and X + ARG0 reduces to X, since that would mean
6165 (-ARG1 + ARG0) reduces to -ARG1. */
6166 else if (!wins && fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
6167 return negate_expr (convert (type, arg1));
6168
6169 /* Fold &x - &x. This can happen from &x.foo - &x.
6170 This is unsafe for certain floats even in non-IEEE formats.
6171 In IEEE, it is unsafe because it does wrong for NaNs.
6172 Also note that operand_equal_p is always false if an operand
6173 is volatile. */
6174
6175 if ((! FLOAT_TYPE_P (type) || flag_unsafe_math_optimizations)
6176 && operand_equal_p (arg0, arg1, 0))
6177 return convert (type, integer_zero_node);
6178
6179 /* A - B -> A + (-B) if B is easily negatable. */
6180 if (!wins && negate_expr_p (arg1)
6181 && (FLOAT_TYPE_P (type)
6182 || (INTEGRAL_TYPE_P (type) && flag_wrapv && !flag_trapv)))
6183 return fold (build (PLUS_EXPR, type, arg0, negate_expr (arg1)));
6184
6185 if (TREE_CODE (arg0) == MULT_EXPR
6186 && TREE_CODE (arg1) == MULT_EXPR
6187 && (INTEGRAL_TYPE_P (type) || flag_unsafe_math_optimizations))
6188 {
6189 /* (A * C) - (B * C) -> (A-B) * C. */
6190 if (operand_equal_p (TREE_OPERAND (arg0, 1),
6191 TREE_OPERAND (arg1, 1), 0))
6192 return fold (build (MULT_EXPR, type,
6193 fold (build (MINUS_EXPR, type,
6194 TREE_OPERAND (arg0, 0),
6195 TREE_OPERAND (arg1, 0))),
6196 TREE_OPERAND (arg0, 1)));
6197 /* (A * C1) - (A * C2) -> A * (C1-C2). */
6198 if (operand_equal_p (TREE_OPERAND (arg0, 0),
6199 TREE_OPERAND (arg1, 0), 0))
6200 return fold (build (MULT_EXPR, type,
6201 TREE_OPERAND (arg0, 0),
6202 fold (build (MINUS_EXPR, type,
6203 TREE_OPERAND (arg0, 1),
6204 TREE_OPERAND (arg1, 1)))));
6205 }
6206
6207 goto associate;
6208
6209 case MULT_EXPR:
6210 /* (-A) * (-B) -> A * B */
6211 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6212 return fold (build (MULT_EXPR, type,
6213 TREE_OPERAND (arg0, 0),
6214 negate_expr (arg1)));
6215 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6216 return fold (build (MULT_EXPR, type,
6217 negate_expr (arg0),
6218 TREE_OPERAND (arg1, 0)));
6219
6220 if (! FLOAT_TYPE_P (type))
6221 {
6222 if (integer_zerop (arg1))
6223 return omit_one_operand (type, arg1, arg0);
6224 if (integer_onep (arg1))
6225 return non_lvalue (convert (type, arg0));
6226
6227 /* (a * (1 << b)) is (a << b) */
6228 if (TREE_CODE (arg1) == LSHIFT_EXPR
6229 && integer_onep (TREE_OPERAND (arg1, 0)))
6230 return fold (build (LSHIFT_EXPR, type, arg0,
6231 TREE_OPERAND (arg1, 1)));
6232 if (TREE_CODE (arg0) == LSHIFT_EXPR
6233 && integer_onep (TREE_OPERAND (arg0, 0)))
6234 return fold (build (LSHIFT_EXPR, type, arg1,
6235 TREE_OPERAND (arg0, 1)));
6236
6237 if (TREE_CODE (arg1) == INTEGER_CST
6238 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0),
6239 convert (type, arg1),
6240 code, NULL_TREE)))
6241 return convert (type, tem);
6242
6243 }
6244 else
6245 {
6246 /* Maybe fold x * 0 to 0. The expressions aren't the same
6247 when x is NaN, since x * 0 is also NaN. Nor are they the
6248 same in modes with signed zeros, since multiplying a
6249 negative value by 0 gives -0, not +0. */
6250 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
6251 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
6252 && real_zerop (arg1))
6253 return omit_one_operand (type, arg1, arg0);
6254 /* In IEEE floating point, x*1 is not equivalent to x for snans. */
6255 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6256 && real_onep (arg1))
6257 return non_lvalue (convert (type, arg0));
6258
6259 /* Transform x * -1.0 into -x. */
6260 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6261 && real_minus_onep (arg1))
6262 return fold (build1 (NEGATE_EXPR, type, arg0));
6263
6264 /* Convert (C1/X)*C2 into (C1*C2)/X. */
6265 if (flag_unsafe_math_optimizations
6266 && TREE_CODE (arg0) == RDIV_EXPR
6267 && TREE_CODE (arg1) == REAL_CST
6268 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
6269 {
6270 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
6271 arg1, 0);
6272 if (tem)
6273 return fold (build (RDIV_EXPR, type, tem,
6274 TREE_OPERAND (arg0, 1)));
6275 }
6276
6277 if (flag_unsafe_math_optimizations)
6278 {
6279 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6280 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6281
6282 /* Optimizations of sqrt(...)*sqrt(...). */
6283 if ((fcode0 == BUILT_IN_SQRT && fcode1 == BUILT_IN_SQRT)
6284 || (fcode0 == BUILT_IN_SQRTF && fcode1 == BUILT_IN_SQRTF)
6285 || (fcode0 == BUILT_IN_SQRTL && fcode1 == BUILT_IN_SQRTL))
6286 {
6287 tree sqrtfn, arg, arglist;
6288 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6289 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6290
6291 /* Optimize sqrt(x)*sqrt(x) as x. */
6292 if (operand_equal_p (arg00, arg10, 0)
6293 && ! HONOR_SNANS (TYPE_MODE (type)))
6294 return arg00;
6295
6296 /* Optimize sqrt(x)*sqrt(y) as sqrt(x*y). */
6297 sqrtfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6298 arg = fold (build (MULT_EXPR, type, arg00, arg10));
6299 arglist = build_tree_list (NULL_TREE, arg);
6300 return build_function_call_expr (sqrtfn, arglist);
6301 }
6302
6303 /* Optimize expN(x)*expN(y) as expN(x+y). */
6304 if (fcode0 == fcode1
6305 && (fcode0 == BUILT_IN_EXP
6306 || fcode0 == BUILT_IN_EXPF
6307 || fcode0 == BUILT_IN_EXPL
6308 || fcode0 == BUILT_IN_EXP2
6309 || fcode0 == BUILT_IN_EXP2F
6310 || fcode0 == BUILT_IN_EXP2L
6311 || fcode0 == BUILT_IN_EXP10
6312 || fcode0 == BUILT_IN_EXP10F
6313 || fcode0 == BUILT_IN_EXP10L
6314 || fcode0 == BUILT_IN_POW10
6315 || fcode0 == BUILT_IN_POW10F
6316 || fcode0 == BUILT_IN_POW10L))
6317 {
6318 tree expfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6319 tree arg = build (PLUS_EXPR, type,
6320 TREE_VALUE (TREE_OPERAND (arg0, 1)),
6321 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6322 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6323 return build_function_call_expr (expfn, arglist);
6324 }
6325
6326 /* Optimizations of pow(...)*pow(...). */
6327 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
6328 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
6329 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
6330 {
6331 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6332 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6333 1)));
6334 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6335 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6336 1)));
6337
6338 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
6339 if (operand_equal_p (arg01, arg11, 0))
6340 {
6341 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6342 tree arg = build (MULT_EXPR, type, arg00, arg10);
6343 tree arglist = tree_cons (NULL_TREE, fold (arg),
6344 build_tree_list (NULL_TREE,
6345 arg01));
6346 return build_function_call_expr (powfn, arglist);
6347 }
6348
6349 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
6350 if (operand_equal_p (arg00, arg10, 0))
6351 {
6352 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6353 tree arg = fold (build (PLUS_EXPR, type, arg01, arg11));
6354 tree arglist = tree_cons (NULL_TREE, arg00,
6355 build_tree_list (NULL_TREE,
6356 arg));
6357 return build_function_call_expr (powfn, arglist);
6358 }
6359 }
6360
6361 /* Optimize tan(x)*cos(x) as sin(x). */
6362 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
6363 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
6364 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
6365 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
6366 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
6367 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
6368 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6369 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6370 {
6371 tree sinfn;
6372
6373 switch (fcode0)
6374 {
6375 case BUILT_IN_TAN:
6376 case BUILT_IN_COS:
6377 sinfn = implicit_built_in_decls[BUILT_IN_SIN];
6378 break;
6379 case BUILT_IN_TANF:
6380 case BUILT_IN_COSF:
6381 sinfn = implicit_built_in_decls[BUILT_IN_SINF];
6382 break;
6383 case BUILT_IN_TANL:
6384 case BUILT_IN_COSL:
6385 sinfn = implicit_built_in_decls[BUILT_IN_SINL];
6386 break;
6387 default:
6388 sinfn = NULL_TREE;
6389 }
6390
6391 if (sinfn != NULL_TREE)
6392 return build_function_call_expr (sinfn,
6393 TREE_OPERAND (arg0, 1));
6394 }
6395
6396 /* Optimize x*pow(x,c) as pow(x,c+1). */
6397 if (fcode1 == BUILT_IN_POW
6398 || fcode1 == BUILT_IN_POWF
6399 || fcode1 == BUILT_IN_POWL)
6400 {
6401 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6402 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1,
6403 1)));
6404 if (TREE_CODE (arg11) == REAL_CST
6405 && ! TREE_CONSTANT_OVERFLOW (arg11)
6406 && operand_equal_p (arg0, arg10, 0))
6407 {
6408 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6409 REAL_VALUE_TYPE c;
6410 tree arg, arglist;
6411
6412 c = TREE_REAL_CST (arg11);
6413 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6414 arg = build_real (type, c);
6415 arglist = build_tree_list (NULL_TREE, arg);
6416 arglist = tree_cons (NULL_TREE, arg0, arglist);
6417 return build_function_call_expr (powfn, arglist);
6418 }
6419 }
6420
6421 /* Optimize pow(x,c)*x as pow(x,c+1). */
6422 if (fcode0 == BUILT_IN_POW
6423 || fcode0 == BUILT_IN_POWF
6424 || fcode0 == BUILT_IN_POWL)
6425 {
6426 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6427 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0,
6428 1)));
6429 if (TREE_CODE (arg01) == REAL_CST
6430 && ! TREE_CONSTANT_OVERFLOW (arg01)
6431 && operand_equal_p (arg1, arg00, 0))
6432 {
6433 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6434 REAL_VALUE_TYPE c;
6435 tree arg, arglist;
6436
6437 c = TREE_REAL_CST (arg01);
6438 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
6439 arg = build_real (type, c);
6440 arglist = build_tree_list (NULL_TREE, arg);
6441 arglist = tree_cons (NULL_TREE, arg1, arglist);
6442 return build_function_call_expr (powfn, arglist);
6443 }
6444 }
6445
6446 /* Optimize x*x as pow(x,2.0), which is expanded as x*x. */
6447 if (! optimize_size
6448 && operand_equal_p (arg0, arg1, 0))
6449 {
6450 tree powfn;
6451
6452 if (type == double_type_node)
6453 powfn = implicit_built_in_decls[BUILT_IN_POW];
6454 else if (type == float_type_node)
6455 powfn = implicit_built_in_decls[BUILT_IN_POWF];
6456 else if (type == long_double_type_node)
6457 powfn = implicit_built_in_decls[BUILT_IN_POWL];
6458 else
6459 powfn = NULL_TREE;
6460
6461 if (powfn)
6462 {
6463 tree arg = build_real (type, dconst2);
6464 tree arglist = build_tree_list (NULL_TREE, arg);
6465 arglist = tree_cons (NULL_TREE, arg0, arglist);
6466 return build_function_call_expr (powfn, arglist);
6467 }
6468 }
6469 }
6470 }
6471 goto associate;
6472
6473 case BIT_IOR_EXPR:
6474 bit_ior:
6475 if (integer_all_onesp (arg1))
6476 return omit_one_operand (type, arg1, arg0);
6477 if (integer_zerop (arg1))
6478 return non_lvalue (convert (type, arg0));
6479 t1 = distribute_bit_expr (code, type, arg0, arg1);
6480 if (t1 != NULL_TREE)
6481 return t1;
6482
6483 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
6484
6485 This results in more efficient code for machines without a NAND
6486 instruction. Combine will canonicalize to the first form
6487 which will allow use of NAND instructions provided by the
6488 backend if they exist. */
6489 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6490 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6491 {
6492 return fold (build1 (BIT_NOT_EXPR, type,
6493 build (BIT_AND_EXPR, type,
6494 TREE_OPERAND (arg0, 0),
6495 TREE_OPERAND (arg1, 0))));
6496 }
6497
6498 /* See if this can be simplified into a rotate first. If that
6499 is unsuccessful continue in the association code. */
6500 goto bit_rotate;
6501
6502 case BIT_XOR_EXPR:
6503 if (integer_zerop (arg1))
6504 return non_lvalue (convert (type, arg0));
6505 if (integer_all_onesp (arg1))
6506 return fold (build1 (BIT_NOT_EXPR, type, arg0));
6507
6508 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
6509 with a constant, and the two constants have no bits in common,
6510 we should treat this as a BIT_IOR_EXPR since this may produce more
6511 simplifications. */
6512 if (TREE_CODE (arg0) == BIT_AND_EXPR
6513 && TREE_CODE (arg1) == BIT_AND_EXPR
6514 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6515 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
6516 && integer_zerop (const_binop (BIT_AND_EXPR,
6517 TREE_OPERAND (arg0, 1),
6518 TREE_OPERAND (arg1, 1), 0)))
6519 {
6520 code = BIT_IOR_EXPR;
6521 goto bit_ior;
6522 }
6523
6524 /* See if this can be simplified into a rotate first. If that
6525 is unsuccessful continue in the association code. */
6526 goto bit_rotate;
6527
6528 case BIT_AND_EXPR:
6529 if (integer_all_onesp (arg1))
6530 return non_lvalue (convert (type, arg0));
6531 if (integer_zerop (arg1))
6532 return omit_one_operand (type, arg1, arg0);
6533 t1 = distribute_bit_expr (code, type, arg0, arg1);
6534 if (t1 != NULL_TREE)
6535 return t1;
6536 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
6537 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
6538 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
6539 {
6540 unsigned int prec
6541 = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
6542
6543 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
6544 && (~TREE_INT_CST_LOW (arg1)
6545 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
6546 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
6547 }
6548
6549 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
6550
6551 This results in more efficient code for machines without a NOR
6552 instruction. Combine will canonicalize to the first form
6553 which will allow use of NOR instructions provided by the
6554 backend if they exist. */
6555 if (TREE_CODE (arg0) == BIT_NOT_EXPR
6556 && TREE_CODE (arg1) == BIT_NOT_EXPR)
6557 {
6558 return fold (build1 (BIT_NOT_EXPR, type,
6559 build (BIT_IOR_EXPR, type,
6560 TREE_OPERAND (arg0, 0),
6561 TREE_OPERAND (arg1, 0))));
6562 }
6563
6564 goto associate;
6565
6566 case RDIV_EXPR:
6567 /* Don't touch a floating-point divide by zero unless the mode
6568 of the constant can represent infinity. */
6569 if (TREE_CODE (arg1) == REAL_CST
6570 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
6571 && real_zerop (arg1))
6572 return t;
6573
6574 /* (-A) / (-B) -> A / B */
6575 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
6576 return fold (build (RDIV_EXPR, type,
6577 TREE_OPERAND (arg0, 0),
6578 negate_expr (arg1)));
6579 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
6580 return fold (build (RDIV_EXPR, type,
6581 negate_expr (arg0),
6582 TREE_OPERAND (arg1, 0)));
6583
6584 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
6585 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6586 && real_onep (arg1))
6587 return non_lvalue (convert (type, arg0));
6588
6589 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
6590 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
6591 && real_minus_onep (arg1))
6592 return non_lvalue (convert (type, negate_expr (arg0)));
6593
6594 /* If ARG1 is a constant, we can convert this to a multiply by the
6595 reciprocal. This does not have the same rounding properties,
6596 so only do this if -funsafe-math-optimizations. We can actually
6597 always safely do it if ARG1 is a power of two, but it's hard to
6598 tell if it is or not in a portable manner. */
6599 if (TREE_CODE (arg1) == REAL_CST)
6600 {
6601 if (flag_unsafe_math_optimizations
6602 && 0 != (tem = const_binop (code, build_real (type, dconst1),
6603 arg1, 0)))
6604 return fold (build (MULT_EXPR, type, arg0, tem));
6605 /* Find the reciprocal if optimizing and the result is exact. */
6606 if (optimize)
6607 {
6608 REAL_VALUE_TYPE r;
6609 r = TREE_REAL_CST (arg1);
6610 if (exact_real_inverse (TYPE_MODE(TREE_TYPE(arg0)), &r))
6611 {
6612 tem = build_real (type, r);
6613 return fold (build (MULT_EXPR, type, arg0, tem));
6614 }
6615 }
6616 }
6617 /* Convert A/B/C to A/(B*C). */
6618 if (flag_unsafe_math_optimizations
6619 && TREE_CODE (arg0) == RDIV_EXPR)
6620 return fold (build (RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
6621 fold (build (MULT_EXPR, type,
6622 TREE_OPERAND (arg0, 1), arg1))));
6623
6624 /* Convert A/(B/C) to (A/B)*C. */
6625 if (flag_unsafe_math_optimizations
6626 && TREE_CODE (arg1) == RDIV_EXPR)
6627 return fold (build (MULT_EXPR, type,
6628 fold (build (RDIV_EXPR, type, arg0,
6629 TREE_OPERAND (arg1, 0))),
6630 TREE_OPERAND (arg1, 1)));
6631
6632 /* Convert C1/(X*C2) into (C1/C2)/X. */
6633 if (flag_unsafe_math_optimizations
6634 && TREE_CODE (arg1) == MULT_EXPR
6635 && TREE_CODE (arg0) == REAL_CST
6636 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
6637 {
6638 tree tem = const_binop (RDIV_EXPR, arg0,
6639 TREE_OPERAND (arg1, 1), 0);
6640 if (tem)
6641 return fold (build (RDIV_EXPR, type, tem,
6642 TREE_OPERAND (arg1, 0)));
6643 }
6644
6645 if (flag_unsafe_math_optimizations)
6646 {
6647 enum built_in_function fcode = builtin_mathfn_code (arg1);
6648 /* Optimize x/expN(y) into x*expN(-y). */
6649 if (fcode == BUILT_IN_EXP
6650 || fcode == BUILT_IN_EXPF
6651 || fcode == BUILT_IN_EXPL
6652 || fcode == BUILT_IN_EXP2
6653 || fcode == BUILT_IN_EXP2F
6654 || fcode == BUILT_IN_EXP2L
6655 || fcode == BUILT_IN_EXP10
6656 || fcode == BUILT_IN_EXP10F
6657 || fcode == BUILT_IN_EXP10L
6658 || fcode == BUILT_IN_POW10
6659 || fcode == BUILT_IN_POW10F
6660 || fcode == BUILT_IN_POW10L)
6661 {
6662 tree expfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6663 tree arg = build1 (NEGATE_EXPR, type,
6664 TREE_VALUE (TREE_OPERAND (arg1, 1)));
6665 tree arglist = build_tree_list (NULL_TREE, fold (arg));
6666 arg1 = build_function_call_expr (expfn, arglist);
6667 return fold (build (MULT_EXPR, type, arg0, arg1));
6668 }
6669
6670 /* Optimize x/pow(y,z) into x*pow(y,-z). */
6671 if (fcode == BUILT_IN_POW
6672 || fcode == BUILT_IN_POWF
6673 || fcode == BUILT_IN_POWL)
6674 {
6675 tree powfn = TREE_OPERAND (TREE_OPERAND (arg1, 0), 0);
6676 tree arg10 = TREE_VALUE (TREE_OPERAND (arg1, 1));
6677 tree arg11 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg1, 1)));
6678 tree neg11 = fold (build1 (NEGATE_EXPR, type, arg11));
6679 tree arglist = tree_cons(NULL_TREE, arg10,
6680 build_tree_list (NULL_TREE, neg11));
6681 arg1 = build_function_call_expr (powfn, arglist);
6682 return fold (build (MULT_EXPR, type, arg0, arg1));
6683 }
6684 }
6685
6686 if (flag_unsafe_math_optimizations)
6687 {
6688 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
6689 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
6690
6691 /* Optimize sin(x)/cos(x) as tan(x). */
6692 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
6693 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
6694 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
6695 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6696 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6697 {
6698 tree tanfn;
6699
6700 if (fcode0 == BUILT_IN_SIN)
6701 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6702 else if (fcode0 == BUILT_IN_SINF)
6703 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6704 else if (fcode0 == BUILT_IN_SINL)
6705 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6706 else
6707 tanfn = NULL_TREE;
6708
6709 if (tanfn != NULL_TREE)
6710 return build_function_call_expr (tanfn,
6711 TREE_OPERAND (arg0, 1));
6712 }
6713
6714 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
6715 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
6716 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
6717 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
6718 && operand_equal_p (TREE_VALUE (TREE_OPERAND (arg0, 1)),
6719 TREE_VALUE (TREE_OPERAND (arg1, 1)), 0))
6720 {
6721 tree tanfn;
6722
6723 if (fcode0 == BUILT_IN_COS)
6724 tanfn = implicit_built_in_decls[BUILT_IN_TAN];
6725 else if (fcode0 == BUILT_IN_COSF)
6726 tanfn = implicit_built_in_decls[BUILT_IN_TANF];
6727 else if (fcode0 == BUILT_IN_COSL)
6728 tanfn = implicit_built_in_decls[BUILT_IN_TANL];
6729 else
6730 tanfn = NULL_TREE;
6731
6732 if (tanfn != NULL_TREE)
6733 {
6734 tree tmp = TREE_OPERAND (arg0, 1);
6735 tmp = build_function_call_expr (tanfn, tmp);
6736 return fold (build (RDIV_EXPR, type,
6737 build_real (type, dconst1),
6738 tmp));
6739 }
6740 }
6741
6742 /* Optimize pow(x,c)/x as pow(x,c-1). */
6743 if (fcode0 == BUILT_IN_POW
6744 || fcode0 == BUILT_IN_POWF
6745 || fcode0 == BUILT_IN_POWL)
6746 {
6747 tree arg00 = TREE_VALUE (TREE_OPERAND (arg0, 1));
6748 tree arg01 = TREE_VALUE (TREE_CHAIN (TREE_OPERAND (arg0, 1)));
6749 if (TREE_CODE (arg01) == REAL_CST
6750 && ! TREE_CONSTANT_OVERFLOW (arg01)
6751 && operand_equal_p (arg1, arg00, 0))
6752 {
6753 tree powfn = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
6754 REAL_VALUE_TYPE c;
6755 tree arg, arglist;
6756
6757 c = TREE_REAL_CST (arg01);
6758 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
6759 arg = build_real (type, c);
6760 arglist = build_tree_list (NULL_TREE, arg);
6761 arglist = tree_cons (NULL_TREE, arg1, arglist);
6762 return build_function_call_expr (powfn, arglist);
6763 }
6764 }
6765 }
6766 goto binary;
6767
6768 case TRUNC_DIV_EXPR:
6769 case ROUND_DIV_EXPR:
6770 case FLOOR_DIV_EXPR:
6771 case CEIL_DIV_EXPR:
6772 case EXACT_DIV_EXPR:
6773 if (integer_onep (arg1))
6774 return non_lvalue (convert (type, arg0));
6775 if (integer_zerop (arg1))
6776 return t;
6777
6778 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
6779 operation, EXACT_DIV_EXPR.
6780
6781 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
6782 At one time others generated faster code, it's not clear if they do
6783 after the last round to changes to the DIV code in expmed.c. */
6784 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
6785 && multiple_of_p (type, arg0, arg1))
6786 return fold (build (EXACT_DIV_EXPR, type, arg0, arg1));
6787
6788 if (TREE_CODE (arg1) == INTEGER_CST
6789 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6790 code, NULL_TREE)))
6791 return convert (type, tem);
6792
6793 goto binary;
6794
6795 case CEIL_MOD_EXPR:
6796 case FLOOR_MOD_EXPR:
6797 case ROUND_MOD_EXPR:
6798 case TRUNC_MOD_EXPR:
6799 if (integer_onep (arg1))
6800 return omit_one_operand (type, integer_zero_node, arg0);
6801 if (integer_zerop (arg1))
6802 return t;
6803
6804 if (TREE_CODE (arg1) == INTEGER_CST
6805 && 0 != (tem = extract_muldiv (TREE_OPERAND (t, 0), arg1,
6806 code, NULL_TREE)))
6807 return convert (type, tem);
6808
6809 goto binary;
6810
6811 case LROTATE_EXPR:
6812 case RROTATE_EXPR:
6813 if (integer_all_onesp (arg0))
6814 return omit_one_operand (type, arg0, arg1);
6815 goto shift;
6816
6817 case RSHIFT_EXPR:
6818 /* Optimize -1 >> x for arithmetic right shifts. */
6819 if (integer_all_onesp (arg0) && ! TREE_UNSIGNED (type))
6820 return omit_one_operand (type, arg0, arg1);
6821 /* ... fall through ... */
6822
6823 case LSHIFT_EXPR:
6824 shift:
6825 if (integer_zerop (arg1))
6826 return non_lvalue (convert (type, arg0));
6827 if (integer_zerop (arg0))
6828 return omit_one_operand (type, arg0, arg1);
6829
6830 /* Since negative shift count is not well-defined,
6831 don't try to compute it in the compiler. */
6832 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
6833 return t;
6834 /* Rewrite an LROTATE_EXPR by a constant into an
6835 RROTATE_EXPR by a new constant. */
6836 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
6837 {
6838 tree tem = build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0);
6839 tem = convert (TREE_TYPE (arg1), tem);
6840 tem = const_binop (MINUS_EXPR, tem, arg1, 0);
6841 return fold (build (RROTATE_EXPR, type, arg0, tem));
6842 }
6843
6844 /* If we have a rotate of a bit operation with the rotate count and
6845 the second operand of the bit operation both constant,
6846 permute the two operations. */
6847 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6848 && (TREE_CODE (arg0) == BIT_AND_EXPR
6849 || TREE_CODE (arg0) == BIT_IOR_EXPR
6850 || TREE_CODE (arg0) == BIT_XOR_EXPR)
6851 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
6852 return fold (build (TREE_CODE (arg0), type,
6853 fold (build (code, type,
6854 TREE_OPERAND (arg0, 0), arg1)),
6855 fold (build (code, type,
6856 TREE_OPERAND (arg0, 1), arg1))));
6857
6858 /* Two consecutive rotates adding up to the width of the mode can
6859 be ignored. */
6860 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
6861 && TREE_CODE (arg0) == RROTATE_EXPR
6862 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
6863 && TREE_INT_CST_HIGH (arg1) == 0
6864 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
6865 && ((TREE_INT_CST_LOW (arg1)
6866 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
6867 == (unsigned int) GET_MODE_BITSIZE (TYPE_MODE (type))))
6868 return TREE_OPERAND (arg0, 0);
6869
6870 goto binary;
6871
6872 case MIN_EXPR:
6873 if (operand_equal_p (arg0, arg1, 0))
6874 return omit_one_operand (type, arg0, arg1);
6875 if (INTEGRAL_TYPE_P (type)
6876 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
6877 return omit_one_operand (type, arg1, arg0);
6878 goto associate;
6879
6880 case MAX_EXPR:
6881 if (operand_equal_p (arg0, arg1, 0))
6882 return omit_one_operand (type, arg0, arg1);
6883 if (INTEGRAL_TYPE_P (type)
6884 && TYPE_MAX_VALUE (type)
6885 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
6886 return omit_one_operand (type, arg1, arg0);
6887 goto associate;
6888
6889 case TRUTH_NOT_EXPR:
6890 /* Note that the operand of this must be an int
6891 and its values must be 0 or 1.
6892 ("true" is a fixed value perhaps depending on the language,
6893 but we don't handle values other than 1 correctly yet.) */
6894 tem = invert_truthvalue (arg0);
6895 /* Avoid infinite recursion. */
6896 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
6897 {
6898 tem = fold_single_bit_test (code, arg0, arg1, type);
6899 if (tem)
6900 return tem;
6901 return t;
6902 }
6903 return convert (type, tem);
6904
6905 case TRUTH_ANDIF_EXPR:
6906 /* Note that the operands of this must be ints
6907 and their values must be 0 or 1.
6908 ("true" is a fixed value perhaps depending on the language.) */
6909 /* If first arg is constant zero, return it. */
6910 if (integer_zerop (arg0))
6911 return convert (type, arg0);
6912 case TRUTH_AND_EXPR:
6913 /* If either arg is constant true, drop it. */
6914 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6915 return non_lvalue (convert (type, arg1));
6916 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
6917 /* Preserve sequence points. */
6918 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
6919 return non_lvalue (convert (type, arg0));
6920 /* If second arg is constant zero, result is zero, but first arg
6921 must be evaluated. */
6922 if (integer_zerop (arg1))
6923 return omit_one_operand (type, arg1, arg0);
6924 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
6925 case will be handled here. */
6926 if (integer_zerop (arg0))
6927 return omit_one_operand (type, arg0, arg1);
6928
6929 truth_andor:
6930 /* We only do these simplifications if we are optimizing. */
6931 if (!optimize)
6932 return t;
6933
6934 /* Check for things like (A || B) && (A || C). We can convert this
6935 to A || (B && C). Note that either operator can be any of the four
6936 truth and/or operations and the transformation will still be
6937 valid. Also note that we only care about order for the
6938 ANDIF and ORIF operators. If B contains side effects, this
6939 might change the truth-value of A. */
6940 if (TREE_CODE (arg0) == TREE_CODE (arg1)
6941 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
6942 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
6943 || TREE_CODE (arg0) == TRUTH_AND_EXPR
6944 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
6945 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
6946 {
6947 tree a00 = TREE_OPERAND (arg0, 0);
6948 tree a01 = TREE_OPERAND (arg0, 1);
6949 tree a10 = TREE_OPERAND (arg1, 0);
6950 tree a11 = TREE_OPERAND (arg1, 1);
6951 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
6952 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
6953 && (code == TRUTH_AND_EXPR
6954 || code == TRUTH_OR_EXPR));
6955
6956 if (operand_equal_p (a00, a10, 0))
6957 return fold (build (TREE_CODE (arg0), type, a00,
6958 fold (build (code, type, a01, a11))));
6959 else if (commutative && operand_equal_p (a00, a11, 0))
6960 return fold (build (TREE_CODE (arg0), type, a00,
6961 fold (build (code, type, a01, a10))));
6962 else if (commutative && operand_equal_p (a01, a10, 0))
6963 return fold (build (TREE_CODE (arg0), type, a01,
6964 fold (build (code, type, a00, a11))));
6965
6966 /* This case if tricky because we must either have commutative
6967 operators or else A10 must not have side-effects. */
6968
6969 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
6970 && operand_equal_p (a01, a11, 0))
6971 return fold (build (TREE_CODE (arg0), type,
6972 fold (build (code, type, a00, a10)),
6973 a01));
6974 }
6975
6976 /* See if we can build a range comparison. */
6977 if (0 != (tem = fold_range_test (t)))
6978 return tem;
6979
6980 /* Check for the possibility of merging component references. If our
6981 lhs is another similar operation, try to merge its rhs with our
6982 rhs. Then try to merge our lhs and rhs. */
6983 if (TREE_CODE (arg0) == code
6984 && 0 != (tem = fold_truthop (code, type,
6985 TREE_OPERAND (arg0, 1), arg1)))
6986 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
6987
6988 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
6989 return tem;
6990
6991 return t;
6992
6993 case TRUTH_ORIF_EXPR:
6994 /* Note that the operands of this must be ints
6995 and their values must be 0 or true.
6996 ("true" is a fixed value perhaps depending on the language.) */
6997 /* If first arg is constant true, return it. */
6998 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
6999 return convert (type, arg0);
7000 case TRUTH_OR_EXPR:
7001 /* If either arg is constant zero, drop it. */
7002 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
7003 return non_lvalue (convert (type, arg1));
7004 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
7005 /* Preserve sequence points. */
7006 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
7007 return non_lvalue (convert (type, arg0));
7008 /* If second arg is constant true, result is true, but we must
7009 evaluate first arg. */
7010 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
7011 return omit_one_operand (type, arg1, arg0);
7012 /* Likewise for first arg, but note this only occurs here for
7013 TRUTH_OR_EXPR. */
7014 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
7015 return omit_one_operand (type, arg0, arg1);
7016 goto truth_andor;
7017
7018 case TRUTH_XOR_EXPR:
7019 /* If either arg is constant zero, drop it. */
7020 if (integer_zerop (arg0))
7021 return non_lvalue (convert (type, arg1));
7022 if (integer_zerop (arg1))
7023 return non_lvalue (convert (type, arg0));
7024 /* If either arg is constant true, this is a logical inversion. */
7025 if (integer_onep (arg0))
7026 return non_lvalue (convert (type, invert_truthvalue (arg1)));
7027 if (integer_onep (arg1))
7028 return non_lvalue (convert (type, invert_truthvalue (arg0)));
7029 return t;
7030
7031 case EQ_EXPR:
7032 case NE_EXPR:
7033 case LT_EXPR:
7034 case GT_EXPR:
7035 case LE_EXPR:
7036 case GE_EXPR:
7037 /* If one arg is a real or integer constant, put it last. */
7038 if (tree_swap_operands_p (arg0, arg1, true))
7039 return fold (build (swap_tree_comparison (code), type, arg1, arg0));
7040
7041 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
7042 {
7043 tree targ0 = strip_float_extensions (arg0);
7044 tree targ1 = strip_float_extensions (arg1);
7045 tree newtype = TREE_TYPE (targ0);
7046
7047 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
7048 newtype = TREE_TYPE (targ1);
7049
7050 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
7051 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
7052 return fold (build (code, type, convert (newtype, targ0),
7053 convert (newtype, targ1)));
7054
7055 /* (-a) CMP (-b) -> b CMP a */
7056 if (TREE_CODE (arg0) == NEGATE_EXPR
7057 && TREE_CODE (arg1) == NEGATE_EXPR)
7058 return fold (build (code, type, TREE_OPERAND (arg1, 0),
7059 TREE_OPERAND (arg0, 0)));
7060
7061 if (TREE_CODE (arg1) == REAL_CST)
7062 {
7063 REAL_VALUE_TYPE cst;
7064 cst = TREE_REAL_CST (arg1);
7065
7066 /* (-a) CMP CST -> a swap(CMP) (-CST) */
7067 if (TREE_CODE (arg0) == NEGATE_EXPR)
7068 return
7069 fold (build (swap_tree_comparison (code), type,
7070 TREE_OPERAND (arg0, 0),
7071 build_real (TREE_TYPE (arg1),
7072 REAL_VALUE_NEGATE (cst))));
7073
7074 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
7075 /* a CMP (-0) -> a CMP 0 */
7076 if (REAL_VALUE_MINUS_ZERO (cst))
7077 return fold (build (code, type, arg0,
7078 build_real (TREE_TYPE (arg1), dconst0)));
7079
7080 /* x != NaN is always true, other ops are always false. */
7081 if (REAL_VALUE_ISNAN (cst)
7082 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
7083 {
7084 t = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
7085 return omit_one_operand (type, convert (type, t), arg0);
7086 }
7087
7088 /* Fold comparisons against infinity. */
7089 if (REAL_VALUE_ISINF (cst))
7090 {
7091 tem = fold_inf_compare (code, type, arg0, arg1);
7092 if (tem != NULL_TREE)
7093 return tem;
7094 }
7095 }
7096
7097 /* If this is a comparison of a real constant with a PLUS_EXPR
7098 or a MINUS_EXPR of a real constant, we can convert it into a
7099 comparison with a revised real constant as long as no overflow
7100 occurs when unsafe_math_optimizations are enabled. */
7101 if (flag_unsafe_math_optimizations
7102 && TREE_CODE (arg1) == REAL_CST
7103 && (TREE_CODE (arg0) == PLUS_EXPR
7104 || TREE_CODE (arg0) == MINUS_EXPR)
7105 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
7106 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7107 ? MINUS_EXPR : PLUS_EXPR,
7108 arg1, TREE_OPERAND (arg0, 1), 0))
7109 && ! TREE_CONSTANT_OVERFLOW (tem))
7110 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7111
7112 /* Likewise, we can simplify a comparison of a real constant with
7113 a MINUS_EXPR whose first operand is also a real constant, i.e.
7114 (c1 - x) < c2 becomes x > c1-c2. */
7115 if (flag_unsafe_math_optimizations
7116 && TREE_CODE (arg1) == REAL_CST
7117 && TREE_CODE (arg0) == MINUS_EXPR
7118 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
7119 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
7120 arg1, 0))
7121 && ! TREE_CONSTANT_OVERFLOW (tem))
7122 return fold (build (swap_tree_comparison (code), type,
7123 TREE_OPERAND (arg0, 1), tem));
7124
7125 /* Fold comparisons against built-in math functions. */
7126 if (TREE_CODE (arg1) == REAL_CST
7127 && flag_unsafe_math_optimizations
7128 && ! flag_errno_math)
7129 {
7130 enum built_in_function fcode = builtin_mathfn_code (arg0);
7131
7132 if (fcode != END_BUILTINS)
7133 {
7134 tem = fold_mathfn_compare (fcode, code, type, arg0, arg1);
7135 if (tem != NULL_TREE)
7136 return tem;
7137 }
7138 }
7139 }
7140
7141 /* Convert foo++ == CONST into ++foo == CONST + INCR.
7142 First, see if one arg is constant; find the constant arg
7143 and the other one. */
7144 {
7145 tree constop = 0, varop = NULL_TREE;
7146 int constopnum = -1;
7147
7148 if (TREE_CONSTANT (arg1))
7149 constopnum = 1, constop = arg1, varop = arg0;
7150 if (TREE_CONSTANT (arg0))
7151 constopnum = 0, constop = arg0, varop = arg1;
7152
7153 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
7154 {
7155 /* This optimization is invalid for ordered comparisons
7156 if CONST+INCR overflows or if foo+incr might overflow.
7157 This optimization is invalid for floating point due to rounding.
7158 For pointer types we assume overflow doesn't happen. */
7159 if (POINTER_TYPE_P (TREE_TYPE (varop))
7160 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7161 && (code == EQ_EXPR || code == NE_EXPR)))
7162 {
7163 tree newconst
7164 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
7165 constop, TREE_OPERAND (varop, 1)));
7166
7167 /* Do not overwrite the current varop to be a preincrement,
7168 create a new node so that we won't confuse our caller who
7169 might create trees and throw them away, reusing the
7170 arguments that they passed to build. This shows up in
7171 the THEN or ELSE parts of ?: being postincrements. */
7172 varop = build (PREINCREMENT_EXPR, TREE_TYPE (varop),
7173 TREE_OPERAND (varop, 0),
7174 TREE_OPERAND (varop, 1));
7175
7176 /* If VAROP is a reference to a bitfield, we must mask
7177 the constant by the width of the field. */
7178 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7179 && DECL_BIT_FIELD(TREE_OPERAND
7180 (TREE_OPERAND (varop, 0), 1)))
7181 {
7182 int size
7183 = TREE_INT_CST_LOW (DECL_SIZE
7184 (TREE_OPERAND
7185 (TREE_OPERAND (varop, 0), 1)));
7186 tree mask, unsigned_type;
7187 unsigned int precision;
7188 tree folded_compare;
7189
7190 /* First check whether the comparison would come out
7191 always the same. If we don't do that we would
7192 change the meaning with the masking. */
7193 if (constopnum == 0)
7194 folded_compare = fold (build (code, type, constop,
7195 TREE_OPERAND (varop, 0)));
7196 else
7197 folded_compare = fold (build (code, type,
7198 TREE_OPERAND (varop, 0),
7199 constop));
7200 if (integer_zerop (folded_compare)
7201 || integer_onep (folded_compare))
7202 return omit_one_operand (type, folded_compare, varop);
7203
7204 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7205 precision = TYPE_PRECISION (unsigned_type);
7206 mask = build_int_2 (~0, ~0);
7207 TREE_TYPE (mask) = unsigned_type;
7208 force_fit_type (mask, 0);
7209 mask = const_binop (RSHIFT_EXPR, mask,
7210 size_int (precision - size), 0);
7211 newconst = fold (build (BIT_AND_EXPR,
7212 TREE_TYPE (varop), newconst,
7213 convert (TREE_TYPE (varop),
7214 mask)));
7215 }
7216
7217 t = build (code, type,
7218 (constopnum == 0) ? newconst : varop,
7219 (constopnum == 1) ? newconst : varop);
7220 return t;
7221 }
7222 }
7223 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
7224 {
7225 if (POINTER_TYPE_P (TREE_TYPE (varop))
7226 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
7227 && (code == EQ_EXPR || code == NE_EXPR)))
7228 {
7229 tree newconst
7230 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
7231 constop, TREE_OPERAND (varop, 1)));
7232
7233 /* Do not overwrite the current varop to be a predecrement,
7234 create a new node so that we won't confuse our caller who
7235 might create trees and throw them away, reusing the
7236 arguments that they passed to build. This shows up in
7237 the THEN or ELSE parts of ?: being postdecrements. */
7238 varop = build (PREDECREMENT_EXPR, TREE_TYPE (varop),
7239 TREE_OPERAND (varop, 0),
7240 TREE_OPERAND (varop, 1));
7241
7242 if (TREE_CODE (TREE_OPERAND (varop, 0)) == COMPONENT_REF
7243 && DECL_BIT_FIELD(TREE_OPERAND
7244 (TREE_OPERAND (varop, 0), 1)))
7245 {
7246 int size
7247 = TREE_INT_CST_LOW (DECL_SIZE
7248 (TREE_OPERAND
7249 (TREE_OPERAND (varop, 0), 1)));
7250 tree mask, unsigned_type;
7251 unsigned int precision;
7252 tree folded_compare;
7253
7254 if (constopnum == 0)
7255 folded_compare = fold (build (code, type, constop,
7256 TREE_OPERAND (varop, 0)));
7257 else
7258 folded_compare = fold (build (code, type,
7259 TREE_OPERAND (varop, 0),
7260 constop));
7261 if (integer_zerop (folded_compare)
7262 || integer_onep (folded_compare))
7263 return omit_one_operand (type, folded_compare, varop);
7264
7265 unsigned_type = (*lang_hooks.types.type_for_size)(size, 1);
7266 precision = TYPE_PRECISION (unsigned_type);
7267 mask = build_int_2 (~0, ~0);
7268 TREE_TYPE (mask) = TREE_TYPE (varop);
7269 force_fit_type (mask, 0);
7270 mask = const_binop (RSHIFT_EXPR, mask,
7271 size_int (precision - size), 0);
7272 newconst = fold (build (BIT_AND_EXPR,
7273 TREE_TYPE (varop), newconst,
7274 convert (TREE_TYPE (varop),
7275 mask)));
7276 }
7277
7278 t = build (code, type,
7279 (constopnum == 0) ? newconst : varop,
7280 (constopnum == 1) ? newconst : varop);
7281 return t;
7282 }
7283 }
7284 }
7285
7286 /* Change X >= C to X > (C - 1) and X < C to X <= (C - 1) if C > 0.
7287 This transformation affects the cases which are handled in later
7288 optimizations involving comparisons with non-negative constants. */
7289 if (TREE_CODE (arg1) == INTEGER_CST
7290 && TREE_CODE (arg0) != INTEGER_CST
7291 && tree_int_cst_sgn (arg1) > 0)
7292 {
7293 switch (code)
7294 {
7295 case GE_EXPR:
7296 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7297 return fold (build (GT_EXPR, type, arg0, arg1));
7298
7299 case LT_EXPR:
7300 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7301 return fold (build (LE_EXPR, type, arg0, arg1));
7302
7303 default:
7304 break;
7305 }
7306 }
7307
7308 /* Comparisons with the highest or lowest possible integer of
7309 the specified size will have known values. */
7310 {
7311 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg1)));
7312
7313 if (TREE_CODE (arg1) == INTEGER_CST
7314 && ! TREE_CONSTANT_OVERFLOW (arg1)
7315 && width <= HOST_BITS_PER_WIDE_INT
7316 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
7317 || POINTER_TYPE_P (TREE_TYPE (arg1))))
7318 {
7319 unsigned HOST_WIDE_INT signed_max;
7320 unsigned HOST_WIDE_INT max, min;
7321
7322 signed_max = ((unsigned HOST_WIDE_INT) 1 << (width - 1)) - 1;
7323
7324 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7325 {
7326 max = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
7327 min = 0;
7328 }
7329 else
7330 {
7331 max = signed_max;
7332 min = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
7333 }
7334
7335 if (TREE_INT_CST_HIGH (arg1) == 0
7336 && TREE_INT_CST_LOW (arg1) == max)
7337 switch (code)
7338 {
7339 case GT_EXPR:
7340 return omit_one_operand (type,
7341 convert (type, integer_zero_node),
7342 arg0);
7343 case GE_EXPR:
7344 return fold (build (EQ_EXPR, type, arg0, arg1));
7345
7346 case LE_EXPR:
7347 return omit_one_operand (type,
7348 convert (type, integer_one_node),
7349 arg0);
7350 case LT_EXPR:
7351 return fold (build (NE_EXPR, type, arg0, arg1));
7352
7353 /* The GE_EXPR and LT_EXPR cases above are not normally
7354 reached because of previous transformations. */
7355
7356 default:
7357 break;
7358 }
7359 else if (TREE_INT_CST_HIGH (arg1) == 0
7360 && TREE_INT_CST_LOW (arg1) == max - 1)
7361 switch (code)
7362 {
7363 case GT_EXPR:
7364 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7365 return fold (build (EQ_EXPR, type, arg0, arg1));
7366 case LE_EXPR:
7367 arg1 = const_binop (PLUS_EXPR, arg1, integer_one_node, 0);
7368 return fold (build (NE_EXPR, type, arg0, arg1));
7369 default:
7370 break;
7371 }
7372 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7373 && TREE_INT_CST_LOW (arg1) == min)
7374 switch (code)
7375 {
7376 case LT_EXPR:
7377 return omit_one_operand (type,
7378 convert (type, integer_zero_node),
7379 arg0);
7380 case LE_EXPR:
7381 return fold (build (EQ_EXPR, type, arg0, arg1));
7382
7383 case GE_EXPR:
7384 return omit_one_operand (type,
7385 convert (type, integer_one_node),
7386 arg0);
7387 case GT_EXPR:
7388 return fold (build (NE_EXPR, type, arg0, arg1));
7389
7390 default:
7391 break;
7392 }
7393 else if (TREE_INT_CST_HIGH (arg1) == (min ? -1 : 0)
7394 && TREE_INT_CST_LOW (arg1) == min + 1)
7395 switch (code)
7396 {
7397 case GE_EXPR:
7398 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7399 return fold (build (NE_EXPR, type, arg0, arg1));
7400 case LT_EXPR:
7401 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
7402 return fold (build (EQ_EXPR, type, arg0, arg1));
7403 default:
7404 break;
7405 }
7406
7407 else if (TREE_INT_CST_HIGH (arg1) == 0
7408 && TREE_INT_CST_LOW (arg1) == signed_max
7409 && TREE_UNSIGNED (TREE_TYPE (arg1))
7410 /* signed_type does not work on pointer types. */
7411 && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
7412 {
7413 /* The following case also applies to X < signed_max+1
7414 and X >= signed_max+1 because previous transformations. */
7415 if (code == LE_EXPR || code == GT_EXPR)
7416 {
7417 tree st0, st1;
7418 st0 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg0));
7419 st1 = (*lang_hooks.types.signed_type) (TREE_TYPE (arg1));
7420 return fold
7421 (build (code == LE_EXPR ? GE_EXPR: LT_EXPR,
7422 type, convert (st0, arg0),
7423 convert (st1, integer_zero_node)));
7424 }
7425 }
7426 }
7427 }
7428
7429 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
7430 a MINUS_EXPR of a constant, we can convert it into a comparison with
7431 a revised constant as long as no overflow occurs. */
7432 if ((code == EQ_EXPR || code == NE_EXPR)
7433 && TREE_CODE (arg1) == INTEGER_CST
7434 && (TREE_CODE (arg0) == PLUS_EXPR
7435 || TREE_CODE (arg0) == MINUS_EXPR)
7436 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
7437 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
7438 ? MINUS_EXPR : PLUS_EXPR,
7439 arg1, TREE_OPERAND (arg0, 1), 0))
7440 && ! TREE_CONSTANT_OVERFLOW (tem))
7441 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7442
7443 /* Similarly for a NEGATE_EXPR. */
7444 else if ((code == EQ_EXPR || code == NE_EXPR)
7445 && TREE_CODE (arg0) == NEGATE_EXPR
7446 && TREE_CODE (arg1) == INTEGER_CST
7447 && 0 != (tem = negate_expr (arg1))
7448 && TREE_CODE (tem) == INTEGER_CST
7449 && ! TREE_CONSTANT_OVERFLOW (tem))
7450 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
7451
7452 /* If we have X - Y == 0, we can convert that to X == Y and similarly
7453 for !=. Don't do this for ordered comparisons due to overflow. */
7454 else if ((code == NE_EXPR || code == EQ_EXPR)
7455 && integer_zerop (arg1) && TREE_CODE (arg0) == MINUS_EXPR)
7456 return fold (build (code, type,
7457 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1)));
7458
7459 /* If we are widening one operand of an integer comparison,
7460 see if the other operand is similarly being widened. Perhaps we
7461 can do the comparison in the narrower type. */
7462 else if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
7463 && TREE_CODE (arg0) == NOP_EXPR
7464 && (tem = get_unwidened (arg0, NULL_TREE)) != arg0
7465 && (t1 = get_unwidened (arg1, TREE_TYPE (tem))) != 0
7466 && (TREE_TYPE (t1) == TREE_TYPE (tem)
7467 || (TREE_CODE (t1) == INTEGER_CST
7468 && int_fits_type_p (t1, TREE_TYPE (tem)))))
7469 return fold (build (code, type, tem, convert (TREE_TYPE (tem), t1)));
7470
7471 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
7472 constant, we can simplify it. */
7473 else if (TREE_CODE (arg1) == INTEGER_CST
7474 && (TREE_CODE (arg0) == MIN_EXPR
7475 || TREE_CODE (arg0) == MAX_EXPR)
7476 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7477 return optimize_minmax_comparison (t);
7478
7479 /* If we are comparing an ABS_EXPR with a constant, we can
7480 convert all the cases into explicit comparisons, but they may
7481 well not be faster than doing the ABS and one comparison.
7482 But ABS (X) <= C is a range comparison, which becomes a subtraction
7483 and a comparison, and is probably faster. */
7484 else if (code == LE_EXPR && TREE_CODE (arg1) == INTEGER_CST
7485 && TREE_CODE (arg0) == ABS_EXPR
7486 && ! TREE_SIDE_EFFECTS (arg0)
7487 && (0 != (tem = negate_expr (arg1)))
7488 && TREE_CODE (tem) == INTEGER_CST
7489 && ! TREE_CONSTANT_OVERFLOW (tem))
7490 return fold (build (TRUTH_ANDIF_EXPR, type,
7491 build (GE_EXPR, type, TREE_OPERAND (arg0, 0), tem),
7492 build (LE_EXPR, type,
7493 TREE_OPERAND (arg0, 0), arg1)));
7494
7495 /* If this is an EQ or NE comparison with zero and ARG0 is
7496 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
7497 two operations, but the latter can be done in one less insn
7498 on machines that have only two-operand insns or on which a
7499 constant cannot be the first operand. */
7500 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
7501 && TREE_CODE (arg0) == BIT_AND_EXPR)
7502 {
7503 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
7504 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
7505 return
7506 fold (build (code, type,
7507 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7508 build (RSHIFT_EXPR,
7509 TREE_TYPE (TREE_OPERAND (arg0, 0)),
7510 TREE_OPERAND (arg0, 1),
7511 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
7512 convert (TREE_TYPE (arg0),
7513 integer_one_node)),
7514 arg1));
7515 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
7516 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
7517 return
7518 fold (build (code, type,
7519 build (BIT_AND_EXPR, TREE_TYPE (arg0),
7520 build (RSHIFT_EXPR,
7521 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7522 TREE_OPERAND (arg0, 0),
7523 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
7524 convert (TREE_TYPE (arg0),
7525 integer_one_node)),
7526 arg1));
7527 }
7528
7529 /* If this is an NE or EQ comparison of zero against the result of a
7530 signed MOD operation whose second operand is a power of 2, make
7531 the MOD operation unsigned since it is simpler and equivalent. */
7532 if ((code == NE_EXPR || code == EQ_EXPR)
7533 && integer_zerop (arg1)
7534 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
7535 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
7536 || TREE_CODE (arg0) == CEIL_MOD_EXPR
7537 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
7538 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
7539 && integer_pow2p (TREE_OPERAND (arg0, 1)))
7540 {
7541 tree newtype = (*lang_hooks.types.unsigned_type) (TREE_TYPE (arg0));
7542 tree newmod = build (TREE_CODE (arg0), newtype,
7543 convert (newtype, TREE_OPERAND (arg0, 0)),
7544 convert (newtype, TREE_OPERAND (arg0, 1)));
7545
7546 return build (code, type, newmod, convert (newtype, arg1));
7547 }
7548
7549 /* If this is an NE comparison of zero with an AND of one, remove the
7550 comparison since the AND will give the correct value. */
7551 if (code == NE_EXPR && integer_zerop (arg1)
7552 && TREE_CODE (arg0) == BIT_AND_EXPR
7553 && integer_onep (TREE_OPERAND (arg0, 1)))
7554 return convert (type, arg0);
7555
7556 /* If we have (A & C) == C where C is a power of 2, convert this into
7557 (A & C) != 0. Similarly for NE_EXPR. */
7558 if ((code == EQ_EXPR || code == NE_EXPR)
7559 && TREE_CODE (arg0) == BIT_AND_EXPR
7560 && integer_pow2p (TREE_OPERAND (arg0, 1))
7561 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
7562 return fold (build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
7563 arg0, integer_zero_node));
7564
7565 /* If we have (A & C) != 0 or (A & C) == 0 and C is a power of
7566 2, then fold the expression into shifts and logical operations. */
7567 tem = fold_single_bit_test (code, arg0, arg1, type);
7568 if (tem)
7569 return tem;
7570
7571 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
7572 Similarly for NE_EXPR. */
7573 if ((code == EQ_EXPR || code == NE_EXPR)
7574 && TREE_CODE (arg0) == BIT_AND_EXPR
7575 && TREE_CODE (arg1) == INTEGER_CST
7576 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7577 {
7578 tree dandnotc
7579 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7580 arg1, build1 (BIT_NOT_EXPR,
7581 TREE_TYPE (TREE_OPERAND (arg0, 1)),
7582 TREE_OPERAND (arg0, 1))));
7583 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7584 if (integer_nonzerop (dandnotc))
7585 return omit_one_operand (type, rslt, arg0);
7586 }
7587
7588 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
7589 Similarly for NE_EXPR. */
7590 if ((code == EQ_EXPR || code == NE_EXPR)
7591 && TREE_CODE (arg0) == BIT_IOR_EXPR
7592 && TREE_CODE (arg1) == INTEGER_CST
7593 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
7594 {
7595 tree candnotd
7596 = fold (build (BIT_AND_EXPR, TREE_TYPE (arg0),
7597 TREE_OPERAND (arg0, 1),
7598 build1 (BIT_NOT_EXPR, TREE_TYPE (arg1), arg1)));
7599 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
7600 if (integer_nonzerop (candnotd))
7601 return omit_one_operand (type, rslt, arg0);
7602 }
7603
7604 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
7605 and similarly for >= into !=. */
7606 if ((code == LT_EXPR || code == GE_EXPR)
7607 && TREE_UNSIGNED (TREE_TYPE (arg0))
7608 && TREE_CODE (arg1) == LSHIFT_EXPR
7609 && integer_onep (TREE_OPERAND (arg1, 0)))
7610 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7611 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7612 TREE_OPERAND (arg1, 1)),
7613 convert (TREE_TYPE (arg0), integer_zero_node));
7614
7615 else if ((code == LT_EXPR || code == GE_EXPR)
7616 && TREE_UNSIGNED (TREE_TYPE (arg0))
7617 && (TREE_CODE (arg1) == NOP_EXPR
7618 || TREE_CODE (arg1) == CONVERT_EXPR)
7619 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
7620 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
7621 return
7622 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
7623 convert (TREE_TYPE (arg0),
7624 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
7625 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
7626 convert (TREE_TYPE (arg0), integer_zero_node));
7627
7628 /* Simplify comparison of something with itself. (For IEEE
7629 floating-point, we can only do some of these simplifications.) */
7630 if (operand_equal_p (arg0, arg1, 0))
7631 {
7632 switch (code)
7633 {
7634 case EQ_EXPR:
7635 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7636 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7637 return constant_boolean_node (1, type);
7638 break;
7639
7640 case GE_EXPR:
7641 case LE_EXPR:
7642 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
7643 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7644 return constant_boolean_node (1, type);
7645 return fold (build (EQ_EXPR, type, arg0, arg1));
7646
7647 case NE_EXPR:
7648 /* For NE, we can only do this simplification if integer
7649 or we don't honor IEEE floating point NaNs. */
7650 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
7651 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
7652 break;
7653 /* ... fall through ... */
7654 case GT_EXPR:
7655 case LT_EXPR:
7656 return constant_boolean_node (0, type);
7657 default:
7658 abort ();
7659 }
7660 }
7661
7662 /* If we are comparing an expression that just has comparisons
7663 of two integer values, arithmetic expressions of those comparisons,
7664 and constants, we can simplify it. There are only three cases
7665 to check: the two values can either be equal, the first can be
7666 greater, or the second can be greater. Fold the expression for
7667 those three values. Since each value must be 0 or 1, we have
7668 eight possibilities, each of which corresponds to the constant 0
7669 or 1 or one of the six possible comparisons.
7670
7671 This handles common cases like (a > b) == 0 but also handles
7672 expressions like ((x > y) - (y > x)) > 0, which supposedly
7673 occur in macroized code. */
7674
7675 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
7676 {
7677 tree cval1 = 0, cval2 = 0;
7678 int save_p = 0;
7679
7680 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
7681 /* Don't handle degenerate cases here; they should already
7682 have been handled anyway. */
7683 && cval1 != 0 && cval2 != 0
7684 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
7685 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
7686 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
7687 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
7688 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
7689 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
7690 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
7691 {
7692 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
7693 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
7694
7695 /* We can't just pass T to eval_subst in case cval1 or cval2
7696 was the same as ARG1. */
7697
7698 tree high_result
7699 = fold (build (code, type,
7700 eval_subst (arg0, cval1, maxval, cval2, minval),
7701 arg1));
7702 tree equal_result
7703 = fold (build (code, type,
7704 eval_subst (arg0, cval1, maxval, cval2, maxval),
7705 arg1));
7706 tree low_result
7707 = fold (build (code, type,
7708 eval_subst (arg0, cval1, minval, cval2, maxval),
7709 arg1));
7710
7711 /* All three of these results should be 0 or 1. Confirm they
7712 are. Then use those values to select the proper code
7713 to use. */
7714
7715 if ((integer_zerop (high_result)
7716 || integer_onep (high_result))
7717 && (integer_zerop (equal_result)
7718 || integer_onep (equal_result))
7719 && (integer_zerop (low_result)
7720 || integer_onep (low_result)))
7721 {
7722 /* Make a 3-bit mask with the high-order bit being the
7723 value for `>', the next for '=', and the low for '<'. */
7724 switch ((integer_onep (high_result) * 4)
7725 + (integer_onep (equal_result) * 2)
7726 + integer_onep (low_result))
7727 {
7728 case 0:
7729 /* Always false. */
7730 return omit_one_operand (type, integer_zero_node, arg0);
7731 case 1:
7732 code = LT_EXPR;
7733 break;
7734 case 2:
7735 code = EQ_EXPR;
7736 break;
7737 case 3:
7738 code = LE_EXPR;
7739 break;
7740 case 4:
7741 code = GT_EXPR;
7742 break;
7743 case 5:
7744 code = NE_EXPR;
7745 break;
7746 case 6:
7747 code = GE_EXPR;
7748 break;
7749 case 7:
7750 /* Always true. */
7751 return omit_one_operand (type, integer_one_node, arg0);
7752 }
7753
7754 t = build (code, type, cval1, cval2);
7755 if (save_p)
7756 return save_expr (t);
7757 else
7758 return fold (t);
7759 }
7760 }
7761 }
7762
7763 /* If this is a comparison of a field, we may be able to simplify it. */
7764 if (((TREE_CODE (arg0) == COMPONENT_REF
7765 && (*lang_hooks.can_use_bit_fields_p) ())
7766 || TREE_CODE (arg0) == BIT_FIELD_REF)
7767 && (code == EQ_EXPR || code == NE_EXPR)
7768 /* Handle the constant case even without -O
7769 to make sure the warnings are given. */
7770 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
7771 {
7772 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
7773 if (t1)
7774 return t1;
7775 }
7776
7777 /* If this is a comparison of complex values and either or both sides
7778 are a COMPLEX_EXPR or COMPLEX_CST, it is best to split up the
7779 comparisons and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR.
7780 This may prevent needless evaluations. */
7781 if ((code == EQ_EXPR || code == NE_EXPR)
7782 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
7783 && (TREE_CODE (arg0) == COMPLEX_EXPR
7784 || TREE_CODE (arg1) == COMPLEX_EXPR
7785 || TREE_CODE (arg0) == COMPLEX_CST
7786 || TREE_CODE (arg1) == COMPLEX_CST))
7787 {
7788 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
7789 tree real0, imag0, real1, imag1;
7790
7791 arg0 = save_expr (arg0);
7792 arg1 = save_expr (arg1);
7793 real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
7794 imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
7795 real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
7796 imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
7797
7798 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
7799 : TRUTH_ORIF_EXPR),
7800 type,
7801 fold (build (code, type, real0, real1)),
7802 fold (build (code, type, imag0, imag1))));
7803 }
7804
7805 /* Optimize comparisons of strlen vs zero to a compare of the
7806 first character of the string vs zero. To wit,
7807 strlen(ptr) == 0 => *ptr == 0
7808 strlen(ptr) != 0 => *ptr != 0
7809 Other cases should reduce to one of these two (or a constant)
7810 due to the return value of strlen being unsigned. */
7811 if ((code == EQ_EXPR || code == NE_EXPR)
7812 && integer_zerop (arg1)
7813 && TREE_CODE (arg0) == CALL_EXPR)
7814 {
7815 tree fndecl = get_callee_fndecl (arg0);
7816 tree arglist;
7817
7818 if (fndecl
7819 && DECL_BUILT_IN (fndecl)
7820 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD
7821 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
7822 && (arglist = TREE_OPERAND (arg0, 1))
7823 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE
7824 && ! TREE_CHAIN (arglist))
7825 return fold (build (code, type,
7826 build1 (INDIRECT_REF, char_type_node,
7827 TREE_VALUE(arglist)),
7828 integer_zero_node));
7829 }
7830
7831 /* From here on, the only cases we handle are when the result is
7832 known to be a constant.
7833
7834 To compute GT, swap the arguments and do LT.
7835 To compute GE, do LT and invert the result.
7836 To compute LE, swap the arguments, do LT and invert the result.
7837 To compute NE, do EQ and invert the result.
7838
7839 Therefore, the code below must handle only EQ and LT. */
7840
7841 if (code == LE_EXPR || code == GT_EXPR)
7842 {
7843 tem = arg0, arg0 = arg1, arg1 = tem;
7844 code = swap_tree_comparison (code);
7845 }
7846
7847 /* Note that it is safe to invert for real values here because we
7848 will check below in the one case that it matters. */
7849
7850 t1 = NULL_TREE;
7851 invert = 0;
7852 if (code == NE_EXPR || code == GE_EXPR)
7853 {
7854 invert = 1;
7855 code = invert_tree_comparison (code);
7856 }
7857
7858 /* Compute a result for LT or EQ if args permit;
7859 otherwise return T. */
7860 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
7861 {
7862 if (code == EQ_EXPR)
7863 t1 = build_int_2 (tree_int_cst_equal (arg0, arg1), 0);
7864 else
7865 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
7866 ? INT_CST_LT_UNSIGNED (arg0, arg1)
7867 : INT_CST_LT (arg0, arg1)),
7868 0);
7869 }
7870
7871 #if 0 /* This is no longer useful, but breaks some real code. */
7872 /* Assume a nonexplicit constant cannot equal an explicit one,
7873 since such code would be undefined anyway.
7874 Exception: on sysvr4, using #pragma weak,
7875 a label can come out as 0. */
7876 else if (TREE_CODE (arg1) == INTEGER_CST
7877 && !integer_zerop (arg1)
7878 && TREE_CONSTANT (arg0)
7879 && TREE_CODE (arg0) == ADDR_EXPR
7880 && code == EQ_EXPR)
7881 t1 = build_int_2 (0, 0);
7882 #endif
7883 /* Two real constants can be compared explicitly. */
7884 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
7885 {
7886 /* If either operand is a NaN, the result is false with two
7887 exceptions: First, an NE_EXPR is true on NaNs, but that case
7888 is already handled correctly since we will be inverting the
7889 result for NE_EXPR. Second, if we had inverted a LE_EXPR
7890 or a GE_EXPR into a LT_EXPR, we must return true so that it
7891 will be inverted into false. */
7892
7893 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
7894 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
7895 t1 = build_int_2 (invert && code == LT_EXPR, 0);
7896
7897 else if (code == EQ_EXPR)
7898 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
7899 TREE_REAL_CST (arg1)),
7900 0);
7901 else
7902 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
7903 TREE_REAL_CST (arg1)),
7904 0);
7905 }
7906
7907 if (t1 == NULL_TREE)
7908 return t;
7909
7910 if (invert)
7911 TREE_INT_CST_LOW (t1) ^= 1;
7912
7913 TREE_TYPE (t1) = type;
7914 if (TREE_CODE (type) == BOOLEAN_TYPE)
7915 return (*lang_hooks.truthvalue_conversion) (t1);
7916 return t1;
7917
7918 case COND_EXPR:
7919 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
7920 so all simple results must be passed through pedantic_non_lvalue. */
7921 if (TREE_CODE (arg0) == INTEGER_CST)
7922 {
7923 tem = TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
7924 /* Only optimize constant conditions when the selected branch
7925 has the same type as the COND_EXPR. This avoids optimizing
7926 away "c ? x : throw", where the throw has a void type. */
7927 if (! VOID_TYPE_P (TREE_TYPE (tem))
7928 || VOID_TYPE_P (TREE_TYPE (t)))
7929 return pedantic_non_lvalue (tem);
7930 return t;
7931 }
7932 if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
7933 return pedantic_omit_one_operand (type, arg1, arg0);
7934
7935 /* If we have A op B ? A : C, we may be able to convert this to a
7936 simpler expression, depending on the operation and the values
7937 of B and C. Signed zeros prevent all of these transformations,
7938 for reasons given above each one. */
7939
7940 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
7941 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
7942 arg1, TREE_OPERAND (arg0, 1))
7943 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
7944 {
7945 tree arg2 = TREE_OPERAND (t, 2);
7946 enum tree_code comp_code = TREE_CODE (arg0);
7947
7948 STRIP_NOPS (arg2);
7949
7950 /* If we have A op 0 ? A : -A, consider applying the following
7951 transformations:
7952
7953 A == 0? A : -A same as -A
7954 A != 0? A : -A same as A
7955 A >= 0? A : -A same as abs (A)
7956 A > 0? A : -A same as abs (A)
7957 A <= 0? A : -A same as -abs (A)
7958 A < 0? A : -A same as -abs (A)
7959
7960 None of these transformations work for modes with signed
7961 zeros. If A is +/-0, the first two transformations will
7962 change the sign of the result (from +0 to -0, or vice
7963 versa). The last four will fix the sign of the result,
7964 even though the original expressions could be positive or
7965 negative, depending on the sign of A.
7966
7967 Note that all these transformations are correct if A is
7968 NaN, since the two alternatives (A and -A) are also NaNs. */
7969 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
7970 ? real_zerop (TREE_OPERAND (arg0, 1))
7971 : integer_zerop (TREE_OPERAND (arg0, 1)))
7972 && TREE_CODE (arg2) == NEGATE_EXPR
7973 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
7974 switch (comp_code)
7975 {
7976 case EQ_EXPR:
7977 return
7978 pedantic_non_lvalue
7979 (convert (type,
7980 negate_expr
7981 (convert (TREE_TYPE (TREE_OPERAND (t, 1)),
7982 arg1))));
7983 case NE_EXPR:
7984 return pedantic_non_lvalue (convert (type, arg1));
7985 case GE_EXPR:
7986 case GT_EXPR:
7987 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7988 arg1 = convert ((*lang_hooks.types.signed_type)
7989 (TREE_TYPE (arg1)), arg1);
7990 return pedantic_non_lvalue
7991 (convert (type, fold (build1 (ABS_EXPR,
7992 TREE_TYPE (arg1), arg1))));
7993 case LE_EXPR:
7994 case LT_EXPR:
7995 if (TREE_UNSIGNED (TREE_TYPE (arg1)))
7996 arg1 = convert ((lang_hooks.types.signed_type)
7997 (TREE_TYPE (arg1)), arg1);
7998 return pedantic_non_lvalue
7999 (negate_expr (convert (type,
8000 fold (build1 (ABS_EXPR,
8001 TREE_TYPE (arg1),
8002 arg1)))));
8003 default:
8004 abort ();
8005 }
8006
8007 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
8008 A == 0 ? A : 0 is always 0 unless A is -0. Note that
8009 both transformations are correct when A is NaN: A != 0
8010 is then true, and A == 0 is false. */
8011
8012 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
8013 {
8014 if (comp_code == NE_EXPR)
8015 return pedantic_non_lvalue (convert (type, arg1));
8016 else if (comp_code == EQ_EXPR)
8017 return pedantic_non_lvalue (convert (type, integer_zero_node));
8018 }
8019
8020 /* Try some transformations of A op B ? A : B.
8021
8022 A == B? A : B same as B
8023 A != B? A : B same as A
8024 A >= B? A : B same as max (A, B)
8025 A > B? A : B same as max (B, A)
8026 A <= B? A : B same as min (A, B)
8027 A < B? A : B same as min (B, A)
8028
8029 As above, these transformations don't work in the presence
8030 of signed zeros. For example, if A and B are zeros of
8031 opposite sign, the first two transformations will change
8032 the sign of the result. In the last four, the original
8033 expressions give different results for (A=+0, B=-0) and
8034 (A=-0, B=+0), but the transformed expressions do not.
8035
8036 The first two transformations are correct if either A or B
8037 is a NaN. In the first transformation, the condition will
8038 be false, and B will indeed be chosen. In the case of the
8039 second transformation, the condition A != B will be true,
8040 and A will be chosen.
8041
8042 The conversions to max() and min() are not correct if B is
8043 a number and A is not. The conditions in the original
8044 expressions will be false, so all four give B. The min()
8045 and max() versions would give a NaN instead. */
8046 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
8047 arg2, TREE_OPERAND (arg0, 0)))
8048 {
8049 tree comp_op0 = TREE_OPERAND (arg0, 0);
8050 tree comp_op1 = TREE_OPERAND (arg0, 1);
8051 tree comp_type = TREE_TYPE (comp_op0);
8052
8053 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
8054 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
8055 {
8056 comp_type = type;
8057 comp_op0 = arg1;
8058 comp_op1 = arg2;
8059 }
8060
8061 switch (comp_code)
8062 {
8063 case EQ_EXPR:
8064 return pedantic_non_lvalue (convert (type, arg2));
8065 case NE_EXPR:
8066 return pedantic_non_lvalue (convert (type, arg1));
8067 case LE_EXPR:
8068 case LT_EXPR:
8069 /* In C++ a ?: expression can be an lvalue, so put the
8070 operand which will be used if they are equal first
8071 so that we can convert this back to the
8072 corresponding COND_EXPR. */
8073 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8074 return pedantic_non_lvalue
8075 (convert (type, fold (build (MIN_EXPR, comp_type,
8076 (comp_code == LE_EXPR
8077 ? comp_op0 : comp_op1),
8078 (comp_code == LE_EXPR
8079 ? comp_op1 : comp_op0)))));
8080 break;
8081 case GE_EXPR:
8082 case GT_EXPR:
8083 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
8084 return pedantic_non_lvalue
8085 (convert (type, fold (build (MAX_EXPR, comp_type,
8086 (comp_code == GE_EXPR
8087 ? comp_op0 : comp_op1),
8088 (comp_code == GE_EXPR
8089 ? comp_op1 : comp_op0)))));
8090 break;
8091 default:
8092 abort ();
8093 }
8094 }
8095
8096 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
8097 we might still be able to simplify this. For example,
8098 if C1 is one less or one more than C2, this might have started
8099 out as a MIN or MAX and been transformed by this function.
8100 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
8101
8102 if (INTEGRAL_TYPE_P (type)
8103 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8104 && TREE_CODE (arg2) == INTEGER_CST)
8105 switch (comp_code)
8106 {
8107 case EQ_EXPR:
8108 /* We can replace A with C1 in this case. */
8109 arg1 = convert (type, TREE_OPERAND (arg0, 1));
8110 return fold (build (code, type, TREE_OPERAND (t, 0), arg1,
8111 TREE_OPERAND (t, 2)));
8112
8113 case LT_EXPR:
8114 /* If C1 is C2 + 1, this is min(A, C2). */
8115 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8116 && operand_equal_p (TREE_OPERAND (arg0, 1),
8117 const_binop (PLUS_EXPR, arg2,
8118 integer_one_node, 0), 1))
8119 return pedantic_non_lvalue
8120 (fold (build (MIN_EXPR, type, arg1, arg2)));
8121 break;
8122
8123 case LE_EXPR:
8124 /* If C1 is C2 - 1, this is min(A, C2). */
8125 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8126 && operand_equal_p (TREE_OPERAND (arg0, 1),
8127 const_binop (MINUS_EXPR, arg2,
8128 integer_one_node, 0), 1))
8129 return pedantic_non_lvalue
8130 (fold (build (MIN_EXPR, type, arg1, arg2)));
8131 break;
8132
8133 case GT_EXPR:
8134 /* If C1 is C2 - 1, this is max(A, C2). */
8135 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
8136 && operand_equal_p (TREE_OPERAND (arg0, 1),
8137 const_binop (MINUS_EXPR, arg2,
8138 integer_one_node, 0), 1))
8139 return pedantic_non_lvalue
8140 (fold (build (MAX_EXPR, type, arg1, arg2)));
8141 break;
8142
8143 case GE_EXPR:
8144 /* If C1 is C2 + 1, this is max(A, C2). */
8145 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
8146 && operand_equal_p (TREE_OPERAND (arg0, 1),
8147 const_binop (PLUS_EXPR, arg2,
8148 integer_one_node, 0), 1))
8149 return pedantic_non_lvalue
8150 (fold (build (MAX_EXPR, type, arg1, arg2)));
8151 break;
8152 case NE_EXPR:
8153 break;
8154 default:
8155 abort ();
8156 }
8157 }
8158
8159 /* If the second operand is simpler than the third, swap them
8160 since that produces better jump optimization results. */
8161 if (tree_swap_operands_p (TREE_OPERAND (t, 1),
8162 TREE_OPERAND (t, 2), false))
8163 {
8164 /* See if this can be inverted. If it can't, possibly because
8165 it was a floating-point inequality comparison, don't do
8166 anything. */
8167 tem = invert_truthvalue (arg0);
8168
8169 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8170 return fold (build (code, type, tem,
8171 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1)));
8172 }
8173
8174 /* Convert A ? 1 : 0 to simply A. */
8175 if (integer_onep (TREE_OPERAND (t, 1))
8176 && integer_zerop (TREE_OPERAND (t, 2))
8177 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
8178 call to fold will try to move the conversion inside
8179 a COND, which will recurse. In that case, the COND_EXPR
8180 is probably the best choice, so leave it alone. */
8181 && type == TREE_TYPE (arg0))
8182 return pedantic_non_lvalue (arg0);
8183
8184 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
8185 over COND_EXPR in cases such as floating point comparisons. */
8186 if (integer_zerop (TREE_OPERAND (t, 1))
8187 && integer_onep (TREE_OPERAND (t, 2))
8188 && truth_value_p (TREE_CODE (arg0)))
8189 return pedantic_non_lvalue (convert (type,
8190 invert_truthvalue (arg0)));
8191
8192 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
8193 operation is simply A & 2. */
8194
8195 if (integer_zerop (TREE_OPERAND (t, 2))
8196 && TREE_CODE (arg0) == NE_EXPR
8197 && integer_zerop (TREE_OPERAND (arg0, 1))
8198 && integer_pow2p (arg1)
8199 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
8200 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
8201 arg1, 1))
8202 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
8203
8204 /* Convert A ? B : 0 into A && B if A and B are truth values. */
8205 if (integer_zerop (TREE_OPERAND (t, 2))
8206 && truth_value_p (TREE_CODE (arg0))
8207 && truth_value_p (TREE_CODE (arg1)))
8208 return pedantic_non_lvalue (fold (build (TRUTH_ANDIF_EXPR, type,
8209 arg0, arg1)));
8210
8211 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
8212 if (integer_onep (TREE_OPERAND (t, 2))
8213 && truth_value_p (TREE_CODE (arg0))
8214 && truth_value_p (TREE_CODE (arg1)))
8215 {
8216 /* Only perform transformation if ARG0 is easily inverted. */
8217 tem = invert_truthvalue (arg0);
8218 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
8219 return pedantic_non_lvalue (fold (build (TRUTH_ORIF_EXPR, type,
8220 tem, arg1)));
8221 }
8222
8223 return t;
8224
8225 case COMPOUND_EXPR:
8226 /* When pedantic, a compound expression can be neither an lvalue
8227 nor an integer constant expression. */
8228 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
8229 return t;
8230 /* Don't let (0, 0) be null pointer constant. */
8231 if (integer_zerop (arg1))
8232 return pedantic_non_lvalue (build1 (NOP_EXPR, type, arg1));
8233 return pedantic_non_lvalue (convert (type, arg1));
8234
8235 case COMPLEX_EXPR:
8236 if (wins)
8237 return build_complex (type, arg0, arg1);
8238 return t;
8239
8240 case REALPART_EXPR:
8241 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8242 return t;
8243 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8244 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
8245 TREE_OPERAND (arg0, 1));
8246 else if (TREE_CODE (arg0) == COMPLEX_CST)
8247 return TREE_REALPART (arg0);
8248 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8249 return fold (build (TREE_CODE (arg0), type,
8250 fold (build1 (REALPART_EXPR, type,
8251 TREE_OPERAND (arg0, 0))),
8252 fold (build1 (REALPART_EXPR,
8253 type, TREE_OPERAND (arg0, 1)))));
8254 return t;
8255
8256 case IMAGPART_EXPR:
8257 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8258 return convert (type, integer_zero_node);
8259 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
8260 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
8261 TREE_OPERAND (arg0, 0));
8262 else if (TREE_CODE (arg0) == COMPLEX_CST)
8263 return TREE_IMAGPART (arg0);
8264 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8265 return fold (build (TREE_CODE (arg0), type,
8266 fold (build1 (IMAGPART_EXPR, type,
8267 TREE_OPERAND (arg0, 0))),
8268 fold (build1 (IMAGPART_EXPR, type,
8269 TREE_OPERAND (arg0, 1)))));
8270 return t;
8271
8272 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
8273 appropriate. */
8274 case CLEANUP_POINT_EXPR:
8275 if (! has_cleanups (arg0))
8276 return TREE_OPERAND (t, 0);
8277
8278 {
8279 enum tree_code code0 = TREE_CODE (arg0);
8280 int kind0 = TREE_CODE_CLASS (code0);
8281 tree arg00 = TREE_OPERAND (arg0, 0);
8282 tree arg01;
8283
8284 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
8285 return fold (build1 (code0, type,
8286 fold (build1 (CLEANUP_POINT_EXPR,
8287 TREE_TYPE (arg00), arg00))));
8288
8289 if (kind0 == '<' || kind0 == '2'
8290 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
8291 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
8292 || code0 == TRUTH_XOR_EXPR)
8293 {
8294 arg01 = TREE_OPERAND (arg0, 1);
8295
8296 if (TREE_CONSTANT (arg00)
8297 || ((code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR)
8298 && ! has_cleanups (arg00)))
8299 return fold (build (code0, type, arg00,
8300 fold (build1 (CLEANUP_POINT_EXPR,
8301 TREE_TYPE (arg01), arg01))));
8302
8303 if (TREE_CONSTANT (arg01))
8304 return fold (build (code0, type,
8305 fold (build1 (CLEANUP_POINT_EXPR,
8306 TREE_TYPE (arg00), arg00)),
8307 arg01));
8308 }
8309
8310 return t;
8311 }
8312
8313 case CALL_EXPR:
8314 /* Check for a built-in function. */
8315 if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR
8316 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (expr, 0), 0))
8317 == FUNCTION_DECL)
8318 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (expr, 0), 0)))
8319 {
8320 tree tmp = fold_builtin (expr);
8321 if (tmp)
8322 return tmp;
8323 }
8324 return t;
8325
8326 default:
8327 return t;
8328 } /* switch (code) */
8329 }
8330
8331 #ifdef ENABLE_FOLD_CHECKING
8332 #undef fold
8333
8334 static void fold_checksum_tree (tree, struct md5_ctx *, htab_t);
8335 static void fold_check_failed (tree, tree);
8336 void print_fold_checksum (tree);
8337
8338 /* When --enable-checking=fold, compute a digest of expr before
8339 and after actual fold call to see if fold did not accidentally
8340 change original expr. */
8341
8342 tree
8343 fold (tree expr)
8344 {
8345 tree ret;
8346 struct md5_ctx ctx;
8347 unsigned char checksum_before[16], checksum_after[16];
8348 htab_t ht;
8349
8350 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8351 md5_init_ctx (&ctx);
8352 fold_checksum_tree (expr, &ctx, ht);
8353 md5_finish_ctx (&ctx, checksum_before);
8354 htab_empty (ht);
8355
8356 ret = fold_1 (expr);
8357
8358 md5_init_ctx (&ctx);
8359 fold_checksum_tree (expr, &ctx, ht);
8360 md5_finish_ctx (&ctx, checksum_after);
8361 htab_delete (ht);
8362
8363 if (memcmp (checksum_before, checksum_after, 16))
8364 fold_check_failed (expr, ret);
8365
8366 return ret;
8367 }
8368
8369 void
8370 print_fold_checksum (tree expr)
8371 {
8372 struct md5_ctx ctx;
8373 unsigned char checksum[16], cnt;
8374 htab_t ht;
8375
8376 ht = htab_create (32, htab_hash_pointer, htab_eq_pointer, NULL);
8377 md5_init_ctx (&ctx);
8378 fold_checksum_tree (expr, &ctx, ht);
8379 md5_finish_ctx (&ctx, checksum);
8380 htab_delete (ht);
8381 for (cnt = 0; cnt < 16; ++cnt)
8382 fprintf (stderr, "%02x", checksum[cnt]);
8383 putc ('\n', stderr);
8384 }
8385
8386 static void
8387 fold_check_failed (tree expr ATTRIBUTE_UNUSED, tree ret ATTRIBUTE_UNUSED)
8388 {
8389 internal_error ("fold check: original tree changed by fold");
8390 }
8391
8392 static void
8393 fold_checksum_tree (tree expr, struct md5_ctx *ctx, htab_t ht)
8394 {
8395 void **slot;
8396 enum tree_code code;
8397 char buf[sizeof (struct tree_decl)];
8398 int i, len;
8399
8400 if (sizeof (struct tree_exp) + 5 * sizeof (tree)
8401 > sizeof (struct tree_decl)
8402 || sizeof (struct tree_type) > sizeof (struct tree_decl))
8403 abort ();
8404 if (expr == NULL)
8405 return;
8406 slot = htab_find_slot (ht, expr, INSERT);
8407 if (*slot != NULL)
8408 return;
8409 *slot = expr;
8410 code = TREE_CODE (expr);
8411 if (code == SAVE_EXPR && SAVE_EXPR_NOPLACEHOLDER (expr))
8412 {
8413 /* Allow SAVE_EXPR_NOPLACEHOLDER flag to be modified. */
8414 memcpy (buf, expr, tree_size (expr));
8415 expr = (tree) buf;
8416 SAVE_EXPR_NOPLACEHOLDER (expr) = 0;
8417 }
8418 else if (TREE_CODE_CLASS (code) == 'd' && DECL_ASSEMBLER_NAME_SET_P (expr))
8419 {
8420 /* Allow DECL_ASSEMBLER_NAME to be modified. */
8421 memcpy (buf, expr, tree_size (expr));
8422 expr = (tree) buf;
8423 SET_DECL_ASSEMBLER_NAME (expr, NULL);
8424 }
8425 else if (TREE_CODE_CLASS (code) == 't'
8426 && (TYPE_POINTER_TO (expr) || TYPE_REFERENCE_TO (expr)))
8427 {
8428 /* Allow TYPE_POINTER_TO and TYPE_REFERENCE_TO to be modified. */
8429 memcpy (buf, expr, tree_size (expr));
8430 expr = (tree) buf;
8431 TYPE_POINTER_TO (expr) = NULL;
8432 TYPE_REFERENCE_TO (expr) = NULL;
8433 }
8434 md5_process_bytes (expr, tree_size (expr), ctx);
8435 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
8436 if (TREE_CODE_CLASS (code) != 't' && TREE_CODE_CLASS (code) != 'd')
8437 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
8438 len = TREE_CODE_LENGTH (code);
8439 switch (TREE_CODE_CLASS (code))
8440 {
8441 case 'c':
8442 switch (code)
8443 {
8444 case STRING_CST:
8445 md5_process_bytes (TREE_STRING_POINTER (expr),
8446 TREE_STRING_LENGTH (expr), ctx);
8447 break;
8448 case COMPLEX_CST:
8449 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
8450 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
8451 break;
8452 case VECTOR_CST:
8453 fold_checksum_tree (TREE_VECTOR_CST_ELTS (expr), ctx, ht);
8454 break;
8455 default:
8456 break;
8457 }
8458 break;
8459 case 'x':
8460 switch (code)
8461 {
8462 case TREE_LIST:
8463 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
8464 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
8465 break;
8466 case TREE_VEC:
8467 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
8468 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
8469 break;
8470 default:
8471 break;
8472 }
8473 break;
8474 case 'e':
8475 switch (code)
8476 {
8477 case SAVE_EXPR: len = 2; break;
8478 case GOTO_SUBROUTINE_EXPR: len = 0; break;
8479 case RTL_EXPR: len = 0; break;
8480 case WITH_CLEANUP_EXPR: len = 2; break;
8481 default: break;
8482 }
8483 /* Fall through. */
8484 case 'r':
8485 case '<':
8486 case '1':
8487 case '2':
8488 case 's':
8489 for (i = 0; i < len; ++i)
8490 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
8491 break;
8492 case 'd':
8493 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
8494 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
8495 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
8496 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
8497 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
8498 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
8499 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
8500 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
8501 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
8502 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
8503 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
8504 break;
8505 case 't':
8506 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
8507 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
8508 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
8509 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
8510 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
8511 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
8512 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
8513 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
8514 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
8515 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
8516 break;
8517 default:
8518 break;
8519 }
8520 }
8521
8522 #endif
8523
8524 /* Perform constant folding and related simplification of initializer
8525 expression EXPR. This behaves identically to "fold" but ignores
8526 potential run-time traps and exceptions that fold must preserve. */
8527
8528 tree
8529 fold_initializer (tree expr)
8530 {
8531 int saved_signaling_nans = flag_signaling_nans;
8532 int saved_trapping_math = flag_trapping_math;
8533 int saved_trapv = flag_trapv;
8534 tree result;
8535
8536 flag_signaling_nans = 0;
8537 flag_trapping_math = 0;
8538 flag_trapv = 0;
8539
8540 result = fold (expr);
8541
8542 flag_signaling_nans = saved_signaling_nans;
8543 flag_trapping_math = saved_trapping_math;
8544 flag_trapv = saved_trapv;
8545
8546 return result;
8547 }
8548
8549 /* Determine if first argument is a multiple of second argument. Return 0 if
8550 it is not, or we cannot easily determined it to be.
8551
8552 An example of the sort of thing we care about (at this point; this routine
8553 could surely be made more general, and expanded to do what the *_DIV_EXPR's
8554 fold cases do now) is discovering that
8555
8556 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8557
8558 is a multiple of
8559
8560 SAVE_EXPR (J * 8)
8561
8562 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
8563
8564 This code also handles discovering that
8565
8566 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
8567
8568 is a multiple of 8 so we don't have to worry about dealing with a
8569 possible remainder.
8570
8571 Note that we *look* inside a SAVE_EXPR only to determine how it was
8572 calculated; it is not safe for fold to do much of anything else with the
8573 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
8574 at run time. For example, the latter example above *cannot* be implemented
8575 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
8576 evaluation time of the original SAVE_EXPR is not necessarily the same at
8577 the time the new expression is evaluated. The only optimization of this
8578 sort that would be valid is changing
8579
8580 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
8581
8582 divided by 8 to
8583
8584 SAVE_EXPR (I) * SAVE_EXPR (J)
8585
8586 (where the same SAVE_EXPR (J) is used in the original and the
8587 transformed version). */
8588
8589 static int
8590 multiple_of_p (tree type, tree top, tree bottom)
8591 {
8592 if (operand_equal_p (top, bottom, 0))
8593 return 1;
8594
8595 if (TREE_CODE (type) != INTEGER_TYPE)
8596 return 0;
8597
8598 switch (TREE_CODE (top))
8599 {
8600 case MULT_EXPR:
8601 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8602 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8603
8604 case PLUS_EXPR:
8605 case MINUS_EXPR:
8606 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
8607 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
8608
8609 case LSHIFT_EXPR:
8610 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
8611 {
8612 tree op1, t1;
8613
8614 op1 = TREE_OPERAND (top, 1);
8615 /* const_binop may not detect overflow correctly,
8616 so check for it explicitly here. */
8617 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
8618 > TREE_INT_CST_LOW (op1)
8619 && TREE_INT_CST_HIGH (op1) == 0
8620 && 0 != (t1 = convert (type,
8621 const_binop (LSHIFT_EXPR, size_one_node,
8622 op1, 0)))
8623 && ! TREE_OVERFLOW (t1))
8624 return multiple_of_p (type, t1, bottom);
8625 }
8626 return 0;
8627
8628 case NOP_EXPR:
8629 /* Can't handle conversions from non-integral or wider integral type. */
8630 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
8631 || (TYPE_PRECISION (type)
8632 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
8633 return 0;
8634
8635 /* .. fall through ... */
8636
8637 case SAVE_EXPR:
8638 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
8639
8640 case INTEGER_CST:
8641 if (TREE_CODE (bottom) != INTEGER_CST
8642 || (TREE_UNSIGNED (type)
8643 && (tree_int_cst_sgn (top) < 0
8644 || tree_int_cst_sgn (bottom) < 0)))
8645 return 0;
8646 return integer_zerop (const_binop (TRUNC_MOD_EXPR,
8647 top, bottom, 0));
8648
8649 default:
8650 return 0;
8651 }
8652 }
8653
8654 /* Return true if `t' is known to be non-negative. */
8655
8656 int
8657 tree_expr_nonnegative_p (tree t)
8658 {
8659 switch (TREE_CODE (t))
8660 {
8661 case ABS_EXPR:
8662 return 1;
8663
8664 case INTEGER_CST:
8665 return tree_int_cst_sgn (t) >= 0;
8666
8667 case REAL_CST:
8668 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
8669
8670 case PLUS_EXPR:
8671 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8672 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8673 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8674
8675 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
8676 both unsigned and at least 2 bits shorter than the result. */
8677 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8678 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8679 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8680 {
8681 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8682 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8683 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8684 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8685 {
8686 unsigned int prec = MAX (TYPE_PRECISION (inner1),
8687 TYPE_PRECISION (inner2)) + 1;
8688 return prec < TYPE_PRECISION (TREE_TYPE (t));
8689 }
8690 }
8691 break;
8692
8693 case MULT_EXPR:
8694 if (FLOAT_TYPE_P (TREE_TYPE (t)))
8695 {
8696 /* x * x for floating point x is always non-negative. */
8697 if (operand_equal_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1), 0))
8698 return 1;
8699 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8700 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8701 }
8702
8703 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
8704 both unsigned and their total bits is shorter than the result. */
8705 if (TREE_CODE (TREE_TYPE (t)) == INTEGER_TYPE
8706 && TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
8707 && TREE_CODE (TREE_OPERAND (t, 1)) == NOP_EXPR)
8708 {
8709 tree inner1 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
8710 tree inner2 = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0));
8711 if (TREE_CODE (inner1) == INTEGER_TYPE && TREE_UNSIGNED (inner1)
8712 && TREE_CODE (inner2) == INTEGER_TYPE && TREE_UNSIGNED (inner2))
8713 return TYPE_PRECISION (inner1) + TYPE_PRECISION (inner2)
8714 < TYPE_PRECISION (TREE_TYPE (t));
8715 }
8716 return 0;
8717
8718 case TRUNC_DIV_EXPR:
8719 case CEIL_DIV_EXPR:
8720 case FLOOR_DIV_EXPR:
8721 case ROUND_DIV_EXPR:
8722 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8723 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8724
8725 case TRUNC_MOD_EXPR:
8726 case CEIL_MOD_EXPR:
8727 case FLOOR_MOD_EXPR:
8728 case ROUND_MOD_EXPR:
8729 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8730
8731 case RDIV_EXPR:
8732 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8733 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8734
8735 case NOP_EXPR:
8736 {
8737 tree inner_type = TREE_TYPE (TREE_OPERAND (t, 0));
8738 tree outer_type = TREE_TYPE (t);
8739
8740 if (TREE_CODE (outer_type) == REAL_TYPE)
8741 {
8742 if (TREE_CODE (inner_type) == REAL_TYPE)
8743 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8744 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8745 {
8746 if (TREE_UNSIGNED (inner_type))
8747 return 1;
8748 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8749 }
8750 }
8751 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
8752 {
8753 if (TREE_CODE (inner_type) == REAL_TYPE)
8754 return tree_expr_nonnegative_p (TREE_OPERAND (t,0));
8755 if (TREE_CODE (inner_type) == INTEGER_TYPE)
8756 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
8757 && TREE_UNSIGNED (inner_type);
8758 }
8759 }
8760 break;
8761
8762 case COND_EXPR:
8763 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1))
8764 && tree_expr_nonnegative_p (TREE_OPERAND (t, 2));
8765 case COMPOUND_EXPR:
8766 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8767 case MIN_EXPR:
8768 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8769 && tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8770 case MAX_EXPR:
8771 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0))
8772 || tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8773 case MODIFY_EXPR:
8774 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8775 case BIND_EXPR:
8776 return tree_expr_nonnegative_p (TREE_OPERAND (t, 1));
8777 case SAVE_EXPR:
8778 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8779 case NON_LVALUE_EXPR:
8780 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8781 case FLOAT_EXPR:
8782 return tree_expr_nonnegative_p (TREE_OPERAND (t, 0));
8783 case RTL_EXPR:
8784 return rtl_expr_nonnegative_p (RTL_EXPR_RTL (t));
8785
8786 case CALL_EXPR:
8787 {
8788 tree fndecl = get_callee_fndecl (t);
8789 tree arglist = TREE_OPERAND (t, 1);
8790 if (fndecl
8791 && DECL_BUILT_IN (fndecl)
8792 && DECL_BUILT_IN_CLASS (fndecl) != BUILT_IN_MD)
8793 switch (DECL_FUNCTION_CODE (fndecl))
8794 {
8795 case BUILT_IN_CABS:
8796 case BUILT_IN_CABSL:
8797 case BUILT_IN_CABSF:
8798 case BUILT_IN_EXP:
8799 case BUILT_IN_EXPF:
8800 case BUILT_IN_EXPL:
8801 case BUILT_IN_EXP2:
8802 case BUILT_IN_EXP2F:
8803 case BUILT_IN_EXP2L:
8804 case BUILT_IN_EXP10:
8805 case BUILT_IN_EXP10F:
8806 case BUILT_IN_EXP10L:
8807 case BUILT_IN_FABS:
8808 case BUILT_IN_FABSF:
8809 case BUILT_IN_FABSL:
8810 case BUILT_IN_FFS:
8811 case BUILT_IN_FFSL:
8812 case BUILT_IN_FFSLL:
8813 case BUILT_IN_PARITY:
8814 case BUILT_IN_PARITYL:
8815 case BUILT_IN_PARITYLL:
8816 case BUILT_IN_POPCOUNT:
8817 case BUILT_IN_POPCOUNTL:
8818 case BUILT_IN_POPCOUNTLL:
8819 case BUILT_IN_POW10:
8820 case BUILT_IN_POW10F:
8821 case BUILT_IN_POW10L:
8822 case BUILT_IN_SQRT:
8823 case BUILT_IN_SQRTF:
8824 case BUILT_IN_SQRTL:
8825 return 1;
8826
8827 case BUILT_IN_ATAN:
8828 case BUILT_IN_ATANF:
8829 case BUILT_IN_ATANL:
8830 case BUILT_IN_CEIL:
8831 case BUILT_IN_CEILF:
8832 case BUILT_IN_CEILL:
8833 case BUILT_IN_FLOOR:
8834 case BUILT_IN_FLOORF:
8835 case BUILT_IN_FLOORL:
8836 case BUILT_IN_NEARBYINT:
8837 case BUILT_IN_NEARBYINTF:
8838 case BUILT_IN_NEARBYINTL:
8839 case BUILT_IN_ROUND:
8840 case BUILT_IN_ROUNDF:
8841 case BUILT_IN_ROUNDL:
8842 case BUILT_IN_TRUNC:
8843 case BUILT_IN_TRUNCF:
8844 case BUILT_IN_TRUNCL:
8845 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8846
8847 case BUILT_IN_POW:
8848 case BUILT_IN_POWF:
8849 case BUILT_IN_POWL:
8850 return tree_expr_nonnegative_p (TREE_VALUE (arglist));
8851
8852 default:
8853 break;
8854 }
8855 }
8856
8857 /* ... fall through ... */
8858
8859 default:
8860 if (truth_value_p (TREE_CODE (t)))
8861 /* Truth values evaluate to 0 or 1, which is nonnegative. */
8862 return 1;
8863 }
8864
8865 /* We don't know sign of `t', so be conservative and return false. */
8866 return 0;
8867 }
8868
8869 /* Return true if `r' is known to be non-negative.
8870 Only handles constants at the moment. */
8871
8872 int
8873 rtl_expr_nonnegative_p (rtx r)
8874 {
8875 switch (GET_CODE (r))
8876 {
8877 case CONST_INT:
8878 return INTVAL (r) >= 0;
8879
8880 case CONST_DOUBLE:
8881 if (GET_MODE (r) == VOIDmode)
8882 return CONST_DOUBLE_HIGH (r) >= 0;
8883 return 0;
8884
8885 case CONST_VECTOR:
8886 {
8887 int units, i;
8888 rtx elt;
8889
8890 units = CONST_VECTOR_NUNITS (r);
8891
8892 for (i = 0; i < units; ++i)
8893 {
8894 elt = CONST_VECTOR_ELT (r, i);
8895 if (!rtl_expr_nonnegative_p (elt))
8896 return 0;
8897 }
8898
8899 return 1;
8900 }
8901
8902 case SYMBOL_REF:
8903 case LABEL_REF:
8904 /* These are always nonnegative. */
8905 return 1;
8906
8907 default:
8908 return 0;
8909 }
8910 }
8911
8912 #include "gt-fold-const.h"
This page took 0.505327 seconds and 4 git commands to generate.