]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
*** empty log message ***
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20 /*@@ Fix lossage on folding division of big integers. */
21
22 /*@@ This file should be rewritten to use an arbitrary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30
31 /* The entry points in this file are fold, size_int and size_binop.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'. */
41
42 #include <stdio.h>
43 #include <setjmp.h>
44 #include "config.h"
45 #include "flags.h"
46 #include "tree.h"
47
48 void lshift_double ();
49 void rshift_double ();
50 void lrotate_double ();
51 void rrotate_double ();
52 static tree const_binop ();
53 \f
54 /* To do constant folding on INTEGER_CST nodes requires 64-bit arithmetic.
55 We do that by representing the 64-bit integer as 8 shorts,
56 with only 8 bits stored in each short, as a positive number. */
57
58 /* Unpack a 64-bit integer into 8 shorts.
59 LOW and HI are the integer, as two `int' pieces.
60 SHORTS points to the array of shorts. */
61
62 static void
63 encode (shorts, low, hi)
64 short *shorts;
65 int low, hi;
66 {
67 shorts[0] = low & 0xff;
68 shorts[1] = (low >> 8) & 0xff;
69 shorts[2] = (low >> 16) & 0xff;
70 shorts[3] = (low >> 24) & 0xff;
71 shorts[4] = hi & 0xff;
72 shorts[5] = (hi >> 8) & 0xff;
73 shorts[6] = (hi >> 16) & 0xff;
74 shorts[7] = (hi >> 24) & 0xff;
75 }
76
77 /* Pack an array of 8 shorts into a 64-bit integer.
78 SHORTS points to the array of shorts.
79 The integer is stored into *LOW and *HI as two `int' pieces. */
80
81 static void
82 decode (shorts, low, hi)
83 short *shorts;
84 int *low, *hi;
85 {
86 /* The casts in the following statement should not be
87 needed, but they get around bugs in some C compilers. */
88 *low = (((long)shorts[3] << 24) | ((long)shorts[2] << 16)
89 | ((long)shorts[1] << 8) | (long)shorts[0]);
90 *hi = (((long)shorts[7] << 24) | ((long)shorts[6] << 16)
91 | ((long)shorts[5] << 8) | (long)shorts[4]);
92 }
93 \f
94 /* Make the integer constant T valid for its type
95 by setting to 0 or 1 all the bits in the constant
96 that don't belong in the type. */
97
98 static void
99 force_fit_type (t)
100 tree t;
101 {
102 register int prec = TYPE_PRECISION (TREE_TYPE (t));
103
104 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
105 prec = POINTER_SIZE;
106
107 /* First clear all bits that are beyond the type's precision. */
108
109 if (prec == 2 * HOST_BITS_PER_INT)
110 ;
111 else if (prec > HOST_BITS_PER_INT)
112 {
113 TREE_INT_CST_HIGH (t)
114 &= ~((-1) << (prec - HOST_BITS_PER_INT));
115 }
116 else
117 {
118 TREE_INT_CST_HIGH (t) = 0;
119 if (prec < HOST_BITS_PER_INT)
120 TREE_INT_CST_LOW (t)
121 &= ~((-1) << prec);
122 }
123
124 /* If it's a signed type and value's sign bit is set, extend the sign. */
125
126 if (! TREE_UNSIGNED (TREE_TYPE (t))
127 && prec != 2 * HOST_BITS_PER_INT
128 && (prec > HOST_BITS_PER_INT
129 ? TREE_INT_CST_HIGH (t) & (1 << (prec - HOST_BITS_PER_INT - 1))
130 : TREE_INT_CST_LOW (t) & (1 << (prec - 1))))
131 {
132 /* Value is negative:
133 set to 1 all the bits that are outside this type's precision. */
134 if (prec > HOST_BITS_PER_INT)
135 {
136 TREE_INT_CST_HIGH (t)
137 |= ((-1) << (prec - HOST_BITS_PER_INT));
138 }
139 else
140 {
141 TREE_INT_CST_HIGH (t) = -1;
142 if (prec < HOST_BITS_PER_INT)
143 TREE_INT_CST_LOW (t)
144 |= ((-1) << prec);
145 }
146 }
147 }
148 \f
149 /* Add two 64-bit integers with 64-bit result.
150 Each argument is given as two `int' pieces.
151 One argument is L1 and H1; the other, L2 and H2.
152 The value is stored as two `int' pieces in *LV and *HV.
153 We use the 8-shorts representation internally. */
154
155 void
156 add_double (l1, h1, l2, h2, lv, hv)
157 int l1, h1, l2, h2;
158 int *lv, *hv;
159 {
160 short arg1[8];
161 short arg2[8];
162 register int carry = 0;
163 register int i;
164
165 encode (arg1, l1, h1);
166 encode (arg2, l2, h2);
167
168 for (i = 0; i < 8; i++)
169 {
170 carry += arg1[i] + arg2[i];
171 arg1[i] = carry & 0xff;
172 carry >>= 8;
173 }
174
175 decode (arg1, lv, hv);
176 }
177
178 /* Negate a 64-bit integers with 64-bit result.
179 The argument is given as two `int' pieces in L1 and H1.
180 The value is stored as two `int' pieces in *LV and *HV.
181 We use the 8-shorts representation internally. */
182
183 void
184 neg_double (l1, h1, lv, hv)
185 int l1, h1;
186 int *lv, *hv;
187 {
188 if (l1 == 0)
189 {
190 *lv = 0;
191 *hv = - h1;
192 }
193 else
194 {
195 *lv = - l1;
196 *hv = ~ h1;
197 }
198 }
199 \f
200 /* Multiply two 64-bit integers with 64-bit result.
201 Each argument is given as two `int' pieces.
202 One argument is L1 and H1; the other, L2 and H2.
203 The value is stored as two `int' pieces in *LV and *HV.
204 We use the 8-shorts representation internally. */
205
206 void
207 mul_double (l1, h1, l2, h2, lv, hv)
208 int l1, h1, l2, h2;
209 int *lv, *hv;
210 {
211 short arg1[8];
212 short arg2[8];
213 short prod[16];
214 register int carry = 0;
215 register int i, j, k;
216
217 /* These two cases are used extensively, arising from pointer
218 combinations. */
219 if (h2 == 0)
220 {
221 if (l2 == 2)
222 {
223 unsigned temp = l1 + l1;
224 *hv = h1 * 2 + (temp < l1);
225 *lv = temp;
226 return;
227 }
228 if (l2 == 4)
229 {
230 unsigned temp = l1 + l1;
231 h1 = h1 * 4 + ((temp < l1) << 1);
232 l1 = temp;
233 temp += temp;
234 h1 += (temp < l1);
235 *lv = temp;
236 *hv = h1;
237 return;
238 }
239 if (l2 == 8)
240 {
241 unsigned temp = l1 + l1;
242 h1 = h1 * 8 + ((temp < l1) << 2);
243 l1 = temp;
244 temp += temp;
245 h1 += (temp < l1) << 1;
246 l1 = temp;
247 temp += temp;
248 h1 += (temp < l1);
249 *lv = temp;
250 *hv = h1;
251 return;
252 }
253 }
254
255 encode (arg1, l1, h1);
256 encode (arg2, l2, h2);
257
258 bzero (prod, sizeof prod);
259
260 for (i = 0; i < 8; i++)
261 for (j = 0; j < 8; j++)
262 {
263 k = i + j;
264 carry = arg1[i] * arg2[j];
265 while (carry)
266 {
267 carry += prod[k];
268 prod[k] = carry & 0xff;
269 carry >>= 8;
270 k++;
271 }
272 }
273
274 decode (prod, lv, hv); /* @@decode ignores prod[8] -> prod[15] */
275 }
276 \f
277 /* Shift the 64-bit integer in L1, H1 left by COUNT places
278 keeping only PREC bits of result.
279 Shift right if COUNT is negative.
280 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
281 Store the value as two `int' pieces in *LV and *HV. */
282
283 void
284 lshift_double (l1, h1, count, prec, lv, hv, arith)
285 int l1, h1, count, prec;
286 int *lv, *hv;
287 int arith;
288 {
289 short arg1[8];
290 register int i;
291 register int carry;
292
293 if (count < 0)
294 {
295 rshift_double (l1, h1, - count, prec, lv, hv, arith);
296 return;
297 }
298
299 encode (arg1, l1, h1);
300
301 if (count > prec)
302 count = prec;
303
304 while (count > 0)
305 {
306 carry = 0;
307 for (i = 0; i < 8; i++)
308 {
309 carry += arg1[i] << 1;
310 arg1[i] = carry & 0xff;
311 carry >>= 8;
312 }
313 count--;
314 }
315
316 decode (arg1, lv, hv);
317 }
318
319 /* Shift the 64-bit integer in L1, H1 right by COUNT places
320 keeping only PREC bits of result. COUNT must be positive.
321 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
322 Store the value as two `int' pieces in *LV and *HV. */
323
324 void
325 rshift_double (l1, h1, count, prec, lv, hv, arith)
326 int l1, h1, count, prec;
327 int *lv, *hv;
328 int arith;
329 {
330 short arg1[8];
331 register int i;
332 register int carry;
333
334 encode (arg1, l1, h1);
335
336 if (count > prec)
337 count = prec;
338
339 while (count > 0)
340 {
341 carry = arith && arg1[7] >> 7;
342 for (i = 7; i >= 0; i--)
343 {
344 carry <<= 8;
345 carry += arg1[i];
346 arg1[i] = (carry >> 1) & 0xff;
347 }
348 count--;
349 }
350
351 decode (arg1, lv, hv);
352 }
353 \f
354 /* Rotate the 64-bit integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Rotate right if COUNT is negative.
357 Store the value as two `int' pieces in *LV and *HV. */
358
359 void
360 lrotate_double (l1, h1, count, prec, lv, hv)
361 int l1, h1, count, prec;
362 int *lv, *hv;
363 {
364 short arg1[8];
365 register int i;
366 register int carry;
367
368 if (count < 0)
369 {
370 rrotate_double (l1, h1, - count, prec, lv, hv);
371 return;
372 }
373
374 encode (arg1, l1, h1);
375
376 if (count > prec)
377 count = prec;
378
379 carry = arg1[7] >> 7;
380 while (count > 0)
381 {
382 for (i = 0; i < 8; i++)
383 {
384 carry += arg1[i] << 1;
385 arg1[i] = carry & 0xff;
386 carry >>= 8;
387 }
388 count--;
389 }
390
391 decode (arg1, lv, hv);
392 }
393
394 /* Rotate the 64-bit integer in L1, H1 left by COUNT places
395 keeping only PREC bits of result. COUNT must be positive.
396 Store the value as two `int' pieces in *LV and *HV. */
397
398 void
399 rrotate_double (l1, h1, count, prec, lv, hv)
400 int l1, h1, count, prec;
401 int *lv, *hv;
402 {
403 short arg1[8];
404 register int i;
405 register int carry;
406
407 encode (arg1, l1, h1);
408
409 if (count > prec)
410 count = prec;
411
412 carry = arg1[0] & 1;
413 while (count > 0)
414 {
415 for (i = 7; i >= 0; i--)
416 {
417 carry <<= 8;
418 carry += arg1[i];
419 arg1[i] = (carry >> 1) & 0xff;
420 }
421 count--;
422 }
423
424 decode (arg1, lv, hv);
425 }
426 \f
427 /* Divide 64 bit integer LNUM, HNUM by 64 bit integer LDEN, HDEN
428 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
429 CODE is a tree code for a kind of division, one of
430 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
431 or EXACT_DIV_EXPR
432 It controls how the quotient is rounded to a integer.
433 UNS nonzero says do unsigned division. */
434
435 static void
436 div_and_round_double (code, uns,
437 lnum_orig, hnum_orig, lden_orig, hden_orig,
438 lquo, hquo, lrem, hrem)
439 enum tree_code code;
440 int uns;
441 int lnum_orig, hnum_orig; /* num == numerator == dividend */
442 int lden_orig, hden_orig; /* den == denominator == divisor */
443 int *lquo, *hquo, *lrem, *hrem;
444 {
445 int quo_neg = 0;
446 short num[9], den[8], quo[8]; /* extra element for scaling. */
447 register int i, j, work;
448 register int carry = 0;
449 unsigned int lnum = lnum_orig;
450 int hnum = hnum_orig;
451 unsigned int lden = lden_orig;
452 int hden = hden_orig;
453
454 if ((hden == 0) && (lden == 0))
455 abort ();
456
457 /* calculate quotient sign and convert operands to unsigned. */
458 if (!uns)
459 {
460 if (hden < 0)
461 {
462 quo_neg = ~ quo_neg;
463 neg_double (lden, hden, &lden, &hden);
464 }
465 if (hnum < 0)
466 {
467 quo_neg = ~ quo_neg;
468 neg_double (lnum, hnum, &lnum, &hnum);
469 }
470 }
471
472 if (hnum == 0 && hden == 0)
473 { /* single precision */
474 *hquo = *hrem = 0;
475 *lquo = lnum / lden; /* rounds toward zero since positive args */
476 goto finish_up;
477 }
478
479 if (hnum == 0)
480 { /* trivial case: dividend < divisor */
481 /* hden != 0 already checked. */
482 *hquo = *lquo = 0;
483 *hrem = hnum;
484 *lrem = lnum;
485 goto finish_up;
486 }
487
488 bzero (quo, sizeof quo);
489
490 bzero (num, sizeof num); /* to zero 9th element */
491 bzero (den, sizeof den);
492
493 encode (num, lnum, hnum);
494 encode (den, lden, hden);
495
496 /* This code requires more than just hden == 0.
497 We also have to require that we don't need more than three bytes
498 to hold CARRY. If we ever did need four bytes to hold it, we
499 would lose part of it when computing WORK on the next round. */
500 if (hden == 0 && ((lden << 8) >> 8) == lden)
501 { /* simpler algorithm */
502 /* hnum != 0 already checked. */
503 for (i = 7; i >= 0; i--)
504 {
505 work = num[i] + (carry << 8);
506 quo[i] = work / lden;
507 carry = work % lden;
508 }
509 }
510 else { /* full double precision,
511 with thanks to Don Knuth's
512 "Seminumerical Algorithms". */
513 #define BASE 256
514 int quo_est, scale, num_hi_sig, den_hi_sig, quo_hi_sig;
515
516 /* Find the highest non-zero divisor digit. */
517 for (i = 7; ; i--)
518 if (den[i] != 0) {
519 den_hi_sig = i;
520 break;
521 }
522 for (i = 7; ; i--)
523 if (num[i] != 0) {
524 num_hi_sig = i;
525 break;
526 }
527 quo_hi_sig = num_hi_sig - den_hi_sig + 1;
528
529 /* Insure that the first digit of the divisor is at least BASE/2.
530 This is required by the quotient digit estimation algorithm. */
531
532 scale = BASE / (den[den_hi_sig] + 1);
533 if (scale > 1) { /* scale divisor and dividend */
534 carry = 0;
535 for (i = 0; i <= 8; i++) {
536 work = (num[i] * scale) + carry;
537 num[i] = work & 0xff;
538 carry = work >> 8;
539 if (num[i] != 0) num_hi_sig = i;
540 }
541 carry = 0;
542 for (i = 0; i <= 7; i++) {
543 work = (den[i] * scale) + carry;
544 den[i] = work & 0xff;
545 carry = work >> 8;
546 if (den[i] != 0) den_hi_sig = i;
547 }
548 }
549
550 /* Main loop */
551 for (i = quo_hi_sig; i > 0; i--) {
552 /* guess the next quotient digit, quo_est, by dividing the first
553 two remaining dividend digits by the high order quotient digit.
554 quo_est is never low and is at most 2 high. */
555
556 int num_hi; /* index of highest remaining dividend digit */
557
558 num_hi = i + den_hi_sig;
559
560 work = (num[num_hi] * BASE) + (num_hi > 0 ? num[num_hi - 1] : 0);
561 if (num[num_hi] != den[den_hi_sig]) {
562 quo_est = work / den[den_hi_sig];
563 }
564 else {
565 quo_est = BASE - 1;
566 }
567
568 /* refine quo_est so it's usually correct, and at most one high. */
569 while ((den[den_hi_sig - 1] * quo_est)
570 > (((work - (quo_est * den[den_hi_sig])) * BASE)
571 + ((num_hi - 1) > 0 ? num[num_hi - 2] : 0)))
572 quo_est--;
573
574 /* Try QUO_EST as the quotient digit, by multiplying the
575 divisor by QUO_EST and subtracting from the remaining dividend.
576 Keep in mind that QUO_EST is the I - 1st digit. */
577
578 carry = 0;
579
580 for (j = 0; j <= den_hi_sig; j++)
581 {
582 int digit;
583
584 work = num[i + j - 1] - (quo_est * den[j]) + carry;
585 digit = work & 0xff;
586 carry = work >> 8;
587 if (digit < 0)
588 {
589 digit += BASE;
590 carry--;
591 }
592 num[i + j - 1] = digit;
593 }
594
595 /* if quo_est was high by one, then num[i] went negative and
596 we need to correct things. */
597
598 if (num[num_hi] < 0)
599 {
600 quo_est--;
601 carry = 0; /* add divisor back in */
602 for (j = 0; j <= den_hi_sig; j++)
603 {
604 work = num[i + j - 1] + den[j] + carry;
605 if (work > BASE)
606 {
607 work -= BASE;
608 carry = 1;
609 }
610 else
611 {
612 carry = 0;
613 }
614 num[i + j - 1] = work;
615 }
616 num [num_hi] += carry;
617 }
618
619 /* store the quotient digit. */
620 quo[i - 1] = quo_est;
621 }
622 }
623
624 decode (quo, lquo, hquo);
625
626 finish_up:
627 /* if result is negative, make it so. */
628 if (quo_neg)
629 neg_double (*lquo, *hquo, lquo, hquo);
630
631 /* compute trial remainder: rem = num - (quo * den) */
632 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
633 neg_double (*lrem, *hrem, lrem, hrem);
634 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
635
636 switch (code)
637 {
638 case TRUNC_DIV_EXPR:
639 case TRUNC_MOD_EXPR: /* round toward zero */
640 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
641 return;
642
643 case FLOOR_DIV_EXPR:
644 case FLOOR_MOD_EXPR: /* round toward negative infinity */
645 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
646 {
647 /* quo = quo - 1; */
648 add_double (*lquo, *hquo, -1, -1, lquo, hquo);
649 }
650 else return;
651 break;
652
653 case CEIL_DIV_EXPR:
654 case CEIL_MOD_EXPR: /* round toward positive infinity */
655 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
656 {
657 add_double (*lquo, *hquo, 1, 0, lquo, hquo);
658 }
659 else return;
660 break;
661
662 case ROUND_DIV_EXPR:
663 case ROUND_MOD_EXPR: /* round to closest integer */
664 {
665 int labs_rem = *lrem, habs_rem = *hrem;
666 int labs_den = lden, habs_den = hden, ltwice, htwice;
667
668 /* get absolute values */
669 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
670 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
671
672 /* if (2 * abs (lrem) >= abs (lden)) */
673 mul_double (2, 0, labs_rem, habs_rem, &ltwice, &htwice);
674 if (((unsigned) habs_den < (unsigned) htwice)
675 || (((unsigned) habs_den == (unsigned) htwice)
676 && ((unsigned) labs_den < (unsigned) ltwice)))
677 {
678 if (*hquo < 0)
679 /* quo = quo - 1; */
680 add_double (*lquo, *hquo, -1, -1, lquo, hquo);
681 else
682 /* quo = quo + 1; */
683 add_double (*lquo, *hquo, 1, 0, lquo, hquo);
684 }
685 else return;
686 }
687 break;
688
689 default:
690 abort ();
691 }
692
693 /* compute true remainder: rem = num - (quo * den) */
694 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
695 neg_double (*lrem, *hrem, lrem, hrem);
696 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
697 }
698 \f
699 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
700
701 /* Check for infinity in an IEEE double precision number. */
702
703 int
704 target_isinf (x)
705 REAL_VALUE_TYPE x;
706 {
707 /* The IEEE 64-bit double format. */
708 union {
709 REAL_VALUE_TYPE d;
710 struct {
711 unsigned sign : 1;
712 unsigned exponent : 11;
713 unsigned mantissa1 : 20;
714 unsigned mantissa2;
715 } little_endian;
716 struct {
717 unsigned mantissa2;
718 unsigned mantissa1 : 20;
719 unsigned exponent : 11;
720 unsigned sign : 1;
721 } big_endian;
722 } u;
723
724 u.d = dconstm1;
725 if (u.big_endian.sign == 1)
726 {
727 u.d = x;
728 return (u.big_endian.exponent == 2047
729 && u.big_endian.mantissa1 == 0
730 && u.big_endian.mantissa2 == 0);
731 }
732 else
733 {
734 u.d = x;
735 return (u.little_endian.exponent == 2047
736 && u.little_endian.mantissa1 == 0
737 && u.little_endian.mantissa2 == 0);
738 }
739 }
740
741 /* Check whether an IEEE double precision number is a NaN. */
742
743 int
744 target_isnan (x)
745 REAL_VALUE_TYPE x;
746 {
747 /* The IEEE 64-bit double format. */
748 union {
749 REAL_VALUE_TYPE d;
750 struct {
751 unsigned sign : 1;
752 unsigned exponent : 11;
753 unsigned mantissa1 : 20;
754 unsigned mantissa2;
755 } little_endian;
756 struct {
757 unsigned mantissa2;
758 unsigned mantissa1 : 20;
759 unsigned exponent : 11;
760 unsigned sign : 1;
761 } big_endian;
762 } u;
763
764 u.d = dconstm1;
765 if (u.big_endian.sign == 1)
766 {
767 u.d = x;
768 return (u.big_endian.exponent == 2047
769 && (u.big_endian.mantissa1 != 0
770 || u.big_endian.mantissa2 != 0));
771 }
772 else
773 {
774 u.d = x;
775 return (u.little_endian.exponent == 2047
776 && (u.little_endian.mantissa1 != 0
777 || u.little_endian.mantissa2 != 0));
778 }
779 }
780
781 /* Check for a negative IEEE double precision number. */
782
783 int
784 target_negative (x)
785 REAL_VALUE_TYPE x;
786 {
787 /* The IEEE 64-bit double format. */
788 union {
789 REAL_VALUE_TYPE d;
790 struct {
791 unsigned sign : 1;
792 unsigned exponent : 11;
793 unsigned mantissa1 : 20;
794 unsigned mantissa2;
795 } little_endian;
796 struct {
797 unsigned mantissa2;
798 unsigned mantissa1 : 20;
799 unsigned exponent : 11;
800 unsigned sign : 1;
801 } big_endian;
802 } u;
803
804 u.d = dconstm1;
805 if (u.big_endian.sign == 1)
806 {
807 u.d = x;
808 return u.big_endian.sign;
809 }
810 else
811 {
812 u.d = x;
813 return u.little_endian.sign;
814 }
815 }
816 #else /* Target not IEEE */
817
818 /* Let's assume other float formats don't have infinity.
819 (This can be overridden by redefining REAL_VALUE_ISINF.) */
820
821 target_isinf (x)
822 REAL_VALUE_TYPE x;
823 {
824 return 0;
825 }
826
827 /* Let's assume other float formats don't have NaNs.
828 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
829
830 target_isnan (x)
831 REAL_VALUE_TYPE x;
832 {
833 return 0;
834 }
835
836 /* Let's assume other float formats don't have minus zero.
837 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
838
839 target_negative (x)
840 REAL_VALUE_TYPE x;
841 {
842 return x < 0;
843 }
844 #endif /* Target not IEEE */
845 \f
846 /* Split a tree IN into a constant and a variable part
847 that could be combined with CODE to make IN.
848 CODE must be a commutative arithmetic operation.
849 Store the constant part into *CONP and the variable in &VARP.
850 Return 1 if this was done; zero means the tree IN did not decompose
851 this way.
852
853 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
854 Therefore, we must tell the caller whether the variable part
855 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
856 The value stored is the coefficient for the variable term.
857 The constant term we return should always be added;
858 we negate it if necessary. */
859
860 static int
861 split_tree (in, code, varp, conp, varsignp)
862 tree in;
863 enum tree_code code;
864 tree *varp, *conp;
865 int *varsignp;
866 {
867 register tree outtype = TREE_TYPE (in);
868 *varp = 0;
869 *conp = 0;
870
871 /* Strip any conversions that don't change the machine mode. */
872 while ((TREE_CODE (in) == NOP_EXPR
873 || TREE_CODE (in) == CONVERT_EXPR)
874 && (TYPE_MODE (TREE_TYPE (in))
875 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
876 in = TREE_OPERAND (in, 0);
877
878 if (TREE_CODE (in) == code
879 || (TREE_CODE (TREE_TYPE (in)) != REAL_TYPE
880 /* We can associate addition and subtraction together
881 (even though the C standard doesn't say so)
882 for integers because the value is not affected.
883 For reals, the value might be affected, so we can't. */
884 &&
885 ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
886 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
887 {
888 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
889 if (code == INTEGER_CST)
890 {
891 *conp = TREE_OPERAND (in, 0);
892 *varp = TREE_OPERAND (in, 1);
893 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
894 && TREE_TYPE (*varp) != outtype)
895 *varp = convert (outtype, *varp);
896 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
897 return 1;
898 }
899 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
900 {
901 *conp = TREE_OPERAND (in, 1);
902 *varp = TREE_OPERAND (in, 0);
903 *varsignp = 1;
904 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
905 && TREE_TYPE (*varp) != outtype)
906 *varp = convert (outtype, *varp);
907 if (TREE_CODE (in) == MINUS_EXPR)
908 {
909 /* If operation is subtraction and constant is second,
910 must negate it to get an additive constant.
911 And this cannot be done unless it is a manifest constant.
912 It could also be the address of a static variable.
913 We cannot negate that, so give up. */
914 if (TREE_CODE (*conp) == INTEGER_CST)
915 /* Subtracting from integer_zero_node loses for long long. */
916 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
917 else
918 return 0;
919 }
920 return 1;
921 }
922 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
923 {
924 *conp = TREE_OPERAND (in, 0);
925 *varp = TREE_OPERAND (in, 1);
926 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
927 && TREE_TYPE (*varp) != outtype)
928 *varp = convert (outtype, *varp);
929 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
930 return 1;
931 }
932 }
933 return 0;
934 }
935 \f
936 /* Combine two constants NUM and ARG2 under operation CODE
937 to produce a new constant.
938 We assume ARG1 and ARG2 have the same data type,
939 or at least are the same kind of constant and the same machine mode. */
940
941 /* Handle floating overflow for `const_binop'. */
942 static jmp_buf const_binop_error;
943
944 static tree
945 const_binop (code, arg1, arg2)
946 enum tree_code code;
947 register tree arg1, arg2;
948 {
949 if (TREE_CODE (arg1) == INTEGER_CST)
950 {
951 register int int1l = TREE_INT_CST_LOW (arg1);
952 register int int1h = TREE_INT_CST_HIGH (arg1);
953 int int2l = TREE_INT_CST_LOW (arg2);
954 int int2h = TREE_INT_CST_HIGH (arg2);
955 int low, hi;
956 int garbagel, garbageh;
957 register tree t;
958 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
959
960 switch (code)
961 {
962 case BIT_IOR_EXPR:
963 t = build_int_2 (int1l | int2l, int1h | int2h);
964 break;
965
966 case BIT_XOR_EXPR:
967 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
968 break;
969
970 case BIT_AND_EXPR:
971 t = build_int_2 (int1l & int2l, int1h & int2h);
972 break;
973
974 case BIT_ANDTC_EXPR:
975 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
976 break;
977
978 case RSHIFT_EXPR:
979 int2l = - int2l;
980 case LSHIFT_EXPR:
981 lshift_double (int1l, int1h, int2l,
982 TYPE_PRECISION (TREE_TYPE (arg1)),
983 &low, &hi,
984 !uns);
985 t = build_int_2 (low, hi);
986 break;
987
988 case RROTATE_EXPR:
989 int2l = - int2l;
990 case LROTATE_EXPR:
991 lrotate_double (int1l, int1h, int2l,
992 TYPE_PRECISION (TREE_TYPE (arg1)),
993 &low, &hi);
994 t = build_int_2 (low, hi);
995 break;
996
997 case PLUS_EXPR:
998 if (int1h == 0)
999 {
1000 int2l += int1l;
1001 if ((unsigned) int2l < int1l)
1002 int2h += 1;
1003 t = build_int_2 (int2l, int2h);
1004 break;
1005 }
1006 if (int2h == 0)
1007 {
1008 int1l += int2l;
1009 if ((unsigned) int1l < int2l)
1010 int1h += 1;
1011 t = build_int_2 (int1l, int1h);
1012 break;
1013 }
1014 add_double (int1l, int1h, int2l, int2h, &low, &hi);
1015 t = build_int_2 (low, hi);
1016 break;
1017
1018 case MINUS_EXPR:
1019 if (int2h == 0 && int2l == 0)
1020 {
1021 t = build_int_2 (int1l, int1h);
1022 break;
1023 }
1024 neg_double (int2l, int2h, &int2l, &int2h);
1025 add_double (int1l, int1h, int2l, int2h, &low, &hi);
1026 t = build_int_2 (low, hi);
1027 break;
1028
1029 case MULT_EXPR:
1030 /* Optimize simple cases. */
1031 if (int1h == 0)
1032 {
1033 unsigned temp;
1034
1035 switch (int1l)
1036 {
1037 case 0:
1038 t = build_int_2 (0, 0);
1039 goto got_it;
1040 case 1:
1041 t = build_int_2 (int2l, int2h);
1042 goto got_it;
1043 case 2:
1044 temp = int2l + int2l;
1045 int2h = int2h * 2 + (temp < int2l);
1046 t = build_int_2 (temp, int2h);
1047 goto got_it;
1048 case 3:
1049 temp = int2l + int2l + int2l;
1050 int2h = int2h * 3 + (temp < int2l);
1051 t = build_int_2 (temp, int2h);
1052 goto got_it;
1053 case 4:
1054 temp = int2l + int2l;
1055 int2h = int2h * 4 + ((temp < int2l) << 1);
1056 int2l = temp;
1057 temp += temp;
1058 int2h += (temp < int2l);
1059 t = build_int_2 (temp, int2h);
1060 goto got_it;
1061 case 8:
1062 temp = int2l + int2l;
1063 int2h = int2h * 8 + ((temp < int2l) << 2);
1064 int2l = temp;
1065 temp += temp;
1066 int2h += (temp < int2l) << 1;
1067 int2l = temp;
1068 temp += temp;
1069 int2h += (temp < int2l);
1070 t = build_int_2 (temp, int2h);
1071 goto got_it;
1072 default:
1073 break;
1074 }
1075 }
1076
1077 if (int2h == 0)
1078 {
1079 if (int2l == 0)
1080 {
1081 t = build_int_2 (0, 0);
1082 break;
1083 }
1084 if (int2l == 1)
1085 {
1086 t = build_int_2 (int1l, int1h);
1087 break;
1088 }
1089 }
1090
1091 mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1092 t = build_int_2 (low, hi);
1093 break;
1094
1095 case TRUNC_DIV_EXPR:
1096 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1097 case EXACT_DIV_EXPR:
1098 /* This is a shortcut for a common special case.
1099 It reduces the number of tree nodes generated
1100 and saves time. */
1101 if (int2h == 0 && int2l > 0
1102 && TREE_TYPE (arg1) == sizetype
1103 && int1h == 0 && int1l >= 0)
1104 {
1105 if (code == CEIL_DIV_EXPR)
1106 int1l += int2l-1;
1107 return size_int (int1l / int2l);
1108 }
1109 case ROUND_DIV_EXPR:
1110 if (int2h == 0 && int2l == 1)
1111 {
1112 t = build_int_2 (int1l, int1h);
1113 break;
1114 }
1115 if (int1l == int2l && int1h == int2h)
1116 {
1117 if ((int1l | int1h) == 0)
1118 abort ();
1119 t = build_int_2 (1, 0);
1120 break;
1121 }
1122 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1123 &low, &hi, &garbagel, &garbageh);
1124 t = build_int_2 (low, hi);
1125 break;
1126
1127 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1128 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1129 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1130 &garbagel, &garbageh, &low, &hi);
1131 t = build_int_2 (low, hi);
1132 break;
1133
1134 case MIN_EXPR:
1135 case MAX_EXPR:
1136 if (uns)
1137 {
1138 low = (((unsigned) int1h < (unsigned) int2h)
1139 || (((unsigned) int1h == (unsigned) int2h)
1140 && ((unsigned) int1l < (unsigned) int2l)));
1141 }
1142 else
1143 {
1144 low = ((int1h < int2h)
1145 || ((int1h == int2h)
1146 && ((unsigned) int1l < (unsigned) int2l)));
1147 }
1148 if (low == (code == MIN_EXPR))
1149 t = build_int_2 (int1l, int1h);
1150 else
1151 t = build_int_2 (int2l, int2h);
1152 break;
1153
1154 default:
1155 abort ();
1156 }
1157 got_it:
1158 TREE_TYPE (t) = TREE_TYPE (arg1);
1159 force_fit_type (t);
1160 return t;
1161 }
1162 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1163 if (TREE_CODE (arg1) == REAL_CST)
1164 {
1165 register REAL_VALUE_TYPE d1;
1166 register REAL_VALUE_TYPE d2;
1167 register REAL_VALUE_TYPE value;
1168
1169 d1 = TREE_REAL_CST (arg1);
1170 d2 = TREE_REAL_CST (arg2);
1171 if (setjmp (const_binop_error))
1172 {
1173 warning ("floating overflow in constant folding");
1174 return build (code, TREE_TYPE (arg1), arg1, arg2);
1175 }
1176 set_float_handler (const_binop_error);
1177
1178 #ifdef REAL_ARITHMETIC
1179 REAL_ARITHMETIC (value, code, d1, d2);
1180 #else
1181 switch (code)
1182 {
1183 case PLUS_EXPR:
1184 value = d1 + d2;
1185 break;
1186
1187 case MINUS_EXPR:
1188 value = d1 - d2;
1189 break;
1190
1191 case MULT_EXPR:
1192 value = d1 * d2;
1193 break;
1194
1195 case RDIV_EXPR:
1196 #ifndef REAL_INFINITY
1197 if (d2 == 0)
1198 abort ();
1199 #endif
1200
1201 value = d1 / d2;
1202 break;
1203
1204 case MIN_EXPR:
1205 value = MIN (d1, d2);
1206 break;
1207
1208 case MAX_EXPR:
1209 value = MAX (d1, d2);
1210 break;
1211
1212 default:
1213 abort ();
1214 }
1215 #endif /* no REAL_ARITHMETIC */
1216 set_float_handler (0);
1217 return build_real (TREE_TYPE (arg1),
1218 REAL_VALUE_TRUNCATE (TYPE_MODE (TREE_TYPE (arg1)), value));
1219 }
1220 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1221 if (TREE_CODE (arg1) == COMPLEX_CST)
1222 {
1223 register tree r1 = TREE_REALPART (arg1);
1224 register tree i1 = TREE_IMAGPART (arg1);
1225 register tree r2 = TREE_REALPART (arg2);
1226 register tree i2 = TREE_IMAGPART (arg2);
1227 register tree t;
1228
1229 switch (code)
1230 {
1231 case PLUS_EXPR:
1232 t = build_complex (const_binop (PLUS_EXPR, r1, r2),
1233 const_binop (PLUS_EXPR, i1, i2));
1234 break;
1235
1236 case MINUS_EXPR:
1237 t = build_complex (const_binop (MINUS_EXPR, r1, r2),
1238 const_binop (MINUS_EXPR, i1, i2));
1239 break;
1240
1241 case MULT_EXPR:
1242 t = build_complex (const_binop (MINUS_EXPR,
1243 const_binop (MULT_EXPR, r1, r2),
1244 const_binop (MULT_EXPR, i1, i2)),
1245 const_binop (PLUS_EXPR,
1246 const_binop (MULT_EXPR, r1, i2),
1247 const_binop (MULT_EXPR, i1, r2)));
1248 break;
1249
1250 case RDIV_EXPR:
1251 {
1252 register tree magsquared
1253 = const_binop (PLUS_EXPR,
1254 const_binop (MULT_EXPR, r2, r2),
1255 const_binop (MULT_EXPR, i2, i2));
1256 t = build_complex (const_binop (RDIV_EXPR,
1257 const_binop (PLUS_EXPR,
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2)),
1260 magsquared),
1261 const_binop (RDIV_EXPR,
1262 const_binop (MINUS_EXPR,
1263 const_binop (MULT_EXPR, i1, r2),
1264 const_binop (MULT_EXPR, r1, i2)),
1265 magsquared));
1266 }
1267 break;
1268
1269 default:
1270 abort ();
1271 }
1272 TREE_TYPE (t) = TREE_TYPE (arg1);
1273 return t;
1274 }
1275 return 0;
1276 }
1277 \f
1278 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1279
1280 tree
1281 size_int (number)
1282 unsigned int number;
1283 {
1284 register tree t;
1285 /* Type-size nodes already made for small sizes. */
1286 static tree size_table[2*HOST_BITS_PER_INT+1];
1287
1288 if (number >= 0 && number < 2*HOST_BITS_PER_INT+1 && size_table[number] != 0)
1289 return size_table[number];
1290 if (number >= 0 && number < 2*HOST_BITS_PER_INT+1)
1291 {
1292 push_obstacks_nochange ();
1293 /* Make this a permanent node. */
1294 end_temporary_allocation ();
1295 t = build_int_2 (number, 0);
1296 TREE_TYPE (t) = sizetype;
1297 size_table[number] = t;
1298 pop_obstacks ();
1299 }
1300 else
1301 {
1302 t = build_int_2 (number, 0);
1303 TREE_TYPE (t) = sizetype;
1304 }
1305 return t;
1306 }
1307
1308 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1309 CODE is a tree code. Data type is taken from `sizetype',
1310 If the operands are constant, so is the result. */
1311
1312 tree
1313 size_binop (code, arg0, arg1)
1314 enum tree_code code;
1315 tree arg0, arg1;
1316 {
1317 /* Handle the special case of two integer constants faster. */
1318 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1319 {
1320 /* And some specific cases even faster than that. */
1321 if (code == PLUS_EXPR
1322 && TREE_INT_CST_LOW (arg0) == 0
1323 && TREE_INT_CST_HIGH (arg0) == 0)
1324 return arg1;
1325 if (code == MINUS_EXPR
1326 && TREE_INT_CST_LOW (arg1) == 0
1327 && TREE_INT_CST_HIGH (arg1) == 0)
1328 return arg0;
1329 if (code == MULT_EXPR
1330 && TREE_INT_CST_LOW (arg0) == 1
1331 && TREE_INT_CST_HIGH (arg0) == 0)
1332 return arg1;
1333 /* Handle general case of two integer constants. */
1334 return const_binop (code, arg0, arg1);
1335 }
1336
1337 if (arg0 == error_mark_node || arg1 == error_mark_node)
1338 return error_mark_node;
1339
1340 return fold (build (code, sizetype, arg0, arg1));
1341 }
1342 \f
1343 /* Given T, a tree representing type conversion of ARG1, a constant,
1344 return a constant tree representing the result of conversion. */
1345
1346 static tree
1347 fold_convert (t, arg1)
1348 register tree t;
1349 register tree arg1;
1350 {
1351 register tree type = TREE_TYPE (t);
1352
1353 if (TREE_CODE (type) == POINTER_TYPE
1354 || TREE_CODE (type) == INTEGER_TYPE
1355 || TREE_CODE (type) == ENUMERAL_TYPE)
1356 {
1357 if (TREE_CODE (arg1) == INTEGER_CST)
1358 {
1359 /* Given an integer constant, make new constant with new type,
1360 appropriately sign-extended or truncated. */
1361 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1362 TREE_INT_CST_HIGH (arg1));
1363 TREE_TYPE (t) = type;
1364 force_fit_type (t);
1365 }
1366 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1367 else if (TREE_CODE (arg1) == REAL_CST)
1368 {
1369 REAL_VALUE_TYPE
1370 l = real_value_from_int_cst (TYPE_MIN_VALUE (type)),
1371 x = TREE_REAL_CST (arg1),
1372 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1373 if (! ((REAL_VALUES_LESS (l, x) || REAL_VALUES_EQUAL (l, x))
1374 && (REAL_VALUES_LESS (x, u) || REAL_VALUES_EQUAL (x, u))))
1375 {
1376 warning ("real constant out of range for integer conversion");
1377 return t;
1378 }
1379 #ifndef REAL_ARITHMETIC
1380 {
1381 REAL_VALUE_TYPE d;
1382 int low, high;
1383 int half_word = 1 << (HOST_BITS_PER_INT / 2);
1384
1385 d = TREE_REAL_CST (arg1);
1386 if (d < 0)
1387 d = -d;
1388
1389 high = (int) (d / half_word / half_word);
1390 d -= (REAL_VALUE_TYPE) high * half_word * half_word;
1391 low = (unsigned) d;
1392 if (TREE_REAL_CST (arg1) < 0)
1393 neg_double (low, high, &low, &high);
1394 t = build_int_2 (low, high);
1395 }
1396 #else
1397 {
1398 int low, high;
1399 REAL_VALUE_TO_INT (low, high, TREE_REAL_CST (arg1));
1400 t = build_int_2 (low, high);
1401 }
1402 #endif
1403 TREE_TYPE (t) = type;
1404 force_fit_type (t);
1405 }
1406 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1407 TREE_TYPE (t) = type;
1408 }
1409 else if (TREE_CODE (type) == REAL_TYPE)
1410 {
1411 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1412 if (TREE_CODE (arg1) == INTEGER_CST)
1413 return build_real_from_int_cst (type, arg1);
1414 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1415 if (TREE_CODE (arg1) == REAL_CST)
1416 return build_real (type, REAL_VALUE_TRUNCATE (TYPE_MODE (type),
1417 TREE_REAL_CST (arg1)));
1418 }
1419 TREE_CONSTANT (t) = 1;
1420 return t;
1421 }
1422 \f
1423 /* Return an expr equal to X but certainly not valid as an lvalue. */
1424
1425 tree
1426 non_lvalue (x)
1427 tree x;
1428 {
1429 tree result;
1430
1431 /* These things are certainly not lvalues. */
1432 if (TREE_CODE (x) == NON_LVALUE_EXPR
1433 || TREE_CODE (x) == INTEGER_CST
1434 || TREE_CODE (x) == REAL_CST
1435 || TREE_CODE (x) == STRING_CST
1436 || TREE_CODE (x) == ADDR_EXPR)
1437 return x;
1438
1439 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1440 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1441 return result;
1442 }
1443 \f
1444 /* Given a tree comparison code, return the code that is the logical inverse
1445 of the given code. It is not safe to do this for floating-point
1446 comparisons, except for NE_EXPR and EQ_EXPR. */
1447
1448 static enum tree_code
1449 invert_tree_comparison (code)
1450 enum tree_code code;
1451 {
1452 switch (code)
1453 {
1454 case EQ_EXPR:
1455 return NE_EXPR;
1456 case NE_EXPR:
1457 return EQ_EXPR;
1458 case GT_EXPR:
1459 return LE_EXPR;
1460 case GE_EXPR:
1461 return LT_EXPR;
1462 case LT_EXPR:
1463 return GE_EXPR;
1464 case LE_EXPR:
1465 return GT_EXPR;
1466 default:
1467 abort ();
1468 }
1469 }
1470
1471 /* Similar, but return the comparison that results if the operands are
1472 swapped. This is safe for floating-point. */
1473
1474 static enum tree_code
1475 swap_tree_comparison (code)
1476 enum tree_code code;
1477 {
1478 switch (code)
1479 {
1480 case EQ_EXPR:
1481 case NE_EXPR:
1482 return code;
1483 case GT_EXPR:
1484 return LT_EXPR;
1485 case GE_EXPR:
1486 return LE_EXPR;
1487 case LT_EXPR:
1488 return GT_EXPR;
1489 case LE_EXPR:
1490 return GE_EXPR;
1491 default:
1492 abort ();
1493 }
1494 }
1495 \f
1496 /* Return nonzero if two operands are necessarily equal.
1497 If ONLY_CONST is non-zero, only return non-zero for constants. */
1498
1499 int
1500 operand_equal_p (arg0, arg1, only_const)
1501 tree arg0, arg1;
1502 int only_const;
1503 {
1504 /* If both types don't have the same signedness, then we can't consider
1505 them equal. We must check this before the STRIP_NOPS calls
1506 because they may change the signedness of the arguments. */
1507 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1508 return 0;
1509
1510 STRIP_NOPS (arg0);
1511 STRIP_NOPS (arg1);
1512
1513 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1514 We don't care about side effects in that case because the SAVE_EXPR
1515 takes care of that for us. */
1516 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1517 return ! only_const;
1518
1519 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1520 return 0;
1521
1522 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1523 && TREE_CODE (arg0) == ADDR_EXPR
1524 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1525 return 1;
1526
1527 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1528 && TREE_CODE (arg0) == INTEGER_CST
1529 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1530 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1531 return 1;
1532
1533 /* Detect when real constants are equal.
1534 But reject weird values because we can't be sure what to do with them. */
1535 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1536 && TREE_CODE (arg0) == REAL_CST
1537 && !bcmp (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1),
1538 sizeof (REAL_VALUE_TYPE))
1539 /* Some people say these are not necessary.
1540 But they do little harm, and taking them out would be risky.
1541 So leave them and let's not spend any more time on them--rms. */
1542 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg0))
1543 && !REAL_VALUE_ISNAN (TREE_REAL_CST (arg0)))
1544 return 1;
1545
1546 if (only_const)
1547 return 0;
1548
1549 if (arg0 == arg1)
1550 return 1;
1551
1552 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1553 return 0;
1554 /* This is needed for conversions and for COMPONENT_REF.
1555 Might as well play it safe and always test this. */
1556 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1557 return 0;
1558
1559 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1560 {
1561 case '1':
1562 /* Two conversions are equal only if signedness and modes match. */
1563 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1564 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1565 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1566 return 0;
1567
1568 return operand_equal_p (TREE_OPERAND (arg0, 0),
1569 TREE_OPERAND (arg1, 0), 0);
1570
1571 case '<':
1572 case '2':
1573 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1574 TREE_OPERAND (arg1, 0), 0)
1575 && operand_equal_p (TREE_OPERAND (arg0, 1),
1576 TREE_OPERAND (arg1, 1), 0));
1577
1578 case 'r':
1579 switch (TREE_CODE (arg0))
1580 {
1581 case INDIRECT_REF:
1582 return operand_equal_p (TREE_OPERAND (arg0, 0),
1583 TREE_OPERAND (arg1, 0), 0);
1584
1585 case COMPONENT_REF:
1586 case ARRAY_REF:
1587 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1588 TREE_OPERAND (arg1, 0), 0)
1589 && operand_equal_p (TREE_OPERAND (arg0, 1),
1590 TREE_OPERAND (arg1, 1), 0));
1591
1592 case BIT_FIELD_REF:
1593 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1594 TREE_OPERAND (arg1, 0), 0)
1595 && operand_equal_p (TREE_OPERAND (arg0, 1),
1596 TREE_OPERAND (arg1, 1), 0)
1597 && operand_equal_p (TREE_OPERAND (arg0, 2),
1598 TREE_OPERAND (arg1, 2), 0));
1599 }
1600 break;
1601 }
1602
1603 return 0;
1604 }
1605 \f
1606 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1607 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1608
1609 When in doubt, return 0. */
1610
1611 static int
1612 operand_equal_for_comparison_p (arg0, arg1, other)
1613 tree arg0, arg1;
1614 tree other;
1615 {
1616 int unsignedp1, unsignedpo;
1617 tree primarg1, primother;
1618 int correct_width;
1619
1620 if (operand_equal_p (arg0, arg1, 0))
1621 return 1;
1622
1623 if (TREE_CODE (TREE_TYPE (arg0)) != INTEGER_TYPE)
1624 return 0;
1625
1626 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1627 actual comparison operand, ARG0.
1628
1629 First throw away any conversions to wider types
1630 already present in the operands. */
1631
1632 primarg1 = get_narrower (arg1, &unsignedp1);
1633 primother = get_narrower (other, &unsignedpo);
1634
1635 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1636 if (unsignedp1 == unsignedpo
1637 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1638 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1639 {
1640 tree type = TREE_TYPE (arg0);
1641
1642 /* Make sure shorter operand is extended the right way
1643 to match the longer operand. */
1644 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1645 TREE_TYPE (primarg1)),
1646 primarg1);
1647
1648 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1649 return 1;
1650 }
1651
1652 return 0;
1653 }
1654 \f
1655 /* See if ARG is an expression is either a comparison or is peforming
1656 arithmetic on comparisons. The comparisons must only be comparing
1657 two different values, which will be stored in *CVAL1 and *CVAL2; if
1658 they are non-zero it means that some operands have already been found.
1659 No variables may be used anywhere else in the expression except in the
1660 comparisons.
1661
1662 If this is true, return 1. Otherwise, return zero. */
1663
1664 static int
1665 twoval_comparison_p (arg, cval1, cval2)
1666 tree arg;
1667 tree *cval1, *cval2;
1668 {
1669 enum tree_code code = TREE_CODE (arg);
1670 char class = TREE_CODE_CLASS (code);
1671
1672 /* We can handle some of the 'e' cases here. */
1673 if (class == 'e'
1674 && (code == TRUTH_NOT_EXPR
1675 || (code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)))
1676 class = '1';
1677 else if (class == 'e'
1678 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1679 || code == COMPOUND_EXPR))
1680 class = '2';
1681
1682 switch (class)
1683 {
1684 case '1':
1685 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
1686
1687 case '2':
1688 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
1689 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
1690
1691 case 'c':
1692 return 1;
1693
1694 case 'e':
1695 if (code == COND_EXPR)
1696 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
1697 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
1698 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1699 cval1, cval2));
1700 return 0;
1701
1702 case '<':
1703 /* First see if we can handle the first operand, then the second. For
1704 the second operand, we know *CVAL1 can't be zero. It must be that
1705 one side of the comparison is each of the values; test for the
1706 case where this isn't true by failing if the two operands
1707 are the same. */
1708
1709 if (operand_equal_p (TREE_OPERAND (arg, 0),
1710 TREE_OPERAND (arg, 1), 0))
1711 return 0;
1712
1713 if (*cval1 == 0)
1714 *cval1 = TREE_OPERAND (arg, 0);
1715 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1716 ;
1717 else if (*cval2 == 0)
1718 *cval2 = TREE_OPERAND (arg, 0);
1719 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1720 ;
1721 else
1722 return 0;
1723
1724 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1725 ;
1726 else if (*cval2 == 0)
1727 *cval2 = TREE_OPERAND (arg, 1);
1728 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1729 ;
1730 else
1731 return 0;
1732
1733 return 1;
1734 }
1735
1736 return 0;
1737 }
1738 \f
1739 /* ARG is a tree that is known to contain just arithmetic operations and
1740 comparisons. Evaluate the operations in the tree substituting NEW0 for
1741 any occurrance of OLD0 as an operand of a comparison and likewise for
1742 NEW1 and OLD1. */
1743
1744 static tree
1745 eval_subst (arg, old0, new0, old1, new1)
1746 tree arg;
1747 tree old0, new0, old1, new1;
1748 {
1749 tree type = TREE_TYPE (arg);
1750 enum tree_code code = TREE_CODE (arg);
1751 char class = TREE_CODE_CLASS (code);
1752
1753 /* We can handle some of the 'e' cases here. */
1754 if (class == 'e' && code == TRUTH_NOT_EXPR)
1755 class = '1';
1756 else if (class == 'e'
1757 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1758 class = '2';
1759
1760 switch (class)
1761 {
1762 case '1':
1763 return fold (build1 (code, type,
1764 eval_subst (TREE_OPERAND (arg, 0),
1765 old0, new0, old1, new1)));
1766
1767 case '2':
1768 return fold (build (code, type,
1769 eval_subst (TREE_OPERAND (arg, 0),
1770 old0, new0, old1, new1),
1771 eval_subst (TREE_OPERAND (arg, 1),
1772 old0, new0, old1, new1)));
1773
1774 case 'e':
1775 switch (code)
1776 {
1777 case SAVE_EXPR:
1778 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1779
1780 case COMPOUND_EXPR:
1781 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1782
1783 case COND_EXPR:
1784 return fold (build (code, type,
1785 eval_subst (TREE_OPERAND (arg, 0),
1786 old0, new0, old1, new1),
1787 eval_subst (TREE_OPERAND (arg, 1),
1788 old0, new0, old1, new1),
1789 eval_subst (TREE_OPERAND (arg, 2),
1790 old0, new0, old1, new1)));
1791 }
1792
1793 case '<':
1794 {
1795 tree arg0 = TREE_OPERAND (arg, 0);
1796 tree arg1 = TREE_OPERAND (arg, 1);
1797
1798 /* We need to check both for exact equality and tree equality. The
1799 former will be true if the operand has a side-effect. In that
1800 case, we know the operand occurred exactly once. */
1801
1802 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1803 arg0 = new0;
1804 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1805 arg0 = new1;
1806
1807 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1808 arg1 = new0;
1809 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1810 arg1 = new1;
1811
1812 return fold (build (code, type, arg0, arg1));
1813 }
1814 }
1815
1816 return arg;
1817 }
1818 \f
1819 /* Return a tree for the case when the result of an expression is RESULT
1820 converted to TYPE and OMITTED was previously an operand of the expression
1821 but is now not needed (e.g., we folded OMITTED * 0).
1822
1823 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1824 the conversion of RESULT to TYPE. */
1825
1826 static tree
1827 omit_one_operand (type, result, omitted)
1828 tree type, result, omitted;
1829 {
1830 tree t = convert (type, result);
1831
1832 if (TREE_SIDE_EFFECTS (omitted))
1833 return build (COMPOUND_EXPR, type, omitted, t);
1834
1835 return t;
1836 }
1837 \f
1838 /* Return a simplified tree node for the truth-negation of ARG
1839 (perhaps by altering ARG). It is known that ARG is an operation that
1840 returns a truth value (0 or 1). */
1841
1842 tree
1843 invert_truthvalue (arg)
1844 tree arg;
1845 {
1846 tree type = TREE_TYPE (arg);
1847 enum tree_code code = TREE_CODE (arg);
1848
1849 /* If this is a comparison, we can simply invert it, except for
1850 floating-point non-equality comparisons, in which case we just
1851 enclose a TRUTH_NOT_EXPR around what we have. */
1852
1853 if (TREE_CODE_CLASS (code) == '<')
1854 {
1855 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 0))) == REAL_TYPE
1856 && code != NE_EXPR && code != EQ_EXPR)
1857 return build1 (TRUTH_NOT_EXPR, type, arg);
1858 else
1859 {
1860 TREE_SET_CODE (arg, invert_tree_comparison (code));
1861 return arg;
1862 }
1863 }
1864
1865 switch (code)
1866 {
1867 case INTEGER_CST:
1868 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
1869 && TREE_INT_CST_HIGH (arg) == 0, 0));
1870
1871 case TRUTH_AND_EXPR:
1872 return build (TRUTH_OR_EXPR, type,
1873 invert_truthvalue (TREE_OPERAND (arg, 0)),
1874 invert_truthvalue (TREE_OPERAND (arg, 1)));
1875
1876 case TRUTH_OR_EXPR:
1877 return build (TRUTH_AND_EXPR, type,
1878 invert_truthvalue (TREE_OPERAND (arg, 0)),
1879 invert_truthvalue (TREE_OPERAND (arg, 1)));
1880
1881 case TRUTH_ANDIF_EXPR:
1882 return build (TRUTH_ORIF_EXPR, type,
1883 invert_truthvalue (TREE_OPERAND (arg, 0)),
1884 invert_truthvalue (TREE_OPERAND (arg, 1)));
1885
1886 case TRUTH_ORIF_EXPR:
1887 return build (TRUTH_ANDIF_EXPR, type,
1888 invert_truthvalue (TREE_OPERAND (arg, 0)),
1889 invert_truthvalue (TREE_OPERAND (arg, 1)));
1890
1891 case TRUTH_NOT_EXPR:
1892 return TREE_OPERAND (arg, 0);
1893
1894 case COND_EXPR:
1895 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
1896 invert_truthvalue (TREE_OPERAND (arg, 1)),
1897 invert_truthvalue (TREE_OPERAND (arg, 2)));
1898
1899 case COMPOUND_EXPR:
1900 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
1901 invert_truthvalue (TREE_OPERAND (arg, 1)));
1902
1903 case NON_LVALUE_EXPR:
1904 return invert_truthvalue (TREE_OPERAND (arg, 0));
1905
1906 case NOP_EXPR:
1907 case CONVERT_EXPR:
1908 case FLOAT_EXPR:
1909 return build1 (TREE_CODE (arg), type,
1910 invert_truthvalue (TREE_OPERAND (arg, 0)));
1911
1912 case BIT_AND_EXPR:
1913 if (! integer_onep (TREE_OPERAND (arg, 1)))
1914 abort ();
1915 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
1916 }
1917
1918 abort ();
1919 }
1920
1921 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
1922 operands are another bit-wise operation with a common input. If so,
1923 distribute the bit operations to save an operation and possibly two if
1924 constants are involved. For example, convert
1925 (A | B) & (A | C) into A | (B & C)
1926 Further simplification will occur if B and C are constants.
1927
1928 If this optimization cannot be done, 0 will be returned. */
1929
1930 static tree
1931 distribute_bit_expr (code, type, arg0, arg1)
1932 enum tree_code code;
1933 tree type;
1934 tree arg0, arg1;
1935 {
1936 tree common;
1937 tree left, right;
1938
1939 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1940 || TREE_CODE (arg0) == code
1941 || (TREE_CODE (arg0) != BIT_AND_EXPR
1942 && TREE_CODE (arg0) != BIT_IOR_EXPR))
1943 return 0;
1944
1945 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
1946 {
1947 common = TREE_OPERAND (arg0, 0);
1948 left = TREE_OPERAND (arg0, 1);
1949 right = TREE_OPERAND (arg1, 1);
1950 }
1951 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
1952 {
1953 common = TREE_OPERAND (arg0, 0);
1954 left = TREE_OPERAND (arg0, 1);
1955 right = TREE_OPERAND (arg1, 0);
1956 }
1957 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
1958 {
1959 common = TREE_OPERAND (arg0, 1);
1960 left = TREE_OPERAND (arg0, 0);
1961 right = TREE_OPERAND (arg1, 1);
1962 }
1963 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
1964 {
1965 common = TREE_OPERAND (arg0, 1);
1966 left = TREE_OPERAND (arg0, 0);
1967 right = TREE_OPERAND (arg1, 0);
1968 }
1969 else
1970 return 0;
1971
1972 return fold (build (TREE_CODE (arg0), type, common,
1973 fold (build (code, type, left, right))));
1974 }
1975 \f
1976 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
1977 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
1978
1979 static tree
1980 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
1981 tree inner;
1982 tree type;
1983 int bitsize, bitpos;
1984 int unsignedp;
1985 {
1986 tree result = build (BIT_FIELD_REF, type, inner,
1987 size_int (bitsize), size_int (bitpos));
1988
1989 TREE_UNSIGNED (result) = unsignedp;
1990
1991 return result;
1992 }
1993
1994 /* Optimize a bit-field compare.
1995
1996 There are two cases: First is a compare against a constant and the
1997 second is a comparison of two items where the fields are at the same
1998 bit position relative to the start of a chunk (byte, halfword, word)
1999 large enough to contain it. In these cases we can avoid the shift
2000 implicit in bitfield extractions.
2001
2002 For constants, we emit a compare of the shifted constant with the
2003 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2004 compared. For two fields at the same position, we do the ANDs with the
2005 similar mask and compare the result of the ANDs.
2006
2007 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2008 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2009 are the left and right operands of the comparison, respectively.
2010
2011 If the optimization described above can be done, we return the resulting
2012 tree. Otherwise we return zero. */
2013
2014 static tree
2015 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2016 enum tree_code code;
2017 tree compare_type;
2018 tree lhs, rhs;
2019 {
2020 int lbitpos, lbitsize, rbitpos, rbitsize;
2021 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2022 tree type = TREE_TYPE (lhs);
2023 tree signed_type, unsigned_type;
2024 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2025 enum machine_mode lmode, rmode, lnmode, rnmode;
2026 int lunsignedp, runsignedp;
2027 int lvolatilep = 0, rvolatilep = 0;
2028 tree linner, rinner;
2029 tree mask;
2030 tree offset;
2031
2032 /* Get all the information about the extractions being done. If the bit size
2033 if the same as the size of the underlying object, we aren't doing an
2034 extraction at all and so can do nothing. */
2035 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2036 &lunsignedp, &lvolatilep);
2037 if (lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2038 || offset != 0)
2039 return 0;
2040
2041 if (!const_p)
2042 {
2043 /* If this is not a constant, we can only do something if bit positions,
2044 sizes, and signedness are the same. */
2045 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2046 &rmode, &runsignedp, &rvolatilep);
2047
2048 if (lbitpos != rbitpos || lbitsize != rbitsize
2049 || lunsignedp != runsignedp || offset != 0)
2050 return 0;
2051 }
2052
2053 /* See if we can find a mode to refer to this field. We should be able to,
2054 but fail if we can't. */
2055 lnmode = get_best_mode (lbitsize, lbitpos,
2056 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2057 lvolatilep);
2058 if (lnmode == VOIDmode)
2059 return 0;
2060
2061 /* Set signed and unsigned types of the precision of this mode for the
2062 shifts below. */
2063 signed_type = type_for_mode (lnmode, 0);
2064 unsigned_type = type_for_mode (lnmode, 1);
2065
2066 if (! const_p)
2067 {
2068 rnmode = get_best_mode (rbitsize, rbitpos,
2069 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2070 rvolatilep);
2071 if (rnmode == VOIDmode)
2072 return 0;
2073 }
2074
2075 /* Compute the bit position and size for the new reference and our offset
2076 within it. If the new reference is the same size as the original, we
2077 won't optimize anything, so return zero. */
2078 lnbitsize = GET_MODE_BITSIZE (lnmode);
2079 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2080 lbitpos -= lnbitpos;
2081 if (lnbitsize == lbitsize)
2082 return 0;
2083
2084 if (! const_p)
2085 {
2086 rnbitsize = GET_MODE_BITSIZE (rnmode);
2087 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2088 rbitpos -= rnbitpos;
2089 if (rnbitsize == rbitsize)
2090 return 0;
2091 }
2092
2093 #if BYTES_BIG_ENDIAN
2094 lbitpos = lnbitsize - lbitsize - lbitpos;
2095 rbitpos = rnbitsize - rbitsize - rbitpos;
2096 #endif
2097
2098 /* Make the mask to be used against the extracted field. */
2099 mask = convert (unsigned_type, build_int_2 (~0, ~0));
2100 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize));
2101 mask = const_binop (RSHIFT_EXPR, mask,
2102 size_int (lnbitsize - lbitsize - lbitpos));
2103
2104 if (! const_p)
2105 /* If not comparing with constant, just rework the comparison
2106 and return. */
2107 return build (code, compare_type,
2108 build (BIT_AND_EXPR, type,
2109 make_bit_field_ref (linner, type,
2110 lnbitsize, lnbitpos, lunsignedp),
2111 mask),
2112 build (BIT_AND_EXPR, type,
2113 make_bit_field_ref (rinner, type,
2114 rnbitsize, rnbitpos, runsignedp),
2115 mask));
2116
2117 /* Otherwise, we are handling the constant case. See if the constant is too
2118 big for the field. Warn and return a tree of for 0 (false) if so. We do
2119 this not only for its own sake, but to avoid having to test for this
2120 error case below. If we didn't, we might generate wrong code.
2121
2122 For unsigned fields, the constant shifted right by the field length should
2123 be all zero. For signed fields, the high-order bits should agree with
2124 the sign bit. */
2125
2126 if (lunsignedp)
2127 {
2128 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2129 convert (unsigned_type, rhs),
2130 size_int (lbitsize))))
2131 {
2132 warning ("comparison is always %s due to width of bitfield",
2133 code == NE_EXPR ? "one" : "zero");
2134 return convert (compare_type,
2135 (code == NE_EXPR
2136 ? integer_one_node : integer_zero_node));
2137 }
2138 }
2139 else
2140 {
2141 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2142 size_int (lbitsize - 1));
2143 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2144 {
2145 warning ("comparison is always %s due to width of bitfield",
2146 code == NE_EXPR ? "one" : "zero");
2147 return convert (compare_type,
2148 (code == NE_EXPR
2149 ? integer_one_node : integer_zero_node));
2150 }
2151 }
2152
2153 /* Single-bit compares should always be against zero. */
2154 if (lbitsize == 1 && ! integer_zerop (rhs))
2155 {
2156 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2157 rhs = convert (type, integer_zero_node);
2158 }
2159
2160 /* Make a new bitfield reference, shift the constant over the
2161 appropriate number of bits and mask it with the computed mask
2162 (in case this was a signed field). If we changed it, make a new one. */
2163 lhs = make_bit_field_ref (linner, TREE_TYPE (lhs), lnbitsize, lnbitpos,
2164 lunsignedp);
2165
2166 rhs = fold (build1 (NOP_EXPR, type,
2167 const_binop (BIT_AND_EXPR,
2168 const_binop (LSHIFT_EXPR,
2169 convert (unsigned_type, rhs),
2170 size_int (lbitpos)), mask)));
2171
2172 return build (code, compare_type,
2173 build (BIT_AND_EXPR, type, lhs, mask),
2174 rhs);
2175 }
2176 \f
2177 /* Subroutine for the following routine: decode a field reference.
2178
2179 If EXP is a comparison reference, we return the innermost reference.
2180
2181 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2182 set to the starting bit number.
2183
2184 If the innermost field can be completely contained in a mode-sized
2185 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2186
2187 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2188 otherwise it is not changed.
2189
2190 *PUNSIGNEDP is set to the signedness of the field.
2191
2192 *PMASK is set to the mask used. This is either contained in a
2193 BIT_AND_EXPR or derived from the width of the field.
2194
2195 Return 0 if this is not a component reference or is one that we can't
2196 do anything with. */
2197
2198 static tree
2199 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2200 pvolatilep, pmask)
2201 tree exp;
2202 int *pbitsize, *pbitpos;
2203 enum machine_mode *pmode;
2204 int *punsignedp, *pvolatilep;
2205 tree *pmask;
2206 {
2207 tree mask = 0;
2208 tree inner;
2209 tree offset;
2210
2211 STRIP_NOPS (exp);
2212
2213 if (TREE_CODE (exp) == BIT_AND_EXPR)
2214 {
2215 mask = TREE_OPERAND (exp, 1);
2216 exp = TREE_OPERAND (exp, 0);
2217 STRIP_NOPS (exp); STRIP_NOPS (mask);
2218 if (TREE_CODE (mask) != INTEGER_CST)
2219 return 0;
2220 }
2221
2222 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
2223 && TREE_CODE (exp) != BIT_FIELD_REF)
2224 return 0;
2225
2226 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2227 punsignedp, pvolatilep);
2228 if (*pbitsize < 0 || offset != 0)
2229 return 0;
2230
2231 if (mask == 0)
2232 {
2233 tree unsigned_type = type_for_size (*pbitsize, 1);
2234 int precision = TYPE_PRECISION (unsigned_type);
2235
2236 mask = convert (unsigned_type, build_int_2 (~0, ~0));
2237 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
2238 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
2239 }
2240
2241 *pmask = mask;
2242 return inner;
2243 }
2244
2245 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2246 bit positions. */
2247
2248 static int
2249 all_ones_mask_p (mask, size)
2250 tree mask;
2251 int size;
2252 {
2253 tree type = TREE_TYPE (mask);
2254 int precision = TYPE_PRECISION (type);
2255
2256 return
2257 operand_equal_p (mask,
2258 const_binop (RSHIFT_EXPR,
2259 const_binop (LSHIFT_EXPR,
2260 convert (signed_type (type),
2261 build_int_2 (~0, ~0)),
2262 size_int (precision - size)),
2263 size_int (precision - size)), 0);
2264 }
2265 \f
2266 /* Try to merge two comparisons to the same innermost item.
2267
2268 For example, if we have p->a == 2 && p->b == 4 and we can make an
2269 object large enough to span both A and B, we can do this with a comparison
2270 against the object ANDed with the a mask.
2271
2272 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2273 operations to do this with one comparison.
2274
2275 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2276 function and the one above.
2277
2278 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2279 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2280
2281 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2282 two operands.
2283
2284 We return the simplified tree or 0 if no optimization is possible. */
2285
2286 static tree
2287 merge_component_references (code, truth_type, lhs, rhs)
2288 enum tree_code code;
2289 tree truth_type, lhs, rhs;
2290 {
2291 /* If this is the "or" of two comparisons, we can do something if we
2292 the comparisons are NE_EXPR. If this is the "and", we can do something
2293 if the comparisons are EQ_EXPR. I.e.,
2294 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2295
2296 WANTED_CODE is this operation code. For single bit fields, we can
2297 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2298 comparison for one-bit fields. */
2299
2300 enum tree_code wanted_code
2301 = (code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) ? EQ_EXPR : NE_EXPR;
2302 enum tree_code lcode, rcode;
2303 tree ll_inner, lr_inner, rl_inner, rr_inner;
2304 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2305 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2306 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2307 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2308 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2309 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2310 enum machine_mode lnmode, rnmode;
2311 tree ll_mask, lr_mask, rl_mask, rr_mask;
2312 tree l_const = 0, r_const = 0;
2313 tree type, result;
2314 int first_bit, end_bit;
2315 int volatilep = 0;
2316
2317 /* Start by getting the comparison codes and seeing if we may be able
2318 to do something. Then get all the parameters for each side. Fail
2319 if anything is volatile. */
2320
2321 lcode = TREE_CODE (lhs);
2322 rcode = TREE_CODE (rhs);
2323 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2324 || (rcode != EQ_EXPR && rcode != NE_EXPR)
2325 || TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
2326 return 0;
2327
2328 ll_inner = decode_field_reference (TREE_OPERAND (lhs, 0),
2329 &ll_bitsize, &ll_bitpos, &ll_mode,
2330 &ll_unsignedp, &volatilep, &ll_mask);
2331 lr_inner = decode_field_reference (TREE_OPERAND (lhs, 1),
2332 &lr_bitsize, &lr_bitpos, &lr_mode,
2333 &lr_unsignedp, &volatilep, &lr_mask);
2334 rl_inner = decode_field_reference (TREE_OPERAND (rhs, 0),
2335 &rl_bitsize, &rl_bitpos, &rl_mode,
2336 &rl_unsignedp, &volatilep, &rl_mask);
2337 rr_inner = decode_field_reference (TREE_OPERAND (rhs, 1),
2338 &rr_bitsize, &rr_bitpos, &rr_mode,
2339 &rr_unsignedp, &volatilep, &rr_mask);
2340
2341 /* It must be true that the inner operation on the lhs of each
2342 comparison must be the same if we are to be able to do anything.
2343 Then see if we have constants. If not, the same must be true for
2344 the rhs's. */
2345 if (volatilep || ll_inner == 0 || rl_inner == 0
2346 || ! operand_equal_p (ll_inner, rl_inner, 0))
2347 return 0;
2348
2349 if (TREE_CODE (TREE_OPERAND (lhs, 1)) == INTEGER_CST
2350 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
2351 l_const = TREE_OPERAND (lhs, 1), r_const = TREE_OPERAND (rhs, 1);
2352 else if (lr_inner == 0 || rr_inner == 0
2353 || ! operand_equal_p (lr_inner, rr_inner, 0))
2354 return 0;
2355
2356 /* If either comparison code is not correct for our logical operation,
2357 fail. However, we can convert a one-bit comparison against zero into
2358 the opposite comparison against that bit being set in the field. */
2359 if (lcode != wanted_code)
2360 {
2361 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2362 l_const = ll_mask;
2363 else
2364 return 0;
2365 }
2366
2367 if (rcode != wanted_code)
2368 {
2369 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2370 r_const = rl_mask;
2371 else
2372 return 0;
2373 }
2374
2375 /* See if we can find a mode that contains both fields being compared on
2376 the left. If we can't, fail. Otherwise, update all constants and masks
2377 to be relative to a field of that size. */
2378 first_bit = MIN (ll_bitpos, rl_bitpos);
2379 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2380 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2381 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2382 volatilep);
2383 if (lnmode == VOIDmode)
2384 return 0;
2385
2386 lnbitsize = GET_MODE_BITSIZE (lnmode);
2387 lnbitpos = first_bit & ~ (lnbitsize - 1);
2388 type = type_for_size (lnbitsize, 1);
2389 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2390
2391 #if BYTES_BIG_ENDIAN
2392 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2393 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2394 #endif
2395
2396 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2397 size_int (xll_bitpos));
2398 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2399 size_int (xrl_bitpos));
2400
2401 /* Make sure the constants are interpreted as unsigned, so we
2402 don't have sign bits outside the range of their type. */
2403
2404 if (l_const)
2405 {
2406 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2407 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2408 size_int (xll_bitpos));
2409 }
2410 if (r_const)
2411 {
2412 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2413 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2414 size_int (xrl_bitpos));
2415 }
2416
2417 /* If the right sides are not constant, do the same for it. Also,
2418 disallow this optimization if a size or signedness mismatch occurs
2419 between the left and right sides. */
2420 if (l_const == 0)
2421 {
2422 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2423 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp)
2424 return 0;
2425
2426 first_bit = MIN (lr_bitpos, rr_bitpos);
2427 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2428 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2429 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2430 volatilep);
2431 if (rnmode == VOIDmode)
2432 return 0;
2433
2434 rnbitsize = GET_MODE_BITSIZE (rnmode);
2435 rnbitpos = first_bit & ~ (rnbitsize - 1);
2436 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2437
2438 #if BYTES_BIG_ENDIAN
2439 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2440 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2441 #endif
2442
2443 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2444 size_int (xlr_bitpos));
2445 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2446 size_int (xrr_bitpos));
2447
2448 /* Make a mask that corresponds to both fields being compared.
2449 Do this for both items being compared. If the masks agree,
2450 we can do this by masking both and comparing the masked
2451 results. */
2452 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
2453 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
2454 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2455 {
2456 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2457 ll_unsignedp || rl_unsignedp);
2458 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2459 lr_unsignedp || rr_unsignedp);
2460 if (! all_ones_mask_p (ll_mask, lnbitsize))
2461 {
2462 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2463 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2464 }
2465 return build (wanted_code, truth_type, lhs, rhs);
2466 }
2467
2468 /* There is still another way we can do something: If both pairs of
2469 fields being compared are adjacent, we may be able to make a wider
2470 field containing them both. */
2471 if ((ll_bitsize + ll_bitpos == rl_bitpos
2472 && lr_bitsize + lr_bitpos == rr_bitpos)
2473 || (ll_bitpos == rl_bitpos + rl_bitsize
2474 && lr_bitpos == rr_bitpos + rr_bitsize))
2475 return build (wanted_code, truth_type,
2476 make_bit_field_ref (ll_inner, type,
2477 ll_bitsize + rl_bitsize,
2478 MIN (ll_bitpos, rl_bitpos),
2479 ll_unsignedp),
2480 make_bit_field_ref (lr_inner, type,
2481 lr_bitsize + rr_bitsize,
2482 MIN (lr_bitpos, rr_bitpos),
2483 lr_unsignedp));
2484
2485 return 0;
2486 }
2487
2488 /* Handle the case of comparisons with constants. If there is something in
2489 common between the masks, those bits of the constants must be the same.
2490 If not, the condition is always false. Test for this to avoid generating
2491 incorrect code below. */
2492 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
2493 if (! integer_zerop (result)
2494 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
2495 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
2496 {
2497 if (wanted_code == NE_EXPR)
2498 {
2499 warning ("`or' of unmatched not-equal tests is always 1");
2500 return convert (truth_type, integer_one_node);
2501 }
2502 else
2503 {
2504 warning ("`and' of mutually exclusive equal-tests is always zero");
2505 return convert (truth_type, integer_zero_node);
2506 }
2507 }
2508
2509 /* Construct the expression we will return. First get the component
2510 reference we will make. Unless the mask is all ones the width of
2511 that field, perform the mask operation. Then compare with the
2512 merged constant. */
2513 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2514 ll_unsignedp || rl_unsignedp);
2515
2516 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
2517 if (! all_ones_mask_p (ll_mask, lnbitsize))
2518 result = build (BIT_AND_EXPR, type, result, ll_mask);
2519
2520 return build (wanted_code, truth_type, result,
2521 const_binop (BIT_IOR_EXPR, l_const, r_const));
2522 }
2523 \f
2524 /* Perform constant folding and related simplification of EXPR.
2525 The related simplifications include x*1 => x, x*0 => 0, etc.,
2526 and application of the associative law.
2527 NOP_EXPR conversions may be removed freely (as long as we
2528 are careful not to change the C type of the overall expression)
2529 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
2530 but we can constant-fold them if they have constant operands. */
2531
2532 tree
2533 fold (expr)
2534 tree expr;
2535 {
2536 register tree t = expr;
2537 tree t1 = NULL_TREE;
2538 tree tem;
2539 tree type = TREE_TYPE (expr);
2540 register tree arg0, arg1;
2541 register enum tree_code code = TREE_CODE (t);
2542 register int kind;
2543 int invert;
2544
2545 /* WINS will be nonzero when the switch is done
2546 if all operands are constant. */
2547
2548 int wins = 1;
2549
2550 /* Return right away if already constant. */
2551 if (TREE_CONSTANT (t))
2552 {
2553 if (code == CONST_DECL)
2554 return DECL_INITIAL (t);
2555 return t;
2556 }
2557
2558 kind = TREE_CODE_CLASS (code);
2559 if (kind == 'e' || kind == '<' || kind == '1' || kind == '2' || kind == 'r')
2560 {
2561 register int len = tree_code_length[(int) code];
2562 register int i;
2563 for (i = 0; i < len; i++)
2564 {
2565 tree op = TREE_OPERAND (t, i);
2566
2567 if (op == 0)
2568 continue; /* Valid for CALL_EXPR, at least. */
2569
2570 /* Strip any conversions that don't change the mode. */
2571 STRIP_NOPS (op);
2572
2573 if (TREE_CODE (op) != INTEGER_CST
2574 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2575 && TREE_CODE (op) != REAL_CST
2576 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2577 )
2578 /* Note that TREE_CONSTANT isn't enough:
2579 static var addresses are constant but we can't
2580 do arithmetic on them. */
2581 wins = 0;
2582
2583 if (i == 0)
2584 arg0 = op;
2585 else if (i == 1)
2586 arg1 = op;
2587 }
2588 }
2589
2590 /* If this is a commutative operation, and ARG0 is a constant, move it
2591 to ARG1 to reduce the number of tests below. */
2592 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
2593 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
2594 || code == BIT_AND_EXPR)
2595 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
2596 {
2597 tem = arg0; arg0 = arg1; arg1 = tem;
2598
2599 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
2600 TREE_OPERAND (t, 1) = tem;
2601 }
2602
2603 /* Now WINS is set as described above,
2604 ARG0 is the first operand of EXPR,
2605 and ARG1 is the second operand (if it has more than one operand).
2606
2607 First check for cases where an arithmetic operation is applied to a
2608 compound, conditional, or comparison operation. Push the arithmetic
2609 operation inside the compound or conditional to see if any folding
2610 can then be done. Convert comparison to conditional for this purpose.
2611 The also optimizes non-constant cases that used to be done in
2612 expand_expr. */
2613 if (TREE_CODE_CLASS (code) == '1')
2614 {
2615 if (TREE_CODE (arg0) == COMPOUND_EXPR)
2616 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2617 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
2618 else if (TREE_CODE (arg0) == COND_EXPR)
2619 return fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
2620 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
2621 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
2622 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
2623 return fold (build (COND_EXPR, type, arg0,
2624 fold (build1 (code, type, integer_one_node)),
2625 fold (build1 (code, type, integer_zero_node))));
2626 }
2627 else if (TREE_CODE_CLASS (code) == '2')
2628 {
2629 if (TREE_CODE (arg1) == COMPOUND_EXPR)
2630 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
2631 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
2632 else if (TREE_CODE (arg1) == COND_EXPR
2633 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
2634 {
2635 tree test, true_value, false_value;
2636
2637 if (TREE_CODE (arg1) == COND_EXPR)
2638 {
2639 test = TREE_OPERAND (arg1, 0);
2640 true_value = TREE_OPERAND (arg1, 1);
2641 false_value = TREE_OPERAND (arg1, 2);
2642 }
2643 else
2644 {
2645 test = arg1;
2646 true_value = integer_one_node;
2647 false_value = integer_zero_node;
2648 }
2649
2650 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
2651 arg0 = save_expr (arg0);
2652 test = fold (build (COND_EXPR, type, test,
2653 fold (build (code, type, arg0, true_value)),
2654 fold (build (code, type, arg0, false_value))));
2655 if (TREE_CODE (arg0) == SAVE_EXPR)
2656 return build (COMPOUND_EXPR, type,
2657 convert (void_type_node, arg0), test);
2658 else
2659 return convert (type, test);
2660 }
2661
2662 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
2663 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2664 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
2665 else if (TREE_CODE (arg0) == COND_EXPR
2666 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
2667 {
2668 tree test, true_value, false_value;
2669
2670 if (TREE_CODE (arg0) == COND_EXPR)
2671 {
2672 test = TREE_OPERAND (arg0, 0);
2673 true_value = TREE_OPERAND (arg0, 1);
2674 false_value = TREE_OPERAND (arg0, 2);
2675 }
2676 else
2677 {
2678 test = arg0;
2679 true_value = integer_one_node;
2680 false_value = integer_zero_node;
2681 }
2682
2683 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
2684 arg1 = save_expr (arg1);
2685 test = fold (build (COND_EXPR, type, test,
2686 fold (build (code, type, true_value, arg1)),
2687 fold (build (code, type, false_value, arg1))));
2688 if (TREE_CODE (arg1) == SAVE_EXPR)
2689 return build (COMPOUND_EXPR, type,
2690 convert (void_type_node, arg1), test);
2691 else
2692 return convert (type, test);
2693 }
2694 }
2695 else if (TREE_CODE_CLASS (code) == '<'
2696 && TREE_CODE (arg0) == COMPOUND_EXPR)
2697 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2698 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
2699 else if (TREE_CODE_CLASS (code) == '<'
2700 && TREE_CODE (arg1) == COMPOUND_EXPR)
2701 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
2702 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
2703
2704 switch (code)
2705 {
2706 case INTEGER_CST:
2707 case REAL_CST:
2708 case STRING_CST:
2709 case COMPLEX_CST:
2710 case CONSTRUCTOR:
2711 return t;
2712
2713 case CONST_DECL:
2714 return fold (DECL_INITIAL (t));
2715
2716 case NOP_EXPR:
2717 case FLOAT_EXPR:
2718 case CONVERT_EXPR:
2719 case FIX_TRUNC_EXPR:
2720 /* Other kinds of FIX are not handled properly by fold_convert. */
2721 /* Two conversions in a row are not needed unless:
2722 - the intermediate type is narrower than both initial and final, or
2723 - the initial type is a pointer type and the precisions of the
2724 intermediate and final types differ, or
2725 - the final type is a pointer type and the precisions of the
2726 initial and intermediate types differ. */
2727 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
2728 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
2729 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2730 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2731 ||
2732 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2733 > TYPE_PRECISION (TREE_TYPE (t)))
2734 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
2735 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2736 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
2737 ==
2738 (TREE_UNSIGNED (TREE_TYPE (t))
2739 && (TYPE_PRECISION (TREE_TYPE (t))
2740 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
2741 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2742 == POINTER_TYPE)
2743 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2744 != TYPE_PRECISION (TREE_TYPE (t))))
2745 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
2746 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2747 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
2748 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
2749
2750 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
2751 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1)))
2752 {
2753 /* Don't leave an assignment inside a conversion. */
2754 tree prev = TREE_OPERAND (t, 0);
2755 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
2756 /* First do the assignment, then return converted constant. */
2757 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
2758 TREE_USED (t) = 1;
2759 return t;
2760 }
2761 if (!wins)
2762 {
2763 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
2764 return t;
2765 }
2766 return fold_convert (t, arg0);
2767
2768 #if 0 /* This loses on &"foo"[0]. */
2769 case ARRAY_REF:
2770 {
2771 int i;
2772
2773 /* Fold an expression like: "foo"[2] */
2774 if (TREE_CODE (arg0) == STRING_CST
2775 && TREE_CODE (arg1) == INTEGER_CST
2776 && !TREE_INT_CST_HIGH (arg1)
2777 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
2778 {
2779 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
2780 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
2781 force_fit_type (t);
2782 }
2783 }
2784 return t;
2785 #endif /* 0 */
2786
2787 case RANGE_EXPR:
2788 TREE_CONSTANT (t) = wins;
2789 return t;
2790
2791 case NEGATE_EXPR:
2792 if (wins)
2793 {
2794 if (TREE_CODE (arg0) == INTEGER_CST)
2795 {
2796 if (TREE_INT_CST_LOW (arg0) == 0)
2797 t = build_int_2 (0, - TREE_INT_CST_HIGH (arg0));
2798 else
2799 t = build_int_2 (- TREE_INT_CST_LOW (arg0),
2800 ~ TREE_INT_CST_HIGH (arg0));
2801 TREE_TYPE (t) = type;
2802 force_fit_type (t);
2803 }
2804 else if (TREE_CODE (arg0) == REAL_CST)
2805 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
2806 TREE_TYPE (t) = type;
2807 }
2808 else if (TREE_CODE (arg0) == NEGATE_EXPR)
2809 return TREE_OPERAND (arg0, 0);
2810
2811 /* Convert - (a - b) to (b - a) for non-floating-point. */
2812 else if (TREE_CODE (arg0) == MINUS_EXPR && TREE_CODE (type) != REAL_TYPE)
2813 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
2814 TREE_OPERAND (arg0, 0));
2815
2816 return t;
2817
2818 case ABS_EXPR:
2819 if (wins)
2820 {
2821 if (TREE_CODE (arg0) == INTEGER_CST)
2822 {
2823 if (! TREE_UNSIGNED (type)
2824 && TREE_INT_CST_HIGH (arg0) < 0)
2825 {
2826 if (TREE_INT_CST_LOW (arg0) == 0)
2827 t = build_int_2 (0, - TREE_INT_CST_HIGH (arg0));
2828 else
2829 t = build_int_2 (- TREE_INT_CST_LOW (arg0),
2830 ~ TREE_INT_CST_HIGH (arg0));
2831 }
2832 }
2833 else if (TREE_CODE (arg0) == REAL_CST)
2834 {
2835 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
2836 t = build_real (type,
2837 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
2838 }
2839 TREE_TYPE (t) = type;
2840 }
2841 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
2842 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
2843 return t;
2844
2845 case BIT_NOT_EXPR:
2846 if (wins)
2847 {
2848 if (TREE_CODE (arg0) == INTEGER_CST)
2849 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
2850 ~ TREE_INT_CST_HIGH (arg0));
2851 TREE_TYPE (t) = type;
2852 force_fit_type (t);
2853 }
2854 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
2855 return TREE_OPERAND (arg0, 0);
2856 return t;
2857
2858 case PLUS_EXPR:
2859 /* A + (-B) -> A - B */
2860 if (TREE_CODE (arg1) == NEGATE_EXPR)
2861 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
2862 else if (TREE_CODE (type) != REAL_TYPE)
2863 {
2864 if (integer_zerop (arg1))
2865 return non_lvalue (convert (type, arg0));
2866
2867 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
2868 with a constant, and the two constants have no bits in common,
2869 we should treat this as a BIT_IOR_EXPR since this may produce more
2870 simplifications. */
2871 if (TREE_CODE (arg0) == BIT_AND_EXPR
2872 && TREE_CODE (arg1) == BIT_AND_EXPR
2873 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
2874 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
2875 && integer_zerop (const_binop (BIT_AND_EXPR,
2876 TREE_OPERAND (arg0, 1),
2877 TREE_OPERAND (arg1, 1))))
2878 {
2879 code = BIT_IOR_EXPR;
2880 goto bit_ior;
2881 }
2882 }
2883 /* In IEEE floating point, x+0 may not equal x. */
2884 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2885 && real_zerop (arg1))
2886 return non_lvalue (convert (type, arg0));
2887 associate:
2888 /* In most languages, can't associate operations on floats
2889 through parentheses. Rather than remember where the parentheses
2890 were, we don't associate floats at all. It shouldn't matter much. */
2891 if (TREE_CODE (type) == REAL_TYPE)
2892 goto binary;
2893 /* The varsign == -1 cases happen only for addition and subtraction.
2894 It says that the arg that was split was really CON minus VAR.
2895 The rest of the code applies to all associative operations. */
2896 if (!wins)
2897 {
2898 tree var, con;
2899 int varsign;
2900
2901 if (split_tree (arg0, code, &var, &con, &varsign))
2902 {
2903 if (varsign == -1)
2904 {
2905 /* EXPR is (CON-VAR) +- ARG1. */
2906 /* If it is + and VAR==ARG1, return just CONST. */
2907 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
2908 return convert (TREE_TYPE (t), con);
2909
2910 /* Otherwise return (CON +- ARG1) - VAR. */
2911 TREE_SET_CODE (t, MINUS_EXPR);
2912 TREE_OPERAND (t, 1) = var;
2913 TREE_OPERAND (t, 0)
2914 = fold (build (code, TREE_TYPE (t), con, arg1));
2915 }
2916 else
2917 {
2918 /* EXPR is (VAR+CON) +- ARG1. */
2919 /* If it is - and VAR==ARG1, return just CONST. */
2920 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
2921 return convert (TREE_TYPE (t), con);
2922
2923 /* Otherwise return VAR +- (ARG1 +- CON). */
2924 TREE_OPERAND (t, 1) = tem
2925 = fold (build (code, TREE_TYPE (t), arg1, con));
2926 TREE_OPERAND (t, 0) = var;
2927 if (integer_zerop (tem)
2928 && (code == PLUS_EXPR || code == MINUS_EXPR))
2929 return convert (type, var);
2930 /* If we have x +/- (c - d) [c an explicit integer]
2931 change it to x -/+ (d - c) since if d is relocatable
2932 then the latter can be a single immediate insn
2933 and the former cannot. */
2934 if (TREE_CODE (tem) == MINUS_EXPR
2935 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
2936 {
2937 tree tem1 = TREE_OPERAND (tem, 1);
2938 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
2939 TREE_OPERAND (tem, 0) = tem1;
2940 TREE_SET_CODE (t,
2941 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
2942 }
2943 }
2944 return t;
2945 }
2946
2947 if (split_tree (arg1, code, &var, &con, &varsign))
2948 {
2949 /* EXPR is ARG0 +- (CON +- VAR). */
2950 if (varsign == -1)
2951 TREE_SET_CODE (t,
2952 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
2953 if (TREE_CODE (t) == MINUS_EXPR
2954 && operand_equal_p (var, arg0, 0))
2955 {
2956 /* If VAR and ARG0 cancel, return just CON or -CON. */
2957 if (code == PLUS_EXPR)
2958 return convert (TREE_TYPE (t), con);
2959 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
2960 convert (TREE_TYPE (t), con)));
2961 }
2962 TREE_OPERAND (t, 0)
2963 = fold (build (code, TREE_TYPE (t), arg0, con));
2964 TREE_OPERAND (t, 1) = var;
2965 if (integer_zerop (TREE_OPERAND (t, 0))
2966 && TREE_CODE (t) == PLUS_EXPR)
2967 return convert (TREE_TYPE (t), var);
2968 return t;
2969 }
2970 }
2971 binary:
2972 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
2973 if (TREE_CODE (arg1) == REAL_CST)
2974 return t;
2975 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
2976 if (wins)
2977 t1 = const_binop (code, arg0, arg1);
2978 if (t1 != NULL_TREE)
2979 {
2980 /* The return value should always have
2981 the same type as the original expression. */
2982 TREE_TYPE (t1) = TREE_TYPE (t);
2983 return t1;
2984 }
2985 return t;
2986
2987 case MINUS_EXPR:
2988 if (TREE_CODE (type) != REAL_TYPE)
2989 {
2990 if (! wins && integer_zerop (arg0))
2991 return build1 (NEGATE_EXPR, type, arg1);
2992 if (integer_zerop (arg1))
2993 return non_lvalue (convert (type, arg0));
2994 }
2995 /* Convert A - (-B) to A + B. */
2996 else if (TREE_CODE (arg1) == NEGATE_EXPR)
2997 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
2998 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
2999 {
3000 /* Except with IEEE floating point, 0-x equals -x. */
3001 if (! wins && real_zerop (arg0))
3002 return build1 (NEGATE_EXPR, type, arg1);
3003 /* Except with IEEE floating point, x-0 equals x. */
3004 if (real_zerop (arg1))
3005 return non_lvalue (convert (type, arg0));
3006 }
3007 /* Fold &x - &x. This can happen from &x.foo - &x.
3008 Note that can't be done for certain floats even in non-IEEE formats.
3009 Also note that operand_equal_p is always false is an operand
3010 is volatile. */
3011
3012 if (operand_equal_p (arg0, arg1,
3013 TREE_CODE (type) == REAL_TYPE))
3014 return convert (type, integer_zero_node);
3015 goto associate;
3016
3017 case MULT_EXPR:
3018 if (TREE_CODE (type) != REAL_TYPE)
3019 {
3020 if (integer_zerop (arg1))
3021 return omit_one_operand (type, arg1, arg0);
3022 if (integer_onep (arg1))
3023 return non_lvalue (convert (type, arg0));
3024
3025 /* (a * (1 << b)) is (a << b) */
3026 if (TREE_CODE (arg1) == LSHIFT_EXPR
3027 && integer_onep (TREE_OPERAND (arg1, 0)))
3028 return fold (build (LSHIFT_EXPR, type, arg0,
3029 TREE_OPERAND (arg1, 1)));
3030 if (TREE_CODE (arg0) == LSHIFT_EXPR
3031 && integer_onep (TREE_OPERAND (arg0, 0)))
3032 return fold (build (LSHIFT_EXPR, type, arg1,
3033 TREE_OPERAND (arg0, 1)));
3034 }
3035 else
3036 {
3037 /* x*0 is 0, except for IEEE floating point. */
3038 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3039 && real_zerop (arg1))
3040 return omit_one_operand (type, arg1, arg0);
3041 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3042 However, ANSI says we can drop signals,
3043 so we can do this anyway. */
3044 if (real_onep (arg1))
3045 return non_lvalue (convert (type, arg0));
3046 /* x*2 is x+x */
3047 if (! wins && real_twop (arg1))
3048 {
3049 tree arg = save_expr (arg0);
3050 return build (PLUS_EXPR, type, arg, arg);
3051 }
3052 }
3053 goto associate;
3054
3055 case BIT_IOR_EXPR:
3056 bit_ior:
3057 if (integer_all_onesp (arg1))
3058 return omit_one_operand (type, arg1, arg0);
3059 if (integer_zerop (arg1))
3060 return non_lvalue (convert (type, arg0));
3061 t1 = distribute_bit_expr (code, type, arg0, arg1);
3062 if (t1 != NULL_TREE)
3063 return t1;
3064 goto associate;
3065
3066 case BIT_XOR_EXPR:
3067 if (integer_zerop (arg1))
3068 return non_lvalue (convert (type, arg0));
3069 if (integer_all_onesp (arg1))
3070 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3071 goto associate;
3072
3073 case BIT_AND_EXPR:
3074 bit_and:
3075 if (integer_all_onesp (arg1))
3076 return non_lvalue (convert (type, arg0));
3077 if (integer_zerop (arg1))
3078 return omit_one_operand (type, arg1, arg0);
3079 t1 = distribute_bit_expr (code, type, arg0, arg1);
3080 if (t1 != NULL_TREE)
3081 return t1;
3082 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3083 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3084 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3085 {
3086 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3087 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_INT
3088 && (~TREE_INT_CST_LOW (arg0) & ((1 << prec) - 1)) == 0)
3089 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3090 }
3091 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3092 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3093 {
3094 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3095 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_INT
3096 && (~TREE_INT_CST_LOW (arg1) & ((1 << prec) - 1)) == 0)
3097 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3098 }
3099 goto associate;
3100
3101 case BIT_ANDTC_EXPR:
3102 if (integer_all_onesp (arg0))
3103 return non_lvalue (convert (type, arg1));
3104 if (integer_zerop (arg0))
3105 return omit_one_operand (type, arg0, arg1);
3106 if (TREE_CODE (arg1) == INTEGER_CST)
3107 {
3108 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3109 code = BIT_AND_EXPR;
3110 goto bit_and;
3111 }
3112 goto binary;
3113
3114 case TRUNC_DIV_EXPR:
3115 case ROUND_DIV_EXPR:
3116 case FLOOR_DIV_EXPR:
3117 case CEIL_DIV_EXPR:
3118 case EXACT_DIV_EXPR:
3119 case RDIV_EXPR:
3120 if (integer_onep (arg1))
3121 return non_lvalue (convert (type, arg0));
3122 if (integer_zerop (arg1))
3123 return t;
3124 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3125 #ifndef REAL_INFINITY
3126 if (TREE_CODE (arg1) == REAL_CST
3127 && real_zerop (arg1))
3128 return t;
3129 #endif
3130 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3131
3132 goto binary;
3133
3134 case CEIL_MOD_EXPR:
3135 case FLOOR_MOD_EXPR:
3136 case ROUND_MOD_EXPR:
3137 case TRUNC_MOD_EXPR:
3138 if (integer_onep (arg1))
3139 return omit_one_operand (type, integer_zero_node, arg0);
3140 if (integer_zerop (arg1))
3141 return t;
3142 goto binary;
3143
3144 case LSHIFT_EXPR:
3145 case RSHIFT_EXPR:
3146 case LROTATE_EXPR:
3147 case RROTATE_EXPR:
3148 if (integer_zerop (arg1))
3149 return non_lvalue (convert (type, arg0));
3150 /* Since negative shift count is not well-defined,
3151 don't try to compute it in the compiler. */
3152 if (tree_int_cst_lt (arg1, integer_zero_node))
3153 return t;
3154 goto binary;
3155
3156 case MIN_EXPR:
3157 if (operand_equal_p (arg0, arg1, 0))
3158 return arg0;
3159 if (TREE_CODE (type) == INTEGER_TYPE
3160 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
3161 return omit_one_operand (type, arg1, arg0);
3162 goto associate;
3163
3164 case MAX_EXPR:
3165 if (operand_equal_p (arg0, arg1, 0))
3166 return arg0;
3167 if (TREE_CODE (type) == INTEGER_TYPE
3168 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
3169 return omit_one_operand (type, arg1, arg0);
3170 goto associate;
3171
3172 case TRUTH_NOT_EXPR:
3173 /* Note that the operand of this must be an int
3174 and its values must be 0 or 1.
3175 ("true" is a fixed value perhaps depending on the language,
3176 but we don't handle values other than 1 correctly yet.) */
3177 return invert_truthvalue (arg0);
3178
3179 case TRUTH_ANDIF_EXPR:
3180 /* Note that the operands of this must be ints
3181 and their values must be 0 or 1.
3182 ("true" is a fixed value perhaps depending on the language.) */
3183 /* If first arg is constant zero, return it. */
3184 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
3185 return arg0;
3186 case TRUTH_AND_EXPR:
3187 /* If either arg is constant true, drop it. */
3188 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
3189 return non_lvalue (arg1);
3190 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
3191 return non_lvalue (arg0);
3192 /* Both known to be zero => return zero. */
3193 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3194 return arg0;
3195
3196 truth_andor:
3197 /* Check for the possibility of merging component references. If our
3198 lhs is another similar operation, try to merge its rhs with our
3199 rhs. Then try to merge our lhs and rhs. */
3200 if (optimize)
3201 {
3202 if (TREE_CODE (arg0) == code)
3203 {
3204 tem = merge_component_references (code, type,
3205 TREE_OPERAND (arg0, 1), arg1);
3206 if (tem)
3207 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
3208 }
3209
3210 tem = merge_component_references (code, type, arg0, arg1);
3211 if (tem)
3212 return tem;
3213 }
3214 return t;
3215
3216 case TRUTH_ORIF_EXPR:
3217 /* Note that the operands of this must be ints
3218 and their values must be 0 or true.
3219 ("true" is a fixed value perhaps depending on the language.) */
3220 /* If first arg is constant true, return it. */
3221 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
3222 return arg0;
3223 case TRUTH_OR_EXPR:
3224 /* If either arg is constant zero, drop it. */
3225 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
3226 return non_lvalue (arg1);
3227 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
3228 return non_lvalue (arg0);
3229 /* Both known to be true => return true. */
3230 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3231 return arg0;
3232 goto truth_andor;
3233
3234 case EQ_EXPR:
3235 case NE_EXPR:
3236 case LT_EXPR:
3237 case GT_EXPR:
3238 case LE_EXPR:
3239 case GE_EXPR:
3240 /* If one arg is a constant integer, put it last. */
3241 if (TREE_CODE (arg0) == INTEGER_CST
3242 && TREE_CODE (arg1) != INTEGER_CST)
3243 {
3244 TREE_OPERAND (t, 0) = arg1;
3245 TREE_OPERAND (t, 1) = arg0;
3246 arg0 = TREE_OPERAND (t, 0);
3247 arg1 = TREE_OPERAND (t, 1);
3248 code = swap_tree_comparison (code);
3249 TREE_SET_CODE (t, code);
3250 }
3251
3252 /* Convert foo++ == CONST into ++foo == CONST + INCR.
3253 First, see if one arg is constant; find the constant arg
3254 and the other one. */
3255 {
3256 tree constop = 0, varop;
3257 tree *constoploc;
3258
3259 if (TREE_CONSTANT (arg1))
3260 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
3261 if (TREE_CONSTANT (arg0))
3262 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
3263
3264 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
3265 {
3266 /* This optimization is invalid for ordered comparisons
3267 if CONST+INCR overflows or if foo+incr might overflow.
3268 This optimization is invalid for floating point due to rounding.
3269 For pointer types we assume overflow doesn't happen. */
3270 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
3271 || (TREE_CODE (TREE_TYPE (varop)) != REAL_TYPE
3272 && (code == EQ_EXPR || code == NE_EXPR)))
3273 {
3274 tree newconst
3275 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
3276 constop, TREE_OPERAND (varop, 1)));
3277 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
3278 *constoploc = newconst;
3279 return t;
3280 }
3281 }
3282 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
3283 {
3284 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
3285 || (TREE_CODE (TREE_TYPE (varop)) != REAL_TYPE
3286 && (code == EQ_EXPR || code == NE_EXPR)))
3287 {
3288 tree newconst
3289 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
3290 constop, TREE_OPERAND (varop, 1)));
3291 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
3292 *constoploc = newconst;
3293 return t;
3294 }
3295 }
3296 }
3297
3298 /* Change X >= CST to X > (CST - 1) if CST is positive. */
3299 if (TREE_CODE (arg1) == INTEGER_CST
3300 && TREE_CODE (arg0) != INTEGER_CST
3301 && ! tree_int_cst_lt (arg1, integer_one_node))
3302 {
3303 switch (TREE_CODE (t))
3304 {
3305 case GE_EXPR:
3306 code = GT_EXPR;
3307 TREE_SET_CODE (t, code);
3308 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
3309 TREE_OPERAND (t, 1) = arg1;
3310 break;
3311
3312 case LT_EXPR:
3313 code = LE_EXPR;
3314 TREE_SET_CODE (t, code);
3315 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
3316 TREE_OPERAND (t, 1) = arg1;
3317 }
3318 }
3319
3320 /* If this is an EQ or NE comparison with zero and ARG0 is
3321 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
3322 two operations, but the latter can be done in one less insn
3323 one machine that have only two-operand insns or on which a
3324 constant cannot be the first operand. */
3325 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
3326 && TREE_CODE (arg0) == BIT_AND_EXPR)
3327 {
3328 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
3329 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
3330 return
3331 fold (build (code, type,
3332 build (BIT_AND_EXPR, TREE_TYPE (arg0),
3333 build (RSHIFT_EXPR,
3334 TREE_TYPE (TREE_OPERAND (arg0, 0)),
3335 TREE_OPERAND (arg0, 1),
3336 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
3337 convert (TREE_TYPE (arg0),
3338 integer_one_node)),
3339 arg1));
3340 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
3341 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
3342 return
3343 fold (build (code, type,
3344 build (BIT_AND_EXPR, TREE_TYPE (arg0),
3345 build (RSHIFT_EXPR,
3346 TREE_TYPE (TREE_OPERAND (arg0, 1)),
3347 TREE_OPERAND (arg0, 0),
3348 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
3349 convert (TREE_TYPE (arg0),
3350 integer_one_node)),
3351 arg1));
3352 }
3353
3354 /* If this is an NE comparison of zero with an AND of one, remove the
3355 comparison since the AND will give the correct value. */
3356 if (code == NE_EXPR && integer_zerop (arg1)
3357 && TREE_CODE (arg0) == BIT_AND_EXPR
3358 && integer_onep (TREE_OPERAND (arg0, 1)))
3359 return convert (type, arg0);
3360
3361 /* If we have (A & C) == C where C is a power of 2, convert this into
3362 (A & C) != 0. Similarly for NE_EXPR. */
3363 if ((code == EQ_EXPR || code == NE_EXPR)
3364 && TREE_CODE (arg0) == BIT_AND_EXPR
3365 && integer_pow2p (TREE_OPERAND (arg0, 1))
3366 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3367 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
3368 arg0, integer_zero_node);
3369
3370 /* Simplify comparison of something with itself. (For IEEE
3371 floating-point, we can only do some of these simplifications.) */
3372 if (operand_equal_p (arg0, arg1, 0))
3373 {
3374 switch (code)
3375 {
3376 case EQ_EXPR:
3377 case GE_EXPR:
3378 case LE_EXPR:
3379 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE)
3380 {
3381 t = build_int_2 (1, 0);
3382 TREE_TYPE (t) = type;
3383 return t;
3384 }
3385 code = EQ_EXPR;
3386 TREE_SET_CODE (t, code);
3387 break;
3388
3389 case NE_EXPR:
3390 /* For NE, we can only do this simplification if integer. */
3391 if (TREE_CODE (TREE_TYPE (arg0)) != INTEGER_TYPE)
3392 break;
3393 /* ... fall through ... */
3394 case GT_EXPR:
3395 case LT_EXPR:
3396 t = build_int_2 (0, 0);
3397 TREE_TYPE (t) = type;
3398 return t;
3399 }
3400 }
3401
3402 /* An unsigned comparison against 0 can be simplified. */
3403 if (integer_zerop (arg1)
3404 && (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3405 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
3406 && TREE_UNSIGNED (TREE_TYPE (arg1)))
3407 {
3408 switch (TREE_CODE (t))
3409 {
3410 case GT_EXPR:
3411 code = NE_EXPR;
3412 TREE_SET_CODE (t, NE_EXPR);
3413 break;
3414 case LE_EXPR:
3415 code = EQ_EXPR;
3416 TREE_SET_CODE (t, EQ_EXPR);
3417 break;
3418 case GE_EXPR:
3419 return omit_one_operand (integer_type_node,
3420 integer_one_node, arg0);
3421 case LT_EXPR:
3422 return omit_one_operand (integer_type_node,
3423 integer_zero_node, arg0);
3424 }
3425 }
3426
3427 /* If we are comparing an expression that just has comparisons
3428 of two integer values, arithmetic expressions of those comparisons,
3429 and constants, we can simplify it. There are only three cases
3430 to check: the two values can either be equal, the first can be
3431 greater, or the second can be greater. Fold the expression for
3432 those three values. Since each value must be 0 or 1, we have
3433 eight possibilities, each of which corresponds to the constant 0
3434 or 1 or one of the six possible comparisons.
3435
3436 This handles common cases like (a > b) == 0 but also handles
3437 expressions like ((x > y) - (y > x)) > 0, which supposedly
3438 occur in macroized code. */
3439
3440 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
3441 {
3442 tree cval1 = 0, cval2 = 0;
3443
3444 if (twoval_comparison_p (arg0, &cval1, &cval2)
3445 /* Don't handle degenerate cases here; they should already
3446 have been handled anyway. */
3447 && cval1 != 0 && cval2 != 0
3448 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
3449 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
3450 && TREE_CODE (TREE_TYPE (cval1)) == INTEGER_TYPE
3451 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
3452 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
3453 {
3454 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
3455 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
3456
3457 /* We can't just pass T to eval_subst in case cval1 or cval2
3458 was the same as ARG1. */
3459
3460 tree high_result
3461 = fold (build (code, type,
3462 eval_subst (arg0, cval1, maxval, cval2, minval),
3463 arg1));
3464 tree equal_result
3465 = fold (build (code, type,
3466 eval_subst (arg0, cval1, maxval, cval2, maxval),
3467 arg1));
3468 tree low_result
3469 = fold (build (code, type,
3470 eval_subst (arg0, cval1, minval, cval2, maxval),
3471 arg1));
3472
3473 /* All three of these results should be 0 or 1. Confirm they
3474 are. Then use those values to select the proper code
3475 to use. */
3476
3477 if ((integer_zerop (high_result)
3478 || integer_onep (high_result))
3479 && (integer_zerop (equal_result)
3480 || integer_onep (equal_result))
3481 && (integer_zerop (low_result)
3482 || integer_onep (low_result)))
3483 {
3484 /* Make a 3-bit mask with the high-order bit being the
3485 value for `>', the next for '=', and the low for '<'. */
3486 switch ((integer_onep (high_result) * 4)
3487 + (integer_onep (equal_result) * 2)
3488 + integer_onep (low_result))
3489 {
3490 case 0:
3491 /* Always false. */
3492 return omit_one_operand (type, integer_zero_node, arg0);
3493 case 1:
3494 code = LT_EXPR;
3495 break;
3496 case 2:
3497 code = EQ_EXPR;
3498 break;
3499 case 3:
3500 code = LE_EXPR;
3501 break;
3502 case 4:
3503 code = GT_EXPR;
3504 break;
3505 case 5:
3506 code = NE_EXPR;
3507 break;
3508 case 6:
3509 code = GE_EXPR;
3510 break;
3511 case 7:
3512 /* Always true. */
3513 return omit_one_operand (type, integer_one_node, arg0);
3514 }
3515
3516 return fold (build (code, type, cval1, cval2));
3517 }
3518 }
3519 }
3520
3521 /* If this is a comparison of a field, we may be able to simplify it. */
3522 if ((TREE_CODE (arg0) == COMPONENT_REF
3523 || TREE_CODE (arg0) == BIT_FIELD_REF)
3524 && (code == EQ_EXPR || code == NE_EXPR)
3525 /* Handle the constant case even without -O
3526 to make sure the warnings are given. */
3527 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
3528 {
3529 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
3530 return t1 ? t1 : t;
3531 }
3532
3533 /* From here on, the only cases we handle are when the result is
3534 known to be a constant.
3535
3536 To compute GT, swap the arguments and do LT.
3537 To compute GE, do LT and invert the result.
3538 To compute LE, swap the arguments, do LT and invert the result.
3539 To compute NE, do EQ and invert the result.
3540
3541 Therefore, the code below must handle only EQ and LT. */
3542
3543 if (code == LE_EXPR || code == GT_EXPR)
3544 {
3545 tem = arg0, arg0 = arg1, arg1 = tem;
3546 code = swap_tree_comparison (code);
3547 }
3548
3549 /* Note that it is safe to invert for real values here because we
3550 will check below in the one case that it matters. */
3551
3552 invert = 0;
3553 if (code == NE_EXPR || code == GE_EXPR)
3554 {
3555 invert = 1;
3556 code = invert_tree_comparison (code);
3557 }
3558
3559 /* Compute a result for LT or EQ if args permit;
3560 otherwise return T. */
3561 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3562 {
3563 if (code == EQ_EXPR)
3564 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
3565 == TREE_INT_CST_LOW (arg1))
3566 && (TREE_INT_CST_HIGH (arg0)
3567 == TREE_INT_CST_HIGH (arg1)),
3568 0);
3569 else
3570 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
3571 ? INT_CST_LT_UNSIGNED (arg0, arg1)
3572 : INT_CST_LT (arg0, arg1)),
3573 0);
3574 }
3575
3576 /* Assume a nonexplicit constant cannot equal an explicit one,
3577 since such code would be undefined anyway.
3578 Exception: on sysvr4, using #pragma weak,
3579 a label can come out as 0. */
3580 else if (TREE_CODE (arg1) == INTEGER_CST
3581 && !integer_zerop (arg1)
3582 && TREE_CONSTANT (arg0)
3583 && TREE_CODE (arg0) == ADDR_EXPR
3584 && code == EQ_EXPR)
3585 t1 = build_int_2 (0, 0);
3586
3587 /* Two real constants can be compared explicitly. */
3588 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
3589 {
3590 /* If either operand is a NaN, the result is false with two
3591 exceptions: First, an NE_EXPR is true on NaNs, but that case
3592 is already handled correctly since we will be inverting the
3593 result for NE_EXPR. Second, if we had inverted a LE_EXPR
3594 or a GE_EXPR into a LT_EXPR, we must return true so that it
3595 will be inverted into false. */
3596
3597 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
3598 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
3599 t1 = build_int_2 (invert && code == LT_EXPR, 0);
3600
3601 else if (code == EQ_EXPR)
3602 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
3603 TREE_REAL_CST (arg1)),
3604 0);
3605 else
3606 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
3607 TREE_REAL_CST (arg1)),
3608 0);
3609 }
3610
3611 if (t1 == NULL_TREE)
3612 return t;
3613
3614 if (invert)
3615 TREE_INT_CST_LOW (t1) ^= 1;
3616
3617 TREE_TYPE (t1) = type;
3618 return t1;
3619
3620 case COND_EXPR:
3621 if (TREE_CODE (arg0) == INTEGER_CST)
3622 return TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
3623 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
3624 return omit_one_operand (type, arg1, arg0);
3625
3626 /* If the second operand is zero, invert the comparison and swap
3627 the second and third operands. Likewise if the second operand
3628 is constant and the third is not or if the third operand is
3629 equivalent to the first operand of the comparison. */
3630
3631 if (integer_zerop (arg1)
3632 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
3633 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3634 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
3635 TREE_OPERAND (t, 2),
3636 TREE_OPERAND (arg0, 1))))
3637 {
3638 /* See if this can be inverted. If it can't, possibly because
3639 it was a floating-point inequality comparison, don't do
3640 anything. */
3641 tem = invert_truthvalue (arg0);
3642
3643 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
3644 {
3645 arg0 = TREE_OPERAND (t, 0) = tem;
3646 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
3647 TREE_OPERAND (t, 2) = arg1;
3648 arg1 = TREE_OPERAND (t, 1);
3649 }
3650 }
3651
3652 /* If we have A op B ? A : C, we may be able to convert this to a
3653 simpler expression, depending on the operation and the values
3654 of B and C. */
3655
3656 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3657 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
3658 arg1, TREE_OPERAND (arg0, 1)))
3659 {
3660 tree arg2 = TREE_OPERAND (t, 2);
3661 enum tree_code comp_code = TREE_CODE (arg0);
3662
3663 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
3664 depending on the comparison operation. */
3665 if (integer_zerop (TREE_OPERAND (arg0, 1))
3666 && TREE_CODE (arg2) == NEGATE_EXPR
3667 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
3668 switch (comp_code)
3669 {
3670 case EQ_EXPR:
3671 return fold (build1 (NEGATE_EXPR, type, arg1));
3672 case NE_EXPR:
3673 return convert (type, arg1);
3674 case GE_EXPR:
3675 case GT_EXPR:
3676 return fold (build1 (ABS_EXPR, type, arg1));
3677 case LE_EXPR:
3678 case LT_EXPR:
3679 return fold (build1 (NEGATE_EXPR, type,
3680 fold (build1 (ABS_EXPR, type, arg1))));
3681 }
3682
3683 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
3684 always zero. */
3685
3686 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
3687 {
3688 if (comp_code == NE_EXPR)
3689 return convert (type, arg1);
3690 else if (comp_code == EQ_EXPR)
3691 return convert (type, integer_zero_node);
3692 }
3693
3694 /* If this is A op B ? A : B, this is either A, B, min (A, B),
3695 or max (A, B), depending on the operation. */
3696
3697 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
3698 arg2, TREE_OPERAND (arg0, 0)))
3699 switch (comp_code)
3700 {
3701 case EQ_EXPR:
3702 return convert (type, arg2);
3703 case NE_EXPR:
3704 return convert (type, arg1);
3705 case LE_EXPR:
3706 case LT_EXPR:
3707 return fold (build (MIN_EXPR, type, arg1, arg2));
3708 case GE_EXPR:
3709 case GT_EXPR:
3710 return fold (build (MAX_EXPR, type, arg1, arg2));
3711 }
3712
3713 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
3714 we might still be able to simplify this. For example,
3715 if C1 is one less or one more than C2, this might have started
3716 out as a MIN or MAX and been transformed by this function. */
3717
3718 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3719 && TREE_CODE (arg2) == INTEGER_CST)
3720 switch (comp_code)
3721 {
3722 case EQ_EXPR:
3723 /* We can replace A with C1 in this case. */
3724 arg1 = TREE_OPERAND (t, 1)
3725 = convert (type, TREE_OPERAND (arg0, 1));
3726 break;
3727
3728 case LT_EXPR:
3729 /* If C1 is C2 + 1, this is min(A, C2). */
3730 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
3731 && operand_equal_p (TREE_OPERAND (arg0, 1),
3732 const_binop (PLUS_EXPR, arg2,
3733 integer_one_node), 1))
3734 return fold (build (MIN_EXPR, type, arg1, arg2));
3735 break;
3736
3737 case LE_EXPR:
3738 /* If C1 is C2 - 1, this is min(A, C2). */
3739 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
3740 && operand_equal_p (TREE_OPERAND (arg0, 1),
3741 const_binop (MINUS_EXPR, arg2,
3742 integer_one_node), 1))
3743 return fold (build (MIN_EXPR, type, arg1, arg2));
3744 break;
3745
3746 case GT_EXPR:
3747 /* If C1 is C2 - 1, this is max(A, C2). */
3748 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
3749 && operand_equal_p (TREE_OPERAND (arg0, 1),
3750 const_binop (MINUS_EXPR, arg2,
3751 integer_one_node), 1))
3752 return fold (build (MAX_EXPR, type, arg1, arg2));
3753 break;
3754
3755 case GE_EXPR:
3756 /* If C1 is C2 + 1, this is max(A, C2). */
3757 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
3758 && operand_equal_p (TREE_OPERAND (arg0, 1),
3759 const_binop (PLUS_EXPR, arg2,
3760 integer_one_node), 1))
3761 return fold (build (MAX_EXPR, type, arg1, arg2));
3762 break;
3763 }
3764 }
3765
3766 /* Convert A ? 1 : 0 to simply A. */
3767 if (integer_onep (TREE_OPERAND (t, 1))
3768 && integer_zerop (TREE_OPERAND (t, 2))
3769 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
3770 call to fold will try to move the conversion inside
3771 a COND, which will recurse. In that case, the COND_EXPR
3772 is probably the best choice, so leave it alone. */
3773 && type == TREE_TYPE (arg0))
3774 return arg0;
3775
3776
3777 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
3778 operation is simply A & 2. */
3779
3780 if (integer_zerop (TREE_OPERAND (t, 2))
3781 && TREE_CODE (arg0) == NE_EXPR
3782 && integer_zerop (TREE_OPERAND (arg0, 1))
3783 && integer_pow2p (arg1)
3784 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
3785 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
3786 arg1, 1))
3787 return convert (type, TREE_OPERAND (arg0, 0));
3788
3789 return t;
3790
3791 case COMPOUND_EXPR:
3792 if (!TREE_SIDE_EFFECTS (arg0))
3793 return arg1;
3794 return t;
3795
3796 default:
3797 return t;
3798 } /* switch (code) */
3799 }
This page took 0.20822 seconds and 5 git commands to generate.