]> gcc.gnu.org Git - gcc.git/blame - gcc/fold-const.c
*** empty log message ***
[gcc.git] / gcc / fold-const.c
CommitLineData
6d716ca8
RS
1/* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20/*@@ Fix lossage on folding division of big integers. */
21
22/*@@ This file should be rewritten to use an arbitary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30
31/* The entry points in this file are fold, size_int and size_binop.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'. */
41
42#include <stdio.h>
43#include <setjmp.h>
44#include "config.h"
45#include "flags.h"
46#include "tree.h"
47
48void lshift_double ();
49void rshift_double ();
50void lrotate_double ();
51void rrotate_double ();
52static tree const_binop ();
53\f
54/* To do constant folding on INTEGER_CST nodes requires 64-bit arithmetic.
55 We do that by representing the 64-bit integer as 8 shorts,
56 with only 8 bits stored in each short, as a positive number. */
57
58/* Unpack a 64-bit integer into 8 shorts.
59 LOW and HI are the integer, as two `int' pieces.
60 SHORTS points to the array of shorts. */
61
62static void
63encode (shorts, low, hi)
64 short *shorts;
65 int low, hi;
66{
67 shorts[0] = low & 0xff;
68 shorts[1] = (low >> 8) & 0xff;
69 shorts[2] = (low >> 16) & 0xff;
70 shorts[3] = (low >> 24) & 0xff;
71 shorts[4] = hi & 0xff;
72 shorts[5] = (hi >> 8) & 0xff;
73 shorts[6] = (hi >> 16) & 0xff;
74 shorts[7] = (hi >> 24) & 0xff;
75}
76
77/* Pack an array of 8 shorts into a 64-bit integer.
78 SHORTS points to the array of shorts.
79 The integer is stored into *LOW and *HI as two `int' pieces. */
80
81static void
82decode (shorts, low, hi)
83 short *shorts;
84 int *low, *hi;
85{
86 /* The casts in the following statement should not be
87 needed, but they get around bugs in some C compilers. */
88 *low = (((long)shorts[3] << 24) | ((long)shorts[2] << 16)
89 | ((long)shorts[1] << 8) | (long)shorts[0]);
90 *hi = (((long)shorts[7] << 24) | ((long)shorts[6] << 16)
91 | ((long)shorts[5] << 8) | (long)shorts[4]);
92}
93\f
94/* Make the integer constant T valid for its type
95 by setting to 0 or 1 all the bits in the constant
96 that don't belong in the type. */
97
98static void
99force_fit_type (t)
100 tree t;
101{
102 register int prec = TYPE_PRECISION (TREE_TYPE (t));
103
104 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
105 prec = POINTER_SIZE;
106
107 /* First clear all bits that are beyond the type's precision. */
108
109 if (prec == 2 * HOST_BITS_PER_INT)
110 ;
111 else if (prec > HOST_BITS_PER_INT)
112 {
113 TREE_INT_CST_HIGH (t)
114 &= ~((-1) << (prec - HOST_BITS_PER_INT));
115 }
116 else
117 {
118 TREE_INT_CST_HIGH (t) = 0;
119 if (prec < HOST_BITS_PER_INT)
120 TREE_INT_CST_LOW (t)
121 &= ~((-1) << prec);
122 }
123
124 /* If it's a signed type and value's sign bit is set, extend the sign. */
125
126 if (! TREE_UNSIGNED (TREE_TYPE (t))
127 && prec != 2 * HOST_BITS_PER_INT
128 && (prec > HOST_BITS_PER_INT
129 ? TREE_INT_CST_HIGH (t) & (1 << (prec - HOST_BITS_PER_INT - 1))
130 : TREE_INT_CST_LOW (t) & (1 << (prec - 1))))
131 {
132 /* Value is negative:
133 set to 1 all the bits that are outside this type's precision. */
134 if (prec > HOST_BITS_PER_INT)
135 {
136 TREE_INT_CST_HIGH (t)
137 |= ((-1) << (prec - HOST_BITS_PER_INT));
138 }
139 else
140 {
141 TREE_INT_CST_HIGH (t) = -1;
142 if (prec < HOST_BITS_PER_INT)
143 TREE_INT_CST_LOW (t)
144 |= ((-1) << prec);
145 }
146 }
147}
148\f
149/* Add two 64-bit integers with 64-bit result.
150 Each argument is given as two `int' pieces.
151 One argument is L1 and H1; the other, L2 and H2.
152 The value is stored as two `int' pieces in *LV and *HV.
153 We use the 8-shorts representation internally. */
154
155void
156add_double (l1, h1, l2, h2, lv, hv)
157 int l1, h1, l2, h2;
158 int *lv, *hv;
159{
160 short arg1[8];
161 short arg2[8];
162 register int carry = 0;
163 register int i;
164
165 encode (arg1, l1, h1);
166 encode (arg2, l2, h2);
167
168 for (i = 0; i < 8; i++)
169 {
170 carry += arg1[i] + arg2[i];
171 arg1[i] = carry & 0xff;
172 carry >>= 8;
173 }
174
175 decode (arg1, lv, hv);
176}
177
178/* Negate a 64-bit integers with 64-bit result.
179 The argument is given as two `int' pieces in L1 and H1.
180 The value is stored as two `int' pieces in *LV and *HV.
181 We use the 8-shorts representation internally. */
182
183void
184neg_double (l1, h1, lv, hv)
185 int l1, h1;
186 int *lv, *hv;
187{
188 if (l1 == 0)
189 {
190 *lv = 0;
191 *hv = - h1;
192 }
193 else
194 {
195 *lv = - l1;
196 *hv = ~ h1;
197 }
198}
199\f
200/* Multiply two 64-bit integers with 64-bit result.
201 Each argument is given as two `int' pieces.
202 One argument is L1 and H1; the other, L2 and H2.
203 The value is stored as two `int' pieces in *LV and *HV.
204 We use the 8-shorts representation internally. */
205
206void
207mul_double (l1, h1, l2, h2, lv, hv)
208 int l1, h1, l2, h2;
209 int *lv, *hv;
210{
211 short arg1[8];
212 short arg2[8];
213 short prod[16];
214 register int carry = 0;
215 register int i, j, k;
216
217 /* These two cases are used extensively, arising from pointer
218 combinations. */
219 if (h2 == 0)
220 {
221 if (l2 == 2)
222 {
223 unsigned temp = l1 + l1;
224 *hv = h1 * 2 + (temp < l1);
225 *lv = temp;
226 return;
227 }
228 if (l2 == 4)
229 {
230 unsigned temp = l1 + l1;
231 h1 = h1 * 4 + ((temp < l1) << 1);
232 l1 = temp;
233 temp += temp;
234 h1 += (temp < l1);
235 *lv = temp;
236 *hv = h1;
237 return;
238 }
239 if (l2 == 8)
240 {
241 unsigned temp = l1 + l1;
242 h1 = h1 * 8 + ((temp < l1) << 2);
243 l1 = temp;
244 temp += temp;
245 h1 += (temp < l1) << 1;
246 l1 = temp;
247 temp += temp;
248 h1 += (temp < l1);
249 *lv = temp;
250 *hv = h1;
251 return;
252 }
253 }
254
255 encode (arg1, l1, h1);
256 encode (arg2, l2, h2);
257
258 bzero (prod, sizeof prod);
259
260 for (i = 0; i < 8; i++)
261 for (j = 0; j < 8; j++)
262 {
263 k = i + j;
264 carry = arg1[i] * arg2[j];
265 while (carry)
266 {
267 carry += prod[k];
268 prod[k] = carry & 0xff;
269 carry >>= 8;
270 k++;
271 }
272 }
273
274 decode (prod, lv, hv); /* @@decode ignores prod[8] -> prod[15] */
275}
276\f
277/* Shift the 64-bit integer in L1, H1 left by COUNT places
278 keeping only PREC bits of result.
279 Shift right if COUNT is negative.
280 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
281 Store the value as two `int' pieces in *LV and *HV. */
282
283void
284lshift_double (l1, h1, count, prec, lv, hv, arith)
285 int l1, h1, count, prec;
286 int *lv, *hv;
287 int arith;
288{
289 short arg1[8];
290 register int i;
291 register int carry;
292
293 if (count < 0)
294 {
295 rshift_double (l1, h1, - count, prec, lv, hv, arith);
296 return;
297 }
298
299 encode (arg1, l1, h1);
300
301 if (count > prec)
302 count = prec;
303
304 while (count > 0)
305 {
306 carry = 0;
307 for (i = 0; i < 8; i++)
308 {
309 carry += arg1[i] << 1;
310 arg1[i] = carry & 0xff;
311 carry >>= 8;
312 }
313 count--;
314 }
315
316 decode (arg1, lv, hv);
317}
318
319/* Shift the 64-bit integer in L1, H1 right by COUNT places
320 keeping only PREC bits of result. COUNT must be positive.
321 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
322 Store the value as two `int' pieces in *LV and *HV. */
323
324void
325rshift_double (l1, h1, count, prec, lv, hv, arith)
326 int l1, h1, count, prec;
327 int *lv, *hv;
328 int arith;
329{
330 short arg1[8];
331 register int i;
332 register int carry;
333
334 encode (arg1, l1, h1);
335
336 if (count > prec)
337 count = prec;
338
339 while (count > 0)
340 {
341 carry = arith && arg1[7] >> 7;
342 for (i = 7; i >= 0; i--)
343 {
344 carry <<= 8;
345 carry += arg1[i];
346 arg1[i] = (carry >> 1) & 0xff;
347 }
348 count--;
349 }
350
351 decode (arg1, lv, hv);
352}
353\f
354/* Rotate the 64-bit integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Rotate right if COUNT is negative.
357 Store the value as two `int' pieces in *LV and *HV. */
358
359void
360lrotate_double (l1, h1, count, prec, lv, hv)
361 int l1, h1, count, prec;
362 int *lv, *hv;
363{
364 short arg1[8];
365 register int i;
366 register int carry;
367
368 if (count < 0)
369 {
370 rrotate_double (l1, h1, - count, prec, lv, hv);
371 return;
372 }
373
374 encode (arg1, l1, h1);
375
376 if (count > prec)
377 count = prec;
378
379 carry = arg1[7] >> 7;
380 while (count > 0)
381 {
382 for (i = 0; i < 8; i++)
383 {
384 carry += arg1[i] << 1;
385 arg1[i] = carry & 0xff;
386 carry >>= 8;
387 }
388 count--;
389 }
390
391 decode (arg1, lv, hv);
392}
393
394/* Rotate the 64-bit integer in L1, H1 left by COUNT places
395 keeping only PREC bits of result. COUNT must be positive.
396 Store the value as two `int' pieces in *LV and *HV. */
397
398void
399rrotate_double (l1, h1, count, prec, lv, hv)
400 int l1, h1, count, prec;
401 int *lv, *hv;
402{
403 short arg1[8];
404 register int i;
405 register int carry;
406
407 encode (arg1, l1, h1);
408
409 if (count > prec)
410 count = prec;
411
412 carry = arg1[0] & 1;
413 while (count > 0)
414 {
415 for (i = 7; i >= 0; i--)
416 {
417 carry <<= 8;
418 carry += arg1[i];
419 arg1[i] = (carry >> 1) & 0xff;
420 }
421 count--;
422 }
423
424 decode (arg1, lv, hv);
425}
426\f
427/* Divide 64 bit integer LNUM, HNUM by 64 bit integer LDEN, HDEN
428 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
429 CODE is a tree code for a kind of division, one of
430 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
431 or EXACT_DIV_EXPR
432 It controls how the quotient is rounded to a integer.
433 UNS nonzero says do unsigned division. */
434
435static void
436div_and_round_double (code, uns,
437 lnum_orig, hnum_orig, lden_orig, hden_orig,
438 lquo, hquo, lrem, hrem)
439 enum tree_code code;
440 int uns;
441 int lnum_orig, hnum_orig; /* num == numerator == dividend */
442 int lden_orig, hden_orig; /* den == denominator == divisor */
443 int *lquo, *hquo, *lrem, *hrem;
444{
445 int quo_neg = 0;
446 short num[9], den[8], quo[8]; /* extra element for scaling. */
447 register int i, j, work;
448 register int carry = 0;
449 unsigned int lnum = lnum_orig;
450 int hnum = hnum_orig;
451 unsigned int lden = lden_orig;
452 int hden = hden_orig;
453
454 if ((hden == 0) && (lden == 0))
455 abort ();
456
457 /* calculate quotient sign and convert operands to unsigned. */
458 if (!uns)
459 {
460 if (hden < 0)
461 {
462 quo_neg = ~ quo_neg;
463 neg_double (lden, hden, &lden, &hden);
464 }
465 if (hnum < 0)
466 {
467 quo_neg = ~ quo_neg;
468 neg_double (lnum, hnum, &lnum, &hnum);
469 }
470 }
471
472 if (hnum == 0 && hden == 0)
473 { /* single precision */
474 *hquo = *hrem = 0;
475 *lquo = lnum / lden; /* rounds toward zero since positive args */
476 goto finish_up;
477 }
478
479 if (hnum == 0)
480 { /* trivial case: dividend < divisor */
481 /* hden != 0 already checked. */
482 *hquo = *lquo = 0;
483 *hrem = hnum;
484 *lrem = lnum;
485 goto finish_up;
486 }
487
488 bzero (quo, sizeof quo);
489
490 bzero (num, sizeof num); /* to zero 9th element */
491 bzero (den, sizeof den);
492
493 encode (num, lnum, hnum);
494 encode (den, lden, hden);
495
496 /* This code requires more than just hden == 0.
497 We also have to require that we don't need more than three bytes
498 to hold CARRY. If we ever did need four bytes to hold it, we
499 would lose part of it when computing WORK on the next round. */
500 if (hden == 0 && ((lden << 8) >> 8) == lden)
501 { /* simpler algorithm */
502 /* hnum != 0 already checked. */
503 for (i = 7; i >= 0; i--)
504 {
505 work = num[i] + (carry << 8);
506 quo[i] = work / lden;
507 carry = work % lden;
508 }
509 }
510 else { /* full double precision,
511 with thanks to Don Knuth's
512 "Semi-Numericial Algorithms". */
513#define BASE 256
514 int quo_est, scale, num_hi_sig, den_hi_sig, quo_hi_sig;
515
516 /* Find the highest non-zero divisor digit. */
517 for (i = 7; ; i--)
518 if (den[i] != 0) {
519 den_hi_sig = i;
520 break;
521 }
522 for (i = 7; ; i--)
523 if (num[i] != 0) {
524 num_hi_sig = i;
525 break;
526 }
527 quo_hi_sig = num_hi_sig - den_hi_sig + 1;
528
529 /* Insure that the first digit of the divisor is at least BASE/2.
530 This is required by the quotient digit estimation algorithm. */
531
532 scale = BASE / (den[den_hi_sig] + 1);
533 if (scale > 1) { /* scale divisor and dividend */
534 carry = 0;
535 for (i = 0; i <= 8; i++) {
536 work = (num[i] * scale) + carry;
537 num[i] = work & 0xff;
538 carry = work >> 8;
539 if (num[i] != 0) num_hi_sig = i;
540 }
541 carry = 0;
542 for (i = 0; i <= 7; i++) {
543 work = (den[i] * scale) + carry;
544 den[i] = work & 0xff;
545 carry = work >> 8;
546 if (den[i] != 0) den_hi_sig = i;
547 }
548 }
549
550 /* Main loop */
551 for (i = quo_hi_sig; i > 0; i--) {
552 /* quess the next quotient digit, quo_est, by dividing the first
553 two remaining dividend digits by the high order quotient digit.
554 quo_est is never low and is at most 2 high. */
555
556 int num_hi; /* index of highest remaining dividend digit */
557
558 num_hi = i + den_hi_sig;
559
560 work = (num[num_hi] * BASE) + (num_hi > 0 ? num[num_hi - 1] : 0);
561 if (num[num_hi] != den[den_hi_sig]) {
562 quo_est = work / den[den_hi_sig];
563 }
564 else {
565 quo_est = BASE - 1;
566 }
567
568 /* refine quo_est so it's usually correct, and at most one high. */
569 while ((den[den_hi_sig - 1] * quo_est)
570 > (((work - (quo_est * den[den_hi_sig])) * BASE)
571 + ((num_hi - 1) > 0 ? num[num_hi - 2] : 0)))
572 quo_est--;
573
574 /* Try QUO_EST as the quotient digit, by multiplying the
575 divisor by QUO_EST and subtracting from the remaining dividend.
576 Keep in mind that QUO_EST is the I - 1st digit. */
577
578 carry = 0;
579
580 for (j = 0; j <= den_hi_sig; j++)
581 {
582 int digit;
583
584 work = num[i + j - 1] - (quo_est * den[j]) + carry;
585 digit = work & 0xff;
586 carry = work >> 8;
587 if (digit < 0)
588 {
589 digit += BASE;
590 carry--;
591 }
592 num[i + j - 1] = digit;
593 }
594
595 /* if quo_est was high by one, then num[i] went negative and
596 we need to correct things. */
597
598 if (num[num_hi] < 0)
599 {
600 quo_est--;
601 carry = 0; /* add divisor back in */
602 for (j = 0; j <= den_hi_sig; j++)
603 {
604 work = num[i + j - 1] + den[j] + carry;
605 if (work > BASE)
606 {
607 work -= BASE;
608 carry = 1;
609 }
610 else
611 {
612 carry = 0;
613 }
614 num[i + j - 1] = work;
615 }
616 num [num_hi] += carry;
617 }
618
619 /* store the quotient digit. */
620 quo[i - 1] = quo_est;
621 }
622 }
623
624 decode (quo, lquo, hquo);
625
626 finish_up:
627 /* if result is negative, make it so. */
628 if (quo_neg)
629 neg_double (*lquo, *hquo, lquo, hquo);
630
631 /* compute trial remainder: rem = num - (quo * den) */
632 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
633 neg_double (*lrem, *hrem, lrem, hrem);
634 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
635
636 switch (code)
637 {
638 case TRUNC_DIV_EXPR:
639 case TRUNC_MOD_EXPR: /* round toward zero */
640 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
641 return;
642
643 case FLOOR_DIV_EXPR:
644 case FLOOR_MOD_EXPR: /* round toward negative infinity */
645 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
646 {
647 /* quo = quo - 1; */
648 add_double (*lquo, *hquo, -1, -1, lquo, hquo);
649 }
650 else return;
651 break;
652
653 case CEIL_DIV_EXPR:
654 case CEIL_MOD_EXPR: /* round toward positive infinity */
655 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
656 {
657 add_double (*lquo, *hquo, 1, 0, lquo, hquo);
658 }
659 else return;
660 break;
661
662 case ROUND_DIV_EXPR:
663 case ROUND_MOD_EXPR: /* round to closest integer */
664 {
665 int labs_rem = *lrem, habs_rem = *hrem;
666 int labs_den = lden, habs_den = hden, ltwice, htwice;
667
668 /* get absolute values */
669 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
670 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
671
672 /* if (2 * abs (lrem) >= abs (lden)) */
673 mul_double (2, 0, labs_rem, habs_rem, &ltwice, &htwice);
674 if (((unsigned) habs_den < (unsigned) htwice)
675 || (((unsigned) habs_den == (unsigned) htwice)
676 && ((unsigned) labs_den < (unsigned) ltwice)))
677 {
678 if (*hquo < 0)
679 /* quo = quo - 1; */
680 add_double (*lquo, *hquo, -1, -1, lquo, hquo);
681 else
682 /* quo = quo + 1; */
683 add_double (*lquo, *hquo, 1, 0, lquo, hquo);
684 }
685 else return;
686 }
687 break;
688
689 default:
690 abort ();
691 }
692
693 /* compute true remainder: rem = num - (quo * den) */
694 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
695 neg_double (*lrem, *hrem, lrem, hrem);
696 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
697}
698\f
699#if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
700
701/* Check for infinity in an IEEE double precision number. */
702
703int
704target_isinf (x)
705 REAL_VALUE_TYPE x;
706{
707 /* The IEEE 64-bit double format. */
708 union {
709 REAL_VALUE_TYPE d;
710 struct {
711 unsigned sign : 1;
712 unsigned exponent : 11;
713 unsigned mantissa1 : 20;
714 unsigned mantissa2;
715 } little_endian;
716 struct {
717 unsigned mantissa2;
718 unsigned mantissa1 : 20;
719 unsigned exponent : 11;
720 unsigned sign : 1;
721 } big_endian;
722 } u;
723
724 u.d = dconstm1;
725 if (u.big_endian.sign == 1)
726 {
727 u.d = x;
728 return (u.big_endian.exponent == 2047
729 && u.big_endian.mantissa1 == 0
730 && u.big_endian.mantissa2 == 0);
731 }
732 else
733 {
734 u.d = x;
735 return (u.little_endian.exponent == 2047
736 && u.little_endian.mantissa1 == 0
737 && u.little_endian.mantissa2 == 0);
738 }
739}
740
7d4d4d22
RS
741/* Check whether an IEEE double precision number is a NaN. */
742
743int
744target_isnan (x)
745 REAL_VALUE_TYPE x;
746{
747 /* The IEEE 64-bit double format. */
748 union {
749 REAL_VALUE_TYPE d;
750 struct {
751 unsigned sign : 1;
752 unsigned exponent : 11;
753 unsigned mantissa1 : 20;
754 unsigned mantissa2;
755 } little_endian;
756 struct {
757 unsigned mantissa2;
758 unsigned mantissa1 : 20;
759 unsigned exponent : 11;
760 unsigned sign : 1;
761 } big_endian;
762 } u;
763
764 u.d = dconstm1;
765 if (u.big_endian.sign == 1)
766 {
767 u.d = x;
768 return (u.big_endian.exponent == 2047
769 && (u.big_endian.mantissa1 != 0
770 || u.big_endian.mantissa2 != 0));
771 }
772 else
773 {
774 u.d = x;
775 return (u.little_endian.exponent == 2047
776 && (u.little_endian.mantissa1 != 0
777 || u.little_endian.mantissa2 != 0));
778 }
779}
780
c05a9b68 781/* Check for a negative IEEE double precision number. */
6d716ca8
RS
782
783int
c05a9b68 784target_negative (x)
6d716ca8
RS
785 REAL_VALUE_TYPE x;
786{
c05a9b68
RS
787 /* The IEEE 64-bit double format. */
788 union {
789 REAL_VALUE_TYPE d;
790 struct {
791 unsigned sign : 1;
792 unsigned exponent : 11;
793 unsigned mantissa1 : 20;
794 unsigned mantissa2;
795 } little_endian;
796 struct {
797 unsigned mantissa2;
798 unsigned mantissa1 : 20;
799 unsigned exponent : 11;
800 unsigned sign : 1;
801 } big_endian;
802 } u;
6d716ca8 803
c05a9b68
RS
804 u.d = dconstm1;
805 if (u.big_endian.sign == 1)
806 {
807 u.d = x;
808 return u.big_endian.sign;
809 }
810 else
811 {
812 u.d = x;
813 return u.little_endian.sign;
814 }
6d716ca8
RS
815}
816#else /* Target not IEEE */
817
818/* Let's assume other float formats don't have infinity.
819 (This can be overridden by redefining REAL_VALUE_ISINF.) */
820
821target_isinf (x)
822 REAL_VALUE_TYPE x;
823{
824 return 0;
825}
826
7d4d4d22
RS
827/* Let's assume other float formats don't have NaNs.
828 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
829
830target_isnan (x)
831 REAL_VALUE_TYPE x;
832{
833 return 0;
834}
835
6d716ca8 836/* Let's assume other float formats don't have minus zero.
c05a9b68 837 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
6d716ca8 838
c05a9b68 839target_negative (x)
6d716ca8
RS
840 REAL_VALUE_TYPE x;
841{
c05a9b68 842 return x < 0;
6d716ca8
RS
843}
844#endif /* Target not IEEE */
845\f
846/* Split a tree IN into a constant and a variable part
847 that could be combined with CODE to make IN.
848 CODE must be a commutative arithmetic operation.
849 Store the constant part into *CONP and the variable in &VARP.
850 Return 1 if this was done; zero means the tree IN did not decompose
851 this way.
852
853 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
854 Therefore, we must tell the caller whether the variable part
855 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
856 The value stored is the coefficient for the variable term.
857 The constant term we return should always be added;
858 we negate it if necessary. */
859
860static int
861split_tree (in, code, varp, conp, varsignp)
862 tree in;
863 enum tree_code code;
864 tree *varp, *conp;
865 int *varsignp;
866{
867 register tree outtype = TREE_TYPE (in);
868 *varp = 0;
869 *conp = 0;
870
871 /* Strip any conversions that don't change the machine mode. */
872 while ((TREE_CODE (in) == NOP_EXPR
873 || TREE_CODE (in) == CONVERT_EXPR)
874 && (TYPE_MODE (TREE_TYPE (in))
875 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
876 in = TREE_OPERAND (in, 0);
877
878 if (TREE_CODE (in) == code
879 || (TREE_CODE (TREE_TYPE (in)) != REAL_TYPE
880 /* We can associate addition and subtraction together
881 (even though the C standard doesn't say so)
882 for integers because the value is not affected.
883 For reals, the value might be affected, so we can't. */
884 &&
885 ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
886 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
887 {
888 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
889 if (code == INTEGER_CST)
890 {
891 *conp = TREE_OPERAND (in, 0);
892 *varp = TREE_OPERAND (in, 1);
893 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
894 && TREE_TYPE (*varp) != outtype)
895 *varp = convert (outtype, *varp);
896 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
897 return 1;
898 }
899 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
900 {
901 *conp = TREE_OPERAND (in, 1);
902 *varp = TREE_OPERAND (in, 0);
903 *varsignp = 1;
904 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
905 && TREE_TYPE (*varp) != outtype)
906 *varp = convert (outtype, *varp);
907 if (TREE_CODE (in) == MINUS_EXPR)
908 {
909 /* If operation is subtraction and constant is second,
910 must negate it to get an additive constant.
911 And this cannot be done unless it is a manifest constant.
912 It could also be the address of a static variable.
913 We cannot negate that, so give up. */
914 if (TREE_CODE (*conp) == INTEGER_CST)
915 /* Subtracting from integer_zero_node loses for long long. */
916 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
917 else
918 return 0;
919 }
920 return 1;
921 }
922 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
923 {
924 *conp = TREE_OPERAND (in, 0);
925 *varp = TREE_OPERAND (in, 1);
926 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
927 && TREE_TYPE (*varp) != outtype)
928 *varp = convert (outtype, *varp);
929 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
930 return 1;
931 }
932 }
933 return 0;
934}
935\f
936/* Combine two constants NUM and ARG2 under operation CODE
937 to produce a new constant.
938 We assume ARG1 and ARG2 have the same data type,
939 or at least are the same kind of constant and the same machine mode. */
940
941/* Handle floating overflow for `const_binop'. */
942static jmp_buf const_binop_error;
943
944static tree
945const_binop (code, arg1, arg2)
946 enum tree_code code;
947 register tree arg1, arg2;
948{
949 if (TREE_CODE (arg1) == INTEGER_CST)
950 {
951 register int int1l = TREE_INT_CST_LOW (arg1);
952 register int int1h = TREE_INT_CST_HIGH (arg1);
953 int int2l = TREE_INT_CST_LOW (arg2);
954 int int2h = TREE_INT_CST_HIGH (arg2);
955 int low, hi;
956 int garbagel, garbageh;
957 register tree t;
958 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
959
960 switch (code)
961 {
962 case BIT_IOR_EXPR:
963 t = build_int_2 (int1l | int2l, int1h | int2h);
964 break;
965
966 case BIT_XOR_EXPR:
967 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
968 break;
969
970 case BIT_AND_EXPR:
971 t = build_int_2 (int1l & int2l, int1h & int2h);
972 break;
973
974 case BIT_ANDTC_EXPR:
975 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
976 break;
977
978 case RSHIFT_EXPR:
979 int2l = - int2l;
980 case LSHIFT_EXPR:
981 lshift_double (int1l, int1h, int2l,
982 TYPE_PRECISION (TREE_TYPE (arg1)),
983 &low, &hi,
984 !uns);
985 t = build_int_2 (low, hi);
986 break;
987
988 case RROTATE_EXPR:
989 int2l = - int2l;
990 case LROTATE_EXPR:
991 lrotate_double (int1l, int1h, int2l,
992 TYPE_PRECISION (TREE_TYPE (arg1)),
993 &low, &hi);
994 t = build_int_2 (low, hi);
995 break;
996
997 case PLUS_EXPR:
998 if (int1h == 0)
999 {
1000 int2l += int1l;
1001 if ((unsigned) int2l < int1l)
1002 int2h += 1;
1003 t = build_int_2 (int2l, int2h);
1004 break;
1005 }
1006 if (int2h == 0)
1007 {
1008 int1l += int2l;
1009 if ((unsigned) int1l < int2l)
1010 int1h += 1;
1011 t = build_int_2 (int1l, int1h);
1012 break;
1013 }
1014 add_double (int1l, int1h, int2l, int2h, &low, &hi);
1015 t = build_int_2 (low, hi);
1016 break;
1017
1018 case MINUS_EXPR:
1019 if (int2h == 0 && int2l == 0)
1020 {
1021 t = build_int_2 (int1l, int1h);
1022 break;
1023 }
1024 neg_double (int2l, int2h, &int2l, &int2h);
1025 add_double (int1l, int1h, int2l, int2h, &low, &hi);
1026 t = build_int_2 (low, hi);
1027 break;
1028
1029 case MULT_EXPR:
1030 /* Optimize simple cases. */
1031 if (int1h == 0)
1032 {
1033 unsigned temp;
1034
1035 switch (int1l)
1036 {
1037 case 0:
1038 t = build_int_2 (0, 0);
1039 goto got_it;
1040 case 1:
1041 t = build_int_2 (int2l, int2h);
1042 goto got_it;
1043 case 2:
1044 temp = int2l + int2l;
1045 int2h = int2h * 2 + (temp < int2l);
1046 t = build_int_2 (temp, int2h);
1047 goto got_it;
1048 case 3:
1049 temp = int2l + int2l + int2l;
1050 int2h = int2h * 3 + (temp < int2l);
1051 t = build_int_2 (temp, int2h);
1052 goto got_it;
1053 case 4:
1054 temp = int2l + int2l;
1055 int2h = int2h * 4 + ((temp < int2l) << 1);
1056 int2l = temp;
1057 temp += temp;
1058 int2h += (temp < int2l);
1059 t = build_int_2 (temp, int2h);
1060 goto got_it;
1061 case 8:
1062 temp = int2l + int2l;
1063 int2h = int2h * 8 + ((temp < int2l) << 2);
1064 int2l = temp;
1065 temp += temp;
1066 int2h += (temp < int2l) << 1;
1067 int2l = temp;
1068 temp += temp;
1069 int2h += (temp < int2l);
1070 t = build_int_2 (temp, int2h);
1071 goto got_it;
1072 default:
1073 break;
1074 }
1075 }
1076
1077 if (int2h == 0)
1078 {
1079 if (int2l == 0)
1080 {
1081 t = build_int_2 (0, 0);
1082 break;
1083 }
1084 if (int2l == 1)
1085 {
1086 t = build_int_2 (int1l, int1h);
1087 break;
1088 }
1089 }
1090
1091 mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1092 t = build_int_2 (low, hi);
1093 break;
1094
1095 case TRUNC_DIV_EXPR:
1096 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1097 case EXACT_DIV_EXPR:
1098 /* This is a shortcut for a common special case.
1099 It reduces the number of tree nodes generated
1100 and saves time. */
1101 if (int2h == 0 && int2l > 0
1102 && TREE_TYPE (arg1) == sizetype
1103 && int1h == 0 && int1l >= 0)
1104 {
1105 if (code == CEIL_DIV_EXPR)
1106 int1l += int2l-1;
1107 return size_int (int1l / int2l);
1108 }
1109 case ROUND_DIV_EXPR:
1110 if (int2h == 0 && int2l == 1)
1111 {
1112 t = build_int_2 (int1l, int1h);
1113 break;
1114 }
1115 if (int1l == int2l && int1h == int2h)
1116 {
1117 if ((int1l | int1h) == 0)
1118 abort ();
1119 t = build_int_2 (1, 0);
1120 break;
1121 }
1122 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1123 &low, &hi, &garbagel, &garbageh);
1124 t = build_int_2 (low, hi);
1125 break;
1126
1127 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1128 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1129 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1130 &garbagel, &garbageh, &low, &hi);
1131 t = build_int_2 (low, hi);
1132 break;
1133
1134 case MIN_EXPR:
1135 case MAX_EXPR:
1136 if (uns)
1137 {
1138 low = (((unsigned) int1h < (unsigned) int2h)
1139 || (((unsigned) int1h == (unsigned) int2h)
1140 && ((unsigned) int1l < (unsigned) int2l)));
1141 }
1142 else
1143 {
1144 low = ((int1h < int2h)
1145 || ((int1h == int2h)
1146 && ((unsigned) int1l < (unsigned) int2l)));
1147 }
1148 if (low == (code == MIN_EXPR))
1149 t = build_int_2 (int1l, int1h);
1150 else
1151 t = build_int_2 (int2l, int2h);
1152 break;
1153
1154 default:
1155 abort ();
1156 }
1157 got_it:
1158 TREE_TYPE (t) = TREE_TYPE (arg1);
1159 force_fit_type (t);
1160 return t;
1161 }
1162#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1163 if (TREE_CODE (arg1) == REAL_CST)
1164 {
1165 register REAL_VALUE_TYPE d1;
1166 register REAL_VALUE_TYPE d2;
1167 register REAL_VALUE_TYPE value;
1168
1169 d1 = TREE_REAL_CST (arg1);
1170 d2 = TREE_REAL_CST (arg2);
1171 if (setjmp (const_binop_error))
1172 {
1173 warning ("floating overflow in constant folding");
1174 return build (code, TREE_TYPE (arg1), arg1, arg2);
1175 }
1176 set_float_handler (const_binop_error);
1177
1178#ifdef REAL_ARITHMETIC
1179 REAL_ARITHMETIC (value, code, d1, d2);
1180#else
1181 switch (code)
1182 {
1183 case PLUS_EXPR:
1184 value = d1 + d2;
1185 break;
1186
1187 case MINUS_EXPR:
1188 value = d1 - d2;
1189 break;
1190
1191 case MULT_EXPR:
1192 value = d1 * d2;
1193 break;
1194
1195 case RDIV_EXPR:
1196#ifndef REAL_INFINITY
1197 if (d2 == 0)
1198 abort ();
1199#endif
1200
1201 value = d1 / d2;
1202 break;
1203
1204 case MIN_EXPR:
1205 value = MIN (d1, d2);
1206 break;
1207
1208 case MAX_EXPR:
1209 value = MAX (d1, d2);
1210 break;
1211
1212 default:
1213 abort ();
1214 }
1215#endif /* no REAL_ARITHMETIC */
1216 set_float_handler (0);
2002d72d
RK
1217 return build_real (TREE_TYPE (arg1),
1218 REAL_VALUE_TRUNCATE (TYPE_MODE (TREE_TYPE (arg1)), value));
6d716ca8
RS
1219 }
1220#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1221 if (TREE_CODE (arg1) == COMPLEX_CST)
1222 {
1223 register tree r1 = TREE_REALPART (arg1);
1224 register tree i1 = TREE_IMAGPART (arg1);
1225 register tree r2 = TREE_REALPART (arg2);
1226 register tree i2 = TREE_IMAGPART (arg2);
1227 register tree t;
1228
1229 switch (code)
1230 {
1231 case PLUS_EXPR:
1232 t = build_complex (const_binop (PLUS_EXPR, r1, r2),
1233 const_binop (PLUS_EXPR, i1, i2));
1234 break;
1235
1236 case MINUS_EXPR:
1237 t = build_complex (const_binop (MINUS_EXPR, r1, r2),
1238 const_binop (MINUS_EXPR, i1, i2));
1239 break;
1240
1241 case MULT_EXPR:
1242 t = build_complex (const_binop (MINUS_EXPR,
1243 const_binop (MULT_EXPR, r1, r2),
1244 const_binop (MULT_EXPR, i1, i2)),
1245 const_binop (PLUS_EXPR,
1246 const_binop (MULT_EXPR, r1, i2),
1247 const_binop (MULT_EXPR, i1, r2)));
1248 break;
1249
1250 case RDIV_EXPR:
1251 {
1252 register tree magsquared
1253 = const_binop (PLUS_EXPR,
1254 const_binop (MULT_EXPR, r2, r2),
1255 const_binop (MULT_EXPR, i2, i2));
1256 t = build_complex (const_binop (RDIV_EXPR,
1257 const_binop (PLUS_EXPR,
1258 const_binop (MULT_EXPR, r1, r2),
1259 const_binop (MULT_EXPR, i1, i2)),
1260 magsquared),
1261 const_binop (RDIV_EXPR,
1262 const_binop (MINUS_EXPR,
1263 const_binop (MULT_EXPR, i1, r2),
1264 const_binop (MULT_EXPR, r1, i2)),
1265 magsquared));
1266 }
1267 break;
1268
1269 default:
1270 abort ();
1271 }
1272 TREE_TYPE (t) = TREE_TYPE (arg1);
1273 return t;
1274 }
1275 return 0;
1276}
1277\f
1278/* Return an INTEGER_CST with value V and type from `sizetype'. */
1279
1280tree
1281size_int (number)
1282 unsigned int number;
1283{
1284 register tree t;
1285 /* Type-size nodes already made for small sizes. */
1286 static tree size_table[2*HOST_BITS_PER_INT+1];
1287
1288 if (number >= 0 && number < 2*HOST_BITS_PER_INT+1 && size_table[number] != 0)
1289 return size_table[number];
1290 if (number >= 0 && number < 2*HOST_BITS_PER_INT+1)
1291 {
6d716ca8
RS
1292 push_obstacks_nochange ();
1293 /* Make this a permanent node. */
c05a9b68 1294 end_temporary_allocation ();
6d716ca8
RS
1295 t = build_int_2 (number, 0);
1296 TREE_TYPE (t) = sizetype;
1297 size_table[number] = t;
1298 pop_obstacks ();
1299 }
1300 else
1301 {
1302 t = build_int_2 (number, 0);
1303 TREE_TYPE (t) = sizetype;
1304 }
1305 return t;
1306}
1307
1308/* Combine operands OP1 and OP2 with arithmetic operation CODE.
1309 CODE is a tree code. Data type is taken from `sizetype',
1310 If the operands are constant, so is the result. */
1311
1312tree
1313size_binop (code, arg0, arg1)
1314 enum tree_code code;
1315 tree arg0, arg1;
1316{
1317 /* Handle the special case of two integer constants faster. */
1318 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1319 {
1320 /* And some specific cases even faster than that. */
1321 if (code == PLUS_EXPR
1322 && TREE_INT_CST_LOW (arg0) == 0
1323 && TREE_INT_CST_HIGH (arg0) == 0)
1324 return arg1;
1325 if (code == MINUS_EXPR
1326 && TREE_INT_CST_LOW (arg1) == 0
1327 && TREE_INT_CST_HIGH (arg1) == 0)
1328 return arg0;
1329 if (code == MULT_EXPR
1330 && TREE_INT_CST_LOW (arg0) == 1
1331 && TREE_INT_CST_HIGH (arg0) == 0)
1332 return arg1;
1333 /* Handle general case of two integer constants. */
1334 return const_binop (code, arg0, arg1);
1335 }
1336
1337 if (arg0 == error_mark_node || arg1 == error_mark_node)
1338 return error_mark_node;
1339
1340 return fold (build (code, sizetype, arg0, arg1));
1341}
1342\f
1343/* Given T, a tree representing type conversion of ARG1, a constant,
1344 return a constant tree representing the result of conversion. */
1345
1346static tree
1347fold_convert (t, arg1)
1348 register tree t;
1349 register tree arg1;
1350{
1351 register tree type = TREE_TYPE (t);
1352
1353 if (TREE_CODE (type) == POINTER_TYPE
1354 || TREE_CODE (type) == INTEGER_TYPE
1355 || TREE_CODE (type) == ENUMERAL_TYPE)
1356 {
1357 if (TREE_CODE (arg1) == INTEGER_CST)
1358 {
1359 /* Given an integer constant, make new constant with new type,
1360 appropriately sign-extended or truncated. */
1361 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1362 TREE_INT_CST_HIGH (arg1));
1363 TREE_TYPE (t) = type;
1364 force_fit_type (t);
1365 }
1366#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1367 else if (TREE_CODE (arg1) == REAL_CST)
1368 {
c05a9b68
RS
1369 REAL_VALUE_TYPE
1370 l = real_value_from_int_cst (TYPE_MIN_VALUE (type)),
1371 x = TREE_REAL_CST (arg1),
1372 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1373 if (! ((REAL_VALUES_LESS (l, x) || REAL_VALUES_EQUAL (l, x))
1374 && (REAL_VALUES_LESS (x, u) || REAL_VALUES_EQUAL (x, u))))
6d716ca8
RS
1375 {
1376 warning ("real constant out of range for integer conversion");
1377 return t;
1378 }
1379#ifndef REAL_ARITHMETIC
1380 {
1381 REAL_VALUE_TYPE d;
1382 int low, high;
1383 int half_word = 1 << (HOST_BITS_PER_INT / 2);
1384
1385 d = TREE_REAL_CST (arg1);
1386 if (d < 0)
1387 d = -d;
1388
1389 high = (int) (d / half_word / half_word);
1390 d -= (REAL_VALUE_TYPE) high * half_word * half_word;
1391 low = (unsigned) d;
1392 if (TREE_REAL_CST (arg1) < 0)
1393 neg_double (low, high, &low, &high);
1394 t = build_int_2 (low, high);
1395 }
1396#else
1397 {
1398 int low, high;
1399 REAL_VALUE_TO_INT (low, high, TREE_REAL_CST (arg1));
1400 t = build_int_2 (low, high);
1401 }
1402#endif
1403 TREE_TYPE (t) = type;
1404 force_fit_type (t);
1405 }
1406#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1407 TREE_TYPE (t) = type;
1408 }
1409 else if (TREE_CODE (type) == REAL_TYPE)
1410 {
1411#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1412 if (TREE_CODE (arg1) == INTEGER_CST)
1413 return build_real_from_int_cst (type, arg1);
1414#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1415 if (TREE_CODE (arg1) == REAL_CST)
1416 return build_real (type, REAL_VALUE_TRUNCATE (TYPE_MODE (type),
1417 TREE_REAL_CST (arg1)));
1418 }
1419 TREE_CONSTANT (t) = 1;
1420 return t;
1421}
1422\f
1423/* Return an expr equal to X but certainly not valid as an lvalue. */
1424
1425tree
1426non_lvalue (x)
1427 tree x;
1428{
1429 tree result;
1430
1431 /* These things are certainly not lvalues. */
1432 if (TREE_CODE (x) == NON_LVALUE_EXPR
1433 || TREE_CODE (x) == INTEGER_CST
1434 || TREE_CODE (x) == REAL_CST
1435 || TREE_CODE (x) == STRING_CST
1436 || TREE_CODE (x) == ADDR_EXPR)
1437 return x;
1438
1439 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1440 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1441 return result;
1442}
c05a9b68
RS
1443\f
1444/* Given a tree comparison code, return the code that is the logical inverse
1445 of the given code. It is not safe to do this for floating-point
1446 comparisons, except for NE_EXPR and EQ_EXPR. */
6d716ca8 1447
c05a9b68
RS
1448static enum tree_code
1449invert_tree_comparison (code)
1450 enum tree_code code;
1451{
1452 switch (code)
1453 {
1454 case EQ_EXPR:
1455 return NE_EXPR;
1456 case NE_EXPR:
1457 return EQ_EXPR;
1458 case GT_EXPR:
1459 return LE_EXPR;
1460 case GE_EXPR:
1461 return LT_EXPR;
1462 case LT_EXPR:
1463 return GE_EXPR;
1464 case LE_EXPR:
1465 return GT_EXPR;
1466 default:
1467 abort ();
1468 }
1469}
1470
1471/* Similar, but return the comparison that results if the operands are
1472 swapped. This is safe for floating-point. */
1473
1474static enum tree_code
1475swap_tree_comparison (code)
1476 enum tree_code code;
1477{
1478 switch (code)
1479 {
1480 case EQ_EXPR:
1481 case NE_EXPR:
1482 return code;
1483 case GT_EXPR:
1484 return LT_EXPR;
1485 case GE_EXPR:
1486 return LE_EXPR;
1487 case LT_EXPR:
1488 return GT_EXPR;
1489 case LE_EXPR:
1490 return GE_EXPR;
1491 default:
1492 abort ();
1493 }
1494}
1495\f
6d716ca8
RS
1496/* Return nonzero if two operands are necessarily equal.
1497 If ONLY_CONST is non-zero, only return non-zero for constants. */
1498
1499int
1500operand_equal_p (arg0, arg1, only_const)
1501 tree arg0, arg1;
1502 int only_const;
1503{
1504 /* If both types don't have the same signedness, then we can't consider
1505 them equal. We must check this before the STRIP_NOPS calls
1506 because they may change the signedness of the arguments. */
1507 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1508 return 0;
1509
1510 STRIP_NOPS (arg0);
1511 STRIP_NOPS (arg1);
1512
1513 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1514 We don't care about side effects in that case because the SAVE_EXPR
1515 takes care of that for us. */
1516 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1517 return ! only_const;
1518
1519 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1520 return 0;
1521
1522 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1523 && TREE_CODE (arg0) == ADDR_EXPR
1524 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1525 return 1;
1526
1527 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1528 && TREE_CODE (arg0) == INTEGER_CST
1529 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1530 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1531 return 1;
1532
7d4d4d22
RS
1533 /* Detect when real constants are equal.
1534 But reject weird values because we can't be sure what to do with them. */
6d716ca8
RS
1535 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1536 && TREE_CODE (arg0) == REAL_CST
c05a9b68
RS
1537 && !bcmp (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1),
1538 sizeof (REAL_VALUE_TYPE))
1539 /* Some people say these are not necessary.
1540 But they do little harm, and taking them out would be risky.
1541 So leave them and let's not spend any more time on them--rms. */
7d4d4d22
RS
1542 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg0))
1543 && !REAL_VALUE_ISNAN (TREE_REAL_CST (arg0)))
6d716ca8
RS
1544 return 1;
1545
1546 if (only_const)
1547 return 0;
1548
1549 if (arg0 == arg1)
1550 return 1;
1551
1552 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1553 return 0;
1554 /* This is needed for conversions and for COMPONENT_REF.
1555 Might as well play it safe and always test this. */
1556 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1557 return 0;
1558
1559 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1560 {
1561 case '1':
1562 /* Two conversions are equal only if signedness and modes match. */
1563 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1564 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1565 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1566 return 0;
1567
1568 return operand_equal_p (TREE_OPERAND (arg0, 0),
1569 TREE_OPERAND (arg1, 0), 0);
1570
1571 case '<':
1572 case '2':
1573 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1574 TREE_OPERAND (arg1, 0), 0)
1575 && operand_equal_p (TREE_OPERAND (arg0, 1),
1576 TREE_OPERAND (arg1, 1), 0));
1577
1578 case 'r':
1579 switch (TREE_CODE (arg0))
1580 {
1581 case INDIRECT_REF:
1582 return operand_equal_p (TREE_OPERAND (arg0, 0),
1583 TREE_OPERAND (arg1, 0), 0);
1584
1585 case COMPONENT_REF:
1586 case ARRAY_REF:
1587 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1588 TREE_OPERAND (arg1, 0), 0)
1589 && operand_equal_p (TREE_OPERAND (arg0, 1),
1590 TREE_OPERAND (arg1, 1), 0));
1591
1592 case BIT_FIELD_REF:
1593 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1594 TREE_OPERAND (arg1, 0), 0)
1595 && operand_equal_p (TREE_OPERAND (arg0, 1),
1596 TREE_OPERAND (arg1, 1), 0)
1597 && operand_equal_p (TREE_OPERAND (arg0, 2),
1598 TREE_OPERAND (arg1, 2), 0));
1599 }
1600 break;
1601 }
1602
1603 return 0;
1604}
c05a9b68
RS
1605\f
1606/* Similar to operand_equal_p, but see if ARG0 might have been made by
1607 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
6d716ca8 1608
6d716ca8
RS
1609 When in doubt, return 0. */
1610
1611static int
c05a9b68
RS
1612operand_equal_for_comparison_p (arg0, arg1, other)
1613 tree arg0, arg1;
1614 tree other;
6d716ca8 1615{
c05a9b68
RS
1616 int unsignedp1, unsignedpo;
1617 tree primarg1, primother;
6d716ca8
RS
1618 int correct_width;
1619
c05a9b68 1620 if (operand_equal_p (arg0, arg1, 0))
6d716ca8
RS
1621 return 1;
1622
c05a9b68 1623 if (TREE_CODE (TREE_TYPE (arg0)) != INTEGER_TYPE)
6d716ca8
RS
1624 return 0;
1625
c05a9b68
RS
1626 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1627 actual comparison operand, ARG0.
6d716ca8 1628
c05a9b68 1629 First throw away any conversions to wider types
6d716ca8 1630 already present in the operands. */
6d716ca8 1631
c05a9b68
RS
1632 primarg1 = get_narrower (arg1, &unsignedp1);
1633 primother = get_narrower (other, &unsignedpo);
1634
1635 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1636 if (unsignedp1 == unsignedpo
1637 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1638 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
6d716ca8 1639 {
c05a9b68 1640 tree type = TREE_TYPE (arg0);
6d716ca8
RS
1641
1642 /* Make sure shorter operand is extended the right way
1643 to match the longer operand. */
c05a9b68
RS
1644 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1645 TREE_TYPE (primarg1)),
1646 primarg1);
6d716ca8 1647
c05a9b68 1648 if (operand_equal_p (arg0, convert (type, primarg1), 0))
6d716ca8
RS
1649 return 1;
1650 }
1651
1652 return 0;
1653}
1654\f
c05a9b68
RS
1655/* See if ARG is an expression is either a comparison or is peforming
1656 arithmetic on comparisons. The comparisons must only be comparing
1657 two different values, which will be stored in *CVAL1 and *CVAL2; if
1658 they are non-zero it means that some operands have already been found.
1659 No variables may be used anywhere else in the expression except in the
1660 comparisons.
1661
1662 If this is true, return 1. Otherwise, return zero. */
1663
1664static int
1665twoval_comparison_p (arg, cval1, cval2)
1666 tree arg;
1667 tree *cval1, *cval2;
1668{
1669 enum tree_code code = TREE_CODE (arg);
1670 char class = TREE_CODE_CLASS (code);
1671
1672 /* We can handle some of the 'e' cases here. */
1673 if (class == 'e'
1674 && (code == TRUTH_NOT_EXPR
1675 || (code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)))
1676 class = '1';
1677 else if (class == 'e'
1678 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1679 || code == COMPOUND_EXPR))
1680 class = '2';
1681
1682 switch (class)
1683 {
1684 case '1':
1685 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2);
1686
1687 case '2':
1688 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
1689 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2));
1690
1691 case 'c':
1692 return 1;
1693
1694 case 'e':
1695 if (code == COND_EXPR)
1696 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2)
1697 && twoval_comparison_p (TREE_OPERAND (arg, 1), cval1, cval2)
1698 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1699 cval1, cval2));
1700 return 0;
1701
1702 case '<':
1703 /* First see if we can handle the first operand, then the second. For
1704 the second operand, we know *CVAL1 can't be zero. It must be that
1705 one side of the comparison is each of the values; test for the
1706 case where this isn't true by failing if the two operands
1707 are the same. */
1708
1709 if (operand_equal_p (TREE_OPERAND (arg, 0),
1710 TREE_OPERAND (arg, 1), 0))
1711 return 0;
1712
1713 if (*cval1 == 0)
1714 *cval1 = TREE_OPERAND (arg, 0);
1715 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1716 ;
1717 else if (*cval2 == 0)
1718 *cval2 = TREE_OPERAND (arg, 0);
1719 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1720 ;
1721 else
1722 return 0;
1723
1724 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1725 ;
1726 else if (*cval2 == 0)
1727 *cval2 = TREE_OPERAND (arg, 1);
1728 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1729 ;
1730 else
1731 return 0;
1732
1733 return 1;
1734 }
1735
1736 return 0;
1737}
1738\f
1739/* ARG is a tree that is known to contain just arithmetic operations and
1740 comparisons. Evaluate the operations in the tree substituting NEW0 for
1741 any occurrance of OLD0 as an operand of a comparison and likewise for
1742 NEW1 and OLD1. */
1743
1744static tree
1745eval_subst (arg, old0, new0, old1, new1)
1746 tree arg;
1747 tree old0, new0, old1, new1;
1748{
1749 tree type = TREE_TYPE (arg);
1750 enum tree_code code = TREE_CODE (arg);
1751 char class = TREE_CODE_CLASS (code);
1752
1753 /* We can handle some of the 'e' cases here. */
1754 if (class == 'e' && code == TRUTH_NOT_EXPR)
1755 class = '1';
1756 else if (class == 'e'
1757 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1758 class = '2';
1759
1760 switch (class)
1761 {
1762 case '1':
1763 return fold (build1 (code, type,
1764 eval_subst (TREE_OPERAND (arg, 0),
1765 old0, new0, old1, new1)));
1766
1767 case '2':
1768 return fold (build (code, type,
1769 eval_subst (TREE_OPERAND (arg, 0),
1770 old0, new0, old1, new1),
1771 eval_subst (TREE_OPERAND (arg, 1),
1772 old0, new0, old1, new1)));
1773
1774 case 'e':
1775 switch (code)
1776 {
1777 case SAVE_EXPR:
1778 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1779
1780 case COMPOUND_EXPR:
1781 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1782
1783 case COND_EXPR:
1784 return fold (build (code, type,
1785 eval_subst (TREE_OPERAND (arg, 0),
1786 old0, new0, old1, new1),
1787 eval_subst (TREE_OPERAND (arg, 1),
1788 old0, new0, old1, new1),
1789 eval_subst (TREE_OPERAND (arg, 2),
1790 old0, new0, old1, new1)));
1791 }
1792
1793 case '<':
1794 {
1795 tree arg0 = TREE_OPERAND (arg, 0);
1796 tree arg1 = TREE_OPERAND (arg, 1);
1797
1798 /* We need to check both for exact equality and tree equality. The
1799 former will be true if the operand has a side-effect. In that
1800 case, we know the operand occurred exactly once. */
1801
1802 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1803 arg0 = new0;
1804 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1805 arg0 = new1;
1806
1807 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1808 arg1 = new0;
1809 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1810 arg1 = new1;
1811
1812 return fold (build (code, type, arg0, arg1));
1813 }
1814 }
1815
1816 return arg;
1817}
1818\f
6d716ca8
RS
1819/* Return a tree for the case when the result of an expression is RESULT
1820 converted to TYPE and OMITTED was previously an operand of the expression
1821 but is now not needed (e.g., we folded OMITTED * 0).
1822
1823 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1824 the conversion of RESULT to TYPE. */
1825
1826static tree
1827omit_one_operand (type, result, omitted)
1828 tree type, result, omitted;
1829{
1830 tree t = convert (type, result);
1831
1832 if (TREE_SIDE_EFFECTS (omitted))
1833 return build (COMPOUND_EXPR, type, omitted, t);
1834
1835 return t;
1836}
1837\f
1838/* Return a simplified tree node for the truth-negation of ARG
1839 (perhaps by altering ARG). It is known that ARG is an operation that
1840 returns a truth value (0 or 1). */
1841
1842tree
1843invert_truthvalue (arg)
1844 tree arg;
1845{
1846 tree type = TREE_TYPE (arg);
c05a9b68 1847 enum tree_code code = TREE_CODE (arg);
6d716ca8 1848
c05a9b68
RS
1849 /* If this is a comparison, we can simply invert it, except for
1850 floating-point non-equality comparisons, in which case we just
1851 enclose a TRUTH_NOT_EXPR around what we have. */
6d716ca8 1852
c05a9b68 1853 if (TREE_CODE_CLASS (code) == '<')
6d716ca8 1854 {
c05a9b68
RS
1855 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 0))) == REAL_TYPE
1856 && code != NE_EXPR && code != EQ_EXPR)
1857 return build1 (TRUTH_NOT_EXPR, type, arg);
1858 else
1859 {
1860 TREE_SET_CODE (arg, invert_tree_comparison (code));
1861 return arg;
1862 }
1863 }
6d716ca8 1864
c05a9b68
RS
1865 switch (code)
1866 {
6d716ca8
RS
1867 case INTEGER_CST:
1868 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
1869 && TREE_INT_CST_HIGH (arg) == 0, 0));
1870
1871 case TRUTH_AND_EXPR:
1872 return build (TRUTH_OR_EXPR, type,
1873 invert_truthvalue (TREE_OPERAND (arg, 0)),
1874 invert_truthvalue (TREE_OPERAND (arg, 1)));
1875
1876 case TRUTH_OR_EXPR:
1877 return build (TRUTH_AND_EXPR, type,
1878 invert_truthvalue (TREE_OPERAND (arg, 0)),
1879 invert_truthvalue (TREE_OPERAND (arg, 1)));
1880
1881 case TRUTH_ANDIF_EXPR:
1882 return build (TRUTH_ORIF_EXPR, type,
1883 invert_truthvalue (TREE_OPERAND (arg, 0)),
1884 invert_truthvalue (TREE_OPERAND (arg, 1)));
1885
1886 case TRUTH_ORIF_EXPR:
1887 return build (TRUTH_ANDIF_EXPR, type,
1888 invert_truthvalue (TREE_OPERAND (arg, 0)),
1889 invert_truthvalue (TREE_OPERAND (arg, 1)));
1890
1891 case TRUTH_NOT_EXPR:
1892 return TREE_OPERAND (arg, 0);
1893
1894 case COND_EXPR:
1895 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
1896 invert_truthvalue (TREE_OPERAND (arg, 1)),
1897 invert_truthvalue (TREE_OPERAND (arg, 2)));
1898
ef9fe0da
RK
1899 case COMPOUND_EXPR:
1900 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
1901 invert_truthvalue (TREE_OPERAND (arg, 1)));
1902
6d716ca8
RS
1903 case NON_LVALUE_EXPR:
1904 return invert_truthvalue (TREE_OPERAND (arg, 0));
1905
1906 case NOP_EXPR:
1907 case CONVERT_EXPR:
1908 case FLOAT_EXPR:
1909 return build1 (TREE_CODE (arg), type,
1910 invert_truthvalue (TREE_OPERAND (arg, 0)));
1911
1912 case BIT_AND_EXPR:
1913 if (! integer_onep (TREE_OPERAND (arg, 1)))
1914 abort ();
1915 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
1916 }
1917
1918 abort ();
1919}
1920
1921/* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
1922 operands are another bit-wise operation with a common input. If so,
1923 distribute the bit operations to save an operation and possibly two if
1924 constants are involved. For example, convert
1925 (A | B) & (A | C) into A | (B & C)
1926 Further simplification will occur if B and C are constants.
1927
1928 If this optimization cannot be done, 0 will be returned. */
1929
1930static tree
1931distribute_bit_expr (code, type, arg0, arg1)
1932 enum tree_code code;
1933 tree type;
1934 tree arg0, arg1;
1935{
1936 tree common;
1937 tree left, right;
1938
1939 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1940 || TREE_CODE (arg0) == code
1941 || (TREE_CODE (arg0) != BIT_AND_EXPR
1942 && TREE_CODE (arg0) != BIT_IOR_EXPR))
1943 return 0;
1944
1945 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
1946 {
1947 common = TREE_OPERAND (arg0, 0);
1948 left = TREE_OPERAND (arg0, 1);
1949 right = TREE_OPERAND (arg1, 1);
1950 }
1951 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
1952 {
1953 common = TREE_OPERAND (arg0, 0);
1954 left = TREE_OPERAND (arg0, 1);
1955 right = TREE_OPERAND (arg1, 0);
1956 }
1957 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
1958 {
1959 common = TREE_OPERAND (arg0, 1);
1960 left = TREE_OPERAND (arg0, 0);
1961 right = TREE_OPERAND (arg1, 1);
1962 }
1963 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
1964 {
1965 common = TREE_OPERAND (arg0, 1);
1966 left = TREE_OPERAND (arg0, 0);
1967 right = TREE_OPERAND (arg1, 0);
1968 }
1969 else
1970 return 0;
1971
1972 return fold (build (TREE_CODE (arg0), type, common,
1973 fold (build (code, type, left, right))));
1974}
1975\f
1976/* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
1977 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
1978
1979static tree
1980make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
1981 tree inner;
1982 tree type;
1983 int bitsize, bitpos;
1984 int unsignedp;
1985{
1986 tree result = build (BIT_FIELD_REF, type, inner,
1987 size_int (bitsize), size_int (bitpos));
1988
1989 TREE_UNSIGNED (result) = unsignedp;
1990
1991 return result;
1992}
1993
1994/* Optimize a bit-field compare.
1995
1996 There are two cases: First is a compare against a constant and the
1997 second is a comparison of two items where the fields are at the same
1998 bit position relative to the start of a chunk (byte, halfword, word)
1999 large enough to contain it. In these cases we can avoid the shift
2000 implicit in bitfield extractions.
2001
2002 For constants, we emit a compare of the shifted constant with the
2003 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2004 compared. For two fields at the same position, we do the ANDs with the
2005 similar mask and compare the result of the ANDs.
2006
2007 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2008 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2009 are the left and right operands of the comparison, respectively.
2010
2011 If the optimization described above can be done, we return the resuling
2012 tree. Otherwise we return zero. */
2013
2014static tree
2015optimize_bit_field_compare (code, compare_type, lhs, rhs)
2016 enum tree_code code;
2017 tree compare_type;
2018 tree lhs, rhs;
2019{
2020 int lbitpos, lbitsize, rbitpos, rbitsize;
2021 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2022 tree type = TREE_TYPE (lhs);
2023 tree signed_type, unsigned_type;
2024 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2025 enum machine_mode lmode, rmode, lnmode, rnmode;
2026 int lunsignedp, runsignedp;
2027 int lvolatilep = 0, rvolatilep = 0;
2028 tree linner, rinner;
2029 tree mask;
2030
2031 /* Get all the information about the extractions being done. If the bit size
2032 if the same as the size of the underlying object, we aren't doing an
2033 extraction at all and so can do nothing. */
2034 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &lmode,
2035 &lunsignedp, &lvolatilep);
c05a9b68 2036 if (lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0)
6d716ca8
RS
2037 return 0;
2038
2039 if (!const_p)
2040 {
2041 /* If this is not a constant, we can only do something if bit positions,
2042 sizes, and signedness are the same. */
2043 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos,
2044 &rmode, &runsignedp, &rvolatilep);
2045
2046 if (lbitpos != rbitpos || lbitsize != rbitsize
2047 || lunsignedp != runsignedp)
2048 return 0;
2049 }
2050
2051 /* See if we can find a mode to refer to this field. We should be able to,
2052 but fail if we can't. */
2053 lnmode = get_best_mode (lbitsize, lbitpos,
2054 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2055 lvolatilep);
2056 if (lnmode == VOIDmode)
2057 return 0;
2058
2059 /* Set signed and unsigned types of the precision of this mode for the
2060 shifts below. */
2061 signed_type = type_for_mode (lnmode, 0);
2062 unsigned_type = type_for_mode (lnmode, 1);
2063
2064 if (! const_p)
2065 {
2066 rnmode = get_best_mode (rbitsize, rbitpos,
2067 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2068 rvolatilep);
2069 if (rnmode == VOIDmode)
2070 return 0;
2071 }
2072
2073 /* Compute the bit position and size for the new reference and our offset
2074 within it. If the new reference is the same size as the original, we
2075 won't optimize anything, so return zero. */
2076 lnbitsize = GET_MODE_BITSIZE (lnmode);
2077 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2078 lbitpos -= lnbitpos;
2079 if (lnbitsize == lbitsize)
2080 return 0;
2081
2082 if (! const_p)
2083 {
2084 rnbitsize = GET_MODE_BITSIZE (rnmode);
2085 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2086 rbitpos -= rnbitpos;
2087 if (rnbitsize == rbitsize)
2088 return 0;
2089 }
2090
2091#if BYTES_BIG_ENDIAN
2092 lbitpos = lnbitsize - lbitsize - lbitpos;
2093 rbitpos = rnbitsize - rbitsize - rbitpos;
2094#endif
2095
2096 /* Make the mask to be used against the extracted field. */
2097 mask = convert (unsigned_type, build_int_2 (~0, ~0));
2098 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize));
2099 mask = const_binop (RSHIFT_EXPR, mask,
2100 size_int (lnbitsize - lbitsize - lbitpos));
2101
2102 if (! const_p)
2103 /* If not comparing with constant, just rework the comparison
2104 and return. */
2105 return build (code, compare_type,
2106 build (BIT_AND_EXPR, type,
2107 make_bit_field_ref (linner, type,
2108 lnbitsize, lnbitpos, lunsignedp),
2109 mask),
2110 build (BIT_AND_EXPR, type,
2111 make_bit_field_ref (rinner, type,
2112 rnbitsize, rnbitpos, runsignedp),
2113 mask));
2114
2115 /* Otherwise, we are handling the constant case. See if the constant is too
2116 big for the field. Warn and return a tree of for 0 (false) if so. We do
2117 this not only for its own sake, but to avoid having to test for this
2118 error case below. If we didn't, we might generate wrong code.
2119
2120 For unsigned fields, the constant shifted right by the field length should
2121 be all zero. For signed fields, the high-order bits should agree with
2122 the sign bit. */
2123
2124 if (lunsignedp)
2125 {
2126 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2127 convert (unsigned_type, rhs),
2128 size_int (lbitsize))))
2129 {
2130 warning ("comparison is always %s due to width of bitfield",
2131 code == NE_EXPR ? "one" : "zero");
2132 return convert (compare_type,
2133 (code == NE_EXPR
2134 ? integer_one_node : integer_zero_node));
2135 }
2136 }
2137 else
2138 {
2139 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2140 size_int (lbitsize - 1));
2141 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2142 {
2143 warning ("comparison is always %s due to width of bitfield",
2144 code == NE_EXPR ? "one" : "zero");
2145 return convert (compare_type,
2146 (code == NE_EXPR
2147 ? integer_one_node : integer_zero_node));
2148 }
2149 }
2150
2151 /* Single-bit compares should always be against zero. */
2152 if (lbitsize == 1 && ! integer_zerop (rhs))
2153 {
2154 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2155 rhs = convert (type, integer_zero_node);
2156 }
2157
2158 /* Make a new bitfield reference, shift the constant over the
2159 appropriate number of bits and mask it with the computed mask
2160 (in case this was a signed field). If we changed it, make a new one. */
2161 lhs = make_bit_field_ref (linner, TREE_TYPE (lhs), lnbitsize, lnbitpos,
2162 lunsignedp);
2163
2164 rhs = fold (build1 (NOP_EXPR, type,
2165 const_binop (BIT_AND_EXPR,
2166 const_binop (LSHIFT_EXPR,
2167 convert (unsigned_type, rhs),
2168 size_int (lbitpos)), mask)));
2169
2170 return build (code, compare_type,
2171 build (BIT_AND_EXPR, type, lhs, mask),
2172 rhs);
2173}
2174\f
2175/* Subroutine for the following routine: decode a field reference.
2176
2177 If EXP is a comparison reference, we return the innermost reference.
2178
2179 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2180 set to the starting bit number.
2181
2182 If the innermost field can be completely contained in a mode-sized
2183 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2184
2185 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2186 otherwise it is not changed.
2187
2188 *PUNSIGNEDP is set to the signedness of the field.
2189
2190 *PMASK is set to the mask used. This is either contained in a
2191 BIT_AND_EXPR or derived from the width of the field.
2192
2193 Return 0 if this is not a component reference or is one that we can't
2194 do anything with. */
2195
2196static tree
2197decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2198 pvolatilep, pmask)
2199 tree exp;
2200 int *pbitsize, *pbitpos;
2201 enum machine_mode *pmode;
2202 int *punsignedp, *pvolatilep;
2203 tree *pmask;
2204{
2205 tree mask = 0;
2206 tree inner;
2207
2208 STRIP_NOPS (exp);
2209
2210 if (TREE_CODE (exp) == BIT_AND_EXPR)
2211 {
2212 mask = TREE_OPERAND (exp, 1);
2213 exp = TREE_OPERAND (exp, 0);
2214 STRIP_NOPS (exp); STRIP_NOPS (mask);
2215 if (TREE_CODE (mask) != INTEGER_CST)
2216 return 0;
2217 }
2218
2219 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
2220 && TREE_CODE (exp) != BIT_FIELD_REF)
2221 return 0;
2222
2223 inner = get_inner_reference (exp, pbitsize, pbitpos, pmode,
2224 punsignedp, pvolatilep);
c05a9b68
RS
2225 if (*pbitsize < 0)
2226 return 0;
6d716ca8
RS
2227
2228 if (mask == 0)
2229 {
2230 tree unsigned_type = type_for_size (*pbitsize, 1);
2231 int precision = TYPE_PRECISION (unsigned_type);
2232
2233 mask = convert (unsigned_type, build_int_2 (~0, ~0));
2234 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
2235 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
2236 }
2237
2238 *pmask = mask;
2239 return inner;
2240}
2241
2242/* Return non-zero if MASK respresents a mask of SIZE ones in the low-order
2243 bit positions. */
2244
2245static int
2246all_ones_mask_p (mask, size)
2247 tree mask;
2248 int size;
2249{
2250 tree type = TREE_TYPE (mask);
2251 int precision = TYPE_PRECISION (type);
2252
2253 return
2254 operand_equal_p (mask,
2255 const_binop (RSHIFT_EXPR,
2256 const_binop (LSHIFT_EXPR,
2257 convert (signed_type (type),
2258 build_int_2 (~0, ~0)),
2259 size_int (precision - size)),
2260 size_int (precision - size)), 0);
2261}
2262\f
2263/* Try to merge two comparisons to the same innermost item.
2264
2265 For example, if we have p->a == 2 && p->b == 4 and we can make an
2266 object large enough to span both A and B, we can do this with a comparison
2267 against the object ANDed with the a mask.
2268
2269 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2270 operations to do this with one comparison.
2271
2272 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2273 function and the one above.
2274
2275 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2276 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2277
2278 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2279 two operands.
2280
2281 We return the simplified tree or 0 if no optimization is possible. */
2282
2283static tree
2284merge_component_references (code, truth_type, lhs, rhs)
2285 enum tree_code code;
2286 tree truth_type, lhs, rhs;
2287{
2288 /* If this is the "or" of two comparisons, we can do something if we
2289 the comparisons are NE_EXPR. If this is the "and", we can do something
2290 if the comparisons are EQ_EXPR. I.e.,
2291 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2292
2293 WANTED_CODE is this operation code. For single bit fields, we can
2294 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2295 comparison for one-bit fields. */
2296
2297 enum tree_code wanted_code
2298 = (code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) ? EQ_EXPR : NE_EXPR;
2299 enum tree_code lcode, rcode;
2300 tree ll_inner, lr_inner, rl_inner, rr_inner;
2301 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2302 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2303 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2304 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2305 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2306 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2307 enum machine_mode lnmode, rnmode;
2308 tree ll_mask, lr_mask, rl_mask, rr_mask;
2309 tree l_const = 0, r_const = 0;
2310 tree type, result;
2311 int first_bit, end_bit;
2312 int volatilep = 0;
2313
2314 /* Start by getting the comparison codes and seeing if we may be able
2315 to do something. Then get all the parameters for each side. Fail
2316 if anything is volatile. */
2317
2318 lcode = TREE_CODE (lhs);
2319 rcode = TREE_CODE (rhs);
2320 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2321 || (rcode != EQ_EXPR && rcode != NE_EXPR)
2322 || TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
2323 return 0;
2324
2325 ll_inner = decode_field_reference (TREE_OPERAND (lhs, 0),
2326 &ll_bitsize, &ll_bitpos, &ll_mode,
2327 &ll_unsignedp, &volatilep, &ll_mask);
2328 lr_inner = decode_field_reference (TREE_OPERAND (lhs, 1),
2329 &lr_bitsize, &lr_bitpos, &lr_mode,
2330 &lr_unsignedp, &volatilep, &lr_mask);
2331 rl_inner = decode_field_reference (TREE_OPERAND (rhs, 0),
2332 &rl_bitsize, &rl_bitpos, &rl_mode,
2333 &rl_unsignedp, &volatilep, &rl_mask);
2334 rr_inner = decode_field_reference (TREE_OPERAND (rhs, 1),
2335 &rr_bitsize, &rr_bitpos, &rr_mode,
2336 &rr_unsignedp, &volatilep, &rr_mask);
2337
2338 /* It must be true that the inner operation on the lhs of each
2339 comparison must be the same if we are to be able to do anything.
2340 Then see if we have constants. If not, the same must be true for
2341 the rhs's. */
2342 if (volatilep || ll_inner == 0 || rl_inner == 0
2343 || ! operand_equal_p (ll_inner, rl_inner, 0))
2344 return 0;
2345
2346 if (TREE_CODE (TREE_OPERAND (lhs, 1)) == INTEGER_CST
2347 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
2348 l_const = TREE_OPERAND (lhs, 1), r_const = TREE_OPERAND (rhs, 1);
2349 else if (lr_inner == 0 || rr_inner == 0
2350 || ! operand_equal_p (lr_inner, rr_inner, 0))
2351 return 0;
2352
2353 /* If either comparison code is not correct for our logical operation,
2354 fail. However, we can convert a one-bit comparison against zero into
2355 the opposite comparison against that bit being set in the field. */
2356 if (lcode != wanted_code)
2357 {
2358 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2359 l_const = ll_mask;
2360 else
2361 return 0;
2362 }
2363
2364 if (rcode != wanted_code)
2365 {
2366 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2367 r_const = rl_mask;
2368 else
2369 return 0;
2370 }
2371
2372 /* See if we can find a mode that contains both fields being compared on
2373 the left. If we can't, fail. Otherwise, update all constants and masks
2374 to be relative to a field of that size. */
2375 first_bit = MIN (ll_bitpos, rl_bitpos);
2376 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2377 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2378 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2379 volatilep);
2380 if (lnmode == VOIDmode)
2381 return 0;
2382
2383 lnbitsize = GET_MODE_BITSIZE (lnmode);
2384 lnbitpos = first_bit & ~ (lnbitsize - 1);
2385 type = type_for_size (lnbitsize, 1);
2386 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2387
2388#if BYTES_BIG_ENDIAN
2389 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2390 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2391#endif
2392
2393 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2394 size_int (xll_bitpos));
2395 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2396 size_int (xrl_bitpos));
2397
2398 /* Make sure the constants are interpreted as unsigned, so we
2399 don't have sign bits outside the range of their type. */
2400
2401 if (l_const)
2402 {
2403 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2404 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2405 size_int (xll_bitpos));
2406 }
2407 if (r_const)
2408 {
2409 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2410 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2411 size_int (xrl_bitpos));
2412 }
2413
2414 /* If the right sides are not constant, do the same for it. Also,
2415 disallow this optimization if a size or signedness mismatch occurs
2416 between the left and right sides. */
2417 if (l_const == 0)
2418 {
2419 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2420 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp)
2421 return 0;
2422
2423 first_bit = MIN (lr_bitpos, rr_bitpos);
2424 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2425 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2426 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2427 volatilep);
2428 if (rnmode == VOIDmode)
2429 return 0;
2430
2431 rnbitsize = GET_MODE_BITSIZE (rnmode);
2432 rnbitpos = first_bit & ~ (rnbitsize - 1);
2433 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2434
2435#if BYTES_BIG_ENDIAN
2436 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2437 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2438#endif
2439
2440 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2441 size_int (xlr_bitpos));
2442 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2443 size_int (xrr_bitpos));
2444
2445 /* Make a mask that corresponds to both fields being compared.
2446 Do this for both items being compared. If the masks agree,
2447 we can do this by masking both and comparing the masked
2448 results. */
2449 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
2450 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
2451 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2452 {
2453 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2454 ll_unsignedp || rl_unsignedp);
2455 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2456 lr_unsignedp || rr_unsignedp);
2457 if (! all_ones_mask_p (ll_mask, lnbitsize))
2458 {
2459 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2460 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2461 }
2462 return build (wanted_code, truth_type, lhs, rhs);
2463 }
2464
2465 /* There is still another way we can do something: If both pairs of
2466 fields being compared are adjacent, we may be able to make a wider
2467 field containing them both. */
2468 if ((ll_bitsize + ll_bitpos == rl_bitpos
2469 && lr_bitsize + lr_bitpos == rr_bitpos)
2470 || (ll_bitpos == rl_bitpos + rl_bitsize
2471 && lr_bitpos == rr_bitpos + rr_bitsize))
2472 return build (wanted_code, truth_type,
2473 make_bit_field_ref (ll_inner, type,
2474 ll_bitsize + rl_bitsize,
2475 MIN (ll_bitpos, rl_bitpos),
2476 ll_unsignedp),
2477 make_bit_field_ref (lr_inner, type,
2478 lr_bitsize + rr_bitsize,
2479 MIN (lr_bitpos, rr_bitpos),
2480 lr_unsignedp));
2481
2482 return 0;
2483 }
2484
2485 /* Handle the case of comparisons with constants. If there is something in
2486 common between the masks, those bits of the constants must be the same.
2487 If not, the condition is always false. Test for this to avoid generating
2488 incorrect code below. */
2489 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
2490 if (! integer_zerop (result)
2491 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
2492 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
2493 {
2494 if (wanted_code == NE_EXPR)
2495 {
2496 warning ("`or' of unmatched not-equal tests is always 1");
2497 return convert (truth_type, integer_one_node);
2498 }
2499 else
2500 {
2501 warning ("`and' of mutually exclusive equal-tests is always zero");
2502 return convert (truth_type, integer_zero_node);
2503 }
2504 }
2505
2506 /* Construct the expression we will return. First get the component
2507 reference we will make. Unless the mask is all ones the width of
2508 that field, perform the mask operation. Then compare with the
2509 merged constant. */
2510 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2511 ll_unsignedp || rl_unsignedp);
2512
2513 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
2514 if (! all_ones_mask_p (ll_mask, lnbitsize))
2515 result = build (BIT_AND_EXPR, type, result, ll_mask);
2516
2517 return build (wanted_code, truth_type, result,
2518 const_binop (BIT_IOR_EXPR, l_const, r_const));
2519}
2520\f
2521/* Perform constant folding and related simplification of EXPR.
2522 The related simplifications include x*1 => x, x*0 => 0, etc.,
2523 and application of the associative law.
2524 NOP_EXPR conversions may be removed freely (as long as we
2525 are careful not to change the C type of the overall expression)
2526 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
2527 but we can constant-fold them if they have constant operands. */
2528
2529tree
2530fold (expr)
2531 tree expr;
2532{
2533 register tree t = expr;
2534 tree t1 = NULL_TREE;
c05a9b68 2535 tree tem;
6d716ca8
RS
2536 tree type = TREE_TYPE (expr);
2537 register tree arg0, arg1;
2538 register enum tree_code code = TREE_CODE (t);
2539 register int kind;
c05a9b68 2540 int invert;
6d716ca8
RS
2541
2542 /* WINS will be nonzero when the switch is done
2543 if all operands are constant. */
2544
2545 int wins = 1;
2546
2547 /* Return right away if already constant. */
2548 if (TREE_CONSTANT (t))
2549 {
2550 if (code == CONST_DECL)
2551 return DECL_INITIAL (t);
2552 return t;
2553 }
2554
2555 kind = TREE_CODE_CLASS (code);
2556 if (kind == 'e' || kind == '<' || kind == '1' || kind == '2' || kind == 'r')
2557 {
2558 register int len = tree_code_length[(int) code];
2559 register int i;
2560 for (i = 0; i < len; i++)
2561 {
2562 tree op = TREE_OPERAND (t, i);
2563
2564 if (op == 0)
2565 continue; /* Valid for CALL_EXPR, at least. */
2566
2567 /* Strip any conversions that don't change the mode. */
2568 STRIP_NOPS (op);
2569
2570 if (TREE_CODE (op) != INTEGER_CST
2571#if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2572 && TREE_CODE (op) != REAL_CST
2573#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2574 )
2575 /* Note that TREE_CONSTANT isn't enough:
2576 static var addresses are constant but we can't
2577 do arithmetic on them. */
2578 wins = 0;
2579
2580 if (i == 0)
2581 arg0 = op;
2582 else if (i == 1)
2583 arg1 = op;
2584 }
2585 }
2586
2587 /* If this is a commutative operation, and ARG0 is a constant, move it
2588 to ARG1 to reduce the number of tests below. */
2589 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
2590 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
2591 || code == BIT_AND_EXPR)
2592 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
2593 {
c05a9b68 2594 tem = arg0; arg0 = arg1; arg1 = tem;
6d716ca8 2595
c05a9b68
RS
2596 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
2597 TREE_OPERAND (t, 1) = tem;
6d716ca8
RS
2598 }
2599
2600 /* Now WINS is set as described above,
2601 ARG0 is the first operand of EXPR,
2602 and ARG1 is the second operand (if it has more than one operand).
2603
2604 First check for cases where an arithmetic operation is applied to a
2605 compound, conditional, or comparison operation. Push the arithmetic
2606 operation inside the compound or conditional to see if any folding
2607 can then be done. Convert comparison to conditional for this purpose.
2608 The also optimizes non-constant cases that used to be done in
2609 expand_expr. */
2610 if (TREE_CODE_CLASS (code) == '1')
2611 {
2612 if (TREE_CODE (arg0) == COMPOUND_EXPR)
2613 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2614 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
2615 else if (TREE_CODE (arg0) == COND_EXPR)
2616 return fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
2617 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
2618 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
2619 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
2620 return fold (build (COND_EXPR, type, arg0,
2621 fold (build1 (code, type, integer_one_node)),
2622 fold (build1 (code, type, integer_zero_node))));
2623 }
2624 else if (TREE_CODE_CLASS (code) == '2')
2625 {
2626 if (TREE_CODE (arg1) == COMPOUND_EXPR)
2627 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
2628 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
2629 else if (TREE_CODE (arg1) == COND_EXPR
2630 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
2631 {
2632 tree test, true_value, false_value;
2633
2634 if (TREE_CODE (arg1) == COND_EXPR)
2635 {
2636 test = TREE_OPERAND (arg1, 0);
2637 true_value = TREE_OPERAND (arg1, 1);
2638 false_value = TREE_OPERAND (arg1, 2);
2639 }
2640 else
2641 {
2642 test = arg1;
2643 true_value = integer_one_node;
2644 false_value = integer_zero_node;
2645 }
2646
2647 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
2648 arg0 = save_expr (arg0);
2649 test = fold (build (COND_EXPR, type, test,
2650 fold (build (code, type, arg0, true_value)),
2651 fold (build (code, type, arg0, false_value))));
2652 if (TREE_CODE (arg0) == SAVE_EXPR)
2653 return build (COMPOUND_EXPR, type,
2654 convert (void_type_node, arg0), test);
2655 else
2656 return convert (type, test);
2657 }
2658
2659 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
2660 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2661 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
2662 else if (TREE_CODE (arg0) == COND_EXPR
2663 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
2664 {
2665 tree test, true_value, false_value;
2666
2667 if (TREE_CODE (arg0) == COND_EXPR)
2668 {
2669 test = TREE_OPERAND (arg0, 0);
2670 true_value = TREE_OPERAND (arg0, 1);
2671 false_value = TREE_OPERAND (arg0, 2);
2672 }
2673 else
2674 {
2675 test = arg0;
2676 true_value = integer_one_node;
2677 false_value = integer_zero_node;
2678 }
2679
2680 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
2681 arg1 = save_expr (arg1);
2682 test = fold (build (COND_EXPR, type, test,
2683 fold (build (code, type, true_value, arg1)),
2684 fold (build (code, type, false_value, arg1))));
2685 if (TREE_CODE (arg1) == SAVE_EXPR)
2686 return build (COMPOUND_EXPR, type,
2687 convert (void_type_node, arg1), test);
2688 else
2689 return convert (type, test);
2690 }
2691 }
c05a9b68
RS
2692 else if (TREE_CODE_CLASS (code) == '<'
2693 && TREE_CODE (arg0) == COMPOUND_EXPR)
2694 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2695 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
2696 else if (TREE_CODE_CLASS (code) == '<'
2697 && TREE_CODE (arg1) == COMPOUND_EXPR)
2698 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
2699 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
6d716ca8
RS
2700
2701 switch (code)
2702 {
2703 case INTEGER_CST:
2704 case REAL_CST:
2705 case STRING_CST:
2706 case COMPLEX_CST:
2707 case CONSTRUCTOR:
2708 return t;
2709
2710 case CONST_DECL:
2711 return fold (DECL_INITIAL (t));
2712
2713 case NOP_EXPR:
2714 case FLOAT_EXPR:
2715 case CONVERT_EXPR:
2716 case FIX_TRUNC_EXPR:
2717 /* Other kinds of FIX are not handled properly by fold_convert. */
2718 /* Two conversions in a row are not needed unless:
2719 - the intermediate type is narrower than both initial and final, or
2720 - the initial type is a pointer type and the precisions of the
2721 intermediate and final types differ, or
2722 - the final type is a pointer type and the precisions of the
2723 initial and intermediate types differ. */
2724 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
2725 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
2726 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2727 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2728 ||
2729 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2730 > TYPE_PRECISION (TREE_TYPE (t)))
2731 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
2732 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2733 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
2734 ==
2735 (TREE_UNSIGNED (TREE_TYPE (t))
2736 && (TYPE_PRECISION (TREE_TYPE (t))
2737 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
2738 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2739 == POINTER_TYPE)
2740 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2741 != TYPE_PRECISION (TREE_TYPE (t))))
2742 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
2743 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2744 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
2745 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
2746
2747 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
2748 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1)))
2749 {
2750 /* Don't leave an assignment inside a conversion. */
2751 tree prev = TREE_OPERAND (t, 0);
2752 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
2753 /* First do the assignment, then return converted constant. */
2754 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
2755 TREE_USED (t) = 1;
2756 return t;
2757 }
2758 if (!wins)
2759 {
2760 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
2761 return t;
2762 }
2763 return fold_convert (t, arg0);
2764
2765#if 0 /* This loses on &"foo"[0]. */
2766 case ARRAY_REF:
2767 {
2768 int i;
2769
2770 /* Fold an expression like: "foo"[2] */
2771 if (TREE_CODE (arg0) == STRING_CST
2772 && TREE_CODE (arg1) == INTEGER_CST
2773 && !TREE_INT_CST_HIGH (arg1)
2774 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
2775 {
2776 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
2777 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
2778 force_fit_type (t);
2779 }
2780 }
2781 return t;
2782#endif /* 0 */
2783
2784 case RANGE_EXPR:
2785 TREE_CONSTANT (t) = wins;
2786 return t;
2787
2788 case NEGATE_EXPR:
2789 if (wins)
2790 {
2791 if (TREE_CODE (arg0) == INTEGER_CST)
2792 {
2793 if (TREE_INT_CST_LOW (arg0) == 0)
2794 t = build_int_2 (0, - TREE_INT_CST_HIGH (arg0));
2795 else
2796 t = build_int_2 (- TREE_INT_CST_LOW (arg0),
2797 ~ TREE_INT_CST_HIGH (arg0));
2798 TREE_TYPE (t) = type;
2799 force_fit_type (t);
2800 }
2801 else if (TREE_CODE (arg0) == REAL_CST)
2802 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
2803 TREE_TYPE (t) = type;
2804 }
2805 else if (TREE_CODE (arg0) == NEGATE_EXPR)
2806 return TREE_OPERAND (arg0, 0);
2807
2808 /* Convert - (a - b) to (b - a) for non-floating-point. */
2809 else if (TREE_CODE (arg0) == MINUS_EXPR && TREE_CODE (type) != REAL_TYPE)
2810 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
2811 TREE_OPERAND (arg0, 0));
2812
2813 return t;
2814
2815 case ABS_EXPR:
2816 if (wins)
2817 {
2818 if (TREE_CODE (arg0) == INTEGER_CST)
2819 {
2820 if (! TREE_UNSIGNED (type)
2821 && TREE_INT_CST_HIGH (arg0) < 0)
2822 {
2823 if (TREE_INT_CST_LOW (arg0) == 0)
2824 t = build_int_2 (0, - TREE_INT_CST_HIGH (arg0));
2825 else
2826 t = build_int_2 (- TREE_INT_CST_LOW (arg0),
2827 ~ TREE_INT_CST_HIGH (arg0));
2828 }
2829 }
2830 else if (TREE_CODE (arg0) == REAL_CST)
2831 {
c05a9b68 2832 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
6d716ca8
RS
2833 t = build_real (type,
2834 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
2835 }
2836 TREE_TYPE (t) = type;
2837 }
2838 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
2839 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
2840 return t;
2841
2842 case BIT_NOT_EXPR:
2843 if (wins)
2844 {
2845 if (TREE_CODE (arg0) == INTEGER_CST)
2846 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
2847 ~ TREE_INT_CST_HIGH (arg0));
2848 TREE_TYPE (t) = type;
2849 force_fit_type (t);
2850 }
2851 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
2852 return TREE_OPERAND (arg0, 0);
2853 return t;
2854
2855 case PLUS_EXPR:
2856 /* A + (-B) -> A - B */
2857 if (TREE_CODE (arg1) == NEGATE_EXPR)
2858 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
2859 else if (TREE_CODE (type) != REAL_TYPE)
2860 {
2861 if (integer_zerop (arg1))
2862 return non_lvalue (convert (type, arg0));
2863
2864 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
2865 with a constant, and the two constants have no bits in common,
2866 we should treat this as a BIT_IOR_EXPR since this may produce more
2867 simplifications. */
2868 if (TREE_CODE (arg0) == BIT_AND_EXPR
2869 && TREE_CODE (arg1) == BIT_AND_EXPR
2870 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
2871 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
2872 && integer_zerop (const_binop (BIT_AND_EXPR,
2873 TREE_OPERAND (arg0, 1),
2874 TREE_OPERAND (arg1, 1))))
2875 {
2876 code = BIT_IOR_EXPR;
2877 goto bit_ior;
2878 }
2879 }
2880 /* In IEEE floating point, x+0 may not equal x. */
2881 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2882 && real_zerop (arg1))
2883 return non_lvalue (convert (type, arg0));
2884 associate:
2885 /* In most languages, can't associate operations on floats
2886 through parentheses. Rather than remember where the parentheses
2887 were, we don't associate floats at all. It shouldn't matter much. */
2888 if (TREE_CODE (type) == REAL_TYPE)
2889 goto binary;
2890 /* The varsign == -1 cases happen only for addition and subtraction.
2891 It says that the arg that was split was really CON minus VAR.
2892 The rest of the code applies to all associative operations. */
2893 if (!wins)
2894 {
c05a9b68 2895 tree var, con;
6d716ca8
RS
2896 int varsign;
2897
2898 if (split_tree (arg0, code, &var, &con, &varsign))
2899 {
2900 if (varsign == -1)
2901 {
2902 /* EXPR is (CON-VAR) +- ARG1. */
2903 /* If it is + and VAR==ARG1, return just CONST. */
2904 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
2905 return convert (TREE_TYPE (t), con);
2906
2907 /* Otherwise return (CON +- ARG1) - VAR. */
2908 TREE_SET_CODE (t, MINUS_EXPR);
2909 TREE_OPERAND (t, 1) = var;
2910 TREE_OPERAND (t, 0)
2911 = fold (build (code, TREE_TYPE (t), con, arg1));
2912 }
2913 else
2914 {
2915 /* EXPR is (VAR+CON) +- ARG1. */
2916 /* If it is - and VAR==ARG1, return just CONST. */
2917 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
2918 return convert (TREE_TYPE (t), con);
2919
2920 /* Otherwise return VAR +- (ARG1 +- CON). */
2921 TREE_OPERAND (t, 1) = tem
2922 = fold (build (code, TREE_TYPE (t), arg1, con));
2923 TREE_OPERAND (t, 0) = var;
2924 if (integer_zerop (tem)
2925 && (code == PLUS_EXPR || code == MINUS_EXPR))
2926 return convert (type, var);
2927 /* If we have x +/- (c - d) [c an explicit integer]
2928 change it to x -/+ (d - c) since if d is relocatable
2929 then the latter can be a single immediate insn
2930 and the former cannot. */
2931 if (TREE_CODE (tem) == MINUS_EXPR
2932 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
2933 {
2934 tree tem1 = TREE_OPERAND (tem, 1);
2935 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
2936 TREE_OPERAND (tem, 0) = tem1;
2937 TREE_SET_CODE (t,
2938 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
2939 }
2940 }
2941 return t;
2942 }
2943
2944 if (split_tree (arg1, code, &var, &con, &varsign))
2945 {
2946 /* EXPR is ARG0 +- (CON +- VAR). */
2947 if (varsign == -1)
2948 TREE_SET_CODE (t,
2949 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
2950 if (TREE_CODE (t) == MINUS_EXPR
2951 && operand_equal_p (var, arg0, 0))
2952 {
2953 /* If VAR and ARG0 cancel, return just CON or -CON. */
2954 if (code == PLUS_EXPR)
2955 return convert (TREE_TYPE (t), con);
2956 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
2957 convert (TREE_TYPE (t), con)));
2958 }
2959 TREE_OPERAND (t, 0)
2960 = fold (build (code, TREE_TYPE (t), arg0, con));
2961 TREE_OPERAND (t, 1) = var;
2962 if (integer_zerop (TREE_OPERAND (t, 0))
2963 && TREE_CODE (t) == PLUS_EXPR)
2964 return convert (TREE_TYPE (t), var);
2965 return t;
2966 }
2967 }
2968 binary:
2969#if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
2970 if (TREE_CODE (arg1) == REAL_CST)
2971 return t;
2972#endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
2973 if (wins)
2974 t1 = const_binop (code, arg0, arg1);
2975 if (t1 != NULL_TREE)
2976 {
2977 /* The return value should always have
2978 the same type as the original expression. */
2979 TREE_TYPE (t1) = TREE_TYPE (t);
2980 return t1;
2981 }
2982 return t;
2983
2984 case MINUS_EXPR:
2985 if (TREE_CODE (type) != REAL_TYPE)
2986 {
2987 if (! wins && integer_zerop (arg0))
2988 return build1 (NEGATE_EXPR, type, arg1);
2989 if (integer_zerop (arg1))
2990 return non_lvalue (convert (type, arg0));
2991 }
2992 /* Convert A - (-B) to A + B. */
2993 else if (TREE_CODE (arg1) == NEGATE_EXPR)
2994 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
c05a9b68 2995 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6d716ca8 2996 {
c05a9b68 2997 /* Except with IEEE floating point, 0-x equals -x. */
6d716ca8
RS
2998 if (! wins && real_zerop (arg0))
2999 return build1 (NEGATE_EXPR, type, arg1);
c05a9b68
RS
3000 /* Except with IEEE floating point, x-0 equals x. */
3001 if (real_zerop (arg1))
6d716ca8
RS
3002 return non_lvalue (convert (type, arg0));
3003 }
3004 /* Fold &x - &x. This can happen from &x.foo - &x.
3005 Note that can't be done for certain floats even in non-IEEE formats.
3006 Also note that operand_equal_p is always false is an operand
3007 is volatile. */
3008
3009 if (operand_equal_p (arg0, arg1,
3010 TREE_CODE (type) == REAL_TYPE))
3011 return convert (type, integer_zero_node);
3012 goto associate;
3013
3014 case MULT_EXPR:
3015 if (TREE_CODE (type) != REAL_TYPE)
3016 {
3017 if (integer_zerop (arg1))
3018 return omit_one_operand (type, arg1, arg0);
3019 if (integer_onep (arg1))
3020 return non_lvalue (convert (type, arg0));
3021
3022 /* (a * (1 << b)) is (a << b) */
3023 if (TREE_CODE (arg1) == LSHIFT_EXPR
3024 && integer_onep (TREE_OPERAND (arg1, 0)))
3025 return fold (build (LSHIFT_EXPR, type, arg0,
3026 TREE_OPERAND (arg1, 1)));
3027 if (TREE_CODE (arg0) == LSHIFT_EXPR
3028 && integer_onep (TREE_OPERAND (arg0, 0)))
3029 return fold (build (LSHIFT_EXPR, type, arg1,
3030 TREE_OPERAND (arg0, 1)));
3031 }
6d716ca8
RS
3032 else
3033 {
c05a9b68 3034 /* x*0 is 0, except for IEEE floating point. */
6d716ca8
RS
3035 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3036 && real_zerop (arg1))
3037 return omit_one_operand (type, arg1, arg0);
c05a9b68 3038 /* In IEEE floating point, x*1 is not equivalent to x for snans.
6d716ca8
RS
3039 However, ANSI says we can drop signals,
3040 so we can do this anyway. */
3041 if (real_onep (arg1))
3042 return non_lvalue (convert (type, arg0));
3043 /* x*2 is x+x */
3044 if (! wins && real_twop (arg1))
3045 {
3046 tree arg = save_expr (arg0);
3047 return build (PLUS_EXPR, type, arg, arg);
3048 }
3049 }
3050 goto associate;
3051
3052 case BIT_IOR_EXPR:
3053 bit_ior:
3054 if (integer_all_onesp (arg1))
3055 return omit_one_operand (type, arg1, arg0);
3056 if (integer_zerop (arg1))
3057 return non_lvalue (convert (type, arg0));
3058 t1 = distribute_bit_expr (code, type, arg0, arg1);
3059 if (t1 != NULL_TREE)
3060 return t1;
3061 goto associate;
3062
3063 case BIT_XOR_EXPR:
3064 if (integer_zerop (arg1))
3065 return non_lvalue (convert (type, arg0));
3066 if (integer_all_onesp (arg1))
3067 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3068 goto associate;
3069
3070 case BIT_AND_EXPR:
3071 bit_and:
3072 if (integer_all_onesp (arg1))
3073 return non_lvalue (convert (type, arg0));
3074 if (integer_zerop (arg1))
3075 return omit_one_operand (type, arg1, arg0);
3076 t1 = distribute_bit_expr (code, type, arg0, arg1);
3077 if (t1 != NULL_TREE)
3078 return t1;
3079 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3080 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3081 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3082 {
3083 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3084 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_INT
3085 && (~TREE_INT_CST_LOW (arg0) & ((1 << prec) - 1)) == 0)
3086 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3087 }
3088 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3089 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3090 {
3091 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3092 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_INT
3093 && (~TREE_INT_CST_LOW (arg1) & ((1 << prec) - 1)) == 0)
3094 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3095 }
3096 goto associate;
3097
3098 case BIT_ANDTC_EXPR:
3099 if (integer_all_onesp (arg0))
3100 return non_lvalue (convert (type, arg1));
3101 if (integer_zerop (arg0))
3102 return omit_one_operand (type, arg0, arg1);
3103 if (TREE_CODE (arg1) == INTEGER_CST)
3104 {
3105 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3106 code = BIT_AND_EXPR;
3107 goto bit_and;
3108 }
3109 goto binary;
3110
3111 case TRUNC_DIV_EXPR:
3112 case ROUND_DIV_EXPR:
3113 case FLOOR_DIV_EXPR:
3114 case CEIL_DIV_EXPR:
3115 case EXACT_DIV_EXPR:
3116 case RDIV_EXPR:
3117 if (integer_onep (arg1))
3118 return non_lvalue (convert (type, arg0));
3119 if (integer_zerop (arg1))
3120 return t;
3121#if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3122#ifndef REAL_INFINITY
3123 if (TREE_CODE (arg1) == REAL_CST
3124 && real_zerop (arg1))
3125 return t;
3126#endif
3127#endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3128
3129 goto binary;
3130
3131 case CEIL_MOD_EXPR:
3132 case FLOOR_MOD_EXPR:
3133 case ROUND_MOD_EXPR:
3134 case TRUNC_MOD_EXPR:
3135 if (integer_onep (arg1))
3136 return omit_one_operand (type, integer_zero_node, arg0);
3137 if (integer_zerop (arg1))
3138 return t;
3139 goto binary;
3140
3141 case LSHIFT_EXPR:
3142 case RSHIFT_EXPR:
3143 case LROTATE_EXPR:
3144 case RROTATE_EXPR:
3145 if (integer_zerop (arg1))
3146 return non_lvalue (convert (type, arg0));
3147 /* Since negative shift count is not well-defined,
3148 don't try to compute it in the compiler. */
3149 if (tree_int_cst_lt (arg1, integer_zero_node))
3150 return t;
3151 goto binary;
3152
3153 case MIN_EXPR:
3154 if (operand_equal_p (arg0, arg1, 0))
3155 return arg0;
3156 if (TREE_CODE (type) == INTEGER_TYPE
3157 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
3158 return omit_one_operand (type, arg1, arg0);
3159 goto associate;
3160
3161 case MAX_EXPR:
3162 if (operand_equal_p (arg0, arg1, 0))
3163 return arg0;
3164 if (TREE_CODE (type) == INTEGER_TYPE
3165 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
3166 return omit_one_operand (type, arg1, arg0);
3167 goto associate;
3168
3169 case TRUTH_NOT_EXPR:
3170 /* Note that the operand of this must be an int
3171 and its values must be 0 or 1.
3172 ("true" is a fixed value perhaps depending on the language,
3173 but we don't handle values other than 1 correctly yet.) */
3174 return invert_truthvalue (arg0);
3175
3176 case TRUTH_ANDIF_EXPR:
3177 /* Note that the operands of this must be ints
3178 and their values must be 0 or 1.
3179 ("true" is a fixed value perhaps depending on the language.) */
3180 /* If first arg is constant zero, return it. */
3181 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
3182 return arg0;
3183 case TRUTH_AND_EXPR:
3184 /* If either arg is constant true, drop it. */
3185 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
3186 return non_lvalue (arg1);
3187 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
3188 return non_lvalue (arg0);
3189 /* Both known to be zero => return zero. */
3190 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3191 return arg0;
3192
3193 truth_andor:
3194 /* Check for the possibility of merging component references. If our
3195 lhs is another similar operation, try to merge its rhs with our
3196 rhs. Then try to merge our lhs and rhs. */
3197 if (optimize)
3198 {
6d716ca8
RS
3199 if (TREE_CODE (arg0) == code)
3200 {
3201 tem = merge_component_references (code, type,
3202 TREE_OPERAND (arg0, 1), arg1);
3203 if (tem)
3204 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
3205 }
3206
3207 tem = merge_component_references (code, type, arg0, arg1);
3208 if (tem)
3209 return tem;
3210 }
3211 return t;
3212
3213 case TRUTH_ORIF_EXPR:
3214 /* Note that the operands of this must be ints
3215 and their values must be 0 or true.
3216 ("true" is a fixed value perhaps depending on the language.) */
3217 /* If first arg is constant true, return it. */
3218 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
3219 return arg0;
3220 case TRUTH_OR_EXPR:
3221 /* If either arg is constant zero, drop it. */
3222 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
3223 return non_lvalue (arg1);
3224 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
3225 return non_lvalue (arg0);
3226 /* Both known to be true => return true. */
3227 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
3228 return arg0;
3229 goto truth_andor;
3230
3231 case EQ_EXPR:
3232 case NE_EXPR:
3233 case LT_EXPR:
3234 case GT_EXPR:
3235 case LE_EXPR:
3236 case GE_EXPR:
3237 /* If one arg is a constant integer, put it last. */
3238 if (TREE_CODE (arg0) == INTEGER_CST
3239 && TREE_CODE (arg1) != INTEGER_CST)
3240 {
3241 TREE_OPERAND (t, 0) = arg1;
3242 TREE_OPERAND (t, 1) = arg0;
3243 arg0 = TREE_OPERAND (t, 0);
3244 arg1 = TREE_OPERAND (t, 1);
c05a9b68 3245 code = swap_tree_comparison (code);
6d716ca8
RS
3246 TREE_SET_CODE (t, code);
3247 }
3248
3249 /* Convert foo++ == CONST into ++foo == CONST + INCR.
3250 First, see if one arg is constant; find the constant arg
3251 and the other one. */
3252 {
3253 tree constop = 0, varop;
3254 tree *constoploc;
3255
3256 if (TREE_CONSTANT (arg1))
3257 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
3258 if (TREE_CONSTANT (arg0))
3259 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
3260
3261 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
3262 {
6d716ca8
RS
3263 /* This optimization is invalid for ordered comparisons
3264 if CONST+INCR overflows or if foo+incr might overflow.
c05a9b68 3265 This optimization is invalid for floating point due to rounding.
6d716ca8
RS
3266 For pointer types we assume overflow doesn't happen. */
3267 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
c05a9b68
RS
3268 || (TREE_CODE (TREE_TYPE (varop)) != REAL_TYPE
3269 && (code == EQ_EXPR || code == NE_EXPR)))
6d716ca8 3270 {
c05a9b68
RS
3271 tree newconst
3272 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
3273 constop, TREE_OPERAND (varop, 1)));
3274 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
3275 *constoploc = newconst;
3276 return t;
6d716ca8
RS
3277 }
3278 }
3279 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
3280 {
6d716ca8 3281 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
c05a9b68
RS
3282 || (TREE_CODE (TREE_TYPE (varop)) != REAL_TYPE
3283 && (code == EQ_EXPR || code == NE_EXPR)))
6d716ca8 3284 {
c05a9b68
RS
3285 tree newconst
3286 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
3287 constop, TREE_OPERAND (varop, 1)));
3288 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
3289 *constoploc = newconst;
3290 return t;
6d716ca8
RS
3291 }
3292 }
3293 }
3294
3295 /* Change X >= CST to X > (CST - 1) if CST is positive. */
3296 if (TREE_CODE (arg1) == INTEGER_CST
3297 && TREE_CODE (arg0) != INTEGER_CST
3298 && ! tree_int_cst_lt (arg1, integer_one_node))
3299 {
3300 switch (TREE_CODE (t))
3301 {
3302 case GE_EXPR:
3303 code = GT_EXPR;
3304 TREE_SET_CODE (t, code);
3305 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
3306 TREE_OPERAND (t, 1) = arg1;
3307 break;
3308
3309 case LT_EXPR:
3310 code = LE_EXPR;
3311 TREE_SET_CODE (t, code);
3312 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
3313 TREE_OPERAND (t, 1) = arg1;
3314 }
3315 }
3316
6d716ca8
RS
3317 /* If this is an EQ or NE comparison with zero and ARG0 is
3318 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
3319 two operations, but the latter can be done in one less insn
3320 one machine that have only two-operand insns or on which a
3321 constant cannot be the first operand. */
3322 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
3323 && TREE_CODE (arg0) == BIT_AND_EXPR)
3324 {
3325 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
3326 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
3327 return
3328 fold (build (code, type,
3329 build (BIT_AND_EXPR, TREE_TYPE (arg0),
3330 build (RSHIFT_EXPR,
3331 TREE_TYPE (TREE_OPERAND (arg0, 0)),
3332 TREE_OPERAND (arg0, 1),
3333 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
3334 convert (TREE_TYPE (arg0),
3335 integer_one_node)),
3336 arg1));
3337 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
3338 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
3339 return
3340 fold (build (code, type,
3341 build (BIT_AND_EXPR, TREE_TYPE (arg0),
3342 build (RSHIFT_EXPR,
3343 TREE_TYPE (TREE_OPERAND (arg0, 1)),
3344 TREE_OPERAND (arg0, 0),
3345 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
3346 convert (TREE_TYPE (arg0),
3347 integer_one_node)),
3348 arg1));
3349 }
3350
3351 /* If this is an NE comparison of zero with an AND of one, remove the
3352 comparison since the AND will give the correct value. */
3353 if (code == NE_EXPR && integer_zerop (arg1)
3354 && TREE_CODE (arg0) == BIT_AND_EXPR
3355 && integer_onep (TREE_OPERAND (arg0, 1)))
3356 return convert (type, arg0);
3357
3358 /* If we have (A & C) == C where C is a power of 2, convert this into
3359 (A & C) != 0. Similarly for NE_EXPR. */
3360 if ((code == EQ_EXPR || code == NE_EXPR)
3361 && TREE_CODE (arg0) == BIT_AND_EXPR
3362 && integer_pow2p (TREE_OPERAND (arg0, 1))
3363 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3364 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
3365 arg0, integer_zero_node);
3366
c05a9b68
RS
3367 /* Simplify comparison of something with itself. (For IEEE
3368 floating-point, we can only do some of these simplifications.) */
3369 if (operand_equal_p (arg0, arg1, 0))
6d716ca8
RS
3370 {
3371 switch (code)
3372 {
3373 case EQ_EXPR:
3374 case GE_EXPR:
3375 case LE_EXPR:
c05a9b68
RS
3376 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE)
3377 {
3378 t = build_int_2 (1, 0);
3379 TREE_TYPE (t) = type;
3380 return t;
3381 }
3382 code = EQ_EXPR;
3383 TREE_SET_CODE (t, code);
3384 break;
3385
6d716ca8 3386 case NE_EXPR:
c05a9b68
RS
3387 /* For NE, we can only do this simplification if integer. */
3388 if (TREE_CODE (TREE_TYPE (arg0)) != INTEGER_TYPE)
3389 break;
3390 /* ... fall through ... */
6d716ca8
RS
3391 case GT_EXPR:
3392 case LT_EXPR:
3393 t = build_int_2 (0, 0);
3394 TREE_TYPE (t) = type;
3395 return t;
3396 }
3397 }
3398
3399 /* An unsigned comparison against 0 can be simplified. */
3400 if (integer_zerop (arg1)
3401 && (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3402 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
3403 && TREE_UNSIGNED (TREE_TYPE (arg1)))
3404 {
3405 switch (TREE_CODE (t))
3406 {
3407 case GT_EXPR:
c05a9b68 3408 code = NE_EXPR;
6d716ca8
RS
3409 TREE_SET_CODE (t, NE_EXPR);
3410 break;
3411 case LE_EXPR:
c05a9b68 3412 code = EQ_EXPR;
6d716ca8
RS
3413 TREE_SET_CODE (t, EQ_EXPR);
3414 break;
3415 case GE_EXPR:
3416 return omit_one_operand (integer_type_node,
3417 integer_one_node, arg0);
3418 case LT_EXPR:
3419 return omit_one_operand (integer_type_node,
3420 integer_zero_node, arg0);
3421 }
3422 }
3423
c05a9b68
RS
3424 /* If we are comparing an expression that just has comparisons
3425 of two integer values, arithmetic expressions of those comparisons,
3426 and constants, we can simplify it. There are only three cases
3427 to check: the two values can either be equal, the first can be
3428 greater, or the second can be greater. Fold the expression for
3429 those three values. Since each value must be 0 or 1, we have
3430 eight possibilities, each of which corresponds to the constant 0
3431 or 1 or one of the six possible comparisons.
3432
3433 This handles common cases like (a > b) == 0 but also handles
3434 expressions like ((x > y) - (y > x)) > 0, which supposedly
3435 occur in macroized code. */
3436
3437 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
3438 {
3439 tree cval1 = 0, cval2 = 0;
3440
3441 if (twoval_comparison_p (arg0, &cval1, &cval2)
3442 /* Don't handle degenerate cases here; they should already
3443 have been handled anyway. */
3444 && cval1 != 0 && cval2 != 0
3445 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
3446 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
3447 && TREE_CODE (TREE_TYPE (cval1)) == INTEGER_TYPE
3448 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
3449 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
3450 {
3451 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
3452 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
3453
3454 /* We can't just pass T to eval_subst in case cval1 or cval2
3455 was the same as ARG1. */
3456
3457 tree high_result
3458 = fold (build (code, type,
3459 eval_subst (arg0, cval1, maxval, cval2, minval),
3460 arg1));
3461 tree equal_result
3462 = fold (build (code, type,
3463 eval_subst (arg0, cval1, maxval, cval2, maxval),
3464 arg1));
3465 tree low_result
3466 = fold (build (code, type,
3467 eval_subst (arg0, cval1, minval, cval2, maxval),
3468 arg1));
3469
3470 /* All three of these results should be 0 or 1. Confirm they
3471 are. Then use those values to select the proper code
3472 to use. */
3473
3474 if ((integer_zerop (high_result)
3475 || integer_onep (high_result))
3476 && (integer_zerop (equal_result)
3477 || integer_onep (equal_result))
3478 && (integer_zerop (low_result)
3479 || integer_onep (low_result)))
3480 {
3481 /* Make a 3-bit mask with the high-order bit being the
3482 value for `>', the next for '=', and the low for '<'. */
3483 switch ((integer_onep (high_result) * 4)
3484 + (integer_onep (equal_result) * 2)
3485 + integer_onep (low_result))
3486 {
3487 case 0:
3488 /* Always false. */
13837058 3489 return omit_one_operand (type, integer_zero_node, arg0);
c05a9b68
RS
3490 case 1:
3491 code = LT_EXPR;
3492 break;
3493 case 2:
3494 code = EQ_EXPR;
3495 break;
3496 case 3:
3497 code = LE_EXPR;
3498 break;
3499 case 4:
3500 code = GT_EXPR;
3501 break;
3502 case 5:
3503 code = NE_EXPR;
3504 break;
3505 case 6:
3506 code = GE_EXPR;
3507 break;
3508 case 7:
3509 /* Always true. */
13837058 3510 return omit_one_operand (type, integer_one_node, arg0);
c05a9b68
RS
3511 }
3512
13837058
TW
3513 return omit_one_operand (type,
3514 fold (build (code, type, cval1, cval2)),
3515 arg0);
c05a9b68
RS
3516 }
3517 }
3518 }
3519
3520 /* If this is a comparison of a field, we may be able to simplify it. */
3521 if ((TREE_CODE (arg0) == COMPONENT_REF
3522 || TREE_CODE (arg0) == BIT_FIELD_REF)
3523 && (code == EQ_EXPR || code == NE_EXPR)
3524 /* Handle the constant case even without -O
3525 to make sure the warnings are given. */
3526 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
3527 {
3528 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
3529 return t1 ? t1 : t;
3530 }
3531
3532 /* From here on, the only cases we handle are when the result is
3533 known to be a constant.
3534
3535 To compute GT, swap the arguments and do LT.
6d716ca8
RS
3536 To compute GE, do LT and invert the result.
3537 To compute LE, swap the arguments, do LT and invert the result.
c05a9b68
RS
3538 To compute NE, do EQ and invert the result.
3539
3540 Therefore, the code below must handle only EQ and LT. */
3541
6d716ca8
RS
3542 if (code == LE_EXPR || code == GT_EXPR)
3543 {
c05a9b68
RS
3544 tem = arg0, arg0 = arg1, arg1 = tem;
3545 code = swap_tree_comparison (code);
3546 }
3547
3548 /* Note that it is safe to invert for real values here because we
3549 will check below in the one case that it matters. */
3550
3551 invert = 0;
3552 if (code == NE_EXPR || code == GE_EXPR)
3553 {
3554 invert = 1;
3555 code = invert_tree_comparison (code);
6d716ca8
RS
3556 }
3557
3558 /* Compute a result for LT or EQ if args permit;
3559 otherwise return T. */
c05a9b68 3560 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
6d716ca8 3561 {
c05a9b68
RS
3562 if (code == EQ_EXPR)
3563 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
3564 == TREE_INT_CST_LOW (arg1))
3565 && (TREE_INT_CST_HIGH (arg0)
3566 == TREE_INT_CST_HIGH (arg1)),
3567 0);
6d716ca8 3568 else
c05a9b68
RS
3569 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
3570 ? INT_CST_LT_UNSIGNED (arg0, arg1)
3571 : INT_CST_LT (arg0, arg1)),
3572 0);
6d716ca8 3573 }
c05a9b68 3574
6d716ca8
RS
3575 /* Assume a nonexplicit constant cannot equal an explicit one,
3576 since such code would be undefined anyway.
3577 Exception: on sysvr4, using #pragma weak,
3578 a label can come out as 0. */
3579 else if (TREE_CODE (arg1) == INTEGER_CST
3580 && !integer_zerop (arg1)
3581 && TREE_CONSTANT (arg0)
3582 && TREE_CODE (arg0) == ADDR_EXPR
c05a9b68
RS
3583 && code == EQ_EXPR)
3584 t1 = build_int_2 (0, 0);
3585
6d716ca8 3586 /* Two real constants can be compared explicitly. */
c05a9b68 3587 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
6d716ca8 3588 {
c05a9b68
RS
3589 /* If either operand is a NaN, the result is false with two
3590 exceptions: First, an NE_EXPR is true on NaNs, but that case
3591 is already handled correctly since we will be inverting the
3592 result for NE_EXPR. Second, if we had inverted a LE_EXPR
3593 or a GE_EXPR into a LT_EXPR, we must return true so that it
3594 will be inverted into false. */
3595
3596 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
3597 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
3598 t1 = build_int_2 (invert && code == LT_EXPR, 0);
3599
3600 else if (code == EQ_EXPR)
3601 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
3602 TREE_REAL_CST (arg1)),
3603 0);
6d716ca8 3604 else
c05a9b68
RS
3605 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
3606 TREE_REAL_CST (arg1)),
3607 0);
6d716ca8
RS
3608 }
3609
c05a9b68
RS
3610 if (t1 == NULL_TREE)
3611 return t;
3612
3613 if (invert)
3614 TREE_INT_CST_LOW (t1) ^= 1;
3615
3616 TREE_TYPE (t1) = type;
3617 return t1;
6d716ca8
RS
3618
3619 case COND_EXPR:
3620 if (TREE_CODE (arg0) == INTEGER_CST)
3621 return TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
3622 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
3623 return omit_one_operand (type, arg1, arg0);
6d716ca8 3624
c05a9b68
RS
3625 /* If the second operand is zero, invert the comparison and swap
3626 the second and third operands. Likewise if the second operand
3627 is constant and the third is not or if the third operand is
3628 equivalent to the first operand of the comparison. */
6d716ca8 3629
c05a9b68
RS
3630 if (integer_zerop (arg1)
3631 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
3632 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3633 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
3634 TREE_OPERAND (t, 2),
3635 TREE_OPERAND (arg0, 1))))
3636 {
3637 /* See if this can be inverted. If it can't, possibly because
3638 it was a floating-point inequality comparison, don't do
3639 anything. */
3640 tem = invert_truthvalue (arg0);
6d716ca8 3641
c05a9b68
RS
3642 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
3643 {
3644 arg0 = TREE_OPERAND (t, 0) = tem;
3645 TREE_OPERAND (t, 1) = TREE_OPERAND (t, 2);
3646 TREE_OPERAND (t, 2) = arg1;
3647 arg1 = TREE_OPERAND (t, 1);
3648 }
3649 }
6d716ca8 3650
c05a9b68
RS
3651 /* If we have A op B ? A : C, we may be able to convert this to a
3652 simpler expression, depending on the operation and the values
3653 of B and C. */
6d716ca8 3654
c05a9b68
RS
3655 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
3656 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
3657 arg1, TREE_OPERAND (arg0, 1)))
6d716ca8 3658 {
c05a9b68
RS
3659 tree arg2 = TREE_OPERAND (t, 2);
3660 enum tree_code comp_code = TREE_CODE (arg0);
3661
3662 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
3663 depending on the comparison operation. */
3664 if (integer_zerop (TREE_OPERAND (arg0, 1))
3665 && TREE_CODE (arg2) == NEGATE_EXPR
3666 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
3667 switch (comp_code)
3668 {
3669 case EQ_EXPR:
3670 return fold (build1 (NEGATE_EXPR, type, arg1));
3671 case NE_EXPR:
cdc54cc9 3672 return convert (type, arg1);
c05a9b68
RS
3673 case GE_EXPR:
3674 case GT_EXPR:
3675 return fold (build1 (ABS_EXPR, type, arg1));
3676 case LE_EXPR:
3677 case LT_EXPR:
3678 return fold (build1 (NEGATE_EXPR, type,
3679 fold (build1 (ABS_EXPR, type, arg1))));
3680 }
6d716ca8 3681
c05a9b68
RS
3682 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
3683 always zero. */
6d716ca8 3684
29ebe69a 3685 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
c05a9b68
RS
3686 {
3687 if (comp_code == NE_EXPR)
cdc54cc9 3688 return convert (type, arg1);
c05a9b68
RS
3689 else if (comp_code == EQ_EXPR)
3690 return convert (type, integer_zero_node);
3691 }
3692
3693 /* If this is A op B ? A : B, this is either A, B, min (A, B),
3694 or max (A, B), depending on the operation. */
3695
3696 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
3697 arg2, TREE_OPERAND (arg0, 0)))
3698 switch (comp_code)
3699 {
3700 case EQ_EXPR:
cdc54cc9 3701 return convert (type, arg2);
c05a9b68 3702 case NE_EXPR:
cdc54cc9 3703 return convert (type, arg1);
c05a9b68
RS
3704 case LE_EXPR:
3705 case LT_EXPR:
3706 return fold (build (MIN_EXPR, type, arg1, arg2));
3707 case GE_EXPR:
3708 case GT_EXPR:
3709 return fold (build (MAX_EXPR, type, arg1, arg2));
3710 }
3711
3712 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
3713 we might still be able to simplify this. For example,
3714 if C1 is one less or one more than C2, this might have started
3715 out as a MIN or MAX and been transformed by this function. */
3716
3717 if (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3718 && TREE_CODE (arg2) == INTEGER_CST)
3719 switch (comp_code)
3720 {
3721 case EQ_EXPR:
3722 /* We can replace A with C1 in this case. */
3723 arg1 = TREE_OPERAND (t, 1)
3724 = convert (type, TREE_OPERAND (arg0, 1));
3725 break;
3726
3727 case LT_EXPR:
3728 /* If C1 is C2 + 1, this is min(A, C2). */
3729 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
3730 && operand_equal_p (TREE_OPERAND (arg0, 1),
3731 const_binop (PLUS_EXPR, arg2,
3732 integer_one_node), 1))
3733 return fold (build (MIN_EXPR, type, arg1, arg2));
3734 break;
3735
3736 case LE_EXPR:
3737 /* If C1 is C2 - 1, this is min(A, C2). */
3738 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
3739 && operand_equal_p (TREE_OPERAND (arg0, 1),
3740 const_binop (MINUS_EXPR, arg2,
3741 integer_one_node), 1))
3742 return fold (build (MIN_EXPR, type, arg1, arg2));
3743 break;
3744
3745 case GT_EXPR:
3746 /* If C1 is C2 - 1, this is max(A, C2). */
3747 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
3748 && operand_equal_p (TREE_OPERAND (arg0, 1),
3749 const_binop (MINUS_EXPR, arg2,
3750 integer_one_node), 1))
3751 return fold (build (MAX_EXPR, type, arg1, arg2));
3752 break;
3753
3754 case GE_EXPR:
3755 /* If C1 is C2 + 1, this is max(A, C2). */
3756 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
3757 && operand_equal_p (TREE_OPERAND (arg0, 1),
3758 const_binop (PLUS_EXPR, arg2,
3759 integer_one_node), 1))
3760 return fold (build (MAX_EXPR, type, arg1, arg2));
3761 break;
3762 }
6d716ca8
RS
3763 }
3764
c05a9b68
RS
3765 /* Convert A ? 1 : 0 to simply A. */
3766 if (integer_onep (TREE_OPERAND (t, 1))
3767 && integer_zerop (TREE_OPERAND (t, 2))
3768 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
3769 call to fold will try to move the conversion inside
3770 a COND, which will recurse. In that case, the COND_EXPR
3771 is probably the best choice, so leave it alone. */
3772 && type == TREE_TYPE (arg0))
3773 return arg0;
6d716ca8 3774
6d716ca8 3775
c05a9b68
RS
3776 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
3777 operation is simply A & 2. */
6d716ca8
RS
3778
3779 if (integer_zerop (TREE_OPERAND (t, 2))
3780 && TREE_CODE (arg0) == NE_EXPR
3781 && integer_zerop (TREE_OPERAND (arg0, 1))
c05a9b68
RS
3782 && integer_pow2p (arg1)
3783 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
3784 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
3785 arg1, 1))
3786 return convert (type, TREE_OPERAND (arg0, 0));
6d716ca8 3787
6d716ca8
RS
3788 return t;
3789
3790 case COMPOUND_EXPR:
3791 if (!TREE_SIDE_EFFECTS (arg0))
3792 return arg1;
3793 return t;
3794
3795 default:
3796 return t;
3797 } /* switch (code) */
3798}
This page took 0.393935 seconds and 5 git commands to generate.