]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
*** empty log message ***
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20 /*@@ Fix lossage on folding division of big integers. */
21
22 /*@@ This file should be rewritten to use an arbitary precision
23 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
24 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
25 @@ The routines that translate from the ap rep should
26 @@ warn if precision et. al. is lost.
27 @@ This would also make life easier when this technology is used
28 @@ for cross-compilers. */
29
30
31 /* The entry points in this file are fold, size_int and size_binop.
32
33 fold takes a tree as argument and returns a simplified tree.
34
35 size_binop takes a tree code for an arithmetic operation
36 and two operands that are trees, and produces a tree for the
37 result, assuming the type comes from `sizetype'.
38
39 size_int takes an integer value, and creates a tree constant
40 with type from `sizetype'. */
41
42 #include <stdio.h>
43 #include <setjmp.h>
44 #include "config.h"
45 #include "flags.h"
46 #include "tree.h"
47
48 void lshift_double ();
49 void rshift_double ();
50 void lrotate_double ();
51 void rrotate_double ();
52 static tree const_binop ();
53 \f
54 /* To do constant folding on INTEGER_CST nodes requires 64-bit arithmetic.
55 We do that by representing the 64-bit integer as 8 shorts,
56 with only 8 bits stored in each short, as a positive number. */
57
58 /* Unpack a 64-bit integer into 8 shorts.
59 LOW and HI are the integer, as two `int' pieces.
60 SHORTS points to the array of shorts. */
61
62 static void
63 encode (shorts, low, hi)
64 short *shorts;
65 int low, hi;
66 {
67 shorts[0] = low & 0xff;
68 shorts[1] = (low >> 8) & 0xff;
69 shorts[2] = (low >> 16) & 0xff;
70 shorts[3] = (low >> 24) & 0xff;
71 shorts[4] = hi & 0xff;
72 shorts[5] = (hi >> 8) & 0xff;
73 shorts[6] = (hi >> 16) & 0xff;
74 shorts[7] = (hi >> 24) & 0xff;
75 }
76
77 /* Pack an array of 8 shorts into a 64-bit integer.
78 SHORTS points to the array of shorts.
79 The integer is stored into *LOW and *HI as two `int' pieces. */
80
81 static void
82 decode (shorts, low, hi)
83 short *shorts;
84 int *low, *hi;
85 {
86 /* The casts in the following statement should not be
87 needed, but they get around bugs in some C compilers. */
88 *low = (((long)shorts[3] << 24) | ((long)shorts[2] << 16)
89 | ((long)shorts[1] << 8) | (long)shorts[0]);
90 *hi = (((long)shorts[7] << 24) | ((long)shorts[6] << 16)
91 | ((long)shorts[5] << 8) | (long)shorts[4]);
92 }
93 \f
94 /* Make the integer constant T valid for its type
95 by setting to 0 or 1 all the bits in the constant
96 that don't belong in the type. */
97
98 static void
99 force_fit_type (t)
100 tree t;
101 {
102 register int prec = TYPE_PRECISION (TREE_TYPE (t));
103
104 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
105 prec = POINTER_SIZE;
106
107 /* First clear all bits that are beyond the type's precision. */
108
109 if (prec == 2 * HOST_BITS_PER_INT)
110 ;
111 else if (prec > HOST_BITS_PER_INT)
112 {
113 TREE_INT_CST_HIGH (t)
114 &= ~((-1) << (prec - HOST_BITS_PER_INT));
115 }
116 else
117 {
118 TREE_INT_CST_HIGH (t) = 0;
119 if (prec < HOST_BITS_PER_INT)
120 TREE_INT_CST_LOW (t)
121 &= ~((-1) << prec);
122 }
123
124 /* If it's a signed type and value's sign bit is set, extend the sign. */
125
126 if (! TREE_UNSIGNED (TREE_TYPE (t))
127 && prec != 2 * HOST_BITS_PER_INT
128 && (prec > HOST_BITS_PER_INT
129 ? TREE_INT_CST_HIGH (t) & (1 << (prec - HOST_BITS_PER_INT - 1))
130 : TREE_INT_CST_LOW (t) & (1 << (prec - 1))))
131 {
132 /* Value is negative:
133 set to 1 all the bits that are outside this type's precision. */
134 if (prec > HOST_BITS_PER_INT)
135 {
136 TREE_INT_CST_HIGH (t)
137 |= ((-1) << (prec - HOST_BITS_PER_INT));
138 }
139 else
140 {
141 TREE_INT_CST_HIGH (t) = -1;
142 if (prec < HOST_BITS_PER_INT)
143 TREE_INT_CST_LOW (t)
144 |= ((-1) << prec);
145 }
146 }
147 }
148 \f
149 /* Add two 64-bit integers with 64-bit result.
150 Each argument is given as two `int' pieces.
151 One argument is L1 and H1; the other, L2 and H2.
152 The value is stored as two `int' pieces in *LV and *HV.
153 We use the 8-shorts representation internally. */
154
155 void
156 add_double (l1, h1, l2, h2, lv, hv)
157 int l1, h1, l2, h2;
158 int *lv, *hv;
159 {
160 short arg1[8];
161 short arg2[8];
162 register int carry = 0;
163 register int i;
164
165 encode (arg1, l1, h1);
166 encode (arg2, l2, h2);
167
168 for (i = 0; i < 8; i++)
169 {
170 carry += arg1[i] + arg2[i];
171 arg1[i] = carry & 0xff;
172 carry >>= 8;
173 }
174
175 decode (arg1, lv, hv);
176 }
177
178 /* Negate a 64-bit integers with 64-bit result.
179 The argument is given as two `int' pieces in L1 and H1.
180 The value is stored as two `int' pieces in *LV and *HV.
181 We use the 8-shorts representation internally. */
182
183 void
184 neg_double (l1, h1, lv, hv)
185 int l1, h1;
186 int *lv, *hv;
187 {
188 if (l1 == 0)
189 {
190 *lv = 0;
191 *hv = - h1;
192 }
193 else
194 {
195 *lv = - l1;
196 *hv = ~ h1;
197 }
198 }
199 \f
200 /* Multiply two 64-bit integers with 64-bit result.
201 Each argument is given as two `int' pieces.
202 One argument is L1 and H1; the other, L2 and H2.
203 The value is stored as two `int' pieces in *LV and *HV.
204 We use the 8-shorts representation internally. */
205
206 void
207 mul_double (l1, h1, l2, h2, lv, hv)
208 int l1, h1, l2, h2;
209 int *lv, *hv;
210 {
211 short arg1[8];
212 short arg2[8];
213 short prod[16];
214 register int carry = 0;
215 register int i, j, k;
216
217 /* These two cases are used extensively, arising from pointer
218 combinations. */
219 if (h2 == 0)
220 {
221 if (l2 == 2)
222 {
223 unsigned temp = l1 + l1;
224 *hv = h1 * 2 + (temp < l1);
225 *lv = temp;
226 return;
227 }
228 if (l2 == 4)
229 {
230 unsigned temp = l1 + l1;
231 h1 = h1 * 4 + ((temp < l1) << 1);
232 l1 = temp;
233 temp += temp;
234 h1 += (temp < l1);
235 *lv = temp;
236 *hv = h1;
237 return;
238 }
239 if (l2 == 8)
240 {
241 unsigned temp = l1 + l1;
242 h1 = h1 * 8 + ((temp < l1) << 2);
243 l1 = temp;
244 temp += temp;
245 h1 += (temp < l1) << 1;
246 l1 = temp;
247 temp += temp;
248 h1 += (temp < l1);
249 *lv = temp;
250 *hv = h1;
251 return;
252 }
253 }
254
255 encode (arg1, l1, h1);
256 encode (arg2, l2, h2);
257
258 bzero (prod, sizeof prod);
259
260 for (i = 0; i < 8; i++)
261 for (j = 0; j < 8; j++)
262 {
263 k = i + j;
264 carry = arg1[i] * arg2[j];
265 while (carry)
266 {
267 carry += prod[k];
268 prod[k] = carry & 0xff;
269 carry >>= 8;
270 k++;
271 }
272 }
273
274 decode (prod, lv, hv); /* @@decode ignores prod[8] -> prod[15] */
275 }
276 \f
277 /* Shift the 64-bit integer in L1, H1 left by COUNT places
278 keeping only PREC bits of result.
279 Shift right if COUNT is negative.
280 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
281 Store the value as two `int' pieces in *LV and *HV. */
282
283 void
284 lshift_double (l1, h1, count, prec, lv, hv, arith)
285 int l1, h1, count, prec;
286 int *lv, *hv;
287 int arith;
288 {
289 short arg1[8];
290 register int i;
291 register int carry;
292
293 if (count < 0)
294 {
295 rshift_double (l1, h1, - count, prec, lv, hv, arith);
296 return;
297 }
298
299 encode (arg1, l1, h1);
300
301 if (count > prec)
302 count = prec;
303
304 while (count > 0)
305 {
306 carry = 0;
307 for (i = 0; i < 8; i++)
308 {
309 carry += arg1[i] << 1;
310 arg1[i] = carry & 0xff;
311 carry >>= 8;
312 }
313 count--;
314 }
315
316 decode (arg1, lv, hv);
317 }
318
319 /* Shift the 64-bit integer in L1, H1 right by COUNT places
320 keeping only PREC bits of result. COUNT must be positive.
321 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
322 Store the value as two `int' pieces in *LV and *HV. */
323
324 void
325 rshift_double (l1, h1, count, prec, lv, hv, arith)
326 int l1, h1, count, prec;
327 int *lv, *hv;
328 int arith;
329 {
330 short arg1[8];
331 register int i;
332 register int carry;
333
334 encode (arg1, l1, h1);
335
336 if (count > prec)
337 count = prec;
338
339 while (count > 0)
340 {
341 carry = arith && arg1[7] >> 7;
342 for (i = 7; i >= 0; i--)
343 {
344 carry <<= 8;
345 carry += arg1[i];
346 arg1[i] = (carry >> 1) & 0xff;
347 }
348 count--;
349 }
350
351 decode (arg1, lv, hv);
352 }
353 \f
354 /* Rotate the 64-bit integer in L1, H1 left by COUNT places
355 keeping only PREC bits of result.
356 Rotate right if COUNT is negative.
357 Store the value as two `int' pieces in *LV and *HV. */
358
359 void
360 lrotate_double (l1, h1, count, prec, lv, hv)
361 int l1, h1, count, prec;
362 int *lv, *hv;
363 {
364 short arg1[8];
365 register int i;
366 register int carry;
367
368 if (count < 0)
369 {
370 rrotate_double (l1, h1, - count, prec, lv, hv);
371 return;
372 }
373
374 encode (arg1, l1, h1);
375
376 if (count > prec)
377 count = prec;
378
379 carry = arg1[7] >> 7;
380 while (count > 0)
381 {
382 for (i = 0; i < 8; i++)
383 {
384 carry += arg1[i] << 1;
385 arg1[i] = carry & 0xff;
386 carry >>= 8;
387 }
388 count--;
389 }
390
391 decode (arg1, lv, hv);
392 }
393
394 /* Rotate the 64-bit integer in L1, H1 left by COUNT places
395 keeping only PREC bits of result. COUNT must be positive.
396 Store the value as two `int' pieces in *LV and *HV. */
397
398 void
399 rrotate_double (l1, h1, count, prec, lv, hv)
400 int l1, h1, count, prec;
401 int *lv, *hv;
402 {
403 short arg1[8];
404 register int i;
405 register int carry;
406
407 encode (arg1, l1, h1);
408
409 if (count > prec)
410 count = prec;
411
412 carry = arg1[0] & 1;
413 while (count > 0)
414 {
415 for (i = 7; i >= 0; i--)
416 {
417 carry <<= 8;
418 carry += arg1[i];
419 arg1[i] = (carry >> 1) & 0xff;
420 }
421 count--;
422 }
423
424 decode (arg1, lv, hv);
425 }
426 \f
427 /* Divide 64 bit integer LNUM, HNUM by 64 bit integer LDEN, HDEN
428 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
429 CODE is a tree code for a kind of division, one of
430 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
431 or EXACT_DIV_EXPR
432 It controls how the quotient is rounded to a integer.
433 UNS nonzero says do unsigned division. */
434
435 static void
436 div_and_round_double (code, uns,
437 lnum_orig, hnum_orig, lden_orig, hden_orig,
438 lquo, hquo, lrem, hrem)
439 enum tree_code code;
440 int uns;
441 int lnum_orig, hnum_orig; /* num == numerator == dividend */
442 int lden_orig, hden_orig; /* den == denominator == divisor */
443 int *lquo, *hquo, *lrem, *hrem;
444 {
445 int quo_neg = 0;
446 short num[9], den[8], quo[8]; /* extra element for scaling. */
447 register int i, j, work;
448 register int carry = 0;
449 unsigned int lnum = lnum_orig;
450 int hnum = hnum_orig;
451 unsigned int lden = lden_orig;
452 int hden = hden_orig;
453
454 if ((hden == 0) && (lden == 0))
455 abort ();
456
457 /* calculate quotient sign and convert operands to unsigned. */
458 if (!uns)
459 {
460 if (hden < 0)
461 {
462 quo_neg = ~ quo_neg;
463 neg_double (lden, hden, &lden, &hden);
464 }
465 if (hnum < 0)
466 {
467 quo_neg = ~ quo_neg;
468 neg_double (lnum, hnum, &lnum, &hnum);
469 }
470 }
471
472 if (hnum == 0 && hden == 0)
473 { /* single precision */
474 *hquo = *hrem = 0;
475 *lquo = lnum / lden; /* rounds toward zero since positive args */
476 goto finish_up;
477 }
478
479 if (hnum == 0)
480 { /* trivial case: dividend < divisor */
481 /* hden != 0 already checked. */
482 *hquo = *lquo = 0;
483 *hrem = hnum;
484 *lrem = lnum;
485 goto finish_up;
486 }
487
488 bzero (quo, sizeof quo);
489
490 bzero (num, sizeof num); /* to zero 9th element */
491 bzero (den, sizeof den);
492
493 encode (num, lnum, hnum);
494 encode (den, lden, hden);
495
496 /* This code requires more than just hden == 0.
497 We also have to require that we don't need more than three bytes
498 to hold CARRY. If we ever did need four bytes to hold it, we
499 would lose part of it when computing WORK on the next round. */
500 if (hden == 0 && ((lden << 8) >> 8) == lden)
501 { /* simpler algorithm */
502 /* hnum != 0 already checked. */
503 for (i = 7; i >= 0; i--)
504 {
505 work = num[i] + (carry << 8);
506 quo[i] = work / lden;
507 carry = work % lden;
508 }
509 }
510 else { /* full double precision,
511 with thanks to Don Knuth's
512 "Semi-Numericial Algorithms". */
513 #define BASE 256
514 int quo_est, scale, num_hi_sig, den_hi_sig, quo_hi_sig;
515
516 /* Find the highest non-zero divisor digit. */
517 for (i = 7; ; i--)
518 if (den[i] != 0) {
519 den_hi_sig = i;
520 break;
521 }
522 for (i = 7; ; i--)
523 if (num[i] != 0) {
524 num_hi_sig = i;
525 break;
526 }
527 quo_hi_sig = num_hi_sig - den_hi_sig + 1;
528
529 /* Insure that the first digit of the divisor is at least BASE/2.
530 This is required by the quotient digit estimation algorithm. */
531
532 scale = BASE / (den[den_hi_sig] + 1);
533 if (scale > 1) { /* scale divisor and dividend */
534 carry = 0;
535 for (i = 0; i <= 8; i++) {
536 work = (num[i] * scale) + carry;
537 num[i] = work & 0xff;
538 carry = work >> 8;
539 if (num[i] != 0) num_hi_sig = i;
540 }
541 carry = 0;
542 for (i = 0; i <= 7; i++) {
543 work = (den[i] * scale) + carry;
544 den[i] = work & 0xff;
545 carry = work >> 8;
546 if (den[i] != 0) den_hi_sig = i;
547 }
548 }
549
550 /* Main loop */
551 for (i = quo_hi_sig; i > 0; i--) {
552 /* quess the next quotient digit, quo_est, by dividing the first
553 two remaining dividend digits by the high order quotient digit.
554 quo_est is never low and is at most 2 high. */
555
556 int num_hi; /* index of highest remaining dividend digit */
557
558 num_hi = i + den_hi_sig;
559
560 work = (num[num_hi] * BASE) + (num_hi > 0 ? num[num_hi - 1] : 0);
561 if (num[num_hi] != den[den_hi_sig]) {
562 quo_est = work / den[den_hi_sig];
563 }
564 else {
565 quo_est = BASE - 1;
566 }
567
568 /* refine quo_est so it's usually correct, and at most one high. */
569 while ((den[den_hi_sig - 1] * quo_est)
570 > (((work - (quo_est * den[den_hi_sig])) * BASE)
571 + ((num_hi - 1) > 0 ? num[num_hi - 2] : 0)))
572 quo_est--;
573
574 /* Try QUO_EST as the quotient digit, by multiplying the
575 divisor by QUO_EST and subtracting from the remaining dividend.
576 Keep in mind that QUO_EST is the I - 1st digit. */
577
578 carry = 0;
579
580 for (j = 0; j <= den_hi_sig; j++)
581 {
582 int digit;
583
584 work = num[i + j - 1] - (quo_est * den[j]) + carry;
585 digit = work & 0xff;
586 carry = work >> 8;
587 if (digit < 0)
588 {
589 digit += BASE;
590 carry--;
591 }
592 num[i + j - 1] = digit;
593 }
594
595 /* if quo_est was high by one, then num[i] went negative and
596 we need to correct things. */
597
598 if (num[num_hi] < 0)
599 {
600 quo_est--;
601 carry = 0; /* add divisor back in */
602 for (j = 0; j <= den_hi_sig; j++)
603 {
604 work = num[i + j - 1] + den[j] + carry;
605 if (work > BASE)
606 {
607 work -= BASE;
608 carry = 1;
609 }
610 else
611 {
612 carry = 0;
613 }
614 num[i + j - 1] = work;
615 }
616 num [num_hi] += carry;
617 }
618
619 /* store the quotient digit. */
620 quo[i - 1] = quo_est;
621 }
622 }
623
624 decode (quo, lquo, hquo);
625
626 finish_up:
627 /* if result is negative, make it so. */
628 if (quo_neg)
629 neg_double (*lquo, *hquo, lquo, hquo);
630
631 /* compute trial remainder: rem = num - (quo * den) */
632 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
633 neg_double (*lrem, *hrem, lrem, hrem);
634 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
635
636 switch (code)
637 {
638 case TRUNC_DIV_EXPR:
639 case TRUNC_MOD_EXPR: /* round toward zero */
640 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
641 return;
642
643 case FLOOR_DIV_EXPR:
644 case FLOOR_MOD_EXPR: /* round toward negative infinity */
645 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
646 {
647 /* quo = quo - 1; */
648 add_double (*lquo, *hquo, -1, -1, lquo, hquo);
649 }
650 else return;
651 break;
652
653 case CEIL_DIV_EXPR:
654 case CEIL_MOD_EXPR: /* round toward positive infinity */
655 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
656 {
657 add_double (*lquo, *hquo, 1, 0, lquo, hquo);
658 }
659 else return;
660 break;
661
662 case ROUND_DIV_EXPR:
663 case ROUND_MOD_EXPR: /* round to closest integer */
664 {
665 int labs_rem = *lrem, habs_rem = *hrem;
666 int labs_den = lden, habs_den = hden, ltwice, htwice;
667
668 /* get absolute values */
669 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
670 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
671
672 /* if (2 * abs (lrem) >= abs (lden)) */
673 mul_double (2, 0, labs_rem, habs_rem, &ltwice, &htwice);
674 if (((unsigned) habs_den < (unsigned) htwice)
675 || (((unsigned) habs_den == (unsigned) htwice)
676 && ((unsigned) labs_den < (unsigned) ltwice)))
677 {
678 if (*hquo < 0)
679 /* quo = quo - 1; */
680 add_double (*lquo, *hquo, -1, -1, lquo, hquo);
681 else
682 /* quo = quo + 1; */
683 add_double (*lquo, *hquo, 1, 0, lquo, hquo);
684 }
685 else return;
686 }
687 break;
688
689 default:
690 abort ();
691 }
692
693 /* compute true remainder: rem = num - (quo * den) */
694 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
695 neg_double (*lrem, *hrem, lrem, hrem);
696 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
697 }
698 \f
699 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
700
701 /* Check for infinity in an IEEE double precision number. */
702
703 int
704 target_isinf (x)
705 REAL_VALUE_TYPE x;
706 {
707 /* The IEEE 64-bit double format. */
708 union {
709 REAL_VALUE_TYPE d;
710 struct {
711 unsigned sign : 1;
712 unsigned exponent : 11;
713 unsigned mantissa1 : 20;
714 unsigned mantissa2;
715 } little_endian;
716 struct {
717 unsigned mantissa2;
718 unsigned mantissa1 : 20;
719 unsigned exponent : 11;
720 unsigned sign : 1;
721 } big_endian;
722 } u;
723
724 u.d = dconstm1;
725 if (u.big_endian.sign == 1)
726 {
727 u.d = x;
728 return (u.big_endian.exponent == 2047
729 && u.big_endian.mantissa1 == 0
730 && u.big_endian.mantissa2 == 0);
731 }
732 else
733 {
734 u.d = x;
735 return (u.little_endian.exponent == 2047
736 && u.little_endian.mantissa1 == 0
737 && u.little_endian.mantissa2 == 0);
738 }
739 }
740
741 /* Check for minus zero in an IEEE double precision number. */
742
743 int
744 target_minus_zero (x)
745 REAL_VALUE_TYPE x;
746 {
747 REAL_VALUE_TYPE d1, d2;
748
749 d1 = REAL_VALUE_NEGATE (x);
750 d2 = dconst0;
751
752 return !bcmp (&d1, &d2, sizeof (d1));
753 }
754 #else /* Target not IEEE */
755
756 /* Let's assume other float formats don't have infinity.
757 (This can be overridden by redefining REAL_VALUE_ISINF.) */
758
759 target_isinf (x)
760 REAL_VALUE_TYPE x;
761 {
762 return 0;
763 }
764
765 /* Let's assume other float formats don't have minus zero.
766 (This can be overridden by redefining REAL_VALUE_MINUS_ZERO.) */
767
768 target_minus_zero (x)
769 REAL_VALUE_TYPE x;
770 {
771 return 0;
772 }
773 #endif /* Target not IEEE */
774 \f
775 /* Split a tree IN into a constant and a variable part
776 that could be combined with CODE to make IN.
777 CODE must be a commutative arithmetic operation.
778 Store the constant part into *CONP and the variable in &VARP.
779 Return 1 if this was done; zero means the tree IN did not decompose
780 this way.
781
782 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
783 Therefore, we must tell the caller whether the variable part
784 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
785 The value stored is the coefficient for the variable term.
786 The constant term we return should always be added;
787 we negate it if necessary. */
788
789 static int
790 split_tree (in, code, varp, conp, varsignp)
791 tree in;
792 enum tree_code code;
793 tree *varp, *conp;
794 int *varsignp;
795 {
796 register tree outtype = TREE_TYPE (in);
797 *varp = 0;
798 *conp = 0;
799
800 /* Strip any conversions that don't change the machine mode. */
801 while ((TREE_CODE (in) == NOP_EXPR
802 || TREE_CODE (in) == CONVERT_EXPR)
803 && (TYPE_MODE (TREE_TYPE (in))
804 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
805 in = TREE_OPERAND (in, 0);
806
807 if (TREE_CODE (in) == code
808 || (TREE_CODE (TREE_TYPE (in)) != REAL_TYPE
809 /* We can associate addition and subtraction together
810 (even though the C standard doesn't say so)
811 for integers because the value is not affected.
812 For reals, the value might be affected, so we can't. */
813 &&
814 ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
815 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
816 {
817 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
818 if (code == INTEGER_CST)
819 {
820 *conp = TREE_OPERAND (in, 0);
821 *varp = TREE_OPERAND (in, 1);
822 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
823 && TREE_TYPE (*varp) != outtype)
824 *varp = convert (outtype, *varp);
825 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
826 return 1;
827 }
828 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
829 {
830 *conp = TREE_OPERAND (in, 1);
831 *varp = TREE_OPERAND (in, 0);
832 *varsignp = 1;
833 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
834 && TREE_TYPE (*varp) != outtype)
835 *varp = convert (outtype, *varp);
836 if (TREE_CODE (in) == MINUS_EXPR)
837 {
838 /* If operation is subtraction and constant is second,
839 must negate it to get an additive constant.
840 And this cannot be done unless it is a manifest constant.
841 It could also be the address of a static variable.
842 We cannot negate that, so give up. */
843 if (TREE_CODE (*conp) == INTEGER_CST)
844 /* Subtracting from integer_zero_node loses for long long. */
845 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
846 else
847 return 0;
848 }
849 return 1;
850 }
851 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
852 {
853 *conp = TREE_OPERAND (in, 0);
854 *varp = TREE_OPERAND (in, 1);
855 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
856 && TREE_TYPE (*varp) != outtype)
857 *varp = convert (outtype, *varp);
858 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
859 return 1;
860 }
861 }
862 return 0;
863 }
864 \f
865 /* Combine two constants NUM and ARG2 under operation CODE
866 to produce a new constant.
867 We assume ARG1 and ARG2 have the same data type,
868 or at least are the same kind of constant and the same machine mode. */
869
870 /* Handle floating overflow for `const_binop'. */
871 static jmp_buf const_binop_error;
872
873 static tree
874 const_binop (code, arg1, arg2)
875 enum tree_code code;
876 register tree arg1, arg2;
877 {
878 if (TREE_CODE (arg1) == INTEGER_CST)
879 {
880 register int int1l = TREE_INT_CST_LOW (arg1);
881 register int int1h = TREE_INT_CST_HIGH (arg1);
882 int int2l = TREE_INT_CST_LOW (arg2);
883 int int2h = TREE_INT_CST_HIGH (arg2);
884 int low, hi;
885 int garbagel, garbageh;
886 register tree t;
887 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
888
889 switch (code)
890 {
891 case BIT_IOR_EXPR:
892 t = build_int_2 (int1l | int2l, int1h | int2h);
893 break;
894
895 case BIT_XOR_EXPR:
896 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
897 break;
898
899 case BIT_AND_EXPR:
900 t = build_int_2 (int1l & int2l, int1h & int2h);
901 break;
902
903 case BIT_ANDTC_EXPR:
904 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
905 break;
906
907 case RSHIFT_EXPR:
908 int2l = - int2l;
909 case LSHIFT_EXPR:
910 lshift_double (int1l, int1h, int2l,
911 TYPE_PRECISION (TREE_TYPE (arg1)),
912 &low, &hi,
913 !uns);
914 t = build_int_2 (low, hi);
915 break;
916
917 case RROTATE_EXPR:
918 int2l = - int2l;
919 case LROTATE_EXPR:
920 lrotate_double (int1l, int1h, int2l,
921 TYPE_PRECISION (TREE_TYPE (arg1)),
922 &low, &hi);
923 t = build_int_2 (low, hi);
924 break;
925
926 case PLUS_EXPR:
927 if (int1h == 0)
928 {
929 int2l += int1l;
930 if ((unsigned) int2l < int1l)
931 int2h += 1;
932 t = build_int_2 (int2l, int2h);
933 break;
934 }
935 if (int2h == 0)
936 {
937 int1l += int2l;
938 if ((unsigned) int1l < int2l)
939 int1h += 1;
940 t = build_int_2 (int1l, int1h);
941 break;
942 }
943 add_double (int1l, int1h, int2l, int2h, &low, &hi);
944 t = build_int_2 (low, hi);
945 break;
946
947 case MINUS_EXPR:
948 if (int2h == 0 && int2l == 0)
949 {
950 t = build_int_2 (int1l, int1h);
951 break;
952 }
953 neg_double (int2l, int2h, &int2l, &int2h);
954 add_double (int1l, int1h, int2l, int2h, &low, &hi);
955 t = build_int_2 (low, hi);
956 break;
957
958 case MULT_EXPR:
959 /* Optimize simple cases. */
960 if (int1h == 0)
961 {
962 unsigned temp;
963
964 switch (int1l)
965 {
966 case 0:
967 t = build_int_2 (0, 0);
968 goto got_it;
969 case 1:
970 t = build_int_2 (int2l, int2h);
971 goto got_it;
972 case 2:
973 temp = int2l + int2l;
974 int2h = int2h * 2 + (temp < int2l);
975 t = build_int_2 (temp, int2h);
976 goto got_it;
977 case 3:
978 temp = int2l + int2l + int2l;
979 int2h = int2h * 3 + (temp < int2l);
980 t = build_int_2 (temp, int2h);
981 goto got_it;
982 case 4:
983 temp = int2l + int2l;
984 int2h = int2h * 4 + ((temp < int2l) << 1);
985 int2l = temp;
986 temp += temp;
987 int2h += (temp < int2l);
988 t = build_int_2 (temp, int2h);
989 goto got_it;
990 case 8:
991 temp = int2l + int2l;
992 int2h = int2h * 8 + ((temp < int2l) << 2);
993 int2l = temp;
994 temp += temp;
995 int2h += (temp < int2l) << 1;
996 int2l = temp;
997 temp += temp;
998 int2h += (temp < int2l);
999 t = build_int_2 (temp, int2h);
1000 goto got_it;
1001 default:
1002 break;
1003 }
1004 }
1005
1006 if (int2h == 0)
1007 {
1008 if (int2l == 0)
1009 {
1010 t = build_int_2 (0, 0);
1011 break;
1012 }
1013 if (int2l == 1)
1014 {
1015 t = build_int_2 (int1l, int1h);
1016 break;
1017 }
1018 }
1019
1020 mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1021 t = build_int_2 (low, hi);
1022 break;
1023
1024 case TRUNC_DIV_EXPR:
1025 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1026 case EXACT_DIV_EXPR:
1027 /* This is a shortcut for a common special case.
1028 It reduces the number of tree nodes generated
1029 and saves time. */
1030 if (int2h == 0 && int2l > 0
1031 && TREE_TYPE (arg1) == sizetype
1032 && int1h == 0 && int1l >= 0)
1033 {
1034 if (code == CEIL_DIV_EXPR)
1035 int1l += int2l-1;
1036 return size_int (int1l / int2l);
1037 }
1038 case ROUND_DIV_EXPR:
1039 if (int2h == 0 && int2l == 1)
1040 {
1041 t = build_int_2 (int1l, int1h);
1042 break;
1043 }
1044 if (int1l == int2l && int1h == int2h)
1045 {
1046 if ((int1l | int1h) == 0)
1047 abort ();
1048 t = build_int_2 (1, 0);
1049 break;
1050 }
1051 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1052 &low, &hi, &garbagel, &garbageh);
1053 t = build_int_2 (low, hi);
1054 break;
1055
1056 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1057 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1058 div_and_round_double (code, uns, int1l, int1h, int2l, int2h,
1059 &garbagel, &garbageh, &low, &hi);
1060 t = build_int_2 (low, hi);
1061 break;
1062
1063 case MIN_EXPR:
1064 case MAX_EXPR:
1065 if (uns)
1066 {
1067 low = (((unsigned) int1h < (unsigned) int2h)
1068 || (((unsigned) int1h == (unsigned) int2h)
1069 && ((unsigned) int1l < (unsigned) int2l)));
1070 }
1071 else
1072 {
1073 low = ((int1h < int2h)
1074 || ((int1h == int2h)
1075 && ((unsigned) int1l < (unsigned) int2l)));
1076 }
1077 if (low == (code == MIN_EXPR))
1078 t = build_int_2 (int1l, int1h);
1079 else
1080 t = build_int_2 (int2l, int2h);
1081 break;
1082
1083 default:
1084 abort ();
1085 }
1086 got_it:
1087 TREE_TYPE (t) = TREE_TYPE (arg1);
1088 force_fit_type (t);
1089 return t;
1090 }
1091 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1092 if (TREE_CODE (arg1) == REAL_CST)
1093 {
1094 register REAL_VALUE_TYPE d1;
1095 register REAL_VALUE_TYPE d2;
1096 register REAL_VALUE_TYPE value;
1097
1098 d1 = TREE_REAL_CST (arg1);
1099 d2 = TREE_REAL_CST (arg2);
1100 if (setjmp (const_binop_error))
1101 {
1102 warning ("floating overflow in constant folding");
1103 return build (code, TREE_TYPE (arg1), arg1, arg2);
1104 }
1105 set_float_handler (const_binop_error);
1106
1107 #ifdef REAL_ARITHMETIC
1108 REAL_ARITHMETIC (value, code, d1, d2);
1109 #else
1110 switch (code)
1111 {
1112 case PLUS_EXPR:
1113 value = d1 + d2;
1114 break;
1115
1116 case MINUS_EXPR:
1117 value = d1 - d2;
1118 break;
1119
1120 case MULT_EXPR:
1121 value = d1 * d2;
1122 break;
1123
1124 case RDIV_EXPR:
1125 #ifndef REAL_INFINITY
1126 if (d2 == 0)
1127 abort ();
1128 #endif
1129
1130 value = d1 / d2;
1131 break;
1132
1133 case MIN_EXPR:
1134 value = MIN (d1, d2);
1135 break;
1136
1137 case MAX_EXPR:
1138 value = MAX (d1, d2);
1139 break;
1140
1141 default:
1142 abort ();
1143 }
1144 #endif /* no REAL_ARITHMETIC */
1145 set_float_handler (0);
1146 return build_real (TREE_TYPE (arg1),
1147 REAL_VALUE_TRUNCATE (TYPE_MODE (TREE_TYPE (arg1)), value));
1148 }
1149 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1150 if (TREE_CODE (arg1) == COMPLEX_CST)
1151 {
1152 register tree r1 = TREE_REALPART (arg1);
1153 register tree i1 = TREE_IMAGPART (arg1);
1154 register tree r2 = TREE_REALPART (arg2);
1155 register tree i2 = TREE_IMAGPART (arg2);
1156 register tree t;
1157
1158 switch (code)
1159 {
1160 case PLUS_EXPR:
1161 t = build_complex (const_binop (PLUS_EXPR, r1, r2),
1162 const_binop (PLUS_EXPR, i1, i2));
1163 break;
1164
1165 case MINUS_EXPR:
1166 t = build_complex (const_binop (MINUS_EXPR, r1, r2),
1167 const_binop (MINUS_EXPR, i1, i2));
1168 break;
1169
1170 case MULT_EXPR:
1171 t = build_complex (const_binop (MINUS_EXPR,
1172 const_binop (MULT_EXPR, r1, r2),
1173 const_binop (MULT_EXPR, i1, i2)),
1174 const_binop (PLUS_EXPR,
1175 const_binop (MULT_EXPR, r1, i2),
1176 const_binop (MULT_EXPR, i1, r2)));
1177 break;
1178
1179 case RDIV_EXPR:
1180 {
1181 register tree magsquared
1182 = const_binop (PLUS_EXPR,
1183 const_binop (MULT_EXPR, r2, r2),
1184 const_binop (MULT_EXPR, i2, i2));
1185 t = build_complex (const_binop (RDIV_EXPR,
1186 const_binop (PLUS_EXPR,
1187 const_binop (MULT_EXPR, r1, r2),
1188 const_binop (MULT_EXPR, i1, i2)),
1189 magsquared),
1190 const_binop (RDIV_EXPR,
1191 const_binop (MINUS_EXPR,
1192 const_binop (MULT_EXPR, i1, r2),
1193 const_binop (MULT_EXPR, r1, i2)),
1194 magsquared));
1195 }
1196 break;
1197
1198 default:
1199 abort ();
1200 }
1201 TREE_TYPE (t) = TREE_TYPE (arg1);
1202 return t;
1203 }
1204 return 0;
1205 }
1206 \f
1207 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1208
1209 tree
1210 size_int (number)
1211 unsigned int number;
1212 {
1213 register tree t;
1214 /* Type-size nodes already made for small sizes. */
1215 static tree size_table[2*HOST_BITS_PER_INT+1];
1216
1217 if (number >= 0 && number < 2*HOST_BITS_PER_INT+1 && size_table[number] != 0)
1218 return size_table[number];
1219 if (number >= 0 && number < 2*HOST_BITS_PER_INT+1)
1220 {
1221 int temp = allocation_temporary_p ();
1222
1223 push_obstacks_nochange ();
1224 /* Make this a permanent node. */
1225 if (temp)
1226 end_temporary_allocation ();
1227 t = build_int_2 (number, 0);
1228 TREE_TYPE (t) = sizetype;
1229 size_table[number] = t;
1230 pop_obstacks ();
1231 }
1232 else
1233 {
1234 t = build_int_2 (number, 0);
1235 TREE_TYPE (t) = sizetype;
1236 }
1237 return t;
1238 }
1239
1240 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1241 CODE is a tree code. Data type is taken from `sizetype',
1242 If the operands are constant, so is the result. */
1243
1244 tree
1245 size_binop (code, arg0, arg1)
1246 enum tree_code code;
1247 tree arg0, arg1;
1248 {
1249 /* Handle the special case of two integer constants faster. */
1250 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1251 {
1252 /* And some specific cases even faster than that. */
1253 if (code == PLUS_EXPR
1254 && TREE_INT_CST_LOW (arg0) == 0
1255 && TREE_INT_CST_HIGH (arg0) == 0)
1256 return arg1;
1257 if (code == MINUS_EXPR
1258 && TREE_INT_CST_LOW (arg1) == 0
1259 && TREE_INT_CST_HIGH (arg1) == 0)
1260 return arg0;
1261 if (code == MULT_EXPR
1262 && TREE_INT_CST_LOW (arg0) == 1
1263 && TREE_INT_CST_HIGH (arg0) == 0)
1264 return arg1;
1265 /* Handle general case of two integer constants. */
1266 return const_binop (code, arg0, arg1);
1267 }
1268
1269 if (arg0 == error_mark_node || arg1 == error_mark_node)
1270 return error_mark_node;
1271
1272 return fold (build (code, sizetype, arg0, arg1));
1273 }
1274 \f
1275 /* Given T, a tree representing type conversion of ARG1, a constant,
1276 return a constant tree representing the result of conversion. */
1277
1278 static tree
1279 fold_convert (t, arg1)
1280 register tree t;
1281 register tree arg1;
1282 {
1283 register tree type = TREE_TYPE (t);
1284
1285 if (TREE_CODE (type) == POINTER_TYPE
1286 || TREE_CODE (type) == INTEGER_TYPE
1287 || TREE_CODE (type) == ENUMERAL_TYPE)
1288 {
1289 if (TREE_CODE (arg1) == INTEGER_CST)
1290 {
1291 /* Given an integer constant, make new constant with new type,
1292 appropriately sign-extended or truncated. */
1293 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1294 TREE_INT_CST_HIGH (arg1));
1295 TREE_TYPE (t) = type;
1296 force_fit_type (t);
1297 }
1298 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1299 else if (TREE_CODE (arg1) == REAL_CST)
1300 {
1301 if (REAL_VALUES_LESS (real_value_from_int_cst (TYPE_MAX_VALUE (type)),
1302 TREE_REAL_CST (arg1))
1303 || REAL_VALUES_LESS (TREE_REAL_CST (arg1),
1304 real_value_from_int_cst (TYPE_MIN_VALUE (type))))
1305 {
1306 warning ("real constant out of range for integer conversion");
1307 return t;
1308 }
1309 #ifndef REAL_ARITHMETIC
1310 {
1311 REAL_VALUE_TYPE d;
1312 int low, high;
1313 int half_word = 1 << (HOST_BITS_PER_INT / 2);
1314
1315 d = TREE_REAL_CST (arg1);
1316 if (d < 0)
1317 d = -d;
1318
1319 high = (int) (d / half_word / half_word);
1320 d -= (REAL_VALUE_TYPE) high * half_word * half_word;
1321 low = (unsigned) d;
1322 if (TREE_REAL_CST (arg1) < 0)
1323 neg_double (low, high, &low, &high);
1324 t = build_int_2 (low, high);
1325 }
1326 #else
1327 {
1328 int low, high;
1329 REAL_VALUE_TO_INT (low, high, TREE_REAL_CST (arg1));
1330 t = build_int_2 (low, high);
1331 }
1332 #endif
1333 TREE_TYPE (t) = type;
1334 force_fit_type (t);
1335 }
1336 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1337 TREE_TYPE (t) = type;
1338 }
1339 else if (TREE_CODE (type) == REAL_TYPE)
1340 {
1341 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1342 if (TREE_CODE (arg1) == INTEGER_CST)
1343 return build_real_from_int_cst (type, arg1);
1344 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1345 if (TREE_CODE (arg1) == REAL_CST)
1346 return build_real (type, REAL_VALUE_TRUNCATE (TYPE_MODE (type),
1347 TREE_REAL_CST (arg1)));
1348 }
1349 TREE_CONSTANT (t) = 1;
1350 return t;
1351 }
1352 \f
1353 /* Return an expr equal to X but certainly not valid as an lvalue. */
1354
1355 tree
1356 non_lvalue (x)
1357 tree x;
1358 {
1359 tree result;
1360
1361 /* These things are certainly not lvalues. */
1362 if (TREE_CODE (x) == NON_LVALUE_EXPR
1363 || TREE_CODE (x) == INTEGER_CST
1364 || TREE_CODE (x) == REAL_CST
1365 || TREE_CODE (x) == STRING_CST
1366 || TREE_CODE (x) == ADDR_EXPR)
1367 return x;
1368
1369 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1370 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1371 return result;
1372 }
1373
1374 /* Return nonzero if two operands are necessarily equal.
1375 If ONLY_CONST is non-zero, only return non-zero for constants. */
1376
1377 int
1378 operand_equal_p (arg0, arg1, only_const)
1379 tree arg0, arg1;
1380 int only_const;
1381 {
1382 /* If both types don't have the same signedness, then we can't consider
1383 them equal. We must check this before the STRIP_NOPS calls
1384 because they may change the signedness of the arguments. */
1385 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1386 return 0;
1387
1388 STRIP_NOPS (arg0);
1389 STRIP_NOPS (arg1);
1390
1391 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1392 We don't care about side effects in that case because the SAVE_EXPR
1393 takes care of that for us. */
1394 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1395 return ! only_const;
1396
1397 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1398 return 0;
1399
1400 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1401 && TREE_CODE (arg0) == ADDR_EXPR
1402 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1403 return 1;
1404
1405 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1406 && TREE_CODE (arg0) == INTEGER_CST
1407 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1408 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1409 return 1;
1410
1411 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1412 && TREE_CODE (arg0) == REAL_CST
1413 && REAL_VALUES_EQUAL (TREE_REAL_CST (arg0), TREE_REAL_CST (arg1)))
1414 return 1;
1415
1416 if (only_const)
1417 return 0;
1418
1419 if (arg0 == arg1)
1420 return 1;
1421
1422 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1423 return 0;
1424 /* This is needed for conversions and for COMPONENT_REF.
1425 Might as well play it safe and always test this. */
1426 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1427 return 0;
1428
1429 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1430 {
1431 case '1':
1432 /* Two conversions are equal only if signedness and modes match. */
1433 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1434 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1435 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1436 return 0;
1437
1438 return operand_equal_p (TREE_OPERAND (arg0, 0),
1439 TREE_OPERAND (arg1, 0), 0);
1440
1441 case '<':
1442 case '2':
1443 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1444 TREE_OPERAND (arg1, 0), 0)
1445 && operand_equal_p (TREE_OPERAND (arg0, 1),
1446 TREE_OPERAND (arg1, 1), 0));
1447
1448 case 'r':
1449 switch (TREE_CODE (arg0))
1450 {
1451 case INDIRECT_REF:
1452 return operand_equal_p (TREE_OPERAND (arg0, 0),
1453 TREE_OPERAND (arg1, 0), 0);
1454
1455 case COMPONENT_REF:
1456 case ARRAY_REF:
1457 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1458 TREE_OPERAND (arg1, 0), 0)
1459 && operand_equal_p (TREE_OPERAND (arg0, 1),
1460 TREE_OPERAND (arg1, 1), 0));
1461
1462 case BIT_FIELD_REF:
1463 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1464 TREE_OPERAND (arg1, 0), 0)
1465 && operand_equal_p (TREE_OPERAND (arg0, 1),
1466 TREE_OPERAND (arg1, 1), 0)
1467 && operand_equal_p (TREE_OPERAND (arg0, 2),
1468 TREE_OPERAND (arg1, 2), 0));
1469 }
1470 break;
1471 }
1472
1473 return 0;
1474 }
1475
1476 /* Return nonzero if comparing COMP1 with COMP2
1477 gives the same result as comparing OP1 with OP2.
1478 When in doubt, return 0. */
1479
1480 static int
1481 comparison_equiv_p (comp1, comp2, op1, op2)
1482 tree comp1, comp2, op1, op2;
1483 {
1484 int unsignedp1, unsignedp2;
1485 tree primop1, primop2;
1486 int correct_width;
1487
1488 if (operand_equal_p (comp1, op1, 0)
1489 && operand_equal_p (comp2, op2, 0))
1490 return 1;
1491
1492 if (TREE_CODE (TREE_TYPE (op1)) != INTEGER_TYPE)
1493 return 0;
1494
1495 if (TREE_TYPE (op1) != TREE_TYPE (op2))
1496 return 0;
1497
1498 if (TREE_TYPE (comp1) != TREE_TYPE (comp2))
1499 return 0;
1500
1501 /* Duplicate what shorten_compare does to the comparison operands,
1502 and see if that gives the actual comparison operands, COMP1 and COMP2. */
1503
1504 /* Throw away any conversions to wider types
1505 already present in the operands. */
1506 primop1 = get_narrower (op1, &unsignedp1);
1507 primop2 = get_narrower (op2, &unsignedp2);
1508
1509 correct_width = TYPE_PRECISION (TREE_TYPE (op2));
1510 if (unsignedp1 == unsignedp2
1511 && TYPE_PRECISION (TREE_TYPE (primop1)) < correct_width
1512 && TYPE_PRECISION (TREE_TYPE (primop2)) < correct_width)
1513 {
1514 tree type = TREE_TYPE (comp1);
1515
1516 /* Make sure shorter operand is extended the right way
1517 to match the longer operand. */
1518 primop1 = convert (signed_or_unsigned_type (unsignedp1, TREE_TYPE (primop1)),
1519 primop1);
1520 primop2 = convert (signed_or_unsigned_type (unsignedp2, TREE_TYPE (primop2)),
1521 primop2);
1522
1523 primop1 = convert (type, primop1);
1524 primop2 = convert (type, primop2);
1525
1526 if (operand_equal_p (comp1, primop1, 0)
1527 && operand_equal_p (comp2, primop2, 0))
1528 return 1;
1529 }
1530
1531 return 0;
1532 }
1533 \f
1534 /* Return a tree for the case when the result of an expression is RESULT
1535 converted to TYPE and OMITTED was previously an operand of the expression
1536 but is now not needed (e.g., we folded OMITTED * 0).
1537
1538 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1539 the conversion of RESULT to TYPE. */
1540
1541 static tree
1542 omit_one_operand (type, result, omitted)
1543 tree type, result, omitted;
1544 {
1545 tree t = convert (type, result);
1546
1547 if (TREE_SIDE_EFFECTS (omitted))
1548 return build (COMPOUND_EXPR, type, omitted, t);
1549
1550 return t;
1551 }
1552 \f
1553 /* Return a simplified tree node for the truth-negation of ARG
1554 (perhaps by altering ARG). It is known that ARG is an operation that
1555 returns a truth value (0 or 1). */
1556
1557 tree
1558 invert_truthvalue (arg)
1559 tree arg;
1560 {
1561 tree type = TREE_TYPE (arg);
1562
1563 /* For floating-point comparisons, it isn't safe to invert the condition.
1564 So just enclose a TRUTH_NOT_EXPR around what we have. */
1565 if (TREE_CODE (type) == REAL_TYPE
1566 && TREE_CODE_CLASS (TREE_CODE (arg)) == '<')
1567 return build1 (TRUTH_NOT_EXPR, type, arg);
1568
1569 switch (TREE_CODE (arg))
1570 {
1571 case NE_EXPR:
1572 TREE_SET_CODE (arg, EQ_EXPR);
1573 return arg;
1574
1575 case EQ_EXPR:
1576 TREE_SET_CODE (arg, NE_EXPR);
1577 return arg;
1578
1579 case GE_EXPR:
1580 TREE_SET_CODE (arg, LT_EXPR);
1581 return arg;
1582
1583 case GT_EXPR:
1584 TREE_SET_CODE (arg, LE_EXPR);
1585 return arg;
1586
1587 case LE_EXPR:
1588 TREE_SET_CODE (arg, GT_EXPR);
1589 return arg;
1590
1591 case LT_EXPR:
1592 TREE_SET_CODE (arg, GE_EXPR);
1593 return arg;
1594
1595 case INTEGER_CST:
1596 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
1597 && TREE_INT_CST_HIGH (arg) == 0, 0));
1598
1599 case TRUTH_AND_EXPR:
1600 return build (TRUTH_OR_EXPR, type,
1601 invert_truthvalue (TREE_OPERAND (arg, 0)),
1602 invert_truthvalue (TREE_OPERAND (arg, 1)));
1603
1604 case TRUTH_OR_EXPR:
1605 return build (TRUTH_AND_EXPR, type,
1606 invert_truthvalue (TREE_OPERAND (arg, 0)),
1607 invert_truthvalue (TREE_OPERAND (arg, 1)));
1608
1609 case TRUTH_ANDIF_EXPR:
1610 return build (TRUTH_ORIF_EXPR, type,
1611 invert_truthvalue (TREE_OPERAND (arg, 0)),
1612 invert_truthvalue (TREE_OPERAND (arg, 1)));
1613
1614 case TRUTH_ORIF_EXPR:
1615 return build (TRUTH_ANDIF_EXPR, type,
1616 invert_truthvalue (TREE_OPERAND (arg, 0)),
1617 invert_truthvalue (TREE_OPERAND (arg, 1)));
1618
1619 case TRUTH_NOT_EXPR:
1620 return TREE_OPERAND (arg, 0);
1621
1622 case COND_EXPR:
1623 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
1624 invert_truthvalue (TREE_OPERAND (arg, 1)),
1625 invert_truthvalue (TREE_OPERAND (arg, 2)));
1626
1627 case COMPOUND_EXPR:
1628 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
1629 invert_truthvalue (TREE_OPERAND (arg, 1)));
1630
1631 case NON_LVALUE_EXPR:
1632 return invert_truthvalue (TREE_OPERAND (arg, 0));
1633
1634 case NOP_EXPR:
1635 case CONVERT_EXPR:
1636 case FLOAT_EXPR:
1637 return build1 (TREE_CODE (arg), type,
1638 invert_truthvalue (TREE_OPERAND (arg, 0)));
1639
1640 case BIT_AND_EXPR:
1641 if (! integer_onep (TREE_OPERAND (arg, 1)))
1642 abort ();
1643 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
1644 }
1645
1646 abort ();
1647 }
1648
1649 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
1650 operands are another bit-wise operation with a common input. If so,
1651 distribute the bit operations to save an operation and possibly two if
1652 constants are involved. For example, convert
1653 (A | B) & (A | C) into A | (B & C)
1654 Further simplification will occur if B and C are constants.
1655
1656 If this optimization cannot be done, 0 will be returned. */
1657
1658 static tree
1659 distribute_bit_expr (code, type, arg0, arg1)
1660 enum tree_code code;
1661 tree type;
1662 tree arg0, arg1;
1663 {
1664 tree common;
1665 tree left, right;
1666
1667 if (TREE_CODE (arg0) != TREE_CODE (arg1)
1668 || TREE_CODE (arg0) == code
1669 || (TREE_CODE (arg0) != BIT_AND_EXPR
1670 && TREE_CODE (arg0) != BIT_IOR_EXPR))
1671 return 0;
1672
1673 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
1674 {
1675 common = TREE_OPERAND (arg0, 0);
1676 left = TREE_OPERAND (arg0, 1);
1677 right = TREE_OPERAND (arg1, 1);
1678 }
1679 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
1680 {
1681 common = TREE_OPERAND (arg0, 0);
1682 left = TREE_OPERAND (arg0, 1);
1683 right = TREE_OPERAND (arg1, 0);
1684 }
1685 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
1686 {
1687 common = TREE_OPERAND (arg0, 1);
1688 left = TREE_OPERAND (arg0, 0);
1689 right = TREE_OPERAND (arg1, 1);
1690 }
1691 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
1692 {
1693 common = TREE_OPERAND (arg0, 1);
1694 left = TREE_OPERAND (arg0, 0);
1695 right = TREE_OPERAND (arg1, 0);
1696 }
1697 else
1698 return 0;
1699
1700 return fold (build (TREE_CODE (arg0), type, common,
1701 fold (build (code, type, left, right))));
1702 }
1703 \f
1704 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
1705 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
1706
1707 static tree
1708 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
1709 tree inner;
1710 tree type;
1711 int bitsize, bitpos;
1712 int unsignedp;
1713 {
1714 tree result = build (BIT_FIELD_REF, type, inner,
1715 size_int (bitsize), size_int (bitpos));
1716
1717 TREE_UNSIGNED (result) = unsignedp;
1718
1719 return result;
1720 }
1721
1722 /* Optimize a bit-field compare.
1723
1724 There are two cases: First is a compare against a constant and the
1725 second is a comparison of two items where the fields are at the same
1726 bit position relative to the start of a chunk (byte, halfword, word)
1727 large enough to contain it. In these cases we can avoid the shift
1728 implicit in bitfield extractions.
1729
1730 For constants, we emit a compare of the shifted constant with the
1731 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
1732 compared. For two fields at the same position, we do the ANDs with the
1733 similar mask and compare the result of the ANDs.
1734
1735 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
1736 COMPARE_TYPE is the type of the comparison, and LHS and RHS
1737 are the left and right operands of the comparison, respectively.
1738
1739 If the optimization described above can be done, we return the resuling
1740 tree. Otherwise we return zero. */
1741
1742 static tree
1743 optimize_bit_field_compare (code, compare_type, lhs, rhs)
1744 enum tree_code code;
1745 tree compare_type;
1746 tree lhs, rhs;
1747 {
1748 int lbitpos, lbitsize, rbitpos, rbitsize;
1749 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
1750 tree type = TREE_TYPE (lhs);
1751 tree signed_type, unsigned_type;
1752 int const_p = TREE_CODE (rhs) == INTEGER_CST;
1753 enum machine_mode lmode, rmode, lnmode, rnmode;
1754 int lunsignedp, runsignedp;
1755 int lvolatilep = 0, rvolatilep = 0;
1756 tree linner, rinner;
1757 tree mask;
1758
1759 /* Get all the information about the extractions being done. If the bit size
1760 if the same as the size of the underlying object, we aren't doing an
1761 extraction at all and so can do nothing. */
1762 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &lmode,
1763 &lunsignedp, &lvolatilep);
1764 if (lbitsize == GET_MODE_BITSIZE (lmode))
1765 return 0;
1766
1767 if (!const_p)
1768 {
1769 /* If this is not a constant, we can only do something if bit positions,
1770 sizes, and signedness are the same. */
1771 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos,
1772 &rmode, &runsignedp, &rvolatilep);
1773
1774 if (lbitpos != rbitpos || lbitsize != rbitsize
1775 || lunsignedp != runsignedp)
1776 return 0;
1777 }
1778
1779 /* See if we can find a mode to refer to this field. We should be able to,
1780 but fail if we can't. */
1781 lnmode = get_best_mode (lbitsize, lbitpos,
1782 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
1783 lvolatilep);
1784 if (lnmode == VOIDmode)
1785 return 0;
1786
1787 /* Set signed and unsigned types of the precision of this mode for the
1788 shifts below. */
1789 signed_type = type_for_mode (lnmode, 0);
1790 unsigned_type = type_for_mode (lnmode, 1);
1791
1792 if (! const_p)
1793 {
1794 rnmode = get_best_mode (rbitsize, rbitpos,
1795 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
1796 rvolatilep);
1797 if (rnmode == VOIDmode)
1798 return 0;
1799 }
1800
1801 /* Compute the bit position and size for the new reference and our offset
1802 within it. If the new reference is the same size as the original, we
1803 won't optimize anything, so return zero. */
1804 lnbitsize = GET_MODE_BITSIZE (lnmode);
1805 lnbitpos = lbitpos & ~ (lnbitsize - 1);
1806 lbitpos -= lnbitpos;
1807 if (lnbitsize == lbitsize)
1808 return 0;
1809
1810 if (! const_p)
1811 {
1812 rnbitsize = GET_MODE_BITSIZE (rnmode);
1813 rnbitpos = rbitpos & ~ (rnbitsize - 1);
1814 rbitpos -= rnbitpos;
1815 if (rnbitsize == rbitsize)
1816 return 0;
1817 }
1818
1819 #if BYTES_BIG_ENDIAN
1820 lbitpos = lnbitsize - lbitsize - lbitpos;
1821 rbitpos = rnbitsize - rbitsize - rbitpos;
1822 #endif
1823
1824 /* Make the mask to be used against the extracted field. */
1825 mask = convert (unsigned_type, build_int_2 (~0, ~0));
1826 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize));
1827 mask = const_binop (RSHIFT_EXPR, mask,
1828 size_int (lnbitsize - lbitsize - lbitpos));
1829
1830 if (! const_p)
1831 /* If not comparing with constant, just rework the comparison
1832 and return. */
1833 return build (code, compare_type,
1834 build (BIT_AND_EXPR, type,
1835 make_bit_field_ref (linner, type,
1836 lnbitsize, lnbitpos, lunsignedp),
1837 mask),
1838 build (BIT_AND_EXPR, type,
1839 make_bit_field_ref (rinner, type,
1840 rnbitsize, rnbitpos, runsignedp),
1841 mask));
1842
1843 /* Otherwise, we are handling the constant case. See if the constant is too
1844 big for the field. Warn and return a tree of for 0 (false) if so. We do
1845 this not only for its own sake, but to avoid having to test for this
1846 error case below. If we didn't, we might generate wrong code.
1847
1848 For unsigned fields, the constant shifted right by the field length should
1849 be all zero. For signed fields, the high-order bits should agree with
1850 the sign bit. */
1851
1852 if (lunsignedp)
1853 {
1854 if (! integer_zerop (const_binop (RSHIFT_EXPR,
1855 convert (unsigned_type, rhs),
1856 size_int (lbitsize))))
1857 {
1858 warning ("comparison is always %s due to width of bitfield",
1859 code == NE_EXPR ? "one" : "zero");
1860 return convert (compare_type,
1861 (code == NE_EXPR
1862 ? integer_one_node : integer_zero_node));
1863 }
1864 }
1865 else
1866 {
1867 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
1868 size_int (lbitsize - 1));
1869 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
1870 {
1871 warning ("comparison is always %s due to width of bitfield",
1872 code == NE_EXPR ? "one" : "zero");
1873 return convert (compare_type,
1874 (code == NE_EXPR
1875 ? integer_one_node : integer_zero_node));
1876 }
1877 }
1878
1879 /* Single-bit compares should always be against zero. */
1880 if (lbitsize == 1 && ! integer_zerop (rhs))
1881 {
1882 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
1883 rhs = convert (type, integer_zero_node);
1884 }
1885
1886 /* Make a new bitfield reference, shift the constant over the
1887 appropriate number of bits and mask it with the computed mask
1888 (in case this was a signed field). If we changed it, make a new one. */
1889 lhs = make_bit_field_ref (linner, TREE_TYPE (lhs), lnbitsize, lnbitpos,
1890 lunsignedp);
1891
1892 rhs = fold (build1 (NOP_EXPR, type,
1893 const_binop (BIT_AND_EXPR,
1894 const_binop (LSHIFT_EXPR,
1895 convert (unsigned_type, rhs),
1896 size_int (lbitpos)), mask)));
1897
1898 return build (code, compare_type,
1899 build (BIT_AND_EXPR, type, lhs, mask),
1900 rhs);
1901 }
1902 \f
1903 /* Subroutine for the following routine: decode a field reference.
1904
1905 If EXP is a comparison reference, we return the innermost reference.
1906
1907 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
1908 set to the starting bit number.
1909
1910 If the innermost field can be completely contained in a mode-sized
1911 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
1912
1913 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
1914 otherwise it is not changed.
1915
1916 *PUNSIGNEDP is set to the signedness of the field.
1917
1918 *PMASK is set to the mask used. This is either contained in a
1919 BIT_AND_EXPR or derived from the width of the field.
1920
1921 Return 0 if this is not a component reference or is one that we can't
1922 do anything with. */
1923
1924 static tree
1925 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
1926 pvolatilep, pmask)
1927 tree exp;
1928 int *pbitsize, *pbitpos;
1929 enum machine_mode *pmode;
1930 int *punsignedp, *pvolatilep;
1931 tree *pmask;
1932 {
1933 tree mask = 0;
1934 tree inner;
1935
1936 STRIP_NOPS (exp);
1937
1938 if (TREE_CODE (exp) == BIT_AND_EXPR)
1939 {
1940 mask = TREE_OPERAND (exp, 1);
1941 exp = TREE_OPERAND (exp, 0);
1942 STRIP_NOPS (exp); STRIP_NOPS (mask);
1943 if (TREE_CODE (mask) != INTEGER_CST)
1944 return 0;
1945 }
1946
1947 if (TREE_CODE (exp) != COMPONENT_REF && TREE_CODE (exp) != ARRAY_REF
1948 && TREE_CODE (exp) != BIT_FIELD_REF)
1949 return 0;
1950
1951 inner = get_inner_reference (exp, pbitsize, pbitpos, pmode,
1952 punsignedp, pvolatilep);
1953
1954 if (mask == 0)
1955 {
1956 tree unsigned_type = type_for_size (*pbitsize, 1);
1957 int precision = TYPE_PRECISION (unsigned_type);
1958
1959 mask = convert (unsigned_type, build_int_2 (~0, ~0));
1960 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
1961 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
1962 }
1963
1964 *pmask = mask;
1965 return inner;
1966 }
1967
1968 /* Return non-zero if MASK respresents a mask of SIZE ones in the low-order
1969 bit positions. */
1970
1971 static int
1972 all_ones_mask_p (mask, size)
1973 tree mask;
1974 int size;
1975 {
1976 tree type = TREE_TYPE (mask);
1977 int precision = TYPE_PRECISION (type);
1978
1979 return
1980 operand_equal_p (mask,
1981 const_binop (RSHIFT_EXPR,
1982 const_binop (LSHIFT_EXPR,
1983 convert (signed_type (type),
1984 build_int_2 (~0, ~0)),
1985 size_int (precision - size)),
1986 size_int (precision - size)), 0);
1987 }
1988 \f
1989 /* Try to merge two comparisons to the same innermost item.
1990
1991 For example, if we have p->a == 2 && p->b == 4 and we can make an
1992 object large enough to span both A and B, we can do this with a comparison
1993 against the object ANDed with the a mask.
1994
1995 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
1996 operations to do this with one comparison.
1997
1998 We check for both normal comparisons and the BIT_AND_EXPRs made this by
1999 function and the one above.
2000
2001 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2002 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2003
2004 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2005 two operands.
2006
2007 We return the simplified tree or 0 if no optimization is possible. */
2008
2009 static tree
2010 merge_component_references (code, truth_type, lhs, rhs)
2011 enum tree_code code;
2012 tree truth_type, lhs, rhs;
2013 {
2014 /* If this is the "or" of two comparisons, we can do something if we
2015 the comparisons are NE_EXPR. If this is the "and", we can do something
2016 if the comparisons are EQ_EXPR. I.e.,
2017 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2018
2019 WANTED_CODE is this operation code. For single bit fields, we can
2020 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2021 comparison for one-bit fields. */
2022
2023 enum tree_code wanted_code
2024 = (code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR) ? EQ_EXPR : NE_EXPR;
2025 enum tree_code lcode, rcode;
2026 tree ll_inner, lr_inner, rl_inner, rr_inner;
2027 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2028 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2029 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2030 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2031 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2032 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2033 enum machine_mode lnmode, rnmode;
2034 tree ll_mask, lr_mask, rl_mask, rr_mask;
2035 tree l_const = 0, r_const = 0;
2036 tree type, result;
2037 int first_bit, end_bit;
2038 int volatilep = 0;
2039
2040 /* Start by getting the comparison codes and seeing if we may be able
2041 to do something. Then get all the parameters for each side. Fail
2042 if anything is volatile. */
2043
2044 lcode = TREE_CODE (lhs);
2045 rcode = TREE_CODE (rhs);
2046 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2047 || (rcode != EQ_EXPR && rcode != NE_EXPR)
2048 || TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
2049 return 0;
2050
2051 ll_inner = decode_field_reference (TREE_OPERAND (lhs, 0),
2052 &ll_bitsize, &ll_bitpos, &ll_mode,
2053 &ll_unsignedp, &volatilep, &ll_mask);
2054 lr_inner = decode_field_reference (TREE_OPERAND (lhs, 1),
2055 &lr_bitsize, &lr_bitpos, &lr_mode,
2056 &lr_unsignedp, &volatilep, &lr_mask);
2057 rl_inner = decode_field_reference (TREE_OPERAND (rhs, 0),
2058 &rl_bitsize, &rl_bitpos, &rl_mode,
2059 &rl_unsignedp, &volatilep, &rl_mask);
2060 rr_inner = decode_field_reference (TREE_OPERAND (rhs, 1),
2061 &rr_bitsize, &rr_bitpos, &rr_mode,
2062 &rr_unsignedp, &volatilep, &rr_mask);
2063
2064 /* It must be true that the inner operation on the lhs of each
2065 comparison must be the same if we are to be able to do anything.
2066 Then see if we have constants. If not, the same must be true for
2067 the rhs's. */
2068 if (volatilep || ll_inner == 0 || rl_inner == 0
2069 || ! operand_equal_p (ll_inner, rl_inner, 0))
2070 return 0;
2071
2072 if (TREE_CODE (TREE_OPERAND (lhs, 1)) == INTEGER_CST
2073 && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
2074 l_const = TREE_OPERAND (lhs, 1), r_const = TREE_OPERAND (rhs, 1);
2075 else if (lr_inner == 0 || rr_inner == 0
2076 || ! operand_equal_p (lr_inner, rr_inner, 0))
2077 return 0;
2078
2079 /* If either comparison code is not correct for our logical operation,
2080 fail. However, we can convert a one-bit comparison against zero into
2081 the opposite comparison against that bit being set in the field. */
2082 if (lcode != wanted_code)
2083 {
2084 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2085 l_const = ll_mask;
2086 else
2087 return 0;
2088 }
2089
2090 if (rcode != wanted_code)
2091 {
2092 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2093 r_const = rl_mask;
2094 else
2095 return 0;
2096 }
2097
2098 /* See if we can find a mode that contains both fields being compared on
2099 the left. If we can't, fail. Otherwise, update all constants and masks
2100 to be relative to a field of that size. */
2101 first_bit = MIN (ll_bitpos, rl_bitpos);
2102 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2103 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2104 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2105 volatilep);
2106 if (lnmode == VOIDmode)
2107 return 0;
2108
2109 lnbitsize = GET_MODE_BITSIZE (lnmode);
2110 lnbitpos = first_bit & ~ (lnbitsize - 1);
2111 type = type_for_size (lnbitsize, 1);
2112 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2113
2114 #if BYTES_BIG_ENDIAN
2115 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2116 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2117 #endif
2118
2119 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2120 size_int (xll_bitpos));
2121 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2122 size_int (xrl_bitpos));
2123
2124 /* Make sure the constants are interpreted as unsigned, so we
2125 don't have sign bits outside the range of their type. */
2126
2127 if (l_const)
2128 {
2129 l_const = convert (unsigned_type (TREE_TYPE (l_const)), l_const);
2130 l_const = const_binop (LSHIFT_EXPR, convert (type, l_const),
2131 size_int (xll_bitpos));
2132 }
2133 if (r_const)
2134 {
2135 r_const = convert (unsigned_type (TREE_TYPE (r_const)), r_const);
2136 r_const = const_binop (LSHIFT_EXPR, convert (type, r_const),
2137 size_int (xrl_bitpos));
2138 }
2139
2140 /* If the right sides are not constant, do the same for it. Also,
2141 disallow this optimization if a size or signedness mismatch occurs
2142 between the left and right sides. */
2143 if (l_const == 0)
2144 {
2145 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2146 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp)
2147 return 0;
2148
2149 first_bit = MIN (lr_bitpos, rr_bitpos);
2150 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2151 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2152 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2153 volatilep);
2154 if (rnmode == VOIDmode)
2155 return 0;
2156
2157 rnbitsize = GET_MODE_BITSIZE (rnmode);
2158 rnbitpos = first_bit & ~ (rnbitsize - 1);
2159 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2160
2161 #if BYTES_BIG_ENDIAN
2162 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2163 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2164 #endif
2165
2166 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2167 size_int (xlr_bitpos));
2168 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2169 size_int (xrr_bitpos));
2170
2171 /* Make a mask that corresponds to both fields being compared.
2172 Do this for both items being compared. If the masks agree,
2173 we can do this by masking both and comparing the masked
2174 results. */
2175 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
2176 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
2177 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2178 {
2179 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2180 ll_unsignedp || rl_unsignedp);
2181 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2182 lr_unsignedp || rr_unsignedp);
2183 if (! all_ones_mask_p (ll_mask, lnbitsize))
2184 {
2185 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2186 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2187 }
2188 return build (wanted_code, truth_type, lhs, rhs);
2189 }
2190
2191 /* There is still another way we can do something: If both pairs of
2192 fields being compared are adjacent, we may be able to make a wider
2193 field containing them both. */
2194 if ((ll_bitsize + ll_bitpos == rl_bitpos
2195 && lr_bitsize + lr_bitpos == rr_bitpos)
2196 || (ll_bitpos == rl_bitpos + rl_bitsize
2197 && lr_bitpos == rr_bitpos + rr_bitsize))
2198 return build (wanted_code, truth_type,
2199 make_bit_field_ref (ll_inner, type,
2200 ll_bitsize + rl_bitsize,
2201 MIN (ll_bitpos, rl_bitpos),
2202 ll_unsignedp),
2203 make_bit_field_ref (lr_inner, type,
2204 lr_bitsize + rr_bitsize,
2205 MIN (lr_bitpos, rr_bitpos),
2206 lr_unsignedp));
2207
2208 return 0;
2209 }
2210
2211 /* Handle the case of comparisons with constants. If there is something in
2212 common between the masks, those bits of the constants must be the same.
2213 If not, the condition is always false. Test for this to avoid generating
2214 incorrect code below. */
2215 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
2216 if (! integer_zerop (result)
2217 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
2218 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
2219 {
2220 if (wanted_code == NE_EXPR)
2221 {
2222 warning ("`or' of unmatched not-equal tests is always 1");
2223 return convert (truth_type, integer_one_node);
2224 }
2225 else
2226 {
2227 warning ("`and' of mutually exclusive equal-tests is always zero");
2228 return convert (truth_type, integer_zero_node);
2229 }
2230 }
2231
2232 /* Construct the expression we will return. First get the component
2233 reference we will make. Unless the mask is all ones the width of
2234 that field, perform the mask operation. Then compare with the
2235 merged constant. */
2236 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2237 ll_unsignedp || rl_unsignedp);
2238
2239 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
2240 if (! all_ones_mask_p (ll_mask, lnbitsize))
2241 result = build (BIT_AND_EXPR, type, result, ll_mask);
2242
2243 return build (wanted_code, truth_type, result,
2244 const_binop (BIT_IOR_EXPR, l_const, r_const));
2245 }
2246 \f
2247 /* Perform constant folding and related simplification of EXPR.
2248 The related simplifications include x*1 => x, x*0 => 0, etc.,
2249 and application of the associative law.
2250 NOP_EXPR conversions may be removed freely (as long as we
2251 are careful not to change the C type of the overall expression)
2252 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
2253 but we can constant-fold them if they have constant operands. */
2254
2255 tree
2256 fold (expr)
2257 tree expr;
2258 {
2259 register tree t = expr;
2260 tree t1 = NULL_TREE;
2261 tree type = TREE_TYPE (expr);
2262 register tree arg0, arg1;
2263 register enum tree_code code = TREE_CODE (t);
2264 register int kind;
2265
2266 /* WINS will be nonzero when the switch is done
2267 if all operands are constant. */
2268
2269 int wins = 1;
2270
2271 /* Return right away if already constant. */
2272 if (TREE_CONSTANT (t))
2273 {
2274 if (code == CONST_DECL)
2275 return DECL_INITIAL (t);
2276 return t;
2277 }
2278
2279 kind = TREE_CODE_CLASS (code);
2280 if (kind == 'e' || kind == '<' || kind == '1' || kind == '2' || kind == 'r')
2281 {
2282 register int len = tree_code_length[(int) code];
2283 register int i;
2284 for (i = 0; i < len; i++)
2285 {
2286 tree op = TREE_OPERAND (t, i);
2287
2288 if (op == 0)
2289 continue; /* Valid for CALL_EXPR, at least. */
2290
2291 /* Strip any conversions that don't change the mode. */
2292 STRIP_NOPS (op);
2293
2294 if (TREE_CODE (op) != INTEGER_CST
2295 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2296 && TREE_CODE (op) != REAL_CST
2297 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2298 )
2299 /* Note that TREE_CONSTANT isn't enough:
2300 static var addresses are constant but we can't
2301 do arithmetic on them. */
2302 wins = 0;
2303
2304 if (i == 0)
2305 arg0 = op;
2306 else if (i == 1)
2307 arg1 = op;
2308 }
2309 }
2310
2311 /* If this is a commutative operation, and ARG0 is a constant, move it
2312 to ARG1 to reduce the number of tests below. */
2313 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
2314 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
2315 || code == BIT_AND_EXPR)
2316 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
2317 {
2318 tree tem = arg0;
2319 arg0 = arg1; arg1 = tem;
2320
2321 TREE_OPERAND (t, 0) = arg0;
2322 TREE_OPERAND (t, 1) = arg1;
2323 }
2324
2325 /* Now WINS is set as described above,
2326 ARG0 is the first operand of EXPR,
2327 and ARG1 is the second operand (if it has more than one operand).
2328
2329 First check for cases where an arithmetic operation is applied to a
2330 compound, conditional, or comparison operation. Push the arithmetic
2331 operation inside the compound or conditional to see if any folding
2332 can then be done. Convert comparison to conditional for this purpose.
2333 The also optimizes non-constant cases that used to be done in
2334 expand_expr. */
2335 if (TREE_CODE_CLASS (code) == '1')
2336 {
2337 if (TREE_CODE (arg0) == COMPOUND_EXPR)
2338 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2339 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
2340 else if (TREE_CODE (arg0) == COND_EXPR)
2341 return fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
2342 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
2343 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
2344 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
2345 return fold (build (COND_EXPR, type, arg0,
2346 fold (build1 (code, type, integer_one_node)),
2347 fold (build1 (code, type, integer_zero_node))));
2348 }
2349 else if (TREE_CODE_CLASS (code) == '2')
2350 {
2351 if (TREE_CODE (arg1) == COMPOUND_EXPR)
2352 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
2353 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
2354 else if (TREE_CODE (arg1) == COND_EXPR
2355 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
2356 {
2357 tree test, true_value, false_value;
2358
2359 if (TREE_CODE (arg1) == COND_EXPR)
2360 {
2361 test = TREE_OPERAND (arg1, 0);
2362 true_value = TREE_OPERAND (arg1, 1);
2363 false_value = TREE_OPERAND (arg1, 2);
2364 }
2365 else
2366 {
2367 test = arg1;
2368 true_value = integer_one_node;
2369 false_value = integer_zero_node;
2370 }
2371
2372 if (TREE_CODE (arg0) != VAR_DECL && TREE_CODE (arg0) != PARM_DECL)
2373 arg0 = save_expr (arg0);
2374 test = fold (build (COND_EXPR, type, test,
2375 fold (build (code, type, arg0, true_value)),
2376 fold (build (code, type, arg0, false_value))));
2377 if (TREE_CODE (arg0) == SAVE_EXPR)
2378 return build (COMPOUND_EXPR, type,
2379 convert (void_type_node, arg0), test);
2380 else
2381 return convert (type, test);
2382 }
2383
2384 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
2385 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
2386 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
2387 else if (TREE_CODE (arg0) == COND_EXPR
2388 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
2389 {
2390 tree test, true_value, false_value;
2391
2392 if (TREE_CODE (arg0) == COND_EXPR)
2393 {
2394 test = TREE_OPERAND (arg0, 0);
2395 true_value = TREE_OPERAND (arg0, 1);
2396 false_value = TREE_OPERAND (arg0, 2);
2397 }
2398 else
2399 {
2400 test = arg0;
2401 true_value = integer_one_node;
2402 false_value = integer_zero_node;
2403 }
2404
2405 if (TREE_CODE (arg1) != VAR_DECL && TREE_CODE (arg1) != PARM_DECL)
2406 arg1 = save_expr (arg1);
2407 test = fold (build (COND_EXPR, type, test,
2408 fold (build (code, type, true_value, arg1)),
2409 fold (build (code, type, false_value, arg1))));
2410 if (TREE_CODE (arg1) == SAVE_EXPR)
2411 return build (COMPOUND_EXPR, type,
2412 convert (void_type_node, arg1), test);
2413 else
2414 return convert (type, test);
2415 }
2416 }
2417
2418 switch (code)
2419 {
2420 case INTEGER_CST:
2421 case REAL_CST:
2422 case STRING_CST:
2423 case COMPLEX_CST:
2424 case CONSTRUCTOR:
2425 return t;
2426
2427 case CONST_DECL:
2428 return fold (DECL_INITIAL (t));
2429
2430 case NOP_EXPR:
2431 case FLOAT_EXPR:
2432 case CONVERT_EXPR:
2433 case FIX_TRUNC_EXPR:
2434 /* Other kinds of FIX are not handled properly by fold_convert. */
2435 /* Two conversions in a row are not needed unless:
2436 - the intermediate type is narrower than both initial and final, or
2437 - the initial type is a pointer type and the precisions of the
2438 intermediate and final types differ, or
2439 - the final type is a pointer type and the precisions of the
2440 initial and intermediate types differ. */
2441 if ((TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
2442 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
2443 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2444 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2445 ||
2446 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2447 > TYPE_PRECISION (TREE_TYPE (t)))
2448 && ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (t, 0)))
2449 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2450 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))))
2451 ==
2452 (TREE_UNSIGNED (TREE_TYPE (t))
2453 && (TYPE_PRECISION (TREE_TYPE (t))
2454 > TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
2455 && ! ((TREE_CODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2456 == POINTER_TYPE)
2457 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0)))
2458 != TYPE_PRECISION (TREE_TYPE (t))))
2459 && ! (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE
2460 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
2461 != TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (t, 0))))))
2462 return convert (TREE_TYPE (t), TREE_OPERAND (TREE_OPERAND (t, 0), 0));
2463
2464 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
2465 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1)))
2466 {
2467 /* Don't leave an assignment inside a conversion. */
2468 tree prev = TREE_OPERAND (t, 0);
2469 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
2470 /* First do the assignment, then return converted constant. */
2471 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
2472 TREE_USED (t) = 1;
2473 return t;
2474 }
2475 if (!wins)
2476 {
2477 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
2478 return t;
2479 }
2480 return fold_convert (t, arg0);
2481
2482 #if 0 /* This loses on &"foo"[0]. */
2483 case ARRAY_REF:
2484 {
2485 int i;
2486
2487 /* Fold an expression like: "foo"[2] */
2488 if (TREE_CODE (arg0) == STRING_CST
2489 && TREE_CODE (arg1) == INTEGER_CST
2490 && !TREE_INT_CST_HIGH (arg1)
2491 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
2492 {
2493 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
2494 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
2495 force_fit_type (t);
2496 }
2497 }
2498 return t;
2499 #endif /* 0 */
2500
2501 case RANGE_EXPR:
2502 TREE_CONSTANT (t) = wins;
2503 return t;
2504
2505 case NEGATE_EXPR:
2506 if (wins)
2507 {
2508 if (TREE_CODE (arg0) == INTEGER_CST)
2509 {
2510 if (TREE_INT_CST_LOW (arg0) == 0)
2511 t = build_int_2 (0, - TREE_INT_CST_HIGH (arg0));
2512 else
2513 t = build_int_2 (- TREE_INT_CST_LOW (arg0),
2514 ~ TREE_INT_CST_HIGH (arg0));
2515 TREE_TYPE (t) = type;
2516 force_fit_type (t);
2517 }
2518 else if (TREE_CODE (arg0) == REAL_CST)
2519 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
2520 TREE_TYPE (t) = type;
2521 }
2522 else if (TREE_CODE (arg0) == NEGATE_EXPR)
2523 return TREE_OPERAND (arg0, 0);
2524
2525 /* Convert - (a - b) to (b - a) for non-floating-point. */
2526 else if (TREE_CODE (arg0) == MINUS_EXPR && TREE_CODE (type) != REAL_TYPE)
2527 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
2528 TREE_OPERAND (arg0, 0));
2529
2530 return t;
2531
2532 case ABS_EXPR:
2533 if (wins)
2534 {
2535 if (TREE_CODE (arg0) == INTEGER_CST)
2536 {
2537 if (! TREE_UNSIGNED (type)
2538 && TREE_INT_CST_HIGH (arg0) < 0)
2539 {
2540 if (TREE_INT_CST_LOW (arg0) == 0)
2541 t = build_int_2 (0, - TREE_INT_CST_HIGH (arg0));
2542 else
2543 t = build_int_2 (- TREE_INT_CST_LOW (arg0),
2544 ~ TREE_INT_CST_HIGH (arg0));
2545 }
2546 }
2547 else if (TREE_CODE (arg0) == REAL_CST)
2548 {
2549 if (REAL_VALUES_LESS (TREE_REAL_CST (arg0), dconst0))
2550 t = build_real (type,
2551 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
2552 }
2553 TREE_TYPE (t) = type;
2554 }
2555 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
2556 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
2557 return t;
2558
2559 case BIT_NOT_EXPR:
2560 if (wins)
2561 {
2562 if (TREE_CODE (arg0) == INTEGER_CST)
2563 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
2564 ~ TREE_INT_CST_HIGH (arg0));
2565 TREE_TYPE (t) = type;
2566 force_fit_type (t);
2567 }
2568 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
2569 return TREE_OPERAND (arg0, 0);
2570 return t;
2571
2572 case PLUS_EXPR:
2573 /* A + (-B) -> A - B */
2574 if (TREE_CODE (arg1) == NEGATE_EXPR)
2575 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
2576 else if (TREE_CODE (type) != REAL_TYPE)
2577 {
2578 if (integer_zerop (arg1))
2579 return non_lvalue (convert (type, arg0));
2580
2581 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
2582 with a constant, and the two constants have no bits in common,
2583 we should treat this as a BIT_IOR_EXPR since this may produce more
2584 simplifications. */
2585 if (TREE_CODE (arg0) == BIT_AND_EXPR
2586 && TREE_CODE (arg1) == BIT_AND_EXPR
2587 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
2588 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
2589 && integer_zerop (const_binop (BIT_AND_EXPR,
2590 TREE_OPERAND (arg0, 1),
2591 TREE_OPERAND (arg1, 1))))
2592 {
2593 code = BIT_IOR_EXPR;
2594 goto bit_ior;
2595 }
2596 }
2597 /* In IEEE floating point, x+0 may not equal x. */
2598 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2599 && real_zerop (arg1))
2600 return non_lvalue (convert (type, arg0));
2601 associate:
2602 /* In most languages, can't associate operations on floats
2603 through parentheses. Rather than remember where the parentheses
2604 were, we don't associate floats at all. It shouldn't matter much. */
2605 if (TREE_CODE (type) == REAL_TYPE)
2606 goto binary;
2607 /* The varsign == -1 cases happen only for addition and subtraction.
2608 It says that the arg that was split was really CON minus VAR.
2609 The rest of the code applies to all associative operations. */
2610 if (!wins)
2611 {
2612 tree var, con, tem;
2613 int varsign;
2614
2615 if (split_tree (arg0, code, &var, &con, &varsign))
2616 {
2617 if (varsign == -1)
2618 {
2619 /* EXPR is (CON-VAR) +- ARG1. */
2620 /* If it is + and VAR==ARG1, return just CONST. */
2621 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
2622 return convert (TREE_TYPE (t), con);
2623
2624 /* Otherwise return (CON +- ARG1) - VAR. */
2625 TREE_SET_CODE (t, MINUS_EXPR);
2626 TREE_OPERAND (t, 1) = var;
2627 TREE_OPERAND (t, 0)
2628 = fold (build (code, TREE_TYPE (t), con, arg1));
2629 }
2630 else
2631 {
2632 /* EXPR is (VAR+CON) +- ARG1. */
2633 /* If it is - and VAR==ARG1, return just CONST. */
2634 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
2635 return convert (TREE_TYPE (t), con);
2636
2637 /* Otherwise return VAR +- (ARG1 +- CON). */
2638 TREE_OPERAND (t, 1) = tem
2639 = fold (build (code, TREE_TYPE (t), arg1, con));
2640 TREE_OPERAND (t, 0) = var;
2641 if (integer_zerop (tem)
2642 && (code == PLUS_EXPR || code == MINUS_EXPR))
2643 return convert (type, var);
2644 /* If we have x +/- (c - d) [c an explicit integer]
2645 change it to x -/+ (d - c) since if d is relocatable
2646 then the latter can be a single immediate insn
2647 and the former cannot. */
2648 if (TREE_CODE (tem) == MINUS_EXPR
2649 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
2650 {
2651 tree tem1 = TREE_OPERAND (tem, 1);
2652 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
2653 TREE_OPERAND (tem, 0) = tem1;
2654 TREE_SET_CODE (t,
2655 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
2656 }
2657 }
2658 return t;
2659 }
2660
2661 if (split_tree (arg1, code, &var, &con, &varsign))
2662 {
2663 /* EXPR is ARG0 +- (CON +- VAR). */
2664 if (varsign == -1)
2665 TREE_SET_CODE (t,
2666 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
2667 if (TREE_CODE (t) == MINUS_EXPR
2668 && operand_equal_p (var, arg0, 0))
2669 {
2670 /* If VAR and ARG0 cancel, return just CON or -CON. */
2671 if (code == PLUS_EXPR)
2672 return convert (TREE_TYPE (t), con);
2673 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
2674 convert (TREE_TYPE (t), con)));
2675 }
2676 TREE_OPERAND (t, 0)
2677 = fold (build (code, TREE_TYPE (t), arg0, con));
2678 TREE_OPERAND (t, 1) = var;
2679 if (integer_zerop (TREE_OPERAND (t, 0))
2680 && TREE_CODE (t) == PLUS_EXPR)
2681 return convert (TREE_TYPE (t), var);
2682 return t;
2683 }
2684 }
2685 binary:
2686 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
2687 if (TREE_CODE (arg1) == REAL_CST)
2688 return t;
2689 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
2690 if (wins)
2691 t1 = const_binop (code, arg0, arg1);
2692 if (t1 != NULL_TREE)
2693 {
2694 /* The return value should always have
2695 the same type as the original expression. */
2696 TREE_TYPE (t1) = TREE_TYPE (t);
2697 return t1;
2698 }
2699 return t;
2700
2701 case MINUS_EXPR:
2702 if (TREE_CODE (type) != REAL_TYPE)
2703 {
2704 if (! wins && integer_zerop (arg0))
2705 return build1 (NEGATE_EXPR, type, arg1);
2706 if (integer_zerop (arg1))
2707 return non_lvalue (convert (type, arg0));
2708 }
2709 /* Convert A - (-B) to A + B. */
2710 else if (TREE_CODE (arg1) == NEGATE_EXPR)
2711 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
2712 else
2713 {
2714 if (! wins && real_zerop (arg0))
2715 return build1 (NEGATE_EXPR, type, arg1);
2716 /* In IEEE floating point, x-0 may not equal x. */
2717 if (real_zerop (arg1) && TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
2718 return non_lvalue (convert (type, arg0));
2719 }
2720 /* Fold &x - &x. This can happen from &x.foo - &x.
2721 Note that can't be done for certain floats even in non-IEEE formats.
2722 Also note that operand_equal_p is always false is an operand
2723 is volatile. */
2724
2725 if (operand_equal_p (arg0, arg1,
2726 TREE_CODE (type) == REAL_TYPE))
2727 return convert (type, integer_zero_node);
2728 goto associate;
2729
2730 case MULT_EXPR:
2731 if (TREE_CODE (type) != REAL_TYPE)
2732 {
2733 if (integer_zerop (arg1))
2734 return omit_one_operand (type, arg1, arg0);
2735 if (integer_onep (arg1))
2736 return non_lvalue (convert (type, arg0));
2737
2738 /* (a * (1 << b)) is (a << b) */
2739 if (TREE_CODE (arg1) == LSHIFT_EXPR
2740 && integer_onep (TREE_OPERAND (arg1, 0)))
2741 return fold (build (LSHIFT_EXPR, type, arg0,
2742 TREE_OPERAND (arg1, 1)));
2743 if (TREE_CODE (arg0) == LSHIFT_EXPR
2744 && integer_onep (TREE_OPERAND (arg0, 0)))
2745 return fold (build (LSHIFT_EXPR, type, arg1,
2746 TREE_OPERAND (arg0, 1)));
2747 }
2748 /* In IEEE floating point, these optimizations are not correct. */
2749 else
2750 {
2751 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2752 && real_zerop (arg1))
2753 return omit_one_operand (type, arg1, arg0);
2754 /* In IEEE floating point, x*1 is not equivalent to x for nans.
2755 However, ANSI says we can drop signals,
2756 so we can do this anyway. */
2757 if (real_onep (arg1))
2758 return non_lvalue (convert (type, arg0));
2759 /* x*2 is x+x */
2760 if (! wins && real_twop (arg1))
2761 {
2762 tree arg = save_expr (arg0);
2763 return build (PLUS_EXPR, type, arg, arg);
2764 }
2765 }
2766 goto associate;
2767
2768 case BIT_IOR_EXPR:
2769 bit_ior:
2770 if (integer_all_onesp (arg1))
2771 return omit_one_operand (type, arg1, arg0);
2772 if (integer_zerop (arg1))
2773 return non_lvalue (convert (type, arg0));
2774 t1 = distribute_bit_expr (code, type, arg0, arg1);
2775 if (t1 != NULL_TREE)
2776 return t1;
2777 goto associate;
2778
2779 case BIT_XOR_EXPR:
2780 if (integer_zerop (arg1))
2781 return non_lvalue (convert (type, arg0));
2782 if (integer_all_onesp (arg1))
2783 return fold (build1 (BIT_NOT_EXPR, type, arg0));
2784 goto associate;
2785
2786 case BIT_AND_EXPR:
2787 bit_and:
2788 if (integer_all_onesp (arg1))
2789 return non_lvalue (convert (type, arg0));
2790 if (integer_zerop (arg1))
2791 return omit_one_operand (type, arg1, arg0);
2792 t1 = distribute_bit_expr (code, type, arg0, arg1);
2793 if (t1 != NULL_TREE)
2794 return t1;
2795 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
2796 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
2797 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
2798 {
2799 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
2800 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_INT
2801 && (~TREE_INT_CST_LOW (arg0) & ((1 << prec) - 1)) == 0)
2802 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
2803 }
2804 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
2805 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
2806 {
2807 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
2808 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_INT
2809 && (~TREE_INT_CST_LOW (arg1) & ((1 << prec) - 1)) == 0)
2810 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
2811 }
2812 goto associate;
2813
2814 case BIT_ANDTC_EXPR:
2815 if (integer_all_onesp (arg0))
2816 return non_lvalue (convert (type, arg1));
2817 if (integer_zerop (arg0))
2818 return omit_one_operand (type, arg0, arg1);
2819 if (TREE_CODE (arg1) == INTEGER_CST)
2820 {
2821 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
2822 code = BIT_AND_EXPR;
2823 goto bit_and;
2824 }
2825 goto binary;
2826
2827 case TRUNC_DIV_EXPR:
2828 case ROUND_DIV_EXPR:
2829 case FLOOR_DIV_EXPR:
2830 case CEIL_DIV_EXPR:
2831 case EXACT_DIV_EXPR:
2832 case RDIV_EXPR:
2833 if (integer_onep (arg1))
2834 return non_lvalue (convert (type, arg0));
2835 if (integer_zerop (arg1))
2836 return t;
2837 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
2838 #ifndef REAL_INFINITY
2839 if (TREE_CODE (arg1) == REAL_CST
2840 && real_zerop (arg1))
2841 return t;
2842 #endif
2843 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
2844
2845 goto binary;
2846
2847 case CEIL_MOD_EXPR:
2848 case FLOOR_MOD_EXPR:
2849 case ROUND_MOD_EXPR:
2850 case TRUNC_MOD_EXPR:
2851 if (integer_onep (arg1))
2852 return omit_one_operand (type, integer_zero_node, arg0);
2853 if (integer_zerop (arg1))
2854 return t;
2855 goto binary;
2856
2857 case LSHIFT_EXPR:
2858 case RSHIFT_EXPR:
2859 case LROTATE_EXPR:
2860 case RROTATE_EXPR:
2861 if (integer_zerop (arg1))
2862 return non_lvalue (convert (type, arg0));
2863 /* Since negative shift count is not well-defined,
2864 don't try to compute it in the compiler. */
2865 if (tree_int_cst_lt (arg1, integer_zero_node))
2866 return t;
2867 goto binary;
2868
2869 case MIN_EXPR:
2870 if (operand_equal_p (arg0, arg1, 0))
2871 return arg0;
2872 if (TREE_CODE (type) == INTEGER_TYPE
2873 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
2874 return omit_one_operand (type, arg1, arg0);
2875 goto associate;
2876
2877 case MAX_EXPR:
2878 if (operand_equal_p (arg0, arg1, 0))
2879 return arg0;
2880 if (TREE_CODE (type) == INTEGER_TYPE
2881 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
2882 return omit_one_operand (type, arg1, arg0);
2883 goto associate;
2884
2885 case TRUTH_NOT_EXPR:
2886 /* Note that the operand of this must be an int
2887 and its values must be 0 or 1.
2888 ("true" is a fixed value perhaps depending on the language,
2889 but we don't handle values other than 1 correctly yet.) */
2890 return invert_truthvalue (arg0);
2891
2892 case TRUTH_ANDIF_EXPR:
2893 /* Note that the operands of this must be ints
2894 and their values must be 0 or 1.
2895 ("true" is a fixed value perhaps depending on the language.) */
2896 /* If first arg is constant zero, return it. */
2897 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
2898 return arg0;
2899 case TRUTH_AND_EXPR:
2900 /* If either arg is constant true, drop it. */
2901 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
2902 return non_lvalue (arg1);
2903 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
2904 return non_lvalue (arg0);
2905 /* Both known to be zero => return zero. */
2906 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2907 return arg0;
2908
2909 truth_andor:
2910 /* Check for the possibility of merging component references. If our
2911 lhs is another similar operation, try to merge its rhs with our
2912 rhs. Then try to merge our lhs and rhs. */
2913 if (optimize)
2914 {
2915 tree tem;
2916
2917 if (TREE_CODE (arg0) == code)
2918 {
2919 tem = merge_component_references (code, type,
2920 TREE_OPERAND (arg0, 1), arg1);
2921 if (tem)
2922 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
2923 }
2924
2925 tem = merge_component_references (code, type, arg0, arg1);
2926 if (tem)
2927 return tem;
2928 }
2929 return t;
2930
2931 case TRUTH_ORIF_EXPR:
2932 /* Note that the operands of this must be ints
2933 and their values must be 0 or true.
2934 ("true" is a fixed value perhaps depending on the language.) */
2935 /* If first arg is constant true, return it. */
2936 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
2937 return arg0;
2938 case TRUTH_OR_EXPR:
2939 /* If either arg is constant zero, drop it. */
2940 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
2941 return non_lvalue (arg1);
2942 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
2943 return non_lvalue (arg0);
2944 /* Both known to be true => return true. */
2945 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2946 return arg0;
2947 goto truth_andor;
2948
2949 case EQ_EXPR:
2950 case NE_EXPR:
2951 case LT_EXPR:
2952 case GT_EXPR:
2953 case LE_EXPR:
2954 case GE_EXPR:
2955 /* If one arg is a constant integer, put it last. */
2956 if (TREE_CODE (arg0) == INTEGER_CST
2957 && TREE_CODE (arg1) != INTEGER_CST)
2958 {
2959 TREE_OPERAND (t, 0) = arg1;
2960 TREE_OPERAND (t, 1) = arg0;
2961 arg0 = TREE_OPERAND (t, 0);
2962 arg1 = TREE_OPERAND (t, 1);
2963 switch (code)
2964 {
2965 case GT_EXPR:
2966 code = LT_EXPR;
2967 break;
2968 case GE_EXPR:
2969 code = LE_EXPR;
2970 break;
2971 case LT_EXPR:
2972 code = GT_EXPR;
2973 break;
2974 case LE_EXPR:
2975 code = GE_EXPR;
2976 break;
2977 }
2978 TREE_SET_CODE (t, code);
2979 }
2980
2981 /* Convert foo++ == CONST into ++foo == CONST + INCR.
2982 First, see if one arg is constant; find the constant arg
2983 and the other one. */
2984 {
2985 tree constop = 0, varop;
2986 tree *constoploc;
2987
2988 if (TREE_CONSTANT (arg1))
2989 constoploc = &TREE_OPERAND (t, 1), constop = arg1, varop = arg0;
2990 if (TREE_CONSTANT (arg0))
2991 constoploc = &TREE_OPERAND (t, 0), constop = arg0, varop = arg1;
2992
2993 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
2994 {
2995 tree newconst
2996 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
2997 constop, TREE_OPERAND (varop, 1)));
2998 /* This optimization is invalid for ordered comparisons
2999 if CONST+INCR overflows or if foo+incr might overflow.
3000 For pointer types we assume overflow doesn't happen. */
3001 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
3002 || code == EQ_EXPR || code == NE_EXPR)
3003 {
3004 /* This optimization is invalid for floating point
3005 if adding one to the constant does not change it. */
3006 if (TREE_CODE (TREE_TYPE (newconst)) != REAL_TYPE
3007 || !REAL_VALUES_EQUAL (TREE_REAL_CST (newconst),
3008 TREE_REAL_CST (constop)))
3009 {
3010 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
3011 *constoploc = newconst;
3012 return t;
3013 }
3014 }
3015 }
3016 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
3017 {
3018 tree newconst
3019 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
3020 constop, TREE_OPERAND (varop, 1)));
3021 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
3022 || code == EQ_EXPR || code == NE_EXPR)
3023 {
3024 if (TREE_CODE (TREE_TYPE (newconst)) != REAL_TYPE
3025 || !REAL_VALUES_EQUAL (TREE_REAL_CST (newconst),
3026 TREE_REAL_CST (constop)))
3027 {
3028 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
3029 *constoploc = newconst;
3030 return t;
3031 }
3032 }
3033 }
3034 }
3035
3036 /* Change X >= CST to X > (CST - 1) if CST is positive. */
3037 if (TREE_CODE (arg1) == INTEGER_CST
3038 && TREE_CODE (arg0) != INTEGER_CST
3039 && ! tree_int_cst_lt (arg1, integer_one_node))
3040 {
3041 switch (TREE_CODE (t))
3042 {
3043 case GE_EXPR:
3044 code = GT_EXPR;
3045 TREE_SET_CODE (t, code);
3046 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
3047 TREE_OPERAND (t, 1) = arg1;
3048 break;
3049
3050 case LT_EXPR:
3051 code = LE_EXPR;
3052 TREE_SET_CODE (t, code);
3053 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
3054 TREE_OPERAND (t, 1) = arg1;
3055 }
3056 }
3057
3058 /* If we are comparing the result of a comparison to a constant,
3059 we can often simplify this, since the comparison result is known to
3060 be either 0 or 1. We can ignore conversions if the LHS is a
3061 comparison. */
3062
3063 if (TREE_CODE (arg1) == INTEGER_CST)
3064 {
3065 tree comparison = arg0;
3066
3067 while (TREE_CODE (comparison) == NOP_EXPR
3068 || TREE_CODE (comparison) == CONVERT_EXPR)
3069 comparison = TREE_OPERAND (comparison, 0);
3070
3071 if (TREE_CODE_CLASS (TREE_CODE (comparison)) == '<'
3072 || TREE_CODE (comparison) == TRUTH_ANDIF_EXPR
3073 || TREE_CODE (comparison) == TRUTH_ORIF_EXPR
3074 || TREE_CODE (comparison) == TRUTH_AND_EXPR
3075 || TREE_CODE (comparison) == TRUTH_OR_EXPR
3076 || TREE_CODE (comparison) == TRUTH_NOT_EXPR)
3077 {
3078 /* We do different things depending on whether the
3079 constant being compared against is < 0, == 0, == 1, or > 1.
3080 Each of those cases, in order, corresponds to one
3081 character in a string. The value of the character is
3082 the result to return. A '0' or '1' means return always true
3083 or always false, respectively; 'c' means return the result
3084 of the comparison, and 'i' means return the result of the
3085 inverted comparison. */
3086
3087 char *actions, action;
3088
3089 switch (code)
3090 {
3091 case EQ_EXPR:
3092 actions = "0ic0";
3093 break;
3094 case NE_EXPR:
3095 actions = "1ci1";
3096 break;
3097 case LE_EXPR:
3098 actions = "0i11";
3099 break;
3100 case LT_EXPR:
3101 actions = "00i1";
3102 break;
3103 case GE_EXPR:
3104 actions = "11c0";
3105 break;
3106 case GT_EXPR:
3107 actions = "1c00";
3108 break;
3109 }
3110
3111 if (tree_int_cst_lt (arg1, integer_zero_node))
3112 action = actions[0];
3113 else if (integer_zerop (arg1))
3114 action = actions[1];
3115 else if (integer_onep (arg1))
3116 action = actions[2];
3117 else
3118 action = actions[3];
3119
3120 switch (action)
3121 {
3122 case '0':
3123 return omit_one_operand (type, integer_zero_node,
3124 comparison);
3125
3126 case '1':
3127 return omit_one_operand (type, integer_one_node, comparison);
3128
3129 case 'c':
3130 return convert (type, comparison);
3131
3132 case 'i':
3133 return convert (type, invert_truthvalue (comparison));
3134
3135 default:
3136 abort ();
3137 }
3138 }
3139 }
3140
3141 /* If this is an EQ or NE comparison with zero and ARG0 is
3142 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
3143 two operations, but the latter can be done in one less insn
3144 one machine that have only two-operand insns or on which a
3145 constant cannot be the first operand. */
3146 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
3147 && TREE_CODE (arg0) == BIT_AND_EXPR)
3148 {
3149 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
3150 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
3151 return
3152 fold (build (code, type,
3153 build (BIT_AND_EXPR, TREE_TYPE (arg0),
3154 build (RSHIFT_EXPR,
3155 TREE_TYPE (TREE_OPERAND (arg0, 0)),
3156 TREE_OPERAND (arg0, 1),
3157 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
3158 convert (TREE_TYPE (arg0),
3159 integer_one_node)),
3160 arg1));
3161 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
3162 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
3163 return
3164 fold (build (code, type,
3165 build (BIT_AND_EXPR, TREE_TYPE (arg0),
3166 build (RSHIFT_EXPR,
3167 TREE_TYPE (TREE_OPERAND (arg0, 1)),
3168 TREE_OPERAND (arg0, 0),
3169 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
3170 convert (TREE_TYPE (arg0),
3171 integer_one_node)),
3172 arg1));
3173 }
3174
3175 /* If this is an NE comparison of zero with an AND of one, remove the
3176 comparison since the AND will give the correct value. */
3177 if (code == NE_EXPR && integer_zerop (arg1)
3178 && TREE_CODE (arg0) == BIT_AND_EXPR
3179 && integer_onep (TREE_OPERAND (arg0, 1)))
3180 return convert (type, arg0);
3181
3182 /* If we have (A & C) == C where C is a power of 2, convert this into
3183 (A & C) != 0. Similarly for NE_EXPR. */
3184 if ((code == EQ_EXPR || code == NE_EXPR)
3185 && TREE_CODE (arg0) == BIT_AND_EXPR
3186 && integer_pow2p (TREE_OPERAND (arg0, 1))
3187 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3188 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
3189 arg0, integer_zero_node);
3190
3191 /* Simplify comparison of an integer with itself.
3192 (This may not be safe with IEEE floats if they are nans.) */
3193 if (operand_equal_p (arg0, arg1, 0)
3194 && TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE)
3195 {
3196 switch (code)
3197 {
3198 case EQ_EXPR:
3199 case GE_EXPR:
3200 case LE_EXPR:
3201 t = build_int_2 (1, 0);
3202 TREE_TYPE (t) = type;
3203 return t;
3204 case NE_EXPR:
3205 case GT_EXPR:
3206 case LT_EXPR:
3207 t = build_int_2 (0, 0);
3208 TREE_TYPE (t) = type;
3209 return t;
3210 }
3211 }
3212
3213 /* An unsigned comparison against 0 can be simplified. */
3214 if (integer_zerop (arg1)
3215 && (TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
3216 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
3217 && TREE_UNSIGNED (TREE_TYPE (arg1)))
3218 {
3219 switch (TREE_CODE (t))
3220 {
3221 case GT_EXPR:
3222 TREE_SET_CODE (t, NE_EXPR);
3223 break;
3224 case LE_EXPR:
3225 TREE_SET_CODE (t, EQ_EXPR);
3226 break;
3227 case GE_EXPR:
3228 return omit_one_operand (integer_type_node,
3229 integer_one_node, arg0);
3230 case LT_EXPR:
3231 return omit_one_operand (integer_type_node,
3232 integer_zero_node, arg0);
3233 }
3234 }
3235
3236 /* To compute GT, swap the arguments and do LT.
3237 To compute GE, do LT and invert the result.
3238 To compute LE, swap the arguments, do LT and invert the result.
3239 To compute NE, do EQ and invert the result. */
3240 if (code == LE_EXPR || code == GT_EXPR)
3241 {
3242 register tree temp = arg0;
3243 arg0 = arg1;
3244 arg1 = temp;
3245 }
3246
3247 /* Compute a result for LT or EQ if args permit;
3248 otherwise return T. */
3249 if (TREE_CODE (arg0) == INTEGER_CST
3250 && TREE_CODE (arg1) == INTEGER_CST)
3251 {
3252 if (code == EQ_EXPR || code == NE_EXPR)
3253 t = build_int_2
3254 (TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
3255 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1),
3256 0);
3257 else
3258 t = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
3259 ? INT_CST_LT_UNSIGNED (arg0, arg1)
3260 : INT_CST_LT (arg0, arg1)),
3261 0);
3262 }
3263 /* Assume a nonexplicit constant cannot equal an explicit one,
3264 since such code would be undefined anyway.
3265 Exception: on sysvr4, using #pragma weak,
3266 a label can come out as 0. */
3267 else if (TREE_CODE (arg1) == INTEGER_CST
3268 && !integer_zerop (arg1)
3269 && TREE_CONSTANT (arg0)
3270 && TREE_CODE (arg0) == ADDR_EXPR
3271 && (code == EQ_EXPR || code == NE_EXPR))
3272 {
3273 t = build_int_2 (0, 0);
3274 }
3275 /* Two real constants can be compared explicitly. */
3276 else if (TREE_CODE (arg0) == REAL_CST
3277 && TREE_CODE (arg1) == REAL_CST)
3278 {
3279 if (code == EQ_EXPR || code == NE_EXPR)
3280 t = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
3281 TREE_REAL_CST (arg1)),
3282 0);
3283 else
3284 t = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
3285 TREE_REAL_CST (arg1)),
3286 0);
3287 }
3288 else if ((TREE_CODE (arg0) == COMPONENT_REF
3289 || TREE_CODE (arg0) == BIT_FIELD_REF)
3290 && (code == EQ_EXPR || code == NE_EXPR)
3291 /* Handle the constant case even without -O
3292 to make sure the warnings are given. */
3293 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
3294 {
3295 tree tem = optimize_bit_field_compare (code, type, arg0, arg1);
3296 return tem ? tem : t;
3297 }
3298
3299 /* If what we want is other than LT or EQ, invert the result. */
3300 if ((code == GE_EXPR || code == LE_EXPR || code == NE_EXPR)
3301 && TREE_CODE (t) == INTEGER_CST)
3302 TREE_INT_CST_LOW (t) ^= 1;
3303 TREE_TYPE (t) = type;
3304 return t;
3305
3306 case COND_EXPR:
3307 if (TREE_CODE (arg0) == INTEGER_CST)
3308 return TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1));
3309 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
3310 return omit_one_operand (type, arg1, arg0);
3311 else if (integer_onep (TREE_OPERAND (t, 1))
3312 && integer_zerop (TREE_OPERAND (t, 2))
3313 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
3314 call to fold will try to move the conversion inside
3315 a COND, which will recurse. In that case, the COND_EXPR
3316 is probably the best choice, so leave it alone. */
3317 && type == TREE_TYPE (arg0))
3318 return arg0;
3319 else if (integer_zerop (arg1) && integer_onep (TREE_OPERAND (t, 2)))
3320 return convert (type, invert_truthvalue (arg0));
3321
3322 /* If we have (a >= 0 ? a : -a) or the same with ">", this is an
3323 absolute value expression. */
3324
3325 if ((TREE_CODE (arg0) == GE_EXPR || TREE_CODE (arg0) == GT_EXPR)
3326 && integer_zerop (TREE_OPERAND (arg0, 1))
3327 && TREE_CODE (TREE_OPERAND (t, 2)) == NEGATE_EXPR
3328 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
3329 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (t, 2), 0), arg1, 0))
3330 return fold (build1 (ABS_EXPR, type, arg1));
3331
3332 /* Similarly for (a <= 0 ? -a : a). */
3333
3334 if ((TREE_CODE (arg0) == LE_EXPR || TREE_CODE (arg0) == LT_EXPR)
3335 && integer_zerop (TREE_OPERAND (arg0, 1))
3336 && TREE_CODE (arg1) == NEGATE_EXPR
3337 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (t, 2), 0)
3338 && operand_equal_p (TREE_OPERAND (arg1, 0), TREE_OPERAND (t, 2), 0))
3339 return fold (build1 (ABS_EXPR, type, TREE_OPERAND (t, 2)));
3340
3341 /* If we have a GT, GE, LT, or LE comparison, this might be a MIN or
3342 MAX test. If so, make a MIN_EXPR or MAX_EXPR. */
3343
3344 if (TREE_CODE (arg0) == GT_EXPR || TREE_CODE (arg0) == GE_EXPR
3345 || TREE_CODE (arg0) == LT_EXPR || TREE_CODE (arg0) == LE_EXPR)
3346 {
3347 tree hi_true, lo_true;
3348
3349 if (TREE_CODE (arg0) == GT_EXPR || TREE_CODE (arg0) == GE_EXPR)
3350 hi_true = TREE_OPERAND (arg0, 0), lo_true = TREE_OPERAND (arg0, 1);
3351 else
3352 hi_true = TREE_OPERAND (arg0, 1), lo_true = TREE_OPERAND (arg0, 0);
3353
3354 if (comparison_equiv_p (hi_true, lo_true, arg1, TREE_OPERAND (t, 2)))
3355 /* We use arg1 and the other arg because they must have the same
3356 type as the intended result.
3357 The values being compared might have a narrower type. */
3358 return fold (build (MAX_EXPR, type, arg1, TREE_OPERAND (t, 2)));
3359 else if (comparison_equiv_p (lo_true, hi_true,
3360 arg1, TREE_OPERAND (t, 2)))
3361 return fold (build (MIN_EXPR, type, arg1, TREE_OPERAND (t, 2)));
3362 }
3363
3364 /* Look for cases when we are comparing some expression A for equality
3365 with zero and the result is to be zero if A is zero. In that case,
3366 check to see if the value of A is the same as the value to be
3367 returned when A is non-zero.
3368
3369 There are two cases: One is where we have (A ? A : 0) and the
3370 other is when a single bit is tested (e.g., A & 2 ? 2 : 0).
3371 In these cases, the result of the conditional is simply A.
3372
3373 Start by setting ARG1 to be the true value and ARG0 to be the thing
3374 compared with zero. Then check for the two cases above. */
3375
3376 if (integer_zerop (TREE_OPERAND (t, 2))
3377 && TREE_CODE (arg0) == NE_EXPR
3378 && integer_zerop (TREE_OPERAND (arg0, 1))
3379 && ! TREE_SIDE_EFFECTS (arg1))
3380 ;
3381 else if (integer_zerop (arg1)
3382 && TREE_CODE (arg0) == EQ_EXPR
3383 && integer_zerop (TREE_OPERAND (arg0, 1))
3384 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
3385 arg1 = TREE_OPERAND (t, 2);
3386 else
3387 return t;
3388
3389 arg0 = TREE_OPERAND (arg0, 0);
3390
3391 STRIP_NOPS (arg1);
3392 if (operand_equal_p (arg0, arg1, 0)
3393 || (TREE_CODE (arg1) == INTEGER_CST
3394 && integer_pow2p (arg1)
3395 && TREE_CODE (arg0) == BIT_AND_EXPR
3396 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0)))
3397 return convert (type, arg0);
3398 return t;
3399
3400 case COMPOUND_EXPR:
3401 if (!TREE_SIDE_EFFECTS (arg0))
3402 return arg1;
3403 return t;
3404
3405 default:
3406 return t;
3407 } /* switch (code) */
3408 }
This page took 0.19505 seconds and 6 git commands to generate.