]> gcc.gnu.org Git - gcc.git/blob - gcc/fold-const.c
(fold_convert): When converting a NaN to another type, change the type
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
28
29
30 /* The entry points in this file are fold, size_int and size_binop.
31
32 fold takes a tree as argument and returns a simplified tree.
33
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
37
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'. */
40
41 #include <stdio.h>
42 #include <setjmp.h>
43 #include "config.h"
44 #include "flags.h"
45 #include "tree.h"
46
47 /* Handle floating overflow for `const_binop'. */
48 static jmp_buf float_error;
49
50 static void encode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT, HOST_WIDE_INT));
51 static void decode PROTO((HOST_WIDE_INT *, HOST_WIDE_INT *, HOST_WIDE_INT *));
52 int div_and_round_double PROTO((enum tree_code, int, HOST_WIDE_INT,
53 HOST_WIDE_INT, HOST_WIDE_INT,
54 HOST_WIDE_INT, HOST_WIDE_INT *,
55 HOST_WIDE_INT *, HOST_WIDE_INT *,
56 HOST_WIDE_INT *));
57 static int split_tree PROTO((tree, enum tree_code, tree *, tree *, int *));
58 static tree const_binop PROTO((enum tree_code, tree, tree, int));
59 static tree fold_convert PROTO((tree, tree));
60 static enum tree_code invert_tree_comparison PROTO((enum tree_code));
61 static enum tree_code swap_tree_comparison PROTO((enum tree_code));
62 static int truth_value_p PROTO((enum tree_code));
63 static int operand_equal_for_comparison_p PROTO((tree, tree, tree));
64 static int twoval_comparison_p PROTO((tree, tree *, tree *, int *));
65 static tree eval_subst PROTO((tree, tree, tree, tree, tree));
66 static tree omit_one_operand PROTO((tree, tree, tree));
67 static tree pedantic_omit_one_operand PROTO((tree, tree, tree));
68 static tree distribute_bit_expr PROTO((enum tree_code, tree, tree, tree));
69 static tree make_bit_field_ref PROTO((tree, tree, int, int, int));
70 static tree optimize_bit_field_compare PROTO((enum tree_code, tree,
71 tree, tree));
72 static tree decode_field_reference PROTO((tree, int *, int *,
73 enum machine_mode *, int *,
74 int *, tree *, tree *));
75 static int all_ones_mask_p PROTO((tree, int));
76 static int simple_operand_p PROTO((tree));
77 static tree range_test PROTO((enum tree_code, tree, enum tree_code,
78 enum tree_code, tree, tree, tree));
79 static tree unextend PROTO((tree, int, int, tree));
80 static tree fold_truthop PROTO((enum tree_code, tree, tree, tree));
81 static tree strip_compound_expr PROTO((tree, tree));
82
83 #ifndef BRANCH_COST
84 #define BRANCH_COST 1
85 #endif
86
87 /* Yield nonzero if a signed left shift of A by B bits overflows. */
88 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
89
90 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
91 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
92 Then this yields nonzero if overflow occurred during the addition.
93 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
94 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
95 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
96 \f
97 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
98 We do that by representing the two-word integer in 4 words, with only
99 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
100
101 #define LOWPART(x) \
102 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
103 #define HIGHPART(x) \
104 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
105 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
106
107 /* Unpack a two-word integer into 4 words.
108 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
109 WORDS points to the array of HOST_WIDE_INTs. */
110
111 static void
112 encode (words, low, hi)
113 HOST_WIDE_INT *words;
114 HOST_WIDE_INT low, hi;
115 {
116 words[0] = LOWPART (low);
117 words[1] = HIGHPART (low);
118 words[2] = LOWPART (hi);
119 words[3] = HIGHPART (hi);
120 }
121
122 /* Pack an array of 4 words into a two-word integer.
123 WORDS points to the array of words.
124 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
125
126 static void
127 decode (words, low, hi)
128 HOST_WIDE_INT *words;
129 HOST_WIDE_INT *low, *hi;
130 {
131 *low = words[0] | words[1] * BASE;
132 *hi = words[2] | words[3] * BASE;
133 }
134 \f
135 /* Make the integer constant T valid for its type
136 by setting to 0 or 1 all the bits in the constant
137 that don't belong in the type.
138 Yield 1 if a signed overflow occurs, 0 otherwise.
139 If OVERFLOW is nonzero, a signed overflow has already occurred
140 in calculating T, so propagate it.
141
142 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
143 if it exists. */
144
145 int
146 force_fit_type (t, overflow)
147 tree t;
148 int overflow;
149 {
150 HOST_WIDE_INT low, high;
151 register int prec;
152
153 if (TREE_CODE (t) == REAL_CST)
154 {
155 #ifdef CHECK_FLOAT_VALUE
156 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t)), TREE_REAL_CST (t),
157 overflow);
158 #endif
159 return overflow;
160 }
161
162 else if (TREE_CODE (t) != INTEGER_CST)
163 return overflow;
164
165 low = TREE_INT_CST_LOW (t);
166 high = TREE_INT_CST_HIGH (t);
167
168 if (TREE_CODE (TREE_TYPE (t)) == POINTER_TYPE)
169 prec = POINTER_SIZE;
170 else
171 prec = TYPE_PRECISION (TREE_TYPE (t));
172
173 /* First clear all bits that are beyond the type's precision. */
174
175 if (prec == 2 * HOST_BITS_PER_WIDE_INT)
176 ;
177 else if (prec > HOST_BITS_PER_WIDE_INT)
178 {
179 TREE_INT_CST_HIGH (t)
180 &= ~((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
181 }
182 else
183 {
184 TREE_INT_CST_HIGH (t) = 0;
185 if (prec < HOST_BITS_PER_WIDE_INT)
186 TREE_INT_CST_LOW (t) &= ~((HOST_WIDE_INT) (-1) << prec);
187 }
188
189 /* Unsigned types do not suffer sign extension or overflow. */
190 if (TREE_UNSIGNED (TREE_TYPE (t)))
191 return overflow;
192
193 /* If the value's sign bit is set, extend the sign. */
194 if (prec != 2 * HOST_BITS_PER_WIDE_INT
195 && (prec > HOST_BITS_PER_WIDE_INT
196 ? (TREE_INT_CST_HIGH (t)
197 & ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)))
198 : TREE_INT_CST_LOW (t) & ((HOST_WIDE_INT) 1 << (prec - 1))))
199 {
200 /* Value is negative:
201 set to 1 all the bits that are outside this type's precision. */
202 if (prec > HOST_BITS_PER_WIDE_INT)
203 {
204 TREE_INT_CST_HIGH (t)
205 |= ((HOST_WIDE_INT) (-1) << (prec - HOST_BITS_PER_WIDE_INT));
206 }
207 else
208 {
209 TREE_INT_CST_HIGH (t) = -1;
210 if (prec < HOST_BITS_PER_WIDE_INT)
211 TREE_INT_CST_LOW (t) |= ((HOST_WIDE_INT) (-1) << prec);
212 }
213 }
214
215 /* Yield nonzero if signed overflow occurred. */
216 return
217 ((overflow | (low ^ TREE_INT_CST_LOW (t)) | (high ^ TREE_INT_CST_HIGH (t)))
218 != 0);
219 }
220 \f
221 /* Add two doubleword integers with doubleword result.
222 Each argument is given as two `HOST_WIDE_INT' pieces.
223 One argument is L1 and H1; the other, L2 and H2.
224 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
225
226 int
227 add_double (l1, h1, l2, h2, lv, hv)
228 HOST_WIDE_INT l1, h1, l2, h2;
229 HOST_WIDE_INT *lv, *hv;
230 {
231 HOST_WIDE_INT l, h;
232
233 l = l1 + l2;
234 h = h1 + h2 + ((unsigned HOST_WIDE_INT) l < l1);
235
236 *lv = l;
237 *hv = h;
238 return overflow_sum_sign (h1, h2, h);
239 }
240
241 /* Negate a doubleword integer with doubleword result.
242 Return nonzero if the operation overflows, assuming it's signed.
243 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
244 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
245
246 int
247 neg_double (l1, h1, lv, hv)
248 HOST_WIDE_INT l1, h1;
249 HOST_WIDE_INT *lv, *hv;
250 {
251 if (l1 == 0)
252 {
253 *lv = 0;
254 *hv = - h1;
255 return (*hv & h1) < 0;
256 }
257 else
258 {
259 *lv = - l1;
260 *hv = ~ h1;
261 return 0;
262 }
263 }
264 \f
265 /* Multiply two doubleword integers with doubleword result.
266 Return nonzero if the operation overflows, assuming it's signed.
267 Each argument is given as two `HOST_WIDE_INT' pieces.
268 One argument is L1 and H1; the other, L2 and H2.
269 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
270
271 int
272 mul_double (l1, h1, l2, h2, lv, hv)
273 HOST_WIDE_INT l1, h1, l2, h2;
274 HOST_WIDE_INT *lv, *hv;
275 {
276 HOST_WIDE_INT arg1[4];
277 HOST_WIDE_INT arg2[4];
278 HOST_WIDE_INT prod[4 * 2];
279 register unsigned HOST_WIDE_INT carry;
280 register int i, j, k;
281 HOST_WIDE_INT toplow, tophigh, neglow, neghigh;
282
283 encode (arg1, l1, h1);
284 encode (arg2, l2, h2);
285
286 bzero ((char *) prod, sizeof prod);
287
288 for (i = 0; i < 4; i++)
289 {
290 carry = 0;
291 for (j = 0; j < 4; j++)
292 {
293 k = i + j;
294 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
295 carry += arg1[i] * arg2[j];
296 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
297 carry += prod[k];
298 prod[k] = LOWPART (carry);
299 carry = HIGHPART (carry);
300 }
301 prod[i + 4] = carry;
302 }
303
304 decode (prod, lv, hv); /* This ignores prod[4] through prod[4*2-1] */
305
306 /* Check for overflow by calculating the top half of the answer in full;
307 it should agree with the low half's sign bit. */
308 decode (prod+4, &toplow, &tophigh);
309 if (h1 < 0)
310 {
311 neg_double (l2, h2, &neglow, &neghigh);
312 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
313 }
314 if (h2 < 0)
315 {
316 neg_double (l1, h1, &neglow, &neghigh);
317 add_double (neglow, neghigh, toplow, tophigh, &toplow, &tophigh);
318 }
319 return (*hv < 0 ? ~(toplow & tophigh) : toplow | tophigh) != 0;
320 }
321 \f
322 /* Shift the doubleword integer in L1, H1 left by COUNT places
323 keeping only PREC bits of result.
324 Shift right if COUNT is negative.
325 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
326 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
327
328 void
329 lshift_double (l1, h1, count, prec, lv, hv, arith)
330 HOST_WIDE_INT l1, h1, count;
331 int prec;
332 HOST_WIDE_INT *lv, *hv;
333 int arith;
334 {
335 if (count < 0)
336 {
337 rshift_double (l1, h1, - count, prec, lv, hv, arith);
338 return;
339 }
340
341 #ifdef SHIFT_COUNT_TRUNCATED
342 if (SHIFT_COUNT_TRUNCATED)
343 count %= prec;
344 #endif
345
346 if (count >= HOST_BITS_PER_WIDE_INT)
347 {
348 *hv = (unsigned HOST_WIDE_INT) l1 << count - HOST_BITS_PER_WIDE_INT;
349 *lv = 0;
350 }
351 else
352 {
353 *hv = (((unsigned HOST_WIDE_INT) h1 << count)
354 | ((unsigned HOST_WIDE_INT) l1 >> HOST_BITS_PER_WIDE_INT - count - 1 >> 1));
355 *lv = (unsigned HOST_WIDE_INT) l1 << count;
356 }
357 }
358
359 /* Shift the doubleword integer in L1, H1 right by COUNT places
360 keeping only PREC bits of result. COUNT must be positive.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
363
364 void
365 rshift_double (l1, h1, count, prec, lv, hv, arith)
366 HOST_WIDE_INT l1, h1, count;
367 int prec;
368 HOST_WIDE_INT *lv, *hv;
369 int arith;
370 {
371 unsigned HOST_WIDE_INT signmask;
372 signmask = (arith
373 ? -((unsigned HOST_WIDE_INT) h1 >> (HOST_BITS_PER_WIDE_INT - 1))
374 : 0);
375
376 #ifdef SHIFT_COUNT_TRUNCATED
377 if (SHIFT_COUNT_TRUNCATED)
378 count %= prec;
379 #endif
380
381 if (count >= HOST_BITS_PER_WIDE_INT)
382 {
383 *hv = signmask;
384 *lv = ((signmask << 2 * HOST_BITS_PER_WIDE_INT - count - 1 << 1)
385 | ((unsigned HOST_WIDE_INT) h1 >> count - HOST_BITS_PER_WIDE_INT));
386 }
387 else
388 {
389 *lv = (((unsigned HOST_WIDE_INT) l1 >> count)
390 | ((unsigned HOST_WIDE_INT) h1 << HOST_BITS_PER_WIDE_INT - count - 1 << 1));
391 *hv = ((signmask << HOST_BITS_PER_WIDE_INT - count)
392 | ((unsigned HOST_WIDE_INT) h1 >> count));
393 }
394 }
395 \f
396 /* Rotate the doubleword integer in L1, H1 left by COUNT places
397 keeping only PREC bits of result.
398 Rotate right if COUNT is negative.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
400
401 void
402 lrotate_double (l1, h1, count, prec, lv, hv)
403 HOST_WIDE_INT l1, h1, count;
404 int prec;
405 HOST_WIDE_INT *lv, *hv;
406 {
407 HOST_WIDE_INT s1l, s1h, s2l, s2h;
408
409 count %= prec;
410 if (count < 0)
411 count += prec;
412
413 lshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
414 rshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
415 *lv = s1l | s2l;
416 *hv = s1h | s2h;
417 }
418
419 /* Rotate the doubleword integer in L1, H1 left by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
422
423 void
424 rrotate_double (l1, h1, count, prec, lv, hv)
425 HOST_WIDE_INT l1, h1, count;
426 int prec;
427 HOST_WIDE_INT *lv, *hv;
428 {
429 HOST_WIDE_INT s1l, s1h, s2l, s2h;
430
431 count %= prec;
432 if (count < 0)
433 count += prec;
434
435 rshift_double (l1, h1, count, prec, &s1l, &s1h, 0);
436 lshift_double (l1, h1, prec - count, prec, &s2l, &s2h, 0);
437 *lv = s1l | s2l;
438 *hv = s1h | s2h;
439 }
440 \f
441 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
442 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
443 CODE is a tree code for a kind of division, one of
444 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
445 or EXACT_DIV_EXPR
446 It controls how the quotient is rounded to a integer.
447 Return nonzero if the operation overflows.
448 UNS nonzero says do unsigned division. */
449
450 int
451 div_and_round_double (code, uns,
452 lnum_orig, hnum_orig, lden_orig, hden_orig,
453 lquo, hquo, lrem, hrem)
454 enum tree_code code;
455 int uns;
456 HOST_WIDE_INT lnum_orig, hnum_orig; /* num == numerator == dividend */
457 HOST_WIDE_INT lden_orig, hden_orig; /* den == denominator == divisor */
458 HOST_WIDE_INT *lquo, *hquo, *lrem, *hrem;
459 {
460 int quo_neg = 0;
461 HOST_WIDE_INT num[4 + 1]; /* extra element for scaling. */
462 HOST_WIDE_INT den[4], quo[4];
463 register int i, j;
464 unsigned HOST_WIDE_INT work;
465 register unsigned HOST_WIDE_INT carry = 0;
466 HOST_WIDE_INT lnum = lnum_orig;
467 HOST_WIDE_INT hnum = hnum_orig;
468 HOST_WIDE_INT lden = lden_orig;
469 HOST_WIDE_INT hden = hden_orig;
470 int overflow = 0;
471
472 if ((hden == 0) && (lden == 0))
473 abort ();
474
475 /* calculate quotient sign and convert operands to unsigned. */
476 if (!uns)
477 {
478 if (hnum < 0)
479 {
480 quo_neg = ~ quo_neg;
481 /* (minimum integer) / (-1) is the only overflow case. */
482 if (neg_double (lnum, hnum, &lnum, &hnum) && (lden & hden) == -1)
483 overflow = 1;
484 }
485 if (hden < 0)
486 {
487 quo_neg = ~ quo_neg;
488 neg_double (lden, hden, &lden, &hden);
489 }
490 }
491
492 if (hnum == 0 && hden == 0)
493 { /* single precision */
494 *hquo = *hrem = 0;
495 /* This unsigned division rounds toward zero. */
496 *lquo = lnum / (unsigned HOST_WIDE_INT) lden;
497 goto finish_up;
498 }
499
500 if (hnum == 0)
501 { /* trivial case: dividend < divisor */
502 /* hden != 0 already checked. */
503 *hquo = *lquo = 0;
504 *hrem = hnum;
505 *lrem = lnum;
506 goto finish_up;
507 }
508
509 bzero ((char *) quo, sizeof quo);
510
511 bzero ((char *) num, sizeof num); /* to zero 9th element */
512 bzero ((char *) den, sizeof den);
513
514 encode (num, lnum, hnum);
515 encode (den, lden, hden);
516
517 /* Special code for when the divisor < BASE. */
518 if (hden == 0 && lden < BASE)
519 {
520 /* hnum != 0 already checked. */
521 for (i = 4 - 1; i >= 0; i--)
522 {
523 work = num[i] + carry * BASE;
524 quo[i] = work / (unsigned HOST_WIDE_INT) lden;
525 carry = work % (unsigned HOST_WIDE_INT) lden;
526 }
527 }
528 else
529 {
530 /* Full double precision division,
531 with thanks to Don Knuth's "Seminumerical Algorithms". */
532 int num_hi_sig, den_hi_sig;
533 unsigned HOST_WIDE_INT quo_est, scale;
534
535 /* Find the highest non-zero divisor digit. */
536 for (i = 4 - 1; ; i--)
537 if (den[i] != 0) {
538 den_hi_sig = i;
539 break;
540 }
541
542 /* Insure that the first digit of the divisor is at least BASE/2.
543 This is required by the quotient digit estimation algorithm. */
544
545 scale = BASE / (den[den_hi_sig] + 1);
546 if (scale > 1) { /* scale divisor and dividend */
547 carry = 0;
548 for (i = 0; i <= 4 - 1; i++) {
549 work = (num[i] * scale) + carry;
550 num[i] = LOWPART (work);
551 carry = HIGHPART (work);
552 } num[4] = carry;
553 carry = 0;
554 for (i = 0; i <= 4 - 1; i++) {
555 work = (den[i] * scale) + carry;
556 den[i] = LOWPART (work);
557 carry = HIGHPART (work);
558 if (den[i] != 0) den_hi_sig = i;
559 }
560 }
561
562 num_hi_sig = 4;
563
564 /* Main loop */
565 for (i = num_hi_sig - den_hi_sig - 1; i >= 0; i--) {
566 /* guess the next quotient digit, quo_est, by dividing the first
567 two remaining dividend digits by the high order quotient digit.
568 quo_est is never low and is at most 2 high. */
569 unsigned HOST_WIDE_INT tmp;
570
571 num_hi_sig = i + den_hi_sig + 1;
572 work = num[num_hi_sig] * BASE + num[num_hi_sig - 1];
573 if (num[num_hi_sig] != den[den_hi_sig])
574 quo_est = work / den[den_hi_sig];
575 else
576 quo_est = BASE - 1;
577
578 /* refine quo_est so it's usually correct, and at most one high. */
579 tmp = work - quo_est * den[den_hi_sig];
580 if (tmp < BASE
581 && den[den_hi_sig - 1] * quo_est > (tmp * BASE + num[num_hi_sig - 2]))
582 quo_est--;
583
584 /* Try QUO_EST as the quotient digit, by multiplying the
585 divisor by QUO_EST and subtracting from the remaining dividend.
586 Keep in mind that QUO_EST is the I - 1st digit. */
587
588 carry = 0;
589 for (j = 0; j <= den_hi_sig; j++)
590 {
591 work = quo_est * den[j] + carry;
592 carry = HIGHPART (work);
593 work = num[i + j] - LOWPART (work);
594 num[i + j] = LOWPART (work);
595 carry += HIGHPART (work) != 0;
596 }
597
598 /* if quo_est was high by one, then num[i] went negative and
599 we need to correct things. */
600
601 if (num[num_hi_sig] < carry)
602 {
603 quo_est--;
604 carry = 0; /* add divisor back in */
605 for (j = 0; j <= den_hi_sig; j++)
606 {
607 work = num[i + j] + den[j] + carry;
608 carry = HIGHPART (work);
609 num[i + j] = LOWPART (work);
610 }
611 num [num_hi_sig] += carry;
612 }
613
614 /* store the quotient digit. */
615 quo[i] = quo_est;
616 }
617 }
618
619 decode (quo, lquo, hquo);
620
621 finish_up:
622 /* if result is negative, make it so. */
623 if (quo_neg)
624 neg_double (*lquo, *hquo, lquo, hquo);
625
626 /* compute trial remainder: rem = num - (quo * den) */
627 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
628 neg_double (*lrem, *hrem, lrem, hrem);
629 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
630
631 switch (code)
632 {
633 case TRUNC_DIV_EXPR:
634 case TRUNC_MOD_EXPR: /* round toward zero */
635 case EXACT_DIV_EXPR: /* for this one, it shouldn't matter */
636 return overflow;
637
638 case FLOOR_DIV_EXPR:
639 case FLOOR_MOD_EXPR: /* round toward negative infinity */
640 if (quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio < 0 && rem != 0 */
641 {
642 /* quo = quo - 1; */
643 add_double (*lquo, *hquo, (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1,
644 lquo, hquo);
645 }
646 else return overflow;
647 break;
648
649 case CEIL_DIV_EXPR:
650 case CEIL_MOD_EXPR: /* round toward positive infinity */
651 if (!quo_neg && (*lrem != 0 || *hrem != 0)) /* ratio > 0 && rem != 0 */
652 {
653 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
654 lquo, hquo);
655 }
656 else return overflow;
657 break;
658
659 case ROUND_DIV_EXPR:
660 case ROUND_MOD_EXPR: /* round to closest integer */
661 {
662 HOST_WIDE_INT labs_rem = *lrem, habs_rem = *hrem;
663 HOST_WIDE_INT labs_den = lden, habs_den = hden, ltwice, htwice;
664
665 /* get absolute values */
666 if (*hrem < 0) neg_double (*lrem, *hrem, &labs_rem, &habs_rem);
667 if (hden < 0) neg_double (lden, hden, &labs_den, &habs_den);
668
669 /* if (2 * abs (lrem) >= abs (lden)) */
670 mul_double ((HOST_WIDE_INT) 2, (HOST_WIDE_INT) 0,
671 labs_rem, habs_rem, &ltwice, &htwice);
672 if (((unsigned HOST_WIDE_INT) habs_den
673 < (unsigned HOST_WIDE_INT) htwice)
674 || (((unsigned HOST_WIDE_INT) habs_den
675 == (unsigned HOST_WIDE_INT) htwice)
676 && ((HOST_WIDE_INT unsigned) labs_den
677 < (unsigned HOST_WIDE_INT) ltwice)))
678 {
679 if (*hquo < 0)
680 /* quo = quo - 1; */
681 add_double (*lquo, *hquo,
682 (HOST_WIDE_INT) -1, (HOST_WIDE_INT) -1, lquo, hquo);
683 else
684 /* quo = quo + 1; */
685 add_double (*lquo, *hquo, (HOST_WIDE_INT) 1, (HOST_WIDE_INT) 0,
686 lquo, hquo);
687 }
688 else return overflow;
689 }
690 break;
691
692 default:
693 abort ();
694 }
695
696 /* compute true remainder: rem = num - (quo * den) */
697 mul_double (*lquo, *hquo, lden_orig, hden_orig, lrem, hrem);
698 neg_double (*lrem, *hrem, lrem, hrem);
699 add_double (lnum_orig, hnum_orig, *lrem, *hrem, lrem, hrem);
700 return overflow;
701 }
702 \f
703 #ifndef REAL_ARITHMETIC
704 /* Effectively truncate a real value to represent the nearest possible value
705 in a narrower mode. The result is actually represented in the same data
706 type as the argument, but its value is usually different.
707
708 A trap may occur during the FP operations and it is the responsibility
709 of the calling function to have a handler established. */
710
711 REAL_VALUE_TYPE
712 real_value_truncate (mode, arg)
713 enum machine_mode mode;
714 REAL_VALUE_TYPE arg;
715 {
716 return REAL_VALUE_TRUNCATE (mode, arg);
717 }
718
719 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
720
721 /* Check for infinity in an IEEE double precision number. */
722
723 int
724 target_isinf (x)
725 REAL_VALUE_TYPE x;
726 {
727 /* The IEEE 64-bit double format. */
728 union {
729 REAL_VALUE_TYPE d;
730 struct {
731 unsigned sign : 1;
732 unsigned exponent : 11;
733 unsigned mantissa1 : 20;
734 unsigned mantissa2;
735 } little_endian;
736 struct {
737 unsigned mantissa2;
738 unsigned mantissa1 : 20;
739 unsigned exponent : 11;
740 unsigned sign : 1;
741 } big_endian;
742 } u;
743
744 u.d = dconstm1;
745 if (u.big_endian.sign == 1)
746 {
747 u.d = x;
748 return (u.big_endian.exponent == 2047
749 && u.big_endian.mantissa1 == 0
750 && u.big_endian.mantissa2 == 0);
751 }
752 else
753 {
754 u.d = x;
755 return (u.little_endian.exponent == 2047
756 && u.little_endian.mantissa1 == 0
757 && u.little_endian.mantissa2 == 0);
758 }
759 }
760
761 /* Check whether an IEEE double precision number is a NaN. */
762
763 int
764 target_isnan (x)
765 REAL_VALUE_TYPE x;
766 {
767 /* The IEEE 64-bit double format. */
768 union {
769 REAL_VALUE_TYPE d;
770 struct {
771 unsigned sign : 1;
772 unsigned exponent : 11;
773 unsigned mantissa1 : 20;
774 unsigned mantissa2;
775 } little_endian;
776 struct {
777 unsigned mantissa2;
778 unsigned mantissa1 : 20;
779 unsigned exponent : 11;
780 unsigned sign : 1;
781 } big_endian;
782 } u;
783
784 u.d = dconstm1;
785 if (u.big_endian.sign == 1)
786 {
787 u.d = x;
788 return (u.big_endian.exponent == 2047
789 && (u.big_endian.mantissa1 != 0
790 || u.big_endian.mantissa2 != 0));
791 }
792 else
793 {
794 u.d = x;
795 return (u.little_endian.exponent == 2047
796 && (u.little_endian.mantissa1 != 0
797 || u.little_endian.mantissa2 != 0));
798 }
799 }
800
801 /* Check for a negative IEEE double precision number. */
802
803 int
804 target_negative (x)
805 REAL_VALUE_TYPE x;
806 {
807 /* The IEEE 64-bit double format. */
808 union {
809 REAL_VALUE_TYPE d;
810 struct {
811 unsigned sign : 1;
812 unsigned exponent : 11;
813 unsigned mantissa1 : 20;
814 unsigned mantissa2;
815 } little_endian;
816 struct {
817 unsigned mantissa2;
818 unsigned mantissa1 : 20;
819 unsigned exponent : 11;
820 unsigned sign : 1;
821 } big_endian;
822 } u;
823
824 u.d = dconstm1;
825 if (u.big_endian.sign == 1)
826 {
827 u.d = x;
828 return u.big_endian.sign;
829 }
830 else
831 {
832 u.d = x;
833 return u.little_endian.sign;
834 }
835 }
836 #else /* Target not IEEE */
837
838 /* Let's assume other float formats don't have infinity.
839 (This can be overridden by redefining REAL_VALUE_ISINF.) */
840
841 target_isinf (x)
842 REAL_VALUE_TYPE x;
843 {
844 return 0;
845 }
846
847 /* Let's assume other float formats don't have NaNs.
848 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
849
850 target_isnan (x)
851 REAL_VALUE_TYPE x;
852 {
853 return 0;
854 }
855
856 /* Let's assume other float formats don't have minus zero.
857 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
858
859 target_negative (x)
860 REAL_VALUE_TYPE x;
861 {
862 return x < 0;
863 }
864 #endif /* Target not IEEE */
865 #endif /* no REAL_ARITHMETIC */
866 \f
867 /* Split a tree IN into a constant and a variable part
868 that could be combined with CODE to make IN.
869 CODE must be a commutative arithmetic operation.
870 Store the constant part into *CONP and the variable in &VARP.
871 Return 1 if this was done; zero means the tree IN did not decompose
872 this way.
873
874 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
875 Therefore, we must tell the caller whether the variable part
876 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
877 The value stored is the coefficient for the variable term.
878 The constant term we return should always be added;
879 we negate it if necessary. */
880
881 static int
882 split_tree (in, code, varp, conp, varsignp)
883 tree in;
884 enum tree_code code;
885 tree *varp, *conp;
886 int *varsignp;
887 {
888 register tree outtype = TREE_TYPE (in);
889 *varp = 0;
890 *conp = 0;
891
892 /* Strip any conversions that don't change the machine mode. */
893 while ((TREE_CODE (in) == NOP_EXPR
894 || TREE_CODE (in) == CONVERT_EXPR)
895 && (TYPE_MODE (TREE_TYPE (in))
896 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in, 0)))))
897 in = TREE_OPERAND (in, 0);
898
899 if (TREE_CODE (in) == code
900 || (! FLOAT_TYPE_P (TREE_TYPE (in))
901 /* We can associate addition and subtraction together
902 (even though the C standard doesn't say so)
903 for integers because the value is not affected.
904 For reals, the value might be affected, so we can't. */
905 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
906 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
907 {
908 enum tree_code code = TREE_CODE (TREE_OPERAND (in, 0));
909 if (code == INTEGER_CST)
910 {
911 *conp = TREE_OPERAND (in, 0);
912 *varp = TREE_OPERAND (in, 1);
913 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
914 && TREE_TYPE (*varp) != outtype)
915 *varp = convert (outtype, *varp);
916 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
917 return 1;
918 }
919 if (TREE_CONSTANT (TREE_OPERAND (in, 1)))
920 {
921 *conp = TREE_OPERAND (in, 1);
922 *varp = TREE_OPERAND (in, 0);
923 *varsignp = 1;
924 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
925 && TREE_TYPE (*varp) != outtype)
926 *varp = convert (outtype, *varp);
927 if (TREE_CODE (in) == MINUS_EXPR)
928 {
929 /* If operation is subtraction and constant is second,
930 must negate it to get an additive constant.
931 And this cannot be done unless it is a manifest constant.
932 It could also be the address of a static variable.
933 We cannot negate that, so give up. */
934 if (TREE_CODE (*conp) == INTEGER_CST)
935 /* Subtracting from integer_zero_node loses for long long. */
936 *conp = fold (build1 (NEGATE_EXPR, TREE_TYPE (*conp), *conp));
937 else
938 return 0;
939 }
940 return 1;
941 }
942 if (TREE_CONSTANT (TREE_OPERAND (in, 0)))
943 {
944 *conp = TREE_OPERAND (in, 0);
945 *varp = TREE_OPERAND (in, 1);
946 if (TYPE_MODE (TREE_TYPE (*varp)) != TYPE_MODE (outtype)
947 && TREE_TYPE (*varp) != outtype)
948 *varp = convert (outtype, *varp);
949 *varsignp = (TREE_CODE (in) == MINUS_EXPR) ? -1 : 1;
950 return 1;
951 }
952 }
953 return 0;
954 }
955 \f
956 /* Combine two constants NUM and ARG2 under operation CODE
957 to produce a new constant.
958 We assume ARG1 and ARG2 have the same data type,
959 or at least are the same kind of constant and the same machine mode.
960
961 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
962
963 static tree
964 const_binop (code, arg1, arg2, notrunc)
965 enum tree_code code;
966 register tree arg1, arg2;
967 int notrunc;
968 {
969 if (TREE_CODE (arg1) == INTEGER_CST)
970 {
971 register HOST_WIDE_INT int1l = TREE_INT_CST_LOW (arg1);
972 register HOST_WIDE_INT int1h = TREE_INT_CST_HIGH (arg1);
973 HOST_WIDE_INT int2l = TREE_INT_CST_LOW (arg2);
974 HOST_WIDE_INT int2h = TREE_INT_CST_HIGH (arg2);
975 HOST_WIDE_INT low, hi;
976 HOST_WIDE_INT garbagel, garbageh;
977 register tree t;
978 int uns = TREE_UNSIGNED (TREE_TYPE (arg1));
979 int overflow = 0;
980
981 switch (code)
982 {
983 case BIT_IOR_EXPR:
984 t = build_int_2 (int1l | int2l, int1h | int2h);
985 break;
986
987 case BIT_XOR_EXPR:
988 t = build_int_2 (int1l ^ int2l, int1h ^ int2h);
989 break;
990
991 case BIT_AND_EXPR:
992 t = build_int_2 (int1l & int2l, int1h & int2h);
993 break;
994
995 case BIT_ANDTC_EXPR:
996 t = build_int_2 (int1l & ~int2l, int1h & ~int2h);
997 break;
998
999 case RSHIFT_EXPR:
1000 int2l = - int2l;
1001 case LSHIFT_EXPR:
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 lshift_double (int1l, int1h, int2l,
1006 TYPE_PRECISION (TREE_TYPE (arg1)),
1007 &low, &hi,
1008 !uns);
1009 t = build_int_2 (low, hi);
1010 TREE_TYPE (t) = TREE_TYPE (arg1);
1011 if (!notrunc)
1012 force_fit_type (t, 0);
1013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1014 TREE_CONSTANT_OVERFLOW (t)
1015 = TREE_CONSTANT_OVERFLOW (arg1) | TREE_CONSTANT_OVERFLOW (arg2);
1016 return t;
1017
1018 case RROTATE_EXPR:
1019 int2l = - int2l;
1020 case LROTATE_EXPR:
1021 lrotate_double (int1l, int1h, int2l,
1022 TYPE_PRECISION (TREE_TYPE (arg1)),
1023 &low, &hi);
1024 t = build_int_2 (low, hi);
1025 break;
1026
1027 case PLUS_EXPR:
1028 if (int1h == 0)
1029 {
1030 int2l += int1l;
1031 if ((unsigned HOST_WIDE_INT) int2l < int1l)
1032 {
1033 hi = int2h++;
1034 overflow = int2h < hi;
1035 }
1036 t = build_int_2 (int2l, int2h);
1037 break;
1038 }
1039 if (int2h == 0)
1040 {
1041 int1l += int2l;
1042 if ((unsigned HOST_WIDE_INT) int1l < int2l)
1043 {
1044 hi = int1h++;
1045 overflow = int1h < hi;
1046 }
1047 t = build_int_2 (int1l, int1h);
1048 break;
1049 }
1050 overflow = add_double (int1l, int1h, int2l, int2h, &low, &hi);
1051 t = build_int_2 (low, hi);
1052 break;
1053
1054 case MINUS_EXPR:
1055 if (int2h == 0 && int2l == 0)
1056 {
1057 t = build_int_2 (int1l, int1h);
1058 break;
1059 }
1060 neg_double (int2l, int2h, &low, &hi);
1061 add_double (int1l, int1h, low, hi, &low, &hi);
1062 overflow = overflow_sum_sign (hi, int2h, int1h);
1063 t = build_int_2 (low, hi);
1064 break;
1065
1066 case MULT_EXPR:
1067 overflow = mul_double (int1l, int1h, int2l, int2h, &low, &hi);
1068 t = build_int_2 (low, hi);
1069 break;
1070
1071 case TRUNC_DIV_EXPR:
1072 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1073 case EXACT_DIV_EXPR:
1074 /* This is a shortcut for a common special case.
1075 It reduces the number of tree nodes generated
1076 and saves time. */
1077 if (int2h == 0 && int2l > 0
1078 && TREE_TYPE (arg1) == sizetype
1079 && int1h == 0 && int1l >= 0)
1080 {
1081 if (code == CEIL_DIV_EXPR)
1082 int1l += int2l-1;
1083 return size_int (int1l / int2l);
1084 }
1085 case ROUND_DIV_EXPR:
1086 if (int2h == 0 && int2l == 1)
1087 {
1088 t = build_int_2 (int1l, int1h);
1089 break;
1090 }
1091 if (int1l == int2l && int1h == int2h)
1092 {
1093 if ((int1l | int1h) == 0)
1094 abort ();
1095 t = build_int_2 (1, 0);
1096 break;
1097 }
1098 overflow = div_and_round_double (code, uns,
1099 int1l, int1h, int2l, int2h,
1100 &low, &hi, &garbagel, &garbageh);
1101 t = build_int_2 (low, hi);
1102 break;
1103
1104 case TRUNC_MOD_EXPR: case ROUND_MOD_EXPR:
1105 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1106 overflow = div_and_round_double (code, uns,
1107 int1l, int1h, int2l, int2h,
1108 &garbagel, &garbageh, &low, &hi);
1109 t = build_int_2 (low, hi);
1110 break;
1111
1112 case MIN_EXPR:
1113 case MAX_EXPR:
1114 if (uns)
1115 {
1116 low = (((unsigned HOST_WIDE_INT) int1h
1117 < (unsigned HOST_WIDE_INT) int2h)
1118 || (((unsigned HOST_WIDE_INT) int1h
1119 == (unsigned HOST_WIDE_INT) int2h)
1120 && ((unsigned HOST_WIDE_INT) int1l
1121 < (unsigned HOST_WIDE_INT) int2l)));
1122 }
1123 else
1124 {
1125 low = ((int1h < int2h)
1126 || ((int1h == int2h)
1127 && ((unsigned HOST_WIDE_INT) int1l
1128 < (unsigned HOST_WIDE_INT) int2l)));
1129 }
1130 if (low == (code == MIN_EXPR))
1131 t = build_int_2 (int1l, int1h);
1132 else
1133 t = build_int_2 (int2l, int2h);
1134 break;
1135
1136 default:
1137 abort ();
1138 }
1139 got_it:
1140 TREE_TYPE (t) = TREE_TYPE (arg1);
1141 TREE_OVERFLOW (t)
1142 = ((notrunc ? !uns && overflow : force_fit_type (t, overflow && !uns))
1143 | TREE_OVERFLOW (arg1)
1144 | TREE_OVERFLOW (arg2));
1145 TREE_CONSTANT_OVERFLOW (t) = (TREE_OVERFLOW (t)
1146 | TREE_CONSTANT_OVERFLOW (arg1)
1147 | TREE_CONSTANT_OVERFLOW (arg2));
1148 return t;
1149 }
1150 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1151 if (TREE_CODE (arg1) == REAL_CST)
1152 {
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 int overflow = 0;
1156 REAL_VALUE_TYPE value;
1157 tree t;
1158
1159 d1 = TREE_REAL_CST (arg1);
1160 d2 = TREE_REAL_CST (arg2);
1161
1162 /* If either operand is a NaN, just return it. Otherwise, set up
1163 for floating-point trap; we return an overflow. */
1164 if (REAL_VALUE_ISNAN (d1))
1165 return arg1;
1166 else if (REAL_VALUE_ISNAN (d2))
1167 return arg2;
1168 else if (setjmp (float_error))
1169 {
1170 t = copy_node (arg1);
1171 overflow = 1;
1172 goto got_float;
1173 }
1174
1175 set_float_handler (float_error);
1176
1177 #ifdef REAL_ARITHMETIC
1178 REAL_ARITHMETIC (value, code, d1, d2);
1179 #else
1180 switch (code)
1181 {
1182 case PLUS_EXPR:
1183 value = d1 + d2;
1184 break;
1185
1186 case MINUS_EXPR:
1187 value = d1 - d2;
1188 break;
1189
1190 case MULT_EXPR:
1191 value = d1 * d2;
1192 break;
1193
1194 case RDIV_EXPR:
1195 #ifndef REAL_INFINITY
1196 if (d2 == 0)
1197 abort ();
1198 #endif
1199
1200 value = d1 / d2;
1201 break;
1202
1203 case MIN_EXPR:
1204 value = MIN (d1, d2);
1205 break;
1206
1207 case MAX_EXPR:
1208 value = MAX (d1, d2);
1209 break;
1210
1211 default:
1212 abort ();
1213 }
1214 #endif /* no REAL_ARITHMETIC */
1215 t = build_real (TREE_TYPE (arg1),
1216 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1)), value));
1217 got_float:
1218 set_float_handler (NULL_PTR);
1219
1220 TREE_OVERFLOW (t)
1221 = (force_fit_type (t, overflow)
1222 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1223 TREE_CONSTANT_OVERFLOW (t)
1224 = TREE_OVERFLOW (t)
1225 | TREE_CONSTANT_OVERFLOW (arg1)
1226 | TREE_CONSTANT_OVERFLOW (arg2);
1227 return t;
1228 }
1229 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1230 if (TREE_CODE (arg1) == COMPLEX_CST)
1231 {
1232 register tree r1 = TREE_REALPART (arg1);
1233 register tree i1 = TREE_IMAGPART (arg1);
1234 register tree r2 = TREE_REALPART (arg2);
1235 register tree i2 = TREE_IMAGPART (arg2);
1236 register tree t;
1237
1238 switch (code)
1239 {
1240 case PLUS_EXPR:
1241 t = build_complex (const_binop (PLUS_EXPR, r1, r2, notrunc),
1242 const_binop (PLUS_EXPR, i1, i2, notrunc));
1243 break;
1244
1245 case MINUS_EXPR:
1246 t = build_complex (const_binop (MINUS_EXPR, r1, r2, notrunc),
1247 const_binop (MINUS_EXPR, i1, i2, notrunc));
1248 break;
1249
1250 case MULT_EXPR:
1251 t = build_complex (const_binop (MINUS_EXPR,
1252 const_binop (MULT_EXPR,
1253 r1, r2, notrunc),
1254 const_binop (MULT_EXPR,
1255 i1, i2, notrunc),
1256 notrunc),
1257 const_binop (PLUS_EXPR,
1258 const_binop (MULT_EXPR,
1259 r1, i2, notrunc),
1260 const_binop (MULT_EXPR,
1261 i1, r2, notrunc),
1262 notrunc));
1263 break;
1264
1265 case RDIV_EXPR:
1266 {
1267 register tree magsquared
1268 = const_binop (PLUS_EXPR,
1269 const_binop (MULT_EXPR, r2, r2, notrunc),
1270 const_binop (MULT_EXPR, i2, i2, notrunc),
1271 notrunc);
1272
1273 t = build_complex
1274 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1275 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1276 const_binop (PLUS_EXPR,
1277 const_binop (MULT_EXPR, r1, r2,
1278 notrunc),
1279 const_binop (MULT_EXPR, i1, i2,
1280 notrunc),
1281 notrunc),
1282 magsquared, notrunc),
1283 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1))
1284 ? TRUNC_DIV_EXPR : RDIV_EXPR,
1285 const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, i1, r2,
1287 notrunc),
1288 const_binop (MULT_EXPR, r1, i2,
1289 notrunc),
1290 notrunc),
1291 magsquared, notrunc));
1292 }
1293 break;
1294
1295 default:
1296 abort ();
1297 }
1298 TREE_TYPE (t) = TREE_TYPE (arg1);
1299 return t;
1300 }
1301 return 0;
1302 }
1303 \f
1304 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1305
1306 tree
1307 size_int (number)
1308 unsigned HOST_WIDE_INT number;
1309 {
1310 register tree t;
1311 /* Type-size nodes already made for small sizes. */
1312 static tree size_table[2*HOST_BITS_PER_WIDE_INT + 1];
1313
1314 if (number < 2*HOST_BITS_PER_WIDE_INT + 1
1315 && size_table[number] != 0)
1316 return size_table[number];
1317 if (number < 2*HOST_BITS_PER_WIDE_INT + 1)
1318 {
1319 push_obstacks_nochange ();
1320 /* Make this a permanent node. */
1321 end_temporary_allocation ();
1322 t = build_int_2 (number, 0);
1323 TREE_TYPE (t) = sizetype;
1324 size_table[number] = t;
1325 pop_obstacks ();
1326 }
1327 else
1328 {
1329 t = build_int_2 (number, 0);
1330 TREE_TYPE (t) = sizetype;
1331 }
1332 return t;
1333 }
1334
1335 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1336 CODE is a tree code. Data type is taken from `sizetype',
1337 If the operands are constant, so is the result. */
1338
1339 tree
1340 size_binop (code, arg0, arg1)
1341 enum tree_code code;
1342 tree arg0, arg1;
1343 {
1344 /* Handle the special case of two integer constants faster. */
1345 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1346 {
1347 /* And some specific cases even faster than that. */
1348 if (code == PLUS_EXPR
1349 && TREE_INT_CST_LOW (arg0) == 0
1350 && TREE_INT_CST_HIGH (arg0) == 0)
1351 return arg1;
1352 if (code == MINUS_EXPR
1353 && TREE_INT_CST_LOW (arg1) == 0
1354 && TREE_INT_CST_HIGH (arg1) == 0)
1355 return arg0;
1356 if (code == MULT_EXPR
1357 && TREE_INT_CST_LOW (arg0) == 1
1358 && TREE_INT_CST_HIGH (arg0) == 0)
1359 return arg1;
1360 /* Handle general case of two integer constants. */
1361 return const_binop (code, arg0, arg1, 0);
1362 }
1363
1364 if (arg0 == error_mark_node || arg1 == error_mark_node)
1365 return error_mark_node;
1366
1367 return fold (build (code, sizetype, arg0, arg1));
1368 }
1369 \f
1370 /* Given T, a tree representing type conversion of ARG1, a constant,
1371 return a constant tree representing the result of conversion. */
1372
1373 static tree
1374 fold_convert (t, arg1)
1375 register tree t;
1376 register tree arg1;
1377 {
1378 register tree type = TREE_TYPE (t);
1379 int overflow = 0;
1380
1381 if (TREE_CODE (type) == POINTER_TYPE || INTEGRAL_TYPE_P (type))
1382 {
1383 if (TREE_CODE (arg1) == INTEGER_CST)
1384 {
1385 /* If we would build a constant wider than GCC supports,
1386 leave the conversion unfolded. */
1387 if (TYPE_PRECISION (type) > 2 * HOST_BITS_PER_WIDE_INT)
1388 return t;
1389
1390 /* Given an integer constant, make new constant with new type,
1391 appropriately sign-extended or truncated. */
1392 t = build_int_2 (TREE_INT_CST_LOW (arg1),
1393 TREE_INT_CST_HIGH (arg1));
1394 TREE_TYPE (t) = type;
1395 /* Indicate an overflow if (1) ARG1 already overflowed,
1396 or (2) force_fit_type indicates an overflow.
1397 Tell force_fit_type that an overflow has already occurred
1398 if ARG1 is a too-large unsigned value and T is signed. */
1399 TREE_OVERFLOW (t)
1400 = (TREE_OVERFLOW (arg1)
1401 | force_fit_type (t,
1402 (TREE_INT_CST_HIGH (arg1) < 0
1403 & (TREE_UNSIGNED (type)
1404 < TREE_UNSIGNED (TREE_TYPE (arg1))))));
1405 TREE_CONSTANT_OVERFLOW (t)
1406 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1407 }
1408 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1409 else if (TREE_CODE (arg1) == REAL_CST)
1410 {
1411 /* Don't initialize these, use assignments.
1412 Initialized local aggregates don't work on old compilers. */
1413 REAL_VALUE_TYPE x;
1414 REAL_VALUE_TYPE l;
1415 REAL_VALUE_TYPE u;
1416
1417 x = TREE_REAL_CST (arg1);
1418 l = real_value_from_int_cst (TYPE_MIN_VALUE (type));
1419 u = real_value_from_int_cst (TYPE_MAX_VALUE (type));
1420 /* See if X will be in range after truncation towards 0.
1421 To compensate for truncation, move the bounds away from 0,
1422 but reject if X exactly equals the adjusted bounds. */
1423 #ifdef REAL_ARITHMETIC
1424 REAL_ARITHMETIC (l, MINUS_EXPR, l, dconst1);
1425 REAL_ARITHMETIC (u, PLUS_EXPR, u, dconst1);
1426 #else
1427 l--;
1428 u++;
1429 #endif
1430 /* If X is a NaN, use zero instead and show we have an overflow.
1431 Otherwise, range check. */
1432 if (REAL_VALUE_ISNAN (x))
1433 overflow = 1, x = dconst0;
1434 else if (! (REAL_VALUES_LESS (l, x) && REAL_VALUES_LESS (x, u)))
1435 overflow = 1;
1436
1437 #ifndef REAL_ARITHMETIC
1438 {
1439 HOST_WIDE_INT low, high;
1440 HOST_WIDE_INT half_word
1441 = (HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT / 2);
1442
1443 if (x < 0)
1444 x = -x;
1445
1446 high = (HOST_WIDE_INT) (x / half_word / half_word);
1447 x -= (REAL_VALUE_TYPE) high * half_word * half_word;
1448 if (x >= (REAL_VALUE_TYPE) half_word * half_word / 2)
1449 {
1450 low = x - (REAL_VALUE_TYPE) half_word * half_word / 2;
1451 low |= (HOST_WIDE_INT) -1 << (HOST_BITS_PER_WIDE_INT - 1);
1452 }
1453 else
1454 low = (HOST_WIDE_INT) x;
1455 if (TREE_REAL_CST (arg1) < 0)
1456 neg_double (low, high, &low, &high);
1457 t = build_int_2 (low, high);
1458 }
1459 #else
1460 {
1461 HOST_WIDE_INT low, high;
1462 REAL_VALUE_TO_INT (&low, &high, x);
1463 t = build_int_2 (low, high);
1464 }
1465 #endif
1466 TREE_TYPE (t) = type;
1467 TREE_OVERFLOW (t)
1468 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1469 TREE_CONSTANT_OVERFLOW (t)
1470 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1471 }
1472 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1473 TREE_TYPE (t) = type;
1474 }
1475 else if (TREE_CODE (type) == REAL_TYPE)
1476 {
1477 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1478 if (TREE_CODE (arg1) == INTEGER_CST)
1479 return build_real_from_int_cst (type, arg1);
1480 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1481 if (TREE_CODE (arg1) == REAL_CST)
1482 {
1483 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
1484 {
1485 t = arg1;
1486 TREE_TYPE (arg1) = type;
1487 return t;
1488 }
1489 else if (setjmp (float_error))
1490 {
1491 overflow = 1;
1492 t = copy_node (arg1);
1493 goto got_it;
1494 }
1495 set_float_handler (float_error);
1496
1497 t = build_real (type, real_value_truncate (TYPE_MODE (type),
1498 TREE_REAL_CST (arg1)));
1499 set_float_handler (NULL_PTR);
1500
1501 got_it:
1502 TREE_OVERFLOW (t)
1503 = TREE_OVERFLOW (arg1) | force_fit_type (t, overflow);
1504 TREE_CONSTANT_OVERFLOW (t)
1505 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg1);
1506 return t;
1507 }
1508 }
1509 TREE_CONSTANT (t) = 1;
1510 return t;
1511 }
1512 \f
1513 /* Return an expr equal to X but certainly not valid as an lvalue.
1514 Also make sure it is not valid as an null pointer constant. */
1515
1516 tree
1517 non_lvalue (x)
1518 tree x;
1519 {
1520 tree result;
1521
1522 /* These things are certainly not lvalues. */
1523 if (TREE_CODE (x) == NON_LVALUE_EXPR
1524 || TREE_CODE (x) == INTEGER_CST
1525 || TREE_CODE (x) == REAL_CST
1526 || TREE_CODE (x) == STRING_CST
1527 || TREE_CODE (x) == ADDR_EXPR)
1528 {
1529 if (TREE_CODE (x) == INTEGER_CST && integer_zerop (x))
1530 {
1531 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1532 so convert_for_assignment won't strip it.
1533 This is so this 0 won't be treated as a null pointer constant. */
1534 result = build1 (NOP_EXPR, TREE_TYPE (x), x);
1535 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1536 return result;
1537 }
1538 return x;
1539 }
1540
1541 result = build1 (NON_LVALUE_EXPR, TREE_TYPE (x), x);
1542 TREE_CONSTANT (result) = TREE_CONSTANT (x);
1543 return result;
1544 }
1545
1546 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1547 Zero means allow extended lvalues. */
1548
1549 int pedantic_lvalues;
1550
1551 /* When pedantic, return an expr equal to X but certainly not valid as a
1552 pedantic lvalue. Otherwise, return X. */
1553
1554 tree
1555 pedantic_non_lvalue (x)
1556 tree x;
1557 {
1558 if (pedantic_lvalues)
1559 return non_lvalue (x);
1560 else
1561 return x;
1562 }
1563 \f
1564 /* Given a tree comparison code, return the code that is the logical inverse
1565 of the given code. It is not safe to do this for floating-point
1566 comparisons, except for NE_EXPR and EQ_EXPR. */
1567
1568 static enum tree_code
1569 invert_tree_comparison (code)
1570 enum tree_code code;
1571 {
1572 switch (code)
1573 {
1574 case EQ_EXPR:
1575 return NE_EXPR;
1576 case NE_EXPR:
1577 return EQ_EXPR;
1578 case GT_EXPR:
1579 return LE_EXPR;
1580 case GE_EXPR:
1581 return LT_EXPR;
1582 case LT_EXPR:
1583 return GE_EXPR;
1584 case LE_EXPR:
1585 return GT_EXPR;
1586 default:
1587 abort ();
1588 }
1589 }
1590
1591 /* Similar, but return the comparison that results if the operands are
1592 swapped. This is safe for floating-point. */
1593
1594 static enum tree_code
1595 swap_tree_comparison (code)
1596 enum tree_code code;
1597 {
1598 switch (code)
1599 {
1600 case EQ_EXPR:
1601 case NE_EXPR:
1602 return code;
1603 case GT_EXPR:
1604 return LT_EXPR;
1605 case GE_EXPR:
1606 return LE_EXPR;
1607 case LT_EXPR:
1608 return GT_EXPR;
1609 case LE_EXPR:
1610 return GE_EXPR;
1611 default:
1612 abort ();
1613 }
1614 }
1615
1616 /* Return nonzero if CODE is a tree code that represents a truth value. */
1617
1618 static int
1619 truth_value_p (code)
1620 enum tree_code code;
1621 {
1622 return (TREE_CODE_CLASS (code) == '<'
1623 || code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR
1624 || code == TRUTH_OR_EXPR || code == TRUTH_ORIF_EXPR
1625 || code == TRUTH_XOR_EXPR || code == TRUTH_NOT_EXPR);
1626 }
1627 \f
1628 /* Return nonzero if two operands are necessarily equal.
1629 If ONLY_CONST is non-zero, only return non-zero for constants.
1630 This function tests whether the operands are indistinguishable;
1631 it does not test whether they are equal using C's == operation.
1632 The distinction is important for IEEE floating point, because
1633 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1634 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1635
1636 int
1637 operand_equal_p (arg0, arg1, only_const)
1638 tree arg0, arg1;
1639 int only_const;
1640 {
1641 /* If both types don't have the same signedness, then we can't consider
1642 them equal. We must check this before the STRIP_NOPS calls
1643 because they may change the signedness of the arguments. */
1644 if (TREE_UNSIGNED (TREE_TYPE (arg0)) != TREE_UNSIGNED (TREE_TYPE (arg1)))
1645 return 0;
1646
1647 STRIP_NOPS (arg0);
1648 STRIP_NOPS (arg1);
1649
1650 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1651 We don't care about side effects in that case because the SAVE_EXPR
1652 takes care of that for us. */
1653 if (TREE_CODE (arg0) == SAVE_EXPR && arg0 == arg1)
1654 return ! only_const;
1655
1656 if (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1))
1657 return 0;
1658
1659 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1660 && TREE_CODE (arg0) == ADDR_EXPR
1661 && TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0))
1662 return 1;
1663
1664 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1665 && TREE_CODE (arg0) == INTEGER_CST
1666 && TREE_INT_CST_LOW (arg0) == TREE_INT_CST_LOW (arg1)
1667 && TREE_INT_CST_HIGH (arg0) == TREE_INT_CST_HIGH (arg1))
1668 return 1;
1669
1670 /* Detect when real constants are equal. */
1671 if (TREE_CODE (arg0) == TREE_CODE (arg1)
1672 && TREE_CODE (arg0) == REAL_CST)
1673 return !bcmp ((char *) &TREE_REAL_CST (arg0),
1674 (char *) &TREE_REAL_CST (arg1),
1675 sizeof (REAL_VALUE_TYPE));
1676
1677 if (only_const)
1678 return 0;
1679
1680 if (arg0 == arg1)
1681 return 1;
1682
1683 if (TREE_CODE (arg0) != TREE_CODE (arg1))
1684 return 0;
1685 /* This is needed for conversions and for COMPONENT_REF.
1686 Might as well play it safe and always test this. */
1687 if (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
1688 return 0;
1689
1690 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
1691 {
1692 case '1':
1693 /* Two conversions are equal only if signedness and modes match. */
1694 if ((TREE_CODE (arg0) == NOP_EXPR || TREE_CODE (arg0) == CONVERT_EXPR)
1695 && (TREE_UNSIGNED (TREE_TYPE (arg0))
1696 != TREE_UNSIGNED (TREE_TYPE (arg1))))
1697 return 0;
1698
1699 return operand_equal_p (TREE_OPERAND (arg0, 0),
1700 TREE_OPERAND (arg1, 0), 0);
1701
1702 case '<':
1703 case '2':
1704 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1705 TREE_OPERAND (arg1, 0), 0)
1706 && operand_equal_p (TREE_OPERAND (arg0, 1),
1707 TREE_OPERAND (arg1, 1), 0));
1708
1709 case 'r':
1710 switch (TREE_CODE (arg0))
1711 {
1712 case INDIRECT_REF:
1713 return operand_equal_p (TREE_OPERAND (arg0, 0),
1714 TREE_OPERAND (arg1, 0), 0);
1715
1716 case COMPONENT_REF:
1717 case ARRAY_REF:
1718 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1719 TREE_OPERAND (arg1, 0), 0)
1720 && operand_equal_p (TREE_OPERAND (arg0, 1),
1721 TREE_OPERAND (arg1, 1), 0));
1722
1723 case BIT_FIELD_REF:
1724 return (operand_equal_p (TREE_OPERAND (arg0, 0),
1725 TREE_OPERAND (arg1, 0), 0)
1726 && operand_equal_p (TREE_OPERAND (arg0, 1),
1727 TREE_OPERAND (arg1, 1), 0)
1728 && operand_equal_p (TREE_OPERAND (arg0, 2),
1729 TREE_OPERAND (arg1, 2), 0));
1730 }
1731 break;
1732 }
1733
1734 return 0;
1735 }
1736 \f
1737 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1738 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1739
1740 When in doubt, return 0. */
1741
1742 static int
1743 operand_equal_for_comparison_p (arg0, arg1, other)
1744 tree arg0, arg1;
1745 tree other;
1746 {
1747 int unsignedp1, unsignedpo;
1748 tree primarg1, primother;
1749 unsigned correct_width;
1750
1751 if (operand_equal_p (arg0, arg1, 0))
1752 return 1;
1753
1754 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
1755 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
1756 return 0;
1757
1758 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1759 actual comparison operand, ARG0.
1760
1761 First throw away any conversions to wider types
1762 already present in the operands. */
1763
1764 primarg1 = get_narrower (arg1, &unsignedp1);
1765 primother = get_narrower (other, &unsignedpo);
1766
1767 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
1768 if (unsignedp1 == unsignedpo
1769 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
1770 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
1771 {
1772 tree type = TREE_TYPE (arg0);
1773
1774 /* Make sure shorter operand is extended the right way
1775 to match the longer operand. */
1776 primarg1 = convert (signed_or_unsigned_type (unsignedp1,
1777 TREE_TYPE (primarg1)),
1778 primarg1);
1779
1780 if (operand_equal_p (arg0, convert (type, primarg1), 0))
1781 return 1;
1782 }
1783
1784 return 0;
1785 }
1786 \f
1787 /* See if ARG is an expression that is either a comparison or is performing
1788 arithmetic on comparisons. The comparisons must only be comparing
1789 two different values, which will be stored in *CVAL1 and *CVAL2; if
1790 they are non-zero it means that some operands have already been found.
1791 No variables may be used anywhere else in the expression except in the
1792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1794
1795 If this is true, return 1. Otherwise, return zero. */
1796
1797 static int
1798 twoval_comparison_p (arg, cval1, cval2, save_p)
1799 tree arg;
1800 tree *cval1, *cval2;
1801 int *save_p;
1802 {
1803 enum tree_code code = TREE_CODE (arg);
1804 char class = TREE_CODE_CLASS (code);
1805
1806 /* We can handle some of the 'e' cases here. */
1807 if (class == 'e' && code == TRUTH_NOT_EXPR)
1808 class = '1';
1809 else if (class == 'e'
1810 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
1811 || code == COMPOUND_EXPR))
1812 class = '2';
1813
1814 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1815 the expression. There may be no way to make this work, but it needs
1816 to be looked at again for 2.6. */
1817 #if 0
1818 else if (class == 'e' && code == SAVE_EXPR && SAVE_EXPR_RTL (arg) == 0)
1819 {
1820 /* If we've already found a CVAL1 or CVAL2, this expression is
1821 two complex to handle. */
1822 if (*cval1 || *cval2)
1823 return 0;
1824
1825 class = '1';
1826 *save_p = 1;
1827 }
1828 #endif
1829
1830 switch (class)
1831 {
1832 case '1':
1833 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
1834
1835 case '2':
1836 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
1837 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1838 cval1, cval2, save_p));
1839
1840 case 'c':
1841 return 1;
1842
1843 case 'e':
1844 if (code == COND_EXPR)
1845 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
1846 cval1, cval2, save_p)
1847 && twoval_comparison_p (TREE_OPERAND (arg, 1),
1848 cval1, cval2, save_p)
1849 && twoval_comparison_p (TREE_OPERAND (arg, 2),
1850 cval1, cval2, save_p));
1851 return 0;
1852
1853 case '<':
1854 /* First see if we can handle the first operand, then the second. For
1855 the second operand, we know *CVAL1 can't be zero. It must be that
1856 one side of the comparison is each of the values; test for the
1857 case where this isn't true by failing if the two operands
1858 are the same. */
1859
1860 if (operand_equal_p (TREE_OPERAND (arg, 0),
1861 TREE_OPERAND (arg, 1), 0))
1862 return 0;
1863
1864 if (*cval1 == 0)
1865 *cval1 = TREE_OPERAND (arg, 0);
1866 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
1867 ;
1868 else if (*cval2 == 0)
1869 *cval2 = TREE_OPERAND (arg, 0);
1870 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
1871 ;
1872 else
1873 return 0;
1874
1875 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
1876 ;
1877 else if (*cval2 == 0)
1878 *cval2 = TREE_OPERAND (arg, 1);
1879 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
1880 ;
1881 else
1882 return 0;
1883
1884 return 1;
1885 }
1886
1887 return 0;
1888 }
1889 \f
1890 /* ARG is a tree that is known to contain just arithmetic operations and
1891 comparisons. Evaluate the operations in the tree substituting NEW0 for
1892 any occurrence of OLD0 as an operand of a comparison and likewise for
1893 NEW1 and OLD1. */
1894
1895 static tree
1896 eval_subst (arg, old0, new0, old1, new1)
1897 tree arg;
1898 tree old0, new0, old1, new1;
1899 {
1900 tree type = TREE_TYPE (arg);
1901 enum tree_code code = TREE_CODE (arg);
1902 char class = TREE_CODE_CLASS (code);
1903
1904 /* We can handle some of the 'e' cases here. */
1905 if (class == 'e' && code == TRUTH_NOT_EXPR)
1906 class = '1';
1907 else if (class == 'e'
1908 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
1909 class = '2';
1910
1911 switch (class)
1912 {
1913 case '1':
1914 return fold (build1 (code, type,
1915 eval_subst (TREE_OPERAND (arg, 0),
1916 old0, new0, old1, new1)));
1917
1918 case '2':
1919 return fold (build (code, type,
1920 eval_subst (TREE_OPERAND (arg, 0),
1921 old0, new0, old1, new1),
1922 eval_subst (TREE_OPERAND (arg, 1),
1923 old0, new0, old1, new1)));
1924
1925 case 'e':
1926 switch (code)
1927 {
1928 case SAVE_EXPR:
1929 return eval_subst (TREE_OPERAND (arg, 0), old0, new0, old1, new1);
1930
1931 case COMPOUND_EXPR:
1932 return eval_subst (TREE_OPERAND (arg, 1), old0, new0, old1, new1);
1933
1934 case COND_EXPR:
1935 return fold (build (code, type,
1936 eval_subst (TREE_OPERAND (arg, 0),
1937 old0, new0, old1, new1),
1938 eval_subst (TREE_OPERAND (arg, 1),
1939 old0, new0, old1, new1),
1940 eval_subst (TREE_OPERAND (arg, 2),
1941 old0, new0, old1, new1)));
1942 }
1943
1944 case '<':
1945 {
1946 tree arg0 = TREE_OPERAND (arg, 0);
1947 tree arg1 = TREE_OPERAND (arg, 1);
1948
1949 /* We need to check both for exact equality and tree equality. The
1950 former will be true if the operand has a side-effect. In that
1951 case, we know the operand occurred exactly once. */
1952
1953 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
1954 arg0 = new0;
1955 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
1956 arg0 = new1;
1957
1958 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
1959 arg1 = new0;
1960 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
1961 arg1 = new1;
1962
1963 return fold (build (code, type, arg0, arg1));
1964 }
1965 }
1966
1967 return arg;
1968 }
1969 \f
1970 /* Return a tree for the case when the result of an expression is RESULT
1971 converted to TYPE and OMITTED was previously an operand of the expression
1972 but is now not needed (e.g., we folded OMITTED * 0).
1973
1974 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1975 the conversion of RESULT to TYPE. */
1976
1977 static tree
1978 omit_one_operand (type, result, omitted)
1979 tree type, result, omitted;
1980 {
1981 tree t = convert (type, result);
1982
1983 if (TREE_SIDE_EFFECTS (omitted))
1984 return build (COMPOUND_EXPR, type, omitted, t);
1985
1986 return non_lvalue (t);
1987 }
1988
1989 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
1990
1991 static tree
1992 pedantic_omit_one_operand (type, result, omitted)
1993 tree type, result, omitted;
1994 {
1995 tree t = convert (type, result);
1996
1997 if (TREE_SIDE_EFFECTS (omitted))
1998 return build (COMPOUND_EXPR, type, omitted, t);
1999
2000 return pedantic_non_lvalue (t);
2001 }
2002
2003
2004 \f
2005 /* Return a simplified tree node for the truth-negation of ARG. This
2006 never alters ARG itself. We assume that ARG is an operation that
2007 returns a truth value (0 or 1). */
2008
2009 tree
2010 invert_truthvalue (arg)
2011 tree arg;
2012 {
2013 tree type = TREE_TYPE (arg);
2014 enum tree_code code = TREE_CODE (arg);
2015
2016 if (code == ERROR_MARK)
2017 return arg;
2018
2019 /* If this is a comparison, we can simply invert it, except for
2020 floating-point non-equality comparisons, in which case we just
2021 enclose a TRUTH_NOT_EXPR around what we have. */
2022
2023 if (TREE_CODE_CLASS (code) == '<')
2024 {
2025 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg, 0)))
2026 && code != NE_EXPR && code != EQ_EXPR)
2027 return build1 (TRUTH_NOT_EXPR, type, arg);
2028 else
2029 return build (invert_tree_comparison (code), type,
2030 TREE_OPERAND (arg, 0), TREE_OPERAND (arg, 1));
2031 }
2032
2033 switch (code)
2034 {
2035 case INTEGER_CST:
2036 return convert (type, build_int_2 (TREE_INT_CST_LOW (arg) == 0
2037 && TREE_INT_CST_HIGH (arg) == 0, 0));
2038
2039 case TRUTH_AND_EXPR:
2040 return build (TRUTH_OR_EXPR, type,
2041 invert_truthvalue (TREE_OPERAND (arg, 0)),
2042 invert_truthvalue (TREE_OPERAND (arg, 1)));
2043
2044 case TRUTH_OR_EXPR:
2045 return build (TRUTH_AND_EXPR, type,
2046 invert_truthvalue (TREE_OPERAND (arg, 0)),
2047 invert_truthvalue (TREE_OPERAND (arg, 1)));
2048
2049 case TRUTH_XOR_EXPR:
2050 /* Here we can invert either operand. We invert the first operand
2051 unless the second operand is a TRUTH_NOT_EXPR in which case our
2052 result is the XOR of the first operand with the inside of the
2053 negation of the second operand. */
2054
2055 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
2056 return build (TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
2057 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
2058 else
2059 return build (TRUTH_XOR_EXPR, type,
2060 invert_truthvalue (TREE_OPERAND (arg, 0)),
2061 TREE_OPERAND (arg, 1));
2062
2063 case TRUTH_ANDIF_EXPR:
2064 return build (TRUTH_ORIF_EXPR, type,
2065 invert_truthvalue (TREE_OPERAND (arg, 0)),
2066 invert_truthvalue (TREE_OPERAND (arg, 1)));
2067
2068 case TRUTH_ORIF_EXPR:
2069 return build (TRUTH_ANDIF_EXPR, type,
2070 invert_truthvalue (TREE_OPERAND (arg, 0)),
2071 invert_truthvalue (TREE_OPERAND (arg, 1)));
2072
2073 case TRUTH_NOT_EXPR:
2074 return TREE_OPERAND (arg, 0);
2075
2076 case COND_EXPR:
2077 return build (COND_EXPR, type, TREE_OPERAND (arg, 0),
2078 invert_truthvalue (TREE_OPERAND (arg, 1)),
2079 invert_truthvalue (TREE_OPERAND (arg, 2)));
2080
2081 case COMPOUND_EXPR:
2082 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg, 0),
2083 invert_truthvalue (TREE_OPERAND (arg, 1)));
2084
2085 case NON_LVALUE_EXPR:
2086 return invert_truthvalue (TREE_OPERAND (arg, 0));
2087
2088 case NOP_EXPR:
2089 case CONVERT_EXPR:
2090 case FLOAT_EXPR:
2091 return build1 (TREE_CODE (arg), type,
2092 invert_truthvalue (TREE_OPERAND (arg, 0)));
2093
2094 case BIT_AND_EXPR:
2095 if (!integer_onep (TREE_OPERAND (arg, 1)))
2096 break;
2097 return build (EQ_EXPR, type, arg, convert (type, integer_zero_node));
2098
2099 case SAVE_EXPR:
2100 return build1 (TRUTH_NOT_EXPR, type, arg);
2101
2102 case CLEANUP_POINT_EXPR:
2103 return build1 (CLEANUP_POINT_EXPR, type,
2104 invert_truthvalue (TREE_OPERAND (arg, 0)));
2105 }
2106 if (TREE_CODE (TREE_TYPE (arg)) != BOOLEAN_TYPE)
2107 abort ();
2108 return build1 (TRUTH_NOT_EXPR, type, arg);
2109 }
2110
2111 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2112 operands are another bit-wise operation with a common input. If so,
2113 distribute the bit operations to save an operation and possibly two if
2114 constants are involved. For example, convert
2115 (A | B) & (A | C) into A | (B & C)
2116 Further simplification will occur if B and C are constants.
2117
2118 If this optimization cannot be done, 0 will be returned. */
2119
2120 static tree
2121 distribute_bit_expr (code, type, arg0, arg1)
2122 enum tree_code code;
2123 tree type;
2124 tree arg0, arg1;
2125 {
2126 tree common;
2127 tree left, right;
2128
2129 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2130 || TREE_CODE (arg0) == code
2131 || (TREE_CODE (arg0) != BIT_AND_EXPR
2132 && TREE_CODE (arg0) != BIT_IOR_EXPR))
2133 return 0;
2134
2135 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
2136 {
2137 common = TREE_OPERAND (arg0, 0);
2138 left = TREE_OPERAND (arg0, 1);
2139 right = TREE_OPERAND (arg1, 1);
2140 }
2141 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
2142 {
2143 common = TREE_OPERAND (arg0, 0);
2144 left = TREE_OPERAND (arg0, 1);
2145 right = TREE_OPERAND (arg1, 0);
2146 }
2147 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
2148 {
2149 common = TREE_OPERAND (arg0, 1);
2150 left = TREE_OPERAND (arg0, 0);
2151 right = TREE_OPERAND (arg1, 1);
2152 }
2153 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
2154 {
2155 common = TREE_OPERAND (arg0, 1);
2156 left = TREE_OPERAND (arg0, 0);
2157 right = TREE_OPERAND (arg1, 0);
2158 }
2159 else
2160 return 0;
2161
2162 return fold (build (TREE_CODE (arg0), type, common,
2163 fold (build (code, type, left, right))));
2164 }
2165 \f
2166 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2167 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2168
2169 static tree
2170 make_bit_field_ref (inner, type, bitsize, bitpos, unsignedp)
2171 tree inner;
2172 tree type;
2173 int bitsize, bitpos;
2174 int unsignedp;
2175 {
2176 tree result = build (BIT_FIELD_REF, type, inner,
2177 size_int (bitsize), size_int (bitpos));
2178
2179 TREE_UNSIGNED (result) = unsignedp;
2180
2181 return result;
2182 }
2183
2184 /* Optimize a bit-field compare.
2185
2186 There are two cases: First is a compare against a constant and the
2187 second is a comparison of two items where the fields are at the same
2188 bit position relative to the start of a chunk (byte, halfword, word)
2189 large enough to contain it. In these cases we can avoid the shift
2190 implicit in bitfield extractions.
2191
2192 For constants, we emit a compare of the shifted constant with the
2193 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2194 compared. For two fields at the same position, we do the ANDs with the
2195 similar mask and compare the result of the ANDs.
2196
2197 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2198 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2199 are the left and right operands of the comparison, respectively.
2200
2201 If the optimization described above can be done, we return the resulting
2202 tree. Otherwise we return zero. */
2203
2204 static tree
2205 optimize_bit_field_compare (code, compare_type, lhs, rhs)
2206 enum tree_code code;
2207 tree compare_type;
2208 tree lhs, rhs;
2209 {
2210 int lbitpos, lbitsize, rbitpos, rbitsize;
2211 int lnbitpos, lnbitsize, rnbitpos, rnbitsize;
2212 tree type = TREE_TYPE (lhs);
2213 tree signed_type, unsigned_type;
2214 int const_p = TREE_CODE (rhs) == INTEGER_CST;
2215 enum machine_mode lmode, rmode, lnmode, rnmode;
2216 int lunsignedp, runsignedp;
2217 int lvolatilep = 0, rvolatilep = 0;
2218 tree linner, rinner;
2219 tree mask;
2220 tree offset;
2221
2222 /* Get all the information about the extractions being done. If the bit size
2223 if the same as the size of the underlying object, we aren't doing an
2224 extraction at all and so can do nothing. */
2225 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
2226 &lunsignedp, &lvolatilep);
2227 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
2228 || offset != 0)
2229 return 0;
2230
2231 if (!const_p)
2232 {
2233 /* If this is not a constant, we can only do something if bit positions,
2234 sizes, and signedness are the same. */
2235 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset,
2236 &rmode, &runsignedp, &rvolatilep);
2237
2238 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
2239 || lunsignedp != runsignedp || offset != 0)
2240 return 0;
2241 }
2242
2243 /* See if we can find a mode to refer to this field. We should be able to,
2244 but fail if we can't. */
2245 lnmode = get_best_mode (lbitsize, lbitpos,
2246 TYPE_ALIGN (TREE_TYPE (linner)), word_mode,
2247 lvolatilep);
2248 if (lnmode == VOIDmode)
2249 return 0;
2250
2251 /* Set signed and unsigned types of the precision of this mode for the
2252 shifts below. */
2253 signed_type = type_for_mode (lnmode, 0);
2254 unsigned_type = type_for_mode (lnmode, 1);
2255
2256 if (! const_p)
2257 {
2258 rnmode = get_best_mode (rbitsize, rbitpos,
2259 TYPE_ALIGN (TREE_TYPE (rinner)), word_mode,
2260 rvolatilep);
2261 if (rnmode == VOIDmode)
2262 return 0;
2263 }
2264
2265 /* Compute the bit position and size for the new reference and our offset
2266 within it. If the new reference is the same size as the original, we
2267 won't optimize anything, so return zero. */
2268 lnbitsize = GET_MODE_BITSIZE (lnmode);
2269 lnbitpos = lbitpos & ~ (lnbitsize - 1);
2270 lbitpos -= lnbitpos;
2271 if (lnbitsize == lbitsize)
2272 return 0;
2273
2274 if (! const_p)
2275 {
2276 rnbitsize = GET_MODE_BITSIZE (rnmode);
2277 rnbitpos = rbitpos & ~ (rnbitsize - 1);
2278 rbitpos -= rnbitpos;
2279 if (rnbitsize == rbitsize)
2280 return 0;
2281 }
2282
2283 if (BYTES_BIG_ENDIAN)
2284 lbitpos = lnbitsize - lbitsize - lbitpos;
2285
2286 /* Make the mask to be used against the extracted field. */
2287 mask = build_int_2 (~0, ~0);
2288 TREE_TYPE (mask) = unsigned_type;
2289 force_fit_type (mask, 0);
2290 mask = convert (unsigned_type, mask);
2291 mask = const_binop (LSHIFT_EXPR, mask, size_int (lnbitsize - lbitsize), 0);
2292 mask = const_binop (RSHIFT_EXPR, mask,
2293 size_int (lnbitsize - lbitsize - lbitpos), 0);
2294
2295 if (! const_p)
2296 /* If not comparing with constant, just rework the comparison
2297 and return. */
2298 return build (code, compare_type,
2299 build (BIT_AND_EXPR, unsigned_type,
2300 make_bit_field_ref (linner, unsigned_type,
2301 lnbitsize, lnbitpos, 1),
2302 mask),
2303 build (BIT_AND_EXPR, unsigned_type,
2304 make_bit_field_ref (rinner, unsigned_type,
2305 rnbitsize, rnbitpos, 1),
2306 mask));
2307
2308 /* Otherwise, we are handling the constant case. See if the constant is too
2309 big for the field. Warn and return a tree of for 0 (false) if so. We do
2310 this not only for its own sake, but to avoid having to test for this
2311 error case below. If we didn't, we might generate wrong code.
2312
2313 For unsigned fields, the constant shifted right by the field length should
2314 be all zero. For signed fields, the high-order bits should agree with
2315 the sign bit. */
2316
2317 if (lunsignedp)
2318 {
2319 if (! integer_zerop (const_binop (RSHIFT_EXPR,
2320 convert (unsigned_type, rhs),
2321 size_int (lbitsize), 0)))
2322 {
2323 warning ("comparison is always %s due to width of bitfield",
2324 code == NE_EXPR ? "one" : "zero");
2325 return convert (compare_type,
2326 (code == NE_EXPR
2327 ? integer_one_node : integer_zero_node));
2328 }
2329 }
2330 else
2331 {
2332 tree tem = const_binop (RSHIFT_EXPR, convert (signed_type, rhs),
2333 size_int (lbitsize - 1), 0);
2334 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
2335 {
2336 warning ("comparison is always %s due to width of bitfield",
2337 code == NE_EXPR ? "one" : "zero");
2338 return convert (compare_type,
2339 (code == NE_EXPR
2340 ? integer_one_node : integer_zero_node));
2341 }
2342 }
2343
2344 /* Single-bit compares should always be against zero. */
2345 if (lbitsize == 1 && ! integer_zerop (rhs))
2346 {
2347 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
2348 rhs = convert (type, integer_zero_node);
2349 }
2350
2351 /* Make a new bitfield reference, shift the constant over the
2352 appropriate number of bits and mask it with the computed mask
2353 (in case this was a signed field). If we changed it, make a new one. */
2354 lhs = make_bit_field_ref (linner, unsigned_type, lnbitsize, lnbitpos, 1);
2355 if (lvolatilep)
2356 {
2357 TREE_SIDE_EFFECTS (lhs) = 1;
2358 TREE_THIS_VOLATILE (lhs) = 1;
2359 }
2360
2361 rhs = fold (const_binop (BIT_AND_EXPR,
2362 const_binop (LSHIFT_EXPR,
2363 convert (unsigned_type, rhs),
2364 size_int (lbitpos), 0),
2365 mask, 0));
2366
2367 return build (code, compare_type,
2368 build (BIT_AND_EXPR, unsigned_type, lhs, mask),
2369 rhs);
2370 }
2371 \f
2372 /* Subroutine for fold_truthop: decode a field reference.
2373
2374 If EXP is a comparison reference, we return the innermost reference.
2375
2376 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2377 set to the starting bit number.
2378
2379 If the innermost field can be completely contained in a mode-sized
2380 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2381
2382 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2383 otherwise it is not changed.
2384
2385 *PUNSIGNEDP is set to the signedness of the field.
2386
2387 *PMASK is set to the mask used. This is either contained in a
2388 BIT_AND_EXPR or derived from the width of the field.
2389
2390 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2391
2392 Return 0 if this is not a component reference or is one that we can't
2393 do anything with. */
2394
2395 static tree
2396 decode_field_reference (exp, pbitsize, pbitpos, pmode, punsignedp,
2397 pvolatilep, pmask, pand_mask)
2398 tree exp;
2399 int *pbitsize, *pbitpos;
2400 enum machine_mode *pmode;
2401 int *punsignedp, *pvolatilep;
2402 tree *pmask;
2403 tree *pand_mask;
2404 {
2405 tree and_mask = 0;
2406 tree mask, inner, offset;
2407 tree unsigned_type;
2408 int precision;
2409
2410 /* All the optimizations using this function assume integer fields.
2411 There are problems with FP fields since the type_for_size call
2412 below can fail for, e.g., XFmode. */
2413 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
2414 return 0;
2415
2416 STRIP_NOPS (exp);
2417
2418 if (TREE_CODE (exp) == BIT_AND_EXPR)
2419 {
2420 and_mask = TREE_OPERAND (exp, 1);
2421 exp = TREE_OPERAND (exp, 0);
2422 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
2423 if (TREE_CODE (and_mask) != INTEGER_CST)
2424 return 0;
2425 }
2426
2427
2428 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
2429 punsignedp, pvolatilep);
2430 if ((inner == exp && and_mask == 0)
2431 || *pbitsize < 0 || offset != 0)
2432 return 0;
2433
2434 /* Compute the mask to access the bitfield. */
2435 unsigned_type = type_for_size (*pbitsize, 1);
2436 precision = TYPE_PRECISION (unsigned_type);
2437
2438 mask = build_int_2 (~0, ~0);
2439 TREE_TYPE (mask) = unsigned_type;
2440 force_fit_type (mask, 0);
2441 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2442 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize), 0);
2443
2444 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2445 if (and_mask != 0)
2446 mask = fold (build (BIT_AND_EXPR, unsigned_type,
2447 convert (unsigned_type, and_mask), mask));
2448
2449 *pmask = mask;
2450 *pand_mask = and_mask;
2451 return inner;
2452 }
2453
2454 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2455 bit positions. */
2456
2457 static int
2458 all_ones_mask_p (mask, size)
2459 tree mask;
2460 int size;
2461 {
2462 tree type = TREE_TYPE (mask);
2463 int precision = TYPE_PRECISION (type);
2464 tree tmask;
2465
2466 tmask = build_int_2 (~0, ~0);
2467 TREE_TYPE (tmask) = signed_type (type);
2468 force_fit_type (tmask, 0);
2469 return
2470 tree_int_cst_equal (mask,
2471 const_binop (RSHIFT_EXPR,
2472 const_binop (LSHIFT_EXPR, tmask,
2473 size_int (precision - size),
2474 0),
2475 size_int (precision - size), 0));
2476 }
2477
2478 /* Subroutine for fold_truthop: determine if an operand is simple enough
2479 to be evaluated unconditionally. */
2480
2481 static int
2482 simple_operand_p (exp)
2483 tree exp;
2484 {
2485 /* Strip any conversions that don't change the machine mode. */
2486 while ((TREE_CODE (exp) == NOP_EXPR
2487 || TREE_CODE (exp) == CONVERT_EXPR)
2488 && (TYPE_MODE (TREE_TYPE (exp))
2489 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
2490 exp = TREE_OPERAND (exp, 0);
2491
2492 return (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c'
2493 || (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
2494 && ! TREE_ADDRESSABLE (exp)
2495 && ! TREE_THIS_VOLATILE (exp)
2496 && ! DECL_NONLOCAL (exp)
2497 /* Don't regard global variables as simple. They may be
2498 allocated in ways unknown to the compiler (shared memory,
2499 #pragma weak, etc). */
2500 && ! TREE_PUBLIC (exp)
2501 && ! DECL_EXTERNAL (exp)
2502 /* Loading a static variable is unduly expensive, but global
2503 registers aren't expensive. */
2504 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
2505 }
2506 \f
2507 /* Subroutine for fold_truthop: try to optimize a range test.
2508
2509 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2510
2511 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2512 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2513 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2514 the result.
2515
2516 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2517 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2518 larger than HI_CST (they may be equal).
2519
2520 We return the simplified tree or 0 if no optimization is possible. */
2521
2522 static tree
2523 range_test (jcode, type, lo_code, hi_code, var, lo_cst, hi_cst)
2524 enum tree_code jcode, lo_code, hi_code;
2525 tree type, var, lo_cst, hi_cst;
2526 {
2527 tree utype;
2528 enum tree_code rcode;
2529
2530 /* See if this is a range test and normalize the constant terms. */
2531
2532 if (jcode == TRUTH_AND_EXPR)
2533 {
2534 switch (lo_code)
2535 {
2536 case NE_EXPR:
2537 /* See if we have VAR != CST && VAR != CST+1. */
2538 if (! (hi_code == NE_EXPR
2539 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2540 && tree_int_cst_equal (integer_one_node,
2541 const_binop (MINUS_EXPR,
2542 hi_cst, lo_cst, 0))))
2543 return 0;
2544
2545 rcode = GT_EXPR;
2546 break;
2547
2548 case GT_EXPR:
2549 case GE_EXPR:
2550 if (hi_code == LT_EXPR)
2551 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2552 else if (hi_code != LE_EXPR)
2553 return 0;
2554
2555 if (lo_code == GT_EXPR)
2556 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2557
2558 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2559 rcode = LE_EXPR;
2560 break;
2561
2562 default:
2563 return 0;
2564 }
2565 }
2566 else
2567 {
2568 switch (lo_code)
2569 {
2570 case EQ_EXPR:
2571 /* See if we have VAR == CST || VAR == CST+1. */
2572 if (! (hi_code == EQ_EXPR
2573 && TREE_INT_CST_LOW (hi_cst) - TREE_INT_CST_LOW (lo_cst) == 1
2574 && tree_int_cst_equal (integer_one_node,
2575 const_binop (MINUS_EXPR,
2576 hi_cst, lo_cst, 0))))
2577 return 0;
2578
2579 rcode = LE_EXPR;
2580 break;
2581
2582 case LE_EXPR:
2583 case LT_EXPR:
2584 if (hi_code == GE_EXPR)
2585 hi_cst = const_binop (MINUS_EXPR, hi_cst, integer_one_node, 0);
2586 else if (hi_code != GT_EXPR)
2587 return 0;
2588
2589 if (lo_code == LE_EXPR)
2590 lo_cst = const_binop (PLUS_EXPR, lo_cst, integer_one_node, 0);
2591
2592 /* We now have VAR < LO_CST || VAR > HI_CST. */
2593 rcode = GT_EXPR;
2594 break;
2595
2596 default:
2597 return 0;
2598 }
2599 }
2600
2601 /* When normalizing, it is possible to both increment the smaller constant
2602 and decrement the larger constant. See if they are still ordered. */
2603 if (tree_int_cst_lt (hi_cst, lo_cst))
2604 return 0;
2605
2606 /* Fail if VAR isn't an integer. */
2607 utype = TREE_TYPE (var);
2608 if (! INTEGRAL_TYPE_P (utype))
2609 return 0;
2610
2611 /* The range test is invalid if subtracting the two constants results
2612 in overflow. This can happen in traditional mode. */
2613 if (! int_fits_type_p (hi_cst, TREE_TYPE (var))
2614 || ! int_fits_type_p (lo_cst, TREE_TYPE (var)))
2615 return 0;
2616
2617 if (! TREE_UNSIGNED (utype))
2618 {
2619 utype = unsigned_type (utype);
2620 var = convert (utype, var);
2621 lo_cst = convert (utype, lo_cst);
2622 hi_cst = convert (utype, hi_cst);
2623 }
2624
2625 return fold (convert (type,
2626 build (rcode, utype,
2627 build (MINUS_EXPR, utype, var, lo_cst),
2628 const_binop (MINUS_EXPR, hi_cst, lo_cst, 0))));
2629 }
2630 \f
2631 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
2632 bit value. Arrange things so the extra bits will be set to zero if and
2633 only if C is signed-extended to its full width. If MASK is nonzero,
2634 it is an INTEGER_CST that should be AND'ed with the extra bits. */
2635
2636 static tree
2637 unextend (c, p, unsignedp, mask)
2638 tree c;
2639 int p;
2640 int unsignedp;
2641 tree mask;
2642 {
2643 tree type = TREE_TYPE (c);
2644 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
2645 tree temp;
2646
2647 if (p == modesize || unsignedp)
2648 return c;
2649
2650 if (TREE_UNSIGNED (type))
2651 c = convert (signed_type (type), c);
2652
2653 /* We work by getting just the sign bit into the low-order bit, then
2654 into the high-order bit, then sign-extend. We then XOR that value
2655 with C. */
2656 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1), 0);
2657 temp = const_binop (BIT_AND_EXPR, temp, size_int (1), 0);
2658 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1), 0);
2659 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1), 0);
2660 if (mask != 0)
2661 temp = const_binop (BIT_AND_EXPR, temp, convert (TREE_TYPE (c), mask), 0);
2662
2663 return convert (type, const_binop (BIT_XOR_EXPR, c, temp, 0));
2664 }
2665 \f
2666 /* Find ways of folding logical expressions of LHS and RHS:
2667 Try to merge two comparisons to the same innermost item.
2668 Look for range tests like "ch >= '0' && ch <= '9'".
2669 Look for combinations of simple terms on machines with expensive branches
2670 and evaluate the RHS unconditionally.
2671
2672 For example, if we have p->a == 2 && p->b == 4 and we can make an
2673 object large enough to span both A and B, we can do this with a comparison
2674 against the object ANDed with the a mask.
2675
2676 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2677 operations to do this with one comparison.
2678
2679 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2680 function and the one above.
2681
2682 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2683 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2684
2685 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2686 two operands.
2687
2688 We return the simplified tree or 0 if no optimization is possible. */
2689
2690 static tree
2691 fold_truthop (code, truth_type, lhs, rhs)
2692 enum tree_code code;
2693 tree truth_type, lhs, rhs;
2694 {
2695 /* If this is the "or" of two comparisons, we can do something if we
2696 the comparisons are NE_EXPR. If this is the "and", we can do something
2697 if the comparisons are EQ_EXPR. I.e.,
2698 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2699
2700 WANTED_CODE is this operation code. For single bit fields, we can
2701 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2702 comparison for one-bit fields. */
2703
2704 enum tree_code wanted_code;
2705 enum tree_code lcode, rcode;
2706 tree ll_arg, lr_arg, rl_arg, rr_arg;
2707 tree ll_inner, lr_inner, rl_inner, rr_inner;
2708 int ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
2709 int rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
2710 int xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
2711 int lnbitsize, lnbitpos, rnbitsize, rnbitpos;
2712 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
2713 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
2714 enum machine_mode lnmode, rnmode;
2715 tree ll_mask, lr_mask, rl_mask, rr_mask;
2716 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
2717 tree l_const, r_const;
2718 tree type, result;
2719 int first_bit, end_bit;
2720 int volatilep;
2721
2722 /* Start by getting the comparison codes and seeing if this looks like
2723 a range test. Fail if anything is volatile. If one operand is a
2724 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2725 with a NE_EXPR. */
2726
2727 if (TREE_SIDE_EFFECTS (lhs)
2728 || TREE_SIDE_EFFECTS (rhs))
2729 return 0;
2730
2731 lcode = TREE_CODE (lhs);
2732 rcode = TREE_CODE (rhs);
2733
2734 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
2735 lcode = NE_EXPR, lhs = build (NE_EXPR, truth_type, lhs, integer_zero_node);
2736
2737 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
2738 rcode = NE_EXPR, rhs = build (NE_EXPR, truth_type, rhs, integer_zero_node);
2739
2740 if (TREE_CODE_CLASS (lcode) != '<'
2741 || TREE_CODE_CLASS (rcode) != '<')
2742 return 0;
2743
2744 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
2745 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
2746
2747 ll_arg = TREE_OPERAND (lhs, 0);
2748 lr_arg = TREE_OPERAND (lhs, 1);
2749 rl_arg = TREE_OPERAND (rhs, 0);
2750 rr_arg = TREE_OPERAND (rhs, 1);
2751
2752 if (TREE_CODE (lr_arg) == INTEGER_CST
2753 && TREE_CODE (rr_arg) == INTEGER_CST
2754 && operand_equal_p (ll_arg, rl_arg, 0))
2755 {
2756 if (tree_int_cst_lt (lr_arg, rr_arg))
2757 result = range_test (code, truth_type, lcode, rcode,
2758 ll_arg, lr_arg, rr_arg);
2759 else
2760 result = range_test (code, truth_type, rcode, lcode,
2761 ll_arg, rr_arg, lr_arg);
2762
2763 /* If this isn't a range test, it also isn't a comparison that
2764 can be merged. However, it wins to evaluate the RHS unconditionally
2765 on machines with expensive branches. */
2766
2767 if (result == 0 && BRANCH_COST >= 2)
2768 {
2769 if (TREE_CODE (ll_arg) != VAR_DECL
2770 && TREE_CODE (ll_arg) != PARM_DECL)
2771 {
2772 /* Avoid evaluating the variable part twice. */
2773 ll_arg = save_expr (ll_arg);
2774 lhs = build (lcode, TREE_TYPE (lhs), ll_arg, lr_arg);
2775 rhs = build (rcode, TREE_TYPE (rhs), ll_arg, rr_arg);
2776 }
2777 return build (code, truth_type, lhs, rhs);
2778 }
2779 return result;
2780 }
2781
2782 /* If the RHS can be evaluated unconditionally and its operands are
2783 simple, it wins to evaluate the RHS unconditionally on machines
2784 with expensive branches. In this case, this isn't a comparison
2785 that can be merged. */
2786
2787 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2788 are with zero (tmw). */
2789
2790 if (BRANCH_COST >= 2
2791 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
2792 && simple_operand_p (rl_arg)
2793 && simple_operand_p (rr_arg))
2794 return build (code, truth_type, lhs, rhs);
2795
2796 /* See if the comparisons can be merged. Then get all the parameters for
2797 each side. */
2798
2799 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
2800 || (rcode != EQ_EXPR && rcode != NE_EXPR))
2801 return 0;
2802
2803 volatilep = 0;
2804 ll_inner = decode_field_reference (ll_arg,
2805 &ll_bitsize, &ll_bitpos, &ll_mode,
2806 &ll_unsignedp, &volatilep, &ll_mask,
2807 &ll_and_mask);
2808 lr_inner = decode_field_reference (lr_arg,
2809 &lr_bitsize, &lr_bitpos, &lr_mode,
2810 &lr_unsignedp, &volatilep, &lr_mask,
2811 &lr_and_mask);
2812 rl_inner = decode_field_reference (rl_arg,
2813 &rl_bitsize, &rl_bitpos, &rl_mode,
2814 &rl_unsignedp, &volatilep, &rl_mask,
2815 &rl_and_mask);
2816 rr_inner = decode_field_reference (rr_arg,
2817 &rr_bitsize, &rr_bitpos, &rr_mode,
2818 &rr_unsignedp, &volatilep, &rr_mask,
2819 &rr_and_mask);
2820
2821 /* It must be true that the inner operation on the lhs of each
2822 comparison must be the same if we are to be able to do anything.
2823 Then see if we have constants. If not, the same must be true for
2824 the rhs's. */
2825 if (volatilep || ll_inner == 0 || rl_inner == 0
2826 || ! operand_equal_p (ll_inner, rl_inner, 0))
2827 return 0;
2828
2829 if (TREE_CODE (lr_arg) == INTEGER_CST
2830 && TREE_CODE (rr_arg) == INTEGER_CST)
2831 l_const = lr_arg, r_const = rr_arg;
2832 else if (lr_inner == 0 || rr_inner == 0
2833 || ! operand_equal_p (lr_inner, rr_inner, 0))
2834 return 0;
2835 else
2836 l_const = r_const = 0;
2837
2838 /* If either comparison code is not correct for our logical operation,
2839 fail. However, we can convert a one-bit comparison against zero into
2840 the opposite comparison against that bit being set in the field. */
2841
2842 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
2843 if (lcode != wanted_code)
2844 {
2845 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
2846 l_const = ll_mask;
2847 else
2848 return 0;
2849 }
2850
2851 if (rcode != wanted_code)
2852 {
2853 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
2854 r_const = rl_mask;
2855 else
2856 return 0;
2857 }
2858
2859 /* See if we can find a mode that contains both fields being compared on
2860 the left. If we can't, fail. Otherwise, update all constants and masks
2861 to be relative to a field of that size. */
2862 first_bit = MIN (ll_bitpos, rl_bitpos);
2863 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
2864 lnmode = get_best_mode (end_bit - first_bit, first_bit,
2865 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
2866 volatilep);
2867 if (lnmode == VOIDmode)
2868 return 0;
2869
2870 lnbitsize = GET_MODE_BITSIZE (lnmode);
2871 lnbitpos = first_bit & ~ (lnbitsize - 1);
2872 type = type_for_size (lnbitsize, 1);
2873 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
2874
2875 if (BYTES_BIG_ENDIAN)
2876 {
2877 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
2878 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
2879 }
2880
2881 ll_mask = const_binop (LSHIFT_EXPR, convert (type, ll_mask),
2882 size_int (xll_bitpos), 0);
2883 rl_mask = const_binop (LSHIFT_EXPR, convert (type, rl_mask),
2884 size_int (xrl_bitpos), 0);
2885
2886 if (l_const)
2887 {
2888 l_const = convert (type, l_const);
2889 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
2890 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos), 0);
2891 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
2892 fold (build1 (BIT_NOT_EXPR,
2893 type, ll_mask)),
2894 0)))
2895 {
2896 warning ("comparison is always %s",
2897 wanted_code == NE_EXPR ? "one" : "zero");
2898
2899 return convert (truth_type,
2900 wanted_code == NE_EXPR
2901 ? integer_one_node : integer_zero_node);
2902 }
2903 }
2904 if (r_const)
2905 {
2906 r_const = convert (type, r_const);
2907 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
2908 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos), 0);
2909 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
2910 fold (build1 (BIT_NOT_EXPR,
2911 type, rl_mask)),
2912 0)))
2913 {
2914 warning ("comparison is always %s",
2915 wanted_code == NE_EXPR ? "one" : "zero");
2916
2917 return convert (truth_type,
2918 wanted_code == NE_EXPR
2919 ? integer_one_node : integer_zero_node);
2920 }
2921 }
2922
2923 /* If the right sides are not constant, do the same for it. Also,
2924 disallow this optimization if a size or signedness mismatch occurs
2925 between the left and right sides. */
2926 if (l_const == 0)
2927 {
2928 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
2929 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
2930 /* Make sure the two fields on the right
2931 correspond to the left without being swapped. */
2932 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
2933 return 0;
2934
2935 first_bit = MIN (lr_bitpos, rr_bitpos);
2936 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
2937 rnmode = get_best_mode (end_bit - first_bit, first_bit,
2938 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
2939 volatilep);
2940 if (rnmode == VOIDmode)
2941 return 0;
2942
2943 rnbitsize = GET_MODE_BITSIZE (rnmode);
2944 rnbitpos = first_bit & ~ (rnbitsize - 1);
2945 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
2946
2947 if (BYTES_BIG_ENDIAN)
2948 {
2949 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
2950 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
2951 }
2952
2953 lr_mask = const_binop (LSHIFT_EXPR, convert (type, lr_mask),
2954 size_int (xlr_bitpos), 0);
2955 rr_mask = const_binop (LSHIFT_EXPR, convert (type, rr_mask),
2956 size_int (xrr_bitpos), 0);
2957
2958 /* Make a mask that corresponds to both fields being compared.
2959 Do this for both items being compared. If the masks agree,
2960 we can do this by masking both and comparing the masked
2961 results. */
2962 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
2963 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask, 0);
2964 if (operand_equal_p (ll_mask, lr_mask, 0) && lnbitsize == rnbitsize)
2965 {
2966 lhs = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
2967 ll_unsignedp || rl_unsignedp);
2968 rhs = make_bit_field_ref (lr_inner, type, rnbitsize, rnbitpos,
2969 lr_unsignedp || rr_unsignedp);
2970 if (! all_ones_mask_p (ll_mask, lnbitsize))
2971 {
2972 lhs = build (BIT_AND_EXPR, type, lhs, ll_mask);
2973 rhs = build (BIT_AND_EXPR, type, rhs, ll_mask);
2974 }
2975 return build (wanted_code, truth_type, lhs, rhs);
2976 }
2977
2978 /* There is still another way we can do something: If both pairs of
2979 fields being compared are adjacent, we may be able to make a wider
2980 field containing them both. */
2981 if ((ll_bitsize + ll_bitpos == rl_bitpos
2982 && lr_bitsize + lr_bitpos == rr_bitpos)
2983 || (ll_bitpos == rl_bitpos + rl_bitsize
2984 && lr_bitpos == rr_bitpos + rr_bitsize))
2985 return build (wanted_code, truth_type,
2986 make_bit_field_ref (ll_inner, type,
2987 ll_bitsize + rl_bitsize,
2988 MIN (ll_bitpos, rl_bitpos),
2989 ll_unsignedp),
2990 make_bit_field_ref (lr_inner, type,
2991 lr_bitsize + rr_bitsize,
2992 MIN (lr_bitpos, rr_bitpos),
2993 lr_unsignedp));
2994
2995 return 0;
2996 }
2997
2998 /* Handle the case of comparisons with constants. If there is something in
2999 common between the masks, those bits of the constants must be the same.
3000 If not, the condition is always false. Test for this to avoid generating
3001 incorrect code below. */
3002 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask, 0);
3003 if (! integer_zerop (result)
3004 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const, 0),
3005 const_binop (BIT_AND_EXPR, result, r_const, 0)) != 1)
3006 {
3007 if (wanted_code == NE_EXPR)
3008 {
3009 warning ("`or' of unmatched not-equal tests is always 1");
3010 return convert (truth_type, integer_one_node);
3011 }
3012 else
3013 {
3014 warning ("`and' of mutually exclusive equal-tests is always zero");
3015 return convert (truth_type, integer_zero_node);
3016 }
3017 }
3018
3019 /* Construct the expression we will return. First get the component
3020 reference we will make. Unless the mask is all ones the width of
3021 that field, perform the mask operation. Then compare with the
3022 merged constant. */
3023 result = make_bit_field_ref (ll_inner, type, lnbitsize, lnbitpos,
3024 ll_unsignedp || rl_unsignedp);
3025
3026 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask, 0);
3027 if (! all_ones_mask_p (ll_mask, lnbitsize))
3028 result = build (BIT_AND_EXPR, type, result, ll_mask);
3029
3030 return build (wanted_code, truth_type, result,
3031 const_binop (BIT_IOR_EXPR, l_const, r_const, 0));
3032 }
3033 \f
3034 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3035 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3036 that we may sometimes modify the tree. */
3037
3038 static tree
3039 strip_compound_expr (t, s)
3040 tree t;
3041 tree s;
3042 {
3043 tree type = TREE_TYPE (t);
3044 enum tree_code code = TREE_CODE (t);
3045
3046 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3047 if (code == COMPOUND_EXPR && TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR
3048 && TREE_OPERAND (TREE_OPERAND (t, 0), 0) == s)
3049 return TREE_OPERAND (t, 1);
3050
3051 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3052 don't bother handling any other types. */
3053 else if (code == COND_EXPR)
3054 {
3055 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3056 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3057 TREE_OPERAND (t, 2) = strip_compound_expr (TREE_OPERAND (t, 2), s);
3058 }
3059 else if (TREE_CODE_CLASS (code) == '1')
3060 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3061 else if (TREE_CODE_CLASS (code) == '<'
3062 || TREE_CODE_CLASS (code) == '2')
3063 {
3064 TREE_OPERAND (t, 0) = strip_compound_expr (TREE_OPERAND (t, 0), s);
3065 TREE_OPERAND (t, 1) = strip_compound_expr (TREE_OPERAND (t, 1), s);
3066 }
3067
3068 return t;
3069 }
3070 \f
3071 /* Perform constant folding and related simplification of EXPR.
3072 The related simplifications include x*1 => x, x*0 => 0, etc.,
3073 and application of the associative law.
3074 NOP_EXPR conversions may be removed freely (as long as we
3075 are careful not to change the C type of the overall expression)
3076 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3077 but we can constant-fold them if they have constant operands. */
3078
3079 tree
3080 fold (expr)
3081 tree expr;
3082 {
3083 register tree t = expr;
3084 tree t1 = NULL_TREE;
3085 tree tem;
3086 tree type = TREE_TYPE (expr);
3087 register tree arg0, arg1;
3088 register enum tree_code code = TREE_CODE (t);
3089 register int kind;
3090 int invert;
3091
3092 /* WINS will be nonzero when the switch is done
3093 if all operands are constant. */
3094
3095 int wins = 1;
3096
3097 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3098 if (code == RTL_EXPR)
3099 return t;
3100
3101 /* Return right away if already constant. */
3102 if (TREE_CONSTANT (t))
3103 {
3104 if (code == CONST_DECL)
3105 return DECL_INITIAL (t);
3106 return t;
3107 }
3108
3109 kind = TREE_CODE_CLASS (code);
3110 if (code == NOP_EXPR || code == FLOAT_EXPR || code == CONVERT_EXPR)
3111 {
3112 tree subop;
3113
3114 /* Special case for conversion ops that can have fixed point args. */
3115 arg0 = TREE_OPERAND (t, 0);
3116
3117 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3118 if (arg0 != 0)
3119 STRIP_TYPE_NOPS (arg0);
3120
3121 if (arg0 != 0 && TREE_CODE (arg0) == COMPLEX_CST)
3122 subop = TREE_REALPART (arg0);
3123 else
3124 subop = arg0;
3125
3126 if (subop != 0 && TREE_CODE (subop) != INTEGER_CST
3127 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3128 && TREE_CODE (subop) != REAL_CST
3129 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3130 )
3131 /* Note that TREE_CONSTANT isn't enough:
3132 static var addresses are constant but we can't
3133 do arithmetic on them. */
3134 wins = 0;
3135 }
3136 else if (kind == 'e' || kind == '<'
3137 || kind == '1' || kind == '2' || kind == 'r')
3138 {
3139 register int len = tree_code_length[(int) code];
3140 register int i;
3141 for (i = 0; i < len; i++)
3142 {
3143 tree op = TREE_OPERAND (t, i);
3144 tree subop;
3145
3146 if (op == 0)
3147 continue; /* Valid for CALL_EXPR, at least. */
3148
3149 if (kind == '<' || code == RSHIFT_EXPR)
3150 {
3151 /* Signedness matters here. Perhaps we can refine this
3152 later. */
3153 STRIP_TYPE_NOPS (op);
3154 }
3155 else
3156 {
3157 /* Strip any conversions that don't change the mode. */
3158 STRIP_NOPS (op);
3159 }
3160
3161 if (TREE_CODE (op) == COMPLEX_CST)
3162 subop = TREE_REALPART (op);
3163 else
3164 subop = op;
3165
3166 if (TREE_CODE (subop) != INTEGER_CST
3167 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3168 && TREE_CODE (subop) != REAL_CST
3169 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3170 )
3171 /* Note that TREE_CONSTANT isn't enough:
3172 static var addresses are constant but we can't
3173 do arithmetic on them. */
3174 wins = 0;
3175
3176 if (i == 0)
3177 arg0 = op;
3178 else if (i == 1)
3179 arg1 = op;
3180 }
3181 }
3182
3183 /* If this is a commutative operation, and ARG0 is a constant, move it
3184 to ARG1 to reduce the number of tests below. */
3185 if ((code == PLUS_EXPR || code == MULT_EXPR || code == MIN_EXPR
3186 || code == MAX_EXPR || code == BIT_IOR_EXPR || code == BIT_XOR_EXPR
3187 || code == BIT_AND_EXPR)
3188 && (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST))
3189 {
3190 tem = arg0; arg0 = arg1; arg1 = tem;
3191
3192 tem = TREE_OPERAND (t, 0); TREE_OPERAND (t, 0) = TREE_OPERAND (t, 1);
3193 TREE_OPERAND (t, 1) = tem;
3194 }
3195
3196 /* Now WINS is set as described above,
3197 ARG0 is the first operand of EXPR,
3198 and ARG1 is the second operand (if it has more than one operand).
3199
3200 First check for cases where an arithmetic operation is applied to a
3201 compound, conditional, or comparison operation. Push the arithmetic
3202 operation inside the compound or conditional to see if any folding
3203 can then be done. Convert comparison to conditional for this purpose.
3204 The also optimizes non-constant cases that used to be done in
3205 expand_expr.
3206
3207 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3208 one of the operands is a comparison and the other is a comparison, a
3209 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3210 code below would make the expression more complex. Change it to a
3211 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3212 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3213
3214 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
3215 || code == EQ_EXPR || code == NE_EXPR)
3216 && ((truth_value_p (TREE_CODE (arg0))
3217 && (truth_value_p (TREE_CODE (arg1))
3218 || (TREE_CODE (arg1) == BIT_AND_EXPR
3219 && integer_onep (TREE_OPERAND (arg1, 1)))))
3220 || (truth_value_p (TREE_CODE (arg1))
3221 && (truth_value_p (TREE_CODE (arg0))
3222 || (TREE_CODE (arg0) == BIT_AND_EXPR
3223 && integer_onep (TREE_OPERAND (arg0, 1)))))))
3224 {
3225 t = fold (build (code == BIT_AND_EXPR ? TRUTH_AND_EXPR
3226 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
3227 : TRUTH_XOR_EXPR,
3228 type, arg0, arg1));
3229
3230 if (code == EQ_EXPR)
3231 t = invert_truthvalue (t);
3232
3233 return t;
3234 }
3235
3236 if (TREE_CODE_CLASS (code) == '1')
3237 {
3238 if (TREE_CODE (arg0) == COMPOUND_EXPR)
3239 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3240 fold (build1 (code, type, TREE_OPERAND (arg0, 1))));
3241 else if (TREE_CODE (arg0) == COND_EXPR)
3242 {
3243 t = fold (build (COND_EXPR, type, TREE_OPERAND (arg0, 0),
3244 fold (build1 (code, type, TREE_OPERAND (arg0, 1))),
3245 fold (build1 (code, type, TREE_OPERAND (arg0, 2)))));
3246
3247 /* If this was a conversion, and all we did was to move into
3248 inside the COND_EXPR, bring it back out. But leave it if
3249 it is a conversion from integer to integer and the
3250 result precision is no wider than a word since such a
3251 conversion is cheap and may be optimized away by combine,
3252 while it couldn't if it were outside the COND_EXPR. Then return
3253 so we don't get into an infinite recursion loop taking the
3254 conversion out and then back in. */
3255
3256 if ((code == NOP_EXPR || code == CONVERT_EXPR
3257 || code == NON_LVALUE_EXPR)
3258 && TREE_CODE (t) == COND_EXPR
3259 && TREE_CODE (TREE_OPERAND (t, 1)) == code
3260 && TREE_CODE (TREE_OPERAND (t, 2)) == code
3261 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0))
3262 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 2), 0)))
3263 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t))
3264 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)))
3265 && TYPE_PRECISION (TREE_TYPE (t)) <= BITS_PER_WORD))
3266 t = build1 (code, type,
3267 build (COND_EXPR,
3268 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 1), 0)),
3269 TREE_OPERAND (t, 0),
3270 TREE_OPERAND (TREE_OPERAND (t, 1), 0),
3271 TREE_OPERAND (TREE_OPERAND (t, 2), 0)));
3272 return t;
3273 }
3274 else if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3275 return fold (build (COND_EXPR, type, arg0,
3276 fold (build1 (code, type, integer_one_node)),
3277 fold (build1 (code, type, integer_zero_node))));
3278 }
3279 else if (TREE_CODE_CLASS (code) == '2'
3280 || TREE_CODE_CLASS (code) == '<')
3281 {
3282 if (TREE_CODE (arg1) == COMPOUND_EXPR)
3283 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3284 fold (build (code, type,
3285 arg0, TREE_OPERAND (arg1, 1))));
3286 else if (TREE_CODE (arg1) == COND_EXPR
3287 || TREE_CODE_CLASS (TREE_CODE (arg1)) == '<')
3288 {
3289 tree test, true_value, false_value;
3290
3291 if (TREE_CODE (arg1) == COND_EXPR)
3292 {
3293 test = TREE_OPERAND (arg1, 0);
3294 true_value = TREE_OPERAND (arg1, 1);
3295 false_value = TREE_OPERAND (arg1, 2);
3296 }
3297 else
3298 {
3299 tree testtype = TREE_TYPE (arg1);
3300 test = arg1;
3301 true_value = convert (testtype, integer_one_node);
3302 false_value = convert (testtype, integer_zero_node);
3303 }
3304
3305 /* If ARG0 is complex we want to make sure we only evaluate
3306 it once. Though this is only required if it is volatile, it
3307 might be more efficient even if it is not. However, if we
3308 succeed in folding one part to a constant, we do not need
3309 to make this SAVE_EXPR. Since we do this optimization
3310 primarily to see if we do end up with constant and this
3311 SAVE_EXPR interferes with later optimizations, suppressing
3312 it when we can is important. */
3313
3314 if (TREE_CODE (arg0) != SAVE_EXPR
3315 && ((TREE_CODE (arg0) != VAR_DECL
3316 && TREE_CODE (arg0) != PARM_DECL)
3317 || TREE_SIDE_EFFECTS (arg0)))
3318 {
3319 tree lhs = fold (build (code, type, arg0, true_value));
3320 tree rhs = fold (build (code, type, arg0, false_value));
3321
3322 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs))
3323 return fold (build (COND_EXPR, type, test, lhs, rhs));
3324
3325 arg0 = save_expr (arg0);
3326 }
3327
3328 test = fold (build (COND_EXPR, type, test,
3329 fold (build (code, type, arg0, true_value)),
3330 fold (build (code, type, arg0, false_value))));
3331 if (TREE_CODE (arg0) == SAVE_EXPR)
3332 return build (COMPOUND_EXPR, type,
3333 convert (void_type_node, arg0),
3334 strip_compound_expr (test, arg0));
3335 else
3336 return convert (type, test);
3337 }
3338
3339 else if (TREE_CODE (arg0) == COMPOUND_EXPR)
3340 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3341 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3342 else if (TREE_CODE (arg0) == COND_EXPR
3343 || TREE_CODE_CLASS (TREE_CODE (arg0)) == '<')
3344 {
3345 tree test, true_value, false_value;
3346
3347 if (TREE_CODE (arg0) == COND_EXPR)
3348 {
3349 test = TREE_OPERAND (arg0, 0);
3350 true_value = TREE_OPERAND (arg0, 1);
3351 false_value = TREE_OPERAND (arg0, 2);
3352 }
3353 else
3354 {
3355 tree testtype = TREE_TYPE (arg0);
3356 test = arg0;
3357 true_value = convert (testtype, integer_one_node);
3358 false_value = convert (testtype, integer_zero_node);
3359 }
3360
3361 if (TREE_CODE (arg1) != SAVE_EXPR
3362 && ((TREE_CODE (arg1) != VAR_DECL
3363 && TREE_CODE (arg1) != PARM_DECL)
3364 || TREE_SIDE_EFFECTS (arg1)))
3365 {
3366 tree lhs = fold (build (code, type, true_value, arg1));
3367 tree rhs = fold (build (code, type, false_value, arg1));
3368
3369 if (TREE_CONSTANT (lhs) || TREE_CONSTANT (rhs)
3370 || TREE_CONSTANT (arg1))
3371 return fold (build (COND_EXPR, type, test, lhs, rhs));
3372
3373 arg1 = save_expr (arg1);
3374 }
3375
3376 test = fold (build (COND_EXPR, type, test,
3377 fold (build (code, type, true_value, arg1)),
3378 fold (build (code, type, false_value, arg1))));
3379 if (TREE_CODE (arg1) == SAVE_EXPR)
3380 return build (COMPOUND_EXPR, type,
3381 convert (void_type_node, arg1),
3382 strip_compound_expr (test, arg1));
3383 else
3384 return convert (type, test);
3385 }
3386 }
3387 else if (TREE_CODE_CLASS (code) == '<'
3388 && TREE_CODE (arg0) == COMPOUND_EXPR)
3389 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
3390 fold (build (code, type, TREE_OPERAND (arg0, 1), arg1)));
3391 else if (TREE_CODE_CLASS (code) == '<'
3392 && TREE_CODE (arg1) == COMPOUND_EXPR)
3393 return build (COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
3394 fold (build (code, type, arg0, TREE_OPERAND (arg1, 1))));
3395
3396 switch (code)
3397 {
3398 case INTEGER_CST:
3399 case REAL_CST:
3400 case STRING_CST:
3401 case COMPLEX_CST:
3402 case CONSTRUCTOR:
3403 return t;
3404
3405 case CONST_DECL:
3406 return fold (DECL_INITIAL (t));
3407
3408 case NOP_EXPR:
3409 case FLOAT_EXPR:
3410 case CONVERT_EXPR:
3411 case FIX_TRUNC_EXPR:
3412 /* Other kinds of FIX are not handled properly by fold_convert. */
3413
3414 if (TREE_TYPE (TREE_OPERAND (t, 0)) == TREE_TYPE (t))
3415 return TREE_OPERAND (t, 0);
3416
3417 /* Handle cases of two conversions in a row. */
3418 if (TREE_CODE (TREE_OPERAND (t, 0)) == NOP_EXPR
3419 || TREE_CODE (TREE_OPERAND (t, 0)) == CONVERT_EXPR)
3420 {
3421 tree inside_type = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3422 tree inter_type = TREE_TYPE (TREE_OPERAND (t, 0));
3423 tree final_type = TREE_TYPE (t);
3424 int inside_int = INTEGRAL_TYPE_P (inside_type);
3425 int inside_ptr = POINTER_TYPE_P (inside_type);
3426 int inside_float = FLOAT_TYPE_P (inside_type);
3427 int inside_prec = TYPE_PRECISION (inside_type);
3428 int inside_unsignedp = TREE_UNSIGNED (inside_type);
3429 int inter_int = INTEGRAL_TYPE_P (inter_type);
3430 int inter_ptr = POINTER_TYPE_P (inter_type);
3431 int inter_float = FLOAT_TYPE_P (inter_type);
3432 int inter_prec = TYPE_PRECISION (inter_type);
3433 int inter_unsignedp = TREE_UNSIGNED (inter_type);
3434 int final_int = INTEGRAL_TYPE_P (final_type);
3435 int final_ptr = POINTER_TYPE_P (final_type);
3436 int final_float = FLOAT_TYPE_P (final_type);
3437 int final_prec = TYPE_PRECISION (final_type);
3438 int final_unsignedp = TREE_UNSIGNED (final_type);
3439
3440 /* In addition to the cases of two conversions in a row
3441 handled below, if we are converting something to its own
3442 type via an object of identical or wider precision, neither
3443 conversion is needed. */
3444 if (inside_type == final_type
3445 && ((inter_int && final_int) || (inter_float && final_float))
3446 && inter_prec >= final_prec)
3447 return TREE_OPERAND (TREE_OPERAND (t, 0), 0);
3448
3449 /* Likewise, if the intermediate and final types are either both
3450 float or both integer, we don't need the middle conversion if
3451 it is wider than the final type and doesn't change the signedness
3452 (for integers). Avoid this if the final type is a pointer
3453 since then we sometimes need the inner conversion. Likewise if
3454 the outer has a precision not equal to the size of its mode. */
3455 if ((((inter_int || inter_ptr) && (inside_int || inside_ptr))
3456 || (inter_float && inside_float))
3457 && inter_prec >= inside_prec
3458 && (inter_float || inter_unsignedp == inside_unsignedp)
3459 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3460 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3461 && ! final_ptr)
3462 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3463
3464 /* Two conversions in a row are not needed unless:
3465 - some conversion is floating-point (overstrict for now), or
3466 - the intermediate type is narrower than both initial and
3467 final, or
3468 - the intermediate type and innermost type differ in signedness,
3469 and the outermost type is wider than the intermediate, or
3470 - the initial type is a pointer type and the precisions of the
3471 intermediate and final types differ, or
3472 - the final type is a pointer type and the precisions of the
3473 initial and intermediate types differ. */
3474 if (! inside_float && ! inter_float && ! final_float
3475 && (inter_prec > inside_prec || inter_prec > final_prec)
3476 && ! (inside_int && inter_int
3477 && inter_unsignedp != inside_unsignedp
3478 && inter_prec < final_prec)
3479 && ((inter_unsignedp && inter_prec > inside_prec)
3480 == (final_unsignedp && final_prec > inter_prec))
3481 && ! (inside_ptr && inter_prec != final_prec)
3482 && ! (final_ptr && inside_prec != inter_prec)
3483 && ! (final_prec != GET_MODE_BITSIZE (TYPE_MODE (final_type))
3484 && TYPE_MODE (final_type) == TYPE_MODE (inter_type))
3485 && ! final_ptr)
3486 return convert (final_type, TREE_OPERAND (TREE_OPERAND (t, 0), 0));
3487 }
3488
3489 if (TREE_CODE (TREE_OPERAND (t, 0)) == MODIFY_EXPR
3490 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t, 0), 1))
3491 /* Detect assigning a bitfield. */
3492 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == COMPONENT_REF
3493 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 1))))
3494 {
3495 /* Don't leave an assignment inside a conversion
3496 unless assigning a bitfield. */
3497 tree prev = TREE_OPERAND (t, 0);
3498 TREE_OPERAND (t, 0) = TREE_OPERAND (prev, 1);
3499 /* First do the assignment, then return converted constant. */
3500 t = build (COMPOUND_EXPR, TREE_TYPE (t), prev, fold (t));
3501 TREE_USED (t) = 1;
3502 return t;
3503 }
3504 if (!wins)
3505 {
3506 TREE_CONSTANT (t) = TREE_CONSTANT (arg0);
3507 return t;
3508 }
3509 return fold_convert (t, arg0);
3510
3511 #if 0 /* This loses on &"foo"[0]. */
3512 case ARRAY_REF:
3513 {
3514 int i;
3515
3516 /* Fold an expression like: "foo"[2] */
3517 if (TREE_CODE (arg0) == STRING_CST
3518 && TREE_CODE (arg1) == INTEGER_CST
3519 && !TREE_INT_CST_HIGH (arg1)
3520 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3521 {
3522 t = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3523 TREE_TYPE (t) = TREE_TYPE (TREE_TYPE (arg0));
3524 force_fit_type (t, 0);
3525 }
3526 }
3527 return t;
3528 #endif /* 0 */
3529
3530 case COMPONENT_REF:
3531 if (TREE_CODE (arg0) == CONSTRUCTOR)
3532 {
3533 tree m = purpose_member (arg1, CONSTRUCTOR_ELTS (arg0));
3534 if (m)
3535 t = TREE_VALUE (m);
3536 }
3537 return t;
3538
3539 case RANGE_EXPR:
3540 TREE_CONSTANT (t) = wins;
3541 return t;
3542
3543 case NEGATE_EXPR:
3544 if (wins)
3545 {
3546 if (TREE_CODE (arg0) == INTEGER_CST)
3547 {
3548 HOST_WIDE_INT low, high;
3549 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3550 TREE_INT_CST_HIGH (arg0),
3551 &low, &high);
3552 t = build_int_2 (low, high);
3553 TREE_TYPE (t) = type;
3554 TREE_OVERFLOW (t)
3555 = (TREE_OVERFLOW (arg0)
3556 | force_fit_type (t, overflow && !TREE_UNSIGNED (type)));
3557 TREE_CONSTANT_OVERFLOW (t)
3558 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3559 }
3560 else if (TREE_CODE (arg0) == REAL_CST)
3561 t = build_real (type, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3562 TREE_TYPE (t) = type;
3563 }
3564 else if (TREE_CODE (arg0) == NEGATE_EXPR)
3565 return TREE_OPERAND (arg0, 0);
3566
3567 /* Convert - (a - b) to (b - a) for non-floating-point. */
3568 else if (TREE_CODE (arg0) == MINUS_EXPR && ! FLOAT_TYPE_P (type))
3569 return build (MINUS_EXPR, type, TREE_OPERAND (arg0, 1),
3570 TREE_OPERAND (arg0, 0));
3571
3572 return t;
3573
3574 case ABS_EXPR:
3575 if (wins)
3576 {
3577 if (TREE_CODE (arg0) == INTEGER_CST)
3578 {
3579 if (! TREE_UNSIGNED (type)
3580 && TREE_INT_CST_HIGH (arg0) < 0)
3581 {
3582 HOST_WIDE_INT low, high;
3583 int overflow = neg_double (TREE_INT_CST_LOW (arg0),
3584 TREE_INT_CST_HIGH (arg0),
3585 &low, &high);
3586 t = build_int_2 (low, high);
3587 TREE_TYPE (t) = type;
3588 TREE_OVERFLOW (t)
3589 = (TREE_OVERFLOW (arg0)
3590 | force_fit_type (t, overflow));
3591 TREE_CONSTANT_OVERFLOW (t)
3592 = TREE_OVERFLOW (t) | TREE_CONSTANT_OVERFLOW (arg0);
3593 }
3594 }
3595 else if (TREE_CODE (arg0) == REAL_CST)
3596 {
3597 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
3598 t = build_real (type,
3599 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0)));
3600 }
3601 TREE_TYPE (t) = type;
3602 }
3603 else if (TREE_CODE (arg0) == ABS_EXPR || TREE_CODE (arg0) == NEGATE_EXPR)
3604 return build1 (ABS_EXPR, type, TREE_OPERAND (arg0, 0));
3605 return t;
3606
3607 case CONJ_EXPR:
3608 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
3609 return arg0;
3610 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
3611 return build (COMPLEX_EXPR, TREE_TYPE (arg0),
3612 TREE_OPERAND (arg0, 0),
3613 fold (build1 (NEGATE_EXPR,
3614 TREE_TYPE (TREE_TYPE (arg0)),
3615 TREE_OPERAND (arg0, 1))));
3616 else if (TREE_CODE (arg0) == COMPLEX_CST)
3617 return build_complex (TREE_OPERAND (arg0, 0),
3618 fold (build1 (NEGATE_EXPR,
3619 TREE_TYPE (TREE_TYPE (arg0)),
3620 TREE_OPERAND (arg0, 1))));
3621 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
3622 return fold (build (TREE_CODE (arg0), type,
3623 fold (build1 (CONJ_EXPR, type,
3624 TREE_OPERAND (arg0, 0))),
3625 fold (build1 (CONJ_EXPR,
3626 type, TREE_OPERAND (arg0, 1)))));
3627 else if (TREE_CODE (arg0) == CONJ_EXPR)
3628 return TREE_OPERAND (arg0, 0);
3629 return t;
3630
3631 case BIT_NOT_EXPR:
3632 if (wins)
3633 {
3634 if (TREE_CODE (arg0) == INTEGER_CST)
3635 t = build_int_2 (~ TREE_INT_CST_LOW (arg0),
3636 ~ TREE_INT_CST_HIGH (arg0));
3637 TREE_TYPE (t) = type;
3638 force_fit_type (t, 0);
3639 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg0);
3640 TREE_CONSTANT_OVERFLOW (t) = TREE_CONSTANT_OVERFLOW (arg0);
3641 }
3642 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
3643 return TREE_OPERAND (arg0, 0);
3644 return t;
3645
3646 case PLUS_EXPR:
3647 /* A + (-B) -> A - B */
3648 if (TREE_CODE (arg1) == NEGATE_EXPR)
3649 return fold (build (MINUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3650 else if (! FLOAT_TYPE_P (type))
3651 {
3652 if (integer_zerop (arg1))
3653 return non_lvalue (convert (type, arg0));
3654
3655 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3656 with a constant, and the two constants have no bits in common,
3657 we should treat this as a BIT_IOR_EXPR since this may produce more
3658 simplifications. */
3659 if (TREE_CODE (arg0) == BIT_AND_EXPR
3660 && TREE_CODE (arg1) == BIT_AND_EXPR
3661 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3662 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3663 && integer_zerop (const_binop (BIT_AND_EXPR,
3664 TREE_OPERAND (arg0, 1),
3665 TREE_OPERAND (arg1, 1), 0)))
3666 {
3667 code = BIT_IOR_EXPR;
3668 goto bit_ior;
3669 }
3670
3671 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3672 about the case where C is a constant, just try one of the
3673 four possibilities. */
3674
3675 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3676 && operand_equal_p (TREE_OPERAND (arg0, 1),
3677 TREE_OPERAND (arg1, 1), 0))
3678 return fold (build (MULT_EXPR, type,
3679 fold (build (PLUS_EXPR, type,
3680 TREE_OPERAND (arg0, 0),
3681 TREE_OPERAND (arg1, 0))),
3682 TREE_OPERAND (arg0, 1)));
3683 }
3684 /* In IEEE floating point, x+0 may not equal x. */
3685 else if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3686 || flag_fast_math)
3687 && real_zerop (arg1))
3688 return non_lvalue (convert (type, arg0));
3689 associate:
3690 /* In most languages, can't associate operations on floats
3691 through parentheses. Rather than remember where the parentheses
3692 were, we don't associate floats at all. It shouldn't matter much.
3693 However, associating multiplications is only very slightly
3694 inaccurate, so do that if -ffast-math is specified. */
3695 if (FLOAT_TYPE_P (type)
3696 && ! (flag_fast_math && code == MULT_EXPR))
3697 goto binary;
3698
3699 /* The varsign == -1 cases happen only for addition and subtraction.
3700 It says that the arg that was split was really CON minus VAR.
3701 The rest of the code applies to all associative operations. */
3702 if (!wins)
3703 {
3704 tree var, con;
3705 int varsign;
3706
3707 if (split_tree (arg0, code, &var, &con, &varsign))
3708 {
3709 if (varsign == -1)
3710 {
3711 /* EXPR is (CON-VAR) +- ARG1. */
3712 /* If it is + and VAR==ARG1, return just CONST. */
3713 if (code == PLUS_EXPR && operand_equal_p (var, arg1, 0))
3714 return convert (TREE_TYPE (t), con);
3715
3716 /* If ARG0 is a constant, don't change things around;
3717 instead keep all the constant computations together. */
3718
3719 if (TREE_CONSTANT (arg0))
3720 return t;
3721
3722 /* Otherwise return (CON +- ARG1) - VAR. */
3723 t = build (MINUS_EXPR, type,
3724 fold (build (code, type, con, arg1)), var);
3725 }
3726 else
3727 {
3728 /* EXPR is (VAR+CON) +- ARG1. */
3729 /* If it is - and VAR==ARG1, return just CONST. */
3730 if (code == MINUS_EXPR && operand_equal_p (var, arg1, 0))
3731 return convert (TREE_TYPE (t), con);
3732
3733 /* If ARG0 is a constant, don't change things around;
3734 instead keep all the constant computations together. */
3735
3736 if (TREE_CONSTANT (arg0))
3737 return t;
3738
3739 /* Otherwise return VAR +- (ARG1 +- CON). */
3740 tem = fold (build (code, type, arg1, con));
3741 t = build (code, type, var, tem);
3742
3743 if (integer_zerop (tem)
3744 && (code == PLUS_EXPR || code == MINUS_EXPR))
3745 return convert (type, var);
3746 /* If we have x +/- (c - d) [c an explicit integer]
3747 change it to x -/+ (d - c) since if d is relocatable
3748 then the latter can be a single immediate insn
3749 and the former cannot. */
3750 if (TREE_CODE (tem) == MINUS_EXPR
3751 && TREE_CODE (TREE_OPERAND (tem, 0)) == INTEGER_CST)
3752 {
3753 tree tem1 = TREE_OPERAND (tem, 1);
3754 TREE_OPERAND (tem, 1) = TREE_OPERAND (tem, 0);
3755 TREE_OPERAND (tem, 0) = tem1;
3756 TREE_SET_CODE (t,
3757 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3758 }
3759 }
3760 return t;
3761 }
3762
3763 if (split_tree (arg1, code, &var, &con, &varsign))
3764 {
3765 if (TREE_CONSTANT (arg1))
3766 return t;
3767
3768 if (varsign == -1)
3769 TREE_SET_CODE (t,
3770 (code == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR));
3771
3772 /* EXPR is ARG0 +- (CON +- VAR). */
3773 if (TREE_CODE (t) == MINUS_EXPR
3774 && operand_equal_p (var, arg0, 0))
3775 {
3776 /* If VAR and ARG0 cancel, return just CON or -CON. */
3777 if (code == PLUS_EXPR)
3778 return convert (TREE_TYPE (t), con);
3779 return fold (build1 (NEGATE_EXPR, TREE_TYPE (t),
3780 convert (TREE_TYPE (t), con)));
3781 }
3782
3783 t = build (TREE_CODE (t), type,
3784 fold (build (code, TREE_TYPE (t), arg0, con)), var);
3785
3786 if (integer_zerop (TREE_OPERAND (t, 0))
3787 && TREE_CODE (t) == PLUS_EXPR)
3788 return convert (TREE_TYPE (t), var);
3789 return t;
3790 }
3791 }
3792 binary:
3793 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3794 if (TREE_CODE (arg1) == REAL_CST)
3795 return t;
3796 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3797 if (wins)
3798 t1 = const_binop (code, arg0, arg1, 0);
3799 if (t1 != NULL_TREE)
3800 {
3801 /* The return value should always have
3802 the same type as the original expression. */
3803 TREE_TYPE (t1) = TREE_TYPE (t);
3804 return t1;
3805 }
3806 return t;
3807
3808 case MINUS_EXPR:
3809 if (! FLOAT_TYPE_P (type))
3810 {
3811 if (! wins && integer_zerop (arg0))
3812 return build1 (NEGATE_EXPR, type, arg1);
3813 if (integer_zerop (arg1))
3814 return non_lvalue (convert (type, arg0));
3815
3816 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3817 about the case where C is a constant, just try one of the
3818 four possibilities. */
3819
3820 if (TREE_CODE (arg0) == MULT_EXPR && TREE_CODE (arg1) == MULT_EXPR
3821 && operand_equal_p (TREE_OPERAND (arg0, 1),
3822 TREE_OPERAND (arg1, 1), 0))
3823 return fold (build (MULT_EXPR, type,
3824 fold (build (MINUS_EXPR, type,
3825 TREE_OPERAND (arg0, 0),
3826 TREE_OPERAND (arg1, 0))),
3827 TREE_OPERAND (arg0, 1)));
3828 }
3829 /* Convert A - (-B) to A + B. */
3830 else if (TREE_CODE (arg1) == NEGATE_EXPR)
3831 return fold (build (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0)));
3832
3833 else if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3834 || flag_fast_math)
3835 {
3836 /* Except with IEEE floating point, 0-x equals -x. */
3837 if (! wins && real_zerop (arg0))
3838 return build1 (NEGATE_EXPR, type, arg1);
3839 /* Except with IEEE floating point, x-0 equals x. */
3840 if (real_zerop (arg1))
3841 return non_lvalue (convert (type, arg0));
3842 }
3843
3844 /* Fold &x - &x. This can happen from &x.foo - &x.
3845 This is unsafe for certain floats even in non-IEEE formats.
3846 In IEEE, it is unsafe because it does wrong for NaNs.
3847 Also note that operand_equal_p is always false if an operand
3848 is volatile. */
3849
3850 if ((! FLOAT_TYPE_P (type) || flag_fast_math)
3851 && operand_equal_p (arg0, arg1, 0))
3852 return convert (type, integer_zero_node);
3853
3854 goto associate;
3855
3856 case MULT_EXPR:
3857 if (! FLOAT_TYPE_P (type))
3858 {
3859 if (integer_zerop (arg1))
3860 return omit_one_operand (type, arg1, arg0);
3861 if (integer_onep (arg1))
3862 return non_lvalue (convert (type, arg0));
3863
3864 /* ((A / C) * C) is A if the division is an
3865 EXACT_DIV_EXPR. Since C is normally a constant,
3866 just check for one of the four possibilities. */
3867
3868 if (TREE_CODE (arg0) == EXACT_DIV_EXPR
3869 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
3870 return TREE_OPERAND (arg0, 0);
3871
3872 /* (a * (1 << b)) is (a << b) */
3873 if (TREE_CODE (arg1) == LSHIFT_EXPR
3874 && integer_onep (TREE_OPERAND (arg1, 0)))
3875 return fold (build (LSHIFT_EXPR, type, arg0,
3876 TREE_OPERAND (arg1, 1)));
3877 if (TREE_CODE (arg0) == LSHIFT_EXPR
3878 && integer_onep (TREE_OPERAND (arg0, 0)))
3879 return fold (build (LSHIFT_EXPR, type, arg1,
3880 TREE_OPERAND (arg0, 1)));
3881 }
3882 else
3883 {
3884 /* x*0 is 0, except for IEEE floating point. */
3885 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3886 || flag_fast_math)
3887 && real_zerop (arg1))
3888 return omit_one_operand (type, arg1, arg0);
3889 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3890 However, ANSI says we can drop signals,
3891 so we can do this anyway. */
3892 if (real_onep (arg1))
3893 return non_lvalue (convert (type, arg0));
3894 /* x*2 is x+x */
3895 if (! wins && real_twop (arg1))
3896 {
3897 tree arg = save_expr (arg0);
3898 return build (PLUS_EXPR, type, arg, arg);
3899 }
3900 }
3901 goto associate;
3902
3903 case BIT_IOR_EXPR:
3904 bit_ior:
3905 if (integer_all_onesp (arg1))
3906 return omit_one_operand (type, arg1, arg0);
3907 if (integer_zerop (arg1))
3908 return non_lvalue (convert (type, arg0));
3909 t1 = distribute_bit_expr (code, type, arg0, arg1);
3910 if (t1 != NULL_TREE)
3911 return t1;
3912
3913 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3914 is a rotate of A by C1 bits. */
3915
3916 if ((TREE_CODE (arg0) == RSHIFT_EXPR
3917 || TREE_CODE (arg0) == LSHIFT_EXPR)
3918 && (TREE_CODE (arg1) == RSHIFT_EXPR
3919 || TREE_CODE (arg1) == LSHIFT_EXPR)
3920 && TREE_CODE (arg0) != TREE_CODE (arg1)
3921 && operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1,0), 0)
3922 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0)))
3923 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
3924 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
3925 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
3926 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1, 1)) == 0
3927 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
3928 + TREE_INT_CST_LOW (TREE_OPERAND (arg1, 1)))
3929 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
3930 return build (LROTATE_EXPR, type, TREE_OPERAND (arg0, 0),
3931 TREE_CODE (arg0) == LSHIFT_EXPR
3932 ? TREE_OPERAND (arg0, 1) : TREE_OPERAND (arg1, 1));
3933
3934 goto associate;
3935
3936 case BIT_XOR_EXPR:
3937 if (integer_zerop (arg1))
3938 return non_lvalue (convert (type, arg0));
3939 if (integer_all_onesp (arg1))
3940 return fold (build1 (BIT_NOT_EXPR, type, arg0));
3941 goto associate;
3942
3943 case BIT_AND_EXPR:
3944 bit_and:
3945 if (integer_all_onesp (arg1))
3946 return non_lvalue (convert (type, arg0));
3947 if (integer_zerop (arg1))
3948 return omit_one_operand (type, arg1, arg0);
3949 t1 = distribute_bit_expr (code, type, arg0, arg1);
3950 if (t1 != NULL_TREE)
3951 return t1;
3952 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3953 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == NOP_EXPR
3954 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0))))
3955 {
3956 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)));
3957 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3958 && (~TREE_INT_CST_LOW (arg0)
3959 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3960 return build1 (NOP_EXPR, type, TREE_OPERAND (arg1, 0));
3961 }
3962 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
3963 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
3964 {
3965 int prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
3966 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
3967 && (~TREE_INT_CST_LOW (arg1)
3968 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
3969 return build1 (NOP_EXPR, type, TREE_OPERAND (arg0, 0));
3970 }
3971 goto associate;
3972
3973 case BIT_ANDTC_EXPR:
3974 if (integer_all_onesp (arg0))
3975 return non_lvalue (convert (type, arg1));
3976 if (integer_zerop (arg0))
3977 return omit_one_operand (type, arg0, arg1);
3978 if (TREE_CODE (arg1) == INTEGER_CST)
3979 {
3980 arg1 = fold (build1 (BIT_NOT_EXPR, type, arg1));
3981 code = BIT_AND_EXPR;
3982 goto bit_and;
3983 }
3984 goto binary;
3985
3986 case RDIV_EXPR:
3987 /* In most cases, do nothing with a divide by zero. */
3988 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3989 #ifndef REAL_INFINITY
3990 if (TREE_CODE (arg1) == REAL_CST && real_zerop (arg1))
3991 return t;
3992 #endif
3993 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3994
3995 /* In IEEE floating point, x/1 is not equivalent to x for snans.
3996 However, ANSI says we can drop signals, so we can do this anyway. */
3997 if (real_onep (arg1))
3998 return non_lvalue (convert (type, arg0));
3999
4000 /* If ARG1 is a constant, we can convert this to a multiply by the
4001 reciprocal. This does not have the same rounding properties,
4002 so only do this if -ffast-math. We can actually always safely
4003 do it if ARG1 is a power of two, but it's hard to tell if it is
4004 or not in a portable manner. */
4005 if (TREE_CODE (arg1) == REAL_CST && flag_fast_math
4006 && 0 != (tem = const_binop (code, build_real (type, dconst1),
4007 arg1, 0)))
4008 return fold (build (MULT_EXPR, type, arg0, tem));
4009
4010 goto binary;
4011
4012 case TRUNC_DIV_EXPR:
4013 case ROUND_DIV_EXPR:
4014 case FLOOR_DIV_EXPR:
4015 case CEIL_DIV_EXPR:
4016 case EXACT_DIV_EXPR:
4017 if (integer_onep (arg1))
4018 return non_lvalue (convert (type, arg0));
4019 if (integer_zerop (arg1))
4020 return t;
4021
4022 /* If we have ((a / C1) / C2) where both division are the same type, try
4023 to simplify. First see if C1 * C2 overflows or not. */
4024 if (TREE_CODE (arg0) == code && TREE_CODE (arg1) == INTEGER_CST
4025 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4026 {
4027 tree new_divisor;
4028
4029 new_divisor = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 1), arg1, 0);
4030 tem = const_binop (FLOOR_DIV_EXPR, new_divisor, arg1, 0);
4031
4032 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_LOW (tem)
4033 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == TREE_INT_CST_HIGH (tem))
4034 {
4035 /* If no overflow, divide by C1*C2. */
4036 return fold (build (code, type, TREE_OPERAND (arg0, 0), new_divisor));
4037 }
4038 }
4039
4040 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4041 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4042 expressions, which often appear in the offsets or sizes of
4043 objects with a varying size. Only deal with positive divisors
4044 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4045
4046 Look for NOPs and SAVE_EXPRs inside. */
4047
4048 if (TREE_CODE (arg1) == INTEGER_CST
4049 && tree_int_cst_sgn (arg1) >= 0)
4050 {
4051 int have_save_expr = 0;
4052 tree c2 = integer_zero_node;
4053 tree xarg0 = arg0;
4054
4055 if (TREE_CODE (xarg0) == SAVE_EXPR)
4056 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4057
4058 STRIP_NOPS (xarg0);
4059
4060 if (TREE_CODE (xarg0) == PLUS_EXPR
4061 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4062 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4063 else if (TREE_CODE (xarg0) == MINUS_EXPR
4064 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4065 /* If we are doing this computation unsigned, the negate
4066 is incorrect. */
4067 && ! TREE_UNSIGNED (type))
4068 {
4069 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4070 xarg0 = TREE_OPERAND (xarg0, 0);
4071 }
4072
4073 if (TREE_CODE (xarg0) == SAVE_EXPR)
4074 have_save_expr = 1, xarg0 = TREE_OPERAND (xarg0, 0);
4075
4076 STRIP_NOPS (xarg0);
4077
4078 if (TREE_CODE (xarg0) == MULT_EXPR
4079 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4080 && tree_int_cst_sgn (TREE_OPERAND (xarg0, 1)) >= 0
4081 && (integer_zerop (const_binop (TRUNC_MOD_EXPR,
4082 TREE_OPERAND (xarg0, 1), arg1, 1))
4083 || integer_zerop (const_binop (TRUNC_MOD_EXPR, arg1,
4084 TREE_OPERAND (xarg0, 1), 1)))
4085 && (tree_int_cst_sgn (c2) >= 0
4086 || integer_zerop (const_binop (TRUNC_MOD_EXPR, c2,
4087 arg1, 1))))
4088 {
4089 tree outer_div = integer_one_node;
4090 tree c1 = TREE_OPERAND (xarg0, 1);
4091 tree c3 = arg1;
4092
4093 /* If C3 > C1, set them equal and do a divide by
4094 C3/C1 at the end of the operation. */
4095 if (tree_int_cst_lt (c1, c3))
4096 outer_div = const_binop (code, c3, c1, 0), c3 = c1;
4097
4098 /* The result is A * (C1/C3) + (C2/C3). */
4099 t = fold (build (PLUS_EXPR, type,
4100 fold (build (MULT_EXPR, type,
4101 TREE_OPERAND (xarg0, 0),
4102 const_binop (code, c1, c3, 1))),
4103 const_binop (code, c2, c3, 1)));
4104
4105 if (! integer_onep (outer_div))
4106 t = fold (build (code, type, t, convert (type, outer_div)));
4107
4108 if (have_save_expr)
4109 t = save_expr (t);
4110
4111 return t;
4112 }
4113 }
4114
4115 goto binary;
4116
4117 case CEIL_MOD_EXPR:
4118 case FLOOR_MOD_EXPR:
4119 case ROUND_MOD_EXPR:
4120 case TRUNC_MOD_EXPR:
4121 if (integer_onep (arg1))
4122 return omit_one_operand (type, integer_zero_node, arg0);
4123 if (integer_zerop (arg1))
4124 return t;
4125
4126 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4127 where C1 % C3 == 0. Handle similarly to the division case,
4128 but don't bother with SAVE_EXPRs. */
4129
4130 if (TREE_CODE (arg1) == INTEGER_CST
4131 && ! integer_zerop (arg1))
4132 {
4133 tree c2 = integer_zero_node;
4134 tree xarg0 = arg0;
4135
4136 if (TREE_CODE (xarg0) == PLUS_EXPR
4137 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST)
4138 c2 = TREE_OPERAND (xarg0, 1), xarg0 = TREE_OPERAND (xarg0, 0);
4139 else if (TREE_CODE (xarg0) == MINUS_EXPR
4140 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4141 && ! TREE_UNSIGNED (type))
4142 {
4143 c2 = fold (build1 (NEGATE_EXPR, type, TREE_OPERAND (xarg0, 1)));
4144 xarg0 = TREE_OPERAND (xarg0, 0);
4145 }
4146
4147 STRIP_NOPS (xarg0);
4148
4149 if (TREE_CODE (xarg0) == MULT_EXPR
4150 && TREE_CODE (TREE_OPERAND (xarg0, 1)) == INTEGER_CST
4151 && integer_zerop (const_binop (TRUNC_MOD_EXPR,
4152 TREE_OPERAND (xarg0, 1),
4153 arg1, 1))
4154 && tree_int_cst_sgn (c2) >= 0)
4155 /* The result is (C2%C3). */
4156 return omit_one_operand (type, const_binop (code, c2, arg1, 1),
4157 TREE_OPERAND (xarg0, 0));
4158 }
4159
4160 goto binary;
4161
4162 case LSHIFT_EXPR:
4163 case RSHIFT_EXPR:
4164 case LROTATE_EXPR:
4165 case RROTATE_EXPR:
4166 if (integer_zerop (arg1))
4167 return non_lvalue (convert (type, arg0));
4168 /* Since negative shift count is not well-defined,
4169 don't try to compute it in the compiler. */
4170 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
4171 return t;
4172 /* Rewrite an LROTATE_EXPR by a constant into an
4173 RROTATE_EXPR by a new constant. */
4174 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
4175 {
4176 TREE_SET_CODE (t, RROTATE_EXPR);
4177 code = RROTATE_EXPR;
4178 TREE_OPERAND (t, 1) = arg1
4179 = const_binop
4180 (MINUS_EXPR,
4181 convert (TREE_TYPE (arg1),
4182 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type)), 0)),
4183 arg1, 0);
4184 if (tree_int_cst_sgn (arg1) < 0)
4185 return t;
4186 }
4187
4188 /* If we have a rotate of a bit operation with the rotate count and
4189 the second operand of the bit operation both constant,
4190 permute the two operations. */
4191 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4192 && (TREE_CODE (arg0) == BIT_AND_EXPR
4193 || TREE_CODE (arg0) == BIT_ANDTC_EXPR
4194 || TREE_CODE (arg0) == BIT_IOR_EXPR
4195 || TREE_CODE (arg0) == BIT_XOR_EXPR)
4196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
4197 return fold (build (TREE_CODE (arg0), type,
4198 fold (build (code, type,
4199 TREE_OPERAND (arg0, 0), arg1)),
4200 fold (build (code, type,
4201 TREE_OPERAND (arg0, 1), arg1))));
4202
4203 /* Two consecutive rotates adding up to the width of the mode can
4204 be ignored. */
4205 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
4206 && TREE_CODE (arg0) == RROTATE_EXPR
4207 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4208 && TREE_INT_CST_HIGH (arg1) == 0
4209 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
4210 && ((TREE_INT_CST_LOW (arg1)
4211 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
4212 == GET_MODE_BITSIZE (TYPE_MODE (type))))
4213 return TREE_OPERAND (arg0, 0);
4214
4215 goto binary;
4216
4217 case MIN_EXPR:
4218 if (operand_equal_p (arg0, arg1, 0))
4219 return arg0;
4220 if (INTEGRAL_TYPE_P (type)
4221 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), 1))
4222 return omit_one_operand (type, arg1, arg0);
4223 goto associate;
4224
4225 case MAX_EXPR:
4226 if (operand_equal_p (arg0, arg1, 0))
4227 return arg0;
4228 if (INTEGRAL_TYPE_P (type)
4229 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), 1))
4230 return omit_one_operand (type, arg1, arg0);
4231 goto associate;
4232
4233 case TRUTH_NOT_EXPR:
4234 /* Note that the operand of this must be an int
4235 and its values must be 0 or 1.
4236 ("true" is a fixed value perhaps depending on the language,
4237 but we don't handle values other than 1 correctly yet.) */
4238 tem = invert_truthvalue (arg0);
4239 /* Avoid infinite recursion. */
4240 if (TREE_CODE (tem) == TRUTH_NOT_EXPR)
4241 return t;
4242 return convert (type, tem);
4243
4244 case TRUTH_ANDIF_EXPR:
4245 /* Note that the operands of this must be ints
4246 and their values must be 0 or 1.
4247 ("true" is a fixed value perhaps depending on the language.) */
4248 /* If first arg is constant zero, return it. */
4249 if (integer_zerop (arg0))
4250 return arg0;
4251 case TRUTH_AND_EXPR:
4252 /* If either arg is constant true, drop it. */
4253 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4254 return non_lvalue (arg1);
4255 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4256 return non_lvalue (arg0);
4257 /* If second arg is constant zero, result is zero, but first arg
4258 must be evaluated. */
4259 if (integer_zerop (arg1))
4260 return omit_one_operand (type, arg1, arg0);
4261
4262 truth_andor:
4263 /* We only do these simplifications if we are optimizing. */
4264 if (!optimize)
4265 return t;
4266
4267 /* Check for things like (A || B) && (A || C). We can convert this
4268 to A || (B && C). Note that either operator can be any of the four
4269 truth and/or operations and the transformation will still be
4270 valid. Also note that we only care about order for the
4271 ANDIF and ORIF operators. */
4272 if (TREE_CODE (arg0) == TREE_CODE (arg1)
4273 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
4274 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
4275 || TREE_CODE (arg0) == TRUTH_AND_EXPR
4276 || TREE_CODE (arg0) == TRUTH_OR_EXPR))
4277 {
4278 tree a00 = TREE_OPERAND (arg0, 0);
4279 tree a01 = TREE_OPERAND (arg0, 1);
4280 tree a10 = TREE_OPERAND (arg1, 0);
4281 tree a11 = TREE_OPERAND (arg1, 1);
4282 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
4283 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
4284 && (code == TRUTH_AND_EXPR
4285 || code == TRUTH_OR_EXPR));
4286
4287 if (operand_equal_p (a00, a10, 0))
4288 return fold (build (TREE_CODE (arg0), type, a00,
4289 fold (build (code, type, a01, a11))));
4290 else if (commutative && operand_equal_p (a00, a11, 0))
4291 return fold (build (TREE_CODE (arg0), type, a00,
4292 fold (build (code, type, a01, a10))));
4293 else if (commutative && operand_equal_p (a01, a10, 0))
4294 return fold (build (TREE_CODE (arg0), type, a01,
4295 fold (build (code, type, a00, a11))));
4296
4297 /* This case if tricky because we must either have commutative
4298 operators or else A10 must not have side-effects. */
4299
4300 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
4301 && operand_equal_p (a01, a11, 0))
4302 return fold (build (TREE_CODE (arg0), type,
4303 fold (build (code, type, a00, a10)),
4304 a01));
4305 }
4306
4307 /* Check for the possibility of merging component references. If our
4308 lhs is another similar operation, try to merge its rhs with our
4309 rhs. Then try to merge our lhs and rhs. */
4310 if (TREE_CODE (arg0) == code
4311 && 0 != (tem = fold_truthop (code, type,
4312 TREE_OPERAND (arg0, 1), arg1)))
4313 return fold (build (code, type, TREE_OPERAND (arg0, 0), tem));
4314
4315 if ((tem = fold_truthop (code, type, arg0, arg1)) != 0)
4316 return tem;
4317
4318 return t;
4319
4320 case TRUTH_ORIF_EXPR:
4321 /* Note that the operands of this must be ints
4322 and their values must be 0 or true.
4323 ("true" is a fixed value perhaps depending on the language.) */
4324 /* If first arg is constant true, return it. */
4325 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
4326 return arg0;
4327 case TRUTH_OR_EXPR:
4328 /* If either arg is constant zero, drop it. */
4329 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
4330 return non_lvalue (arg1);
4331 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1))
4332 return non_lvalue (arg0);
4333 /* If second arg is constant true, result is true, but we must
4334 evaluate first arg. */
4335 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
4336 return omit_one_operand (type, arg1, arg0);
4337 goto truth_andor;
4338
4339 case TRUTH_XOR_EXPR:
4340 /* If either arg is constant zero, drop it. */
4341 if (integer_zerop (arg0))
4342 return non_lvalue (arg1);
4343 if (integer_zerop (arg1))
4344 return non_lvalue (arg0);
4345 /* If either arg is constant true, this is a logical inversion. */
4346 if (integer_onep (arg0))
4347 return non_lvalue (invert_truthvalue (arg1));
4348 if (integer_onep (arg1))
4349 return non_lvalue (invert_truthvalue (arg0));
4350 return t;
4351
4352 case EQ_EXPR:
4353 case NE_EXPR:
4354 case LT_EXPR:
4355 case GT_EXPR:
4356 case LE_EXPR:
4357 case GE_EXPR:
4358 /* If one arg is a constant integer, put it last. */
4359 if (TREE_CODE (arg0) == INTEGER_CST
4360 && TREE_CODE (arg1) != INTEGER_CST)
4361 {
4362 TREE_OPERAND (t, 0) = arg1;
4363 TREE_OPERAND (t, 1) = arg0;
4364 arg0 = TREE_OPERAND (t, 0);
4365 arg1 = TREE_OPERAND (t, 1);
4366 code = swap_tree_comparison (code);
4367 TREE_SET_CODE (t, code);
4368 }
4369
4370 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4371 First, see if one arg is constant; find the constant arg
4372 and the other one. */
4373 {
4374 tree constop = 0, varop;
4375 int constopnum = -1;
4376
4377 if (TREE_CONSTANT (arg1))
4378 constopnum = 1, constop = arg1, varop = arg0;
4379 if (TREE_CONSTANT (arg0))
4380 constopnum = 0, constop = arg0, varop = arg1;
4381
4382 if (constop && TREE_CODE (varop) == POSTINCREMENT_EXPR)
4383 {
4384 /* This optimization is invalid for ordered comparisons
4385 if CONST+INCR overflows or if foo+incr might overflow.
4386 This optimization is invalid for floating point due to rounding.
4387 For pointer types we assume overflow doesn't happen. */
4388 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4389 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4390 && (code == EQ_EXPR || code == NE_EXPR)))
4391 {
4392 tree newconst
4393 = fold (build (PLUS_EXPR, TREE_TYPE (varop),
4394 constop, TREE_OPERAND (varop, 1)));
4395 TREE_SET_CODE (varop, PREINCREMENT_EXPR);
4396
4397 t = build (code, type, TREE_OPERAND (t, 0),
4398 TREE_OPERAND (t, 1));
4399 TREE_OPERAND (t, constopnum) = newconst;
4400 return t;
4401 }
4402 }
4403 else if (constop && TREE_CODE (varop) == POSTDECREMENT_EXPR)
4404 {
4405 if (TREE_CODE (TREE_TYPE (varop)) == POINTER_TYPE
4406 || (! FLOAT_TYPE_P (TREE_TYPE (varop))
4407 && (code == EQ_EXPR || code == NE_EXPR)))
4408 {
4409 tree newconst
4410 = fold (build (MINUS_EXPR, TREE_TYPE (varop),
4411 constop, TREE_OPERAND (varop, 1)));
4412 TREE_SET_CODE (varop, PREDECREMENT_EXPR);
4413 t = build (code, type, TREE_OPERAND (t, 0),
4414 TREE_OPERAND (t, 1));
4415 TREE_OPERAND (t, constopnum) = newconst;
4416 return t;
4417 }
4418 }
4419 }
4420
4421 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4422 if (TREE_CODE (arg1) == INTEGER_CST
4423 && TREE_CODE (arg0) != INTEGER_CST
4424 && tree_int_cst_sgn (arg1) > 0)
4425 {
4426 switch (TREE_CODE (t))
4427 {
4428 case GE_EXPR:
4429 code = GT_EXPR;
4430 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4431 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4432 break;
4433
4434 case LT_EXPR:
4435 code = LE_EXPR;
4436 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node, 0);
4437 t = build (code, type, TREE_OPERAND (t, 0), arg1);
4438 break;
4439 }
4440 }
4441
4442 /* If this is an EQ or NE comparison with zero and ARG0 is
4443 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4444 two operations, but the latter can be done in one less insn
4445 one machine that have only two-operand insns or on which a
4446 constant cannot be the first operand. */
4447 if (integer_zerop (arg1) && (code == EQ_EXPR || code == NE_EXPR)
4448 && TREE_CODE (arg0) == BIT_AND_EXPR)
4449 {
4450 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == LSHIFT_EXPR
4451 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 0), 0)))
4452 return
4453 fold (build (code, type,
4454 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4455 build (RSHIFT_EXPR,
4456 TREE_TYPE (TREE_OPERAND (arg0, 0)),
4457 TREE_OPERAND (arg0, 1),
4458 TREE_OPERAND (TREE_OPERAND (arg0, 0), 1)),
4459 convert (TREE_TYPE (arg0),
4460 integer_one_node)),
4461 arg1));
4462 else if (TREE_CODE (TREE_OPERAND (arg0, 1)) == LSHIFT_EXPR
4463 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0, 1), 0)))
4464 return
4465 fold (build (code, type,
4466 build (BIT_AND_EXPR, TREE_TYPE (arg0),
4467 build (RSHIFT_EXPR,
4468 TREE_TYPE (TREE_OPERAND (arg0, 1)),
4469 TREE_OPERAND (arg0, 0),
4470 TREE_OPERAND (TREE_OPERAND (arg0, 1), 1)),
4471 convert (TREE_TYPE (arg0),
4472 integer_one_node)),
4473 arg1));
4474 }
4475
4476 /* If this is an NE or EQ comparison of zero against the result of a
4477 signed MOD operation whose second operand is a power of 2, make
4478 the MOD operation unsigned since it is simpler and equivalent. */
4479 if ((code == NE_EXPR || code == EQ_EXPR)
4480 && integer_zerop (arg1)
4481 && ! TREE_UNSIGNED (TREE_TYPE (arg0))
4482 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
4483 || TREE_CODE (arg0) == CEIL_MOD_EXPR
4484 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
4485 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
4486 && integer_pow2p (TREE_OPERAND (arg0, 1)))
4487 {
4488 tree newtype = unsigned_type (TREE_TYPE (arg0));
4489 tree newmod = build (TREE_CODE (arg0), newtype,
4490 convert (newtype, TREE_OPERAND (arg0, 0)),
4491 convert (newtype, TREE_OPERAND (arg0, 1)));
4492
4493 return build (code, type, newmod, convert (newtype, arg1));
4494 }
4495
4496 /* If this is an NE comparison of zero with an AND of one, remove the
4497 comparison since the AND will give the correct value. */
4498 if (code == NE_EXPR && integer_zerop (arg1)
4499 && TREE_CODE (arg0) == BIT_AND_EXPR
4500 && integer_onep (TREE_OPERAND (arg0, 1)))
4501 return convert (type, arg0);
4502
4503 /* If we have (A & C) == C where C is a power of 2, convert this into
4504 (A & C) != 0. Similarly for NE_EXPR. */
4505 if ((code == EQ_EXPR || code == NE_EXPR)
4506 && TREE_CODE (arg0) == BIT_AND_EXPR
4507 && integer_pow2p (TREE_OPERAND (arg0, 1))
4508 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
4509 return build (code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
4510 arg0, integer_zero_node);
4511
4512 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4513 and similarly for >= into !=. */
4514 if ((code == LT_EXPR || code == GE_EXPR)
4515 && TREE_UNSIGNED (TREE_TYPE (arg0))
4516 && TREE_CODE (arg1) == LSHIFT_EXPR
4517 && integer_onep (TREE_OPERAND (arg1, 0)))
4518 return build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4519 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4520 TREE_OPERAND (arg1, 1)),
4521 convert (TREE_TYPE (arg0), integer_zero_node));
4522
4523 else if ((code == LT_EXPR || code == GE_EXPR)
4524 && TREE_UNSIGNED (TREE_TYPE (arg0))
4525 && (TREE_CODE (arg1) == NOP_EXPR
4526 || TREE_CODE (arg1) == CONVERT_EXPR)
4527 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
4528 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
4529 return
4530 build (code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
4531 convert (TREE_TYPE (arg0),
4532 build (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
4533 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1))),
4534 convert (TREE_TYPE (arg0), integer_zero_node));
4535
4536 /* Simplify comparison of something with itself. (For IEEE
4537 floating-point, we can only do some of these simplifications.) */
4538 if (operand_equal_p (arg0, arg1, 0))
4539 {
4540 switch (code)
4541 {
4542 case EQ_EXPR:
4543 case GE_EXPR:
4544 case LE_EXPR:
4545 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4546 {
4547 t = build_int_2 (1, 0);
4548 TREE_TYPE (t) = type;
4549 return t;
4550 }
4551 code = EQ_EXPR;
4552 TREE_SET_CODE (t, code);
4553 break;
4554
4555 case NE_EXPR:
4556 /* For NE, we can only do this simplification if integer. */
4557 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
4558 break;
4559 /* ... fall through ... */
4560 case GT_EXPR:
4561 case LT_EXPR:
4562 t = build_int_2 (0, 0);
4563 TREE_TYPE (t) = type;
4564 return t;
4565 }
4566 }
4567
4568 /* An unsigned comparison against 0 can be simplified. */
4569 if (integer_zerop (arg1)
4570 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
4571 || TREE_CODE (TREE_TYPE (arg1)) == POINTER_TYPE)
4572 && TREE_UNSIGNED (TREE_TYPE (arg1)))
4573 {
4574 switch (TREE_CODE (t))
4575 {
4576 case GT_EXPR:
4577 code = NE_EXPR;
4578 TREE_SET_CODE (t, NE_EXPR);
4579 break;
4580 case LE_EXPR:
4581 code = EQ_EXPR;
4582 TREE_SET_CODE (t, EQ_EXPR);
4583 break;
4584 case GE_EXPR:
4585 return omit_one_operand (type,
4586 convert (type, integer_one_node),
4587 arg0);
4588 case LT_EXPR:
4589 return omit_one_operand (type,
4590 convert (type, integer_zero_node),
4591 arg0);
4592 }
4593 }
4594
4595 /* If we are comparing an expression that just has comparisons
4596 of two integer values, arithmetic expressions of those comparisons,
4597 and constants, we can simplify it. There are only three cases
4598 to check: the two values can either be equal, the first can be
4599 greater, or the second can be greater. Fold the expression for
4600 those three values. Since each value must be 0 or 1, we have
4601 eight possibilities, each of which corresponds to the constant 0
4602 or 1 or one of the six possible comparisons.
4603
4604 This handles common cases like (a > b) == 0 but also handles
4605 expressions like ((x > y) - (y > x)) > 0, which supposedly
4606 occur in macroized code. */
4607
4608 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
4609 {
4610 tree cval1 = 0, cval2 = 0;
4611 int save_p = 0;
4612
4613 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
4614 /* Don't handle degenerate cases here; they should already
4615 have been handled anyway. */
4616 && cval1 != 0 && cval2 != 0
4617 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
4618 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
4619 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
4620 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
4621 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
4622 {
4623 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
4624 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
4625
4626 /* We can't just pass T to eval_subst in case cval1 or cval2
4627 was the same as ARG1. */
4628
4629 tree high_result
4630 = fold (build (code, type,
4631 eval_subst (arg0, cval1, maxval, cval2, minval),
4632 arg1));
4633 tree equal_result
4634 = fold (build (code, type,
4635 eval_subst (arg0, cval1, maxval, cval2, maxval),
4636 arg1));
4637 tree low_result
4638 = fold (build (code, type,
4639 eval_subst (arg0, cval1, minval, cval2, maxval),
4640 arg1));
4641
4642 /* All three of these results should be 0 or 1. Confirm they
4643 are. Then use those values to select the proper code
4644 to use. */
4645
4646 if ((integer_zerop (high_result)
4647 || integer_onep (high_result))
4648 && (integer_zerop (equal_result)
4649 || integer_onep (equal_result))
4650 && (integer_zerop (low_result)
4651 || integer_onep (low_result)))
4652 {
4653 /* Make a 3-bit mask with the high-order bit being the
4654 value for `>', the next for '=', and the low for '<'. */
4655 switch ((integer_onep (high_result) * 4)
4656 + (integer_onep (equal_result) * 2)
4657 + integer_onep (low_result))
4658 {
4659 case 0:
4660 /* Always false. */
4661 return omit_one_operand (type, integer_zero_node, arg0);
4662 case 1:
4663 code = LT_EXPR;
4664 break;
4665 case 2:
4666 code = EQ_EXPR;
4667 break;
4668 case 3:
4669 code = LE_EXPR;
4670 break;
4671 case 4:
4672 code = GT_EXPR;
4673 break;
4674 case 5:
4675 code = NE_EXPR;
4676 break;
4677 case 6:
4678 code = GE_EXPR;
4679 break;
4680 case 7:
4681 /* Always true. */
4682 return omit_one_operand (type, integer_one_node, arg0);
4683 }
4684
4685 t = build (code, type, cval1, cval2);
4686 if (save_p)
4687 return save_expr (t);
4688 else
4689 return fold (t);
4690 }
4691 }
4692 }
4693
4694 /* If this is a comparison of a field, we may be able to simplify it. */
4695 if ((TREE_CODE (arg0) == COMPONENT_REF
4696 || TREE_CODE (arg0) == BIT_FIELD_REF)
4697 && (code == EQ_EXPR || code == NE_EXPR)
4698 /* Handle the constant case even without -O
4699 to make sure the warnings are given. */
4700 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
4701 {
4702 t1 = optimize_bit_field_compare (code, type, arg0, arg1);
4703 return t1 ? t1 : t;
4704 }
4705
4706 /* If this is a comparison of complex values and either or both
4707 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4708 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4709 may prevent needless evaluations. */
4710 if ((code == EQ_EXPR || code == NE_EXPR)
4711 && TREE_CODE (TREE_TYPE (arg0)) == COMPLEX_TYPE
4712 && (TREE_CODE (arg0) == COMPLEX_EXPR
4713 || TREE_CODE (arg1) == COMPLEX_EXPR))
4714 {
4715 tree subtype = TREE_TYPE (TREE_TYPE (arg0));
4716 tree real0 = fold (build1 (REALPART_EXPR, subtype, arg0));
4717 tree imag0 = fold (build1 (IMAGPART_EXPR, subtype, arg0));
4718 tree real1 = fold (build1 (REALPART_EXPR, subtype, arg1));
4719 tree imag1 = fold (build1 (IMAGPART_EXPR, subtype, arg1));
4720
4721 return fold (build ((code == EQ_EXPR ? TRUTH_ANDIF_EXPR
4722 : TRUTH_ORIF_EXPR),
4723 type,
4724 fold (build (code, type, real0, real1)),
4725 fold (build (code, type, imag0, imag1))));
4726 }
4727
4728 /* From here on, the only cases we handle are when the result is
4729 known to be a constant.
4730
4731 To compute GT, swap the arguments and do LT.
4732 To compute GE, do LT and invert the result.
4733 To compute LE, swap the arguments, do LT and invert the result.
4734 To compute NE, do EQ and invert the result.
4735
4736 Therefore, the code below must handle only EQ and LT. */
4737
4738 if (code == LE_EXPR || code == GT_EXPR)
4739 {
4740 tem = arg0, arg0 = arg1, arg1 = tem;
4741 code = swap_tree_comparison (code);
4742 }
4743
4744 /* Note that it is safe to invert for real values here because we
4745 will check below in the one case that it matters. */
4746
4747 invert = 0;
4748 if (code == NE_EXPR || code == GE_EXPR)
4749 {
4750 invert = 1;
4751 code = invert_tree_comparison (code);
4752 }
4753
4754 /* Compute a result for LT or EQ if args permit;
4755 otherwise return T. */
4756 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
4757 {
4758 if (code == EQ_EXPR)
4759 t1 = build_int_2 ((TREE_INT_CST_LOW (arg0)
4760 == TREE_INT_CST_LOW (arg1))
4761 && (TREE_INT_CST_HIGH (arg0)
4762 == TREE_INT_CST_HIGH (arg1)),
4763 0);
4764 else
4765 t1 = build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0))
4766 ? INT_CST_LT_UNSIGNED (arg0, arg1)
4767 : INT_CST_LT (arg0, arg1)),
4768 0);
4769 }
4770
4771 /* Assume a nonexplicit constant cannot equal an explicit one,
4772 since such code would be undefined anyway.
4773 Exception: on sysvr4, using #pragma weak,
4774 a label can come out as 0. */
4775 else if (TREE_CODE (arg1) == INTEGER_CST
4776 && !integer_zerop (arg1)
4777 && TREE_CONSTANT (arg0)
4778 && TREE_CODE (arg0) == ADDR_EXPR
4779 && code == EQ_EXPR)
4780 t1 = build_int_2 (0, 0);
4781
4782 /* Two real constants can be compared explicitly. */
4783 else if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
4784 {
4785 /* If either operand is a NaN, the result is false with two
4786 exceptions: First, an NE_EXPR is true on NaNs, but that case
4787 is already handled correctly since we will be inverting the
4788 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4789 or a GE_EXPR into a LT_EXPR, we must return true so that it
4790 will be inverted into false. */
4791
4792 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
4793 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1)))
4794 t1 = build_int_2 (invert && code == LT_EXPR, 0);
4795
4796 else if (code == EQ_EXPR)
4797 t1 = build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0),
4798 TREE_REAL_CST (arg1)),
4799 0);
4800 else
4801 t1 = build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0),
4802 TREE_REAL_CST (arg1)),
4803 0);
4804 }
4805
4806 if (t1 == NULL_TREE)
4807 return t;
4808
4809 if (invert)
4810 TREE_INT_CST_LOW (t1) ^= 1;
4811
4812 TREE_TYPE (t1) = type;
4813 return t1;
4814
4815 case COND_EXPR:
4816 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4817 so all simple results must be passed through pedantic_non_lvalue. */
4818 if (TREE_CODE (arg0) == INTEGER_CST)
4819 return pedantic_non_lvalue
4820 (TREE_OPERAND (t, (integer_zerop (arg0) ? 2 : 1)));
4821 else if (operand_equal_p (arg1, TREE_OPERAND (expr, 2), 0))
4822 return pedantic_omit_one_operand (type, arg1, arg0);
4823
4824 /* If the second operand is zero, invert the comparison and swap
4825 the second and third operands. Likewise if the second operand
4826 is constant and the third is not or if the third operand is
4827 equivalent to the first operand of the comparison. */
4828
4829 if (integer_zerop (arg1)
4830 || (TREE_CONSTANT (arg1) && ! TREE_CONSTANT (TREE_OPERAND (t, 2)))
4831 || (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4832 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4833 TREE_OPERAND (t, 2),
4834 TREE_OPERAND (arg0, 1))))
4835 {
4836 /* See if this can be inverted. If it can't, possibly because
4837 it was a floating-point inequality comparison, don't do
4838 anything. */
4839 tem = invert_truthvalue (arg0);
4840
4841 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
4842 {
4843 t = build (code, type, tem,
4844 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
4845 arg0 = tem;
4846 arg1 = TREE_OPERAND (t, 2);
4847 STRIP_NOPS (arg1);
4848 }
4849 }
4850
4851 /* If we have A op B ? A : C, we may be able to convert this to a
4852 simpler expression, depending on the operation and the values
4853 of B and C. IEEE floating point prevents this though,
4854 because A or B might be -0.0 or a NaN. */
4855
4856 if (TREE_CODE_CLASS (TREE_CODE (arg0)) == '<'
4857 && (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
4858 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 0)))
4859 || flag_fast_math)
4860 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
4861 arg1, TREE_OPERAND (arg0, 1)))
4862 {
4863 tree arg2 = TREE_OPERAND (t, 2);
4864 enum tree_code comp_code = TREE_CODE (arg0);
4865
4866 STRIP_NOPS (arg2);
4867
4868 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4869 depending on the comparison operation. */
4870 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0, 1)))
4871 ? real_zerop (TREE_OPERAND (arg0, 1))
4872 : integer_zerop (TREE_OPERAND (arg0, 1)))
4873 && TREE_CODE (arg2) == NEGATE_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4875 switch (comp_code)
4876 {
4877 case EQ_EXPR:
4878 return pedantic_non_lvalue
4879 (fold (build1 (NEGATE_EXPR, type, arg1)));
4880 case NE_EXPR:
4881 return pedantic_non_lvalue (convert (type, arg1));
4882 case GE_EXPR:
4883 case GT_EXPR:
4884 return pedantic_non_lvalue
4885 (convert (type, fold (build1 (ABS_EXPR,
4886 TREE_TYPE (arg1), arg1))));
4887 case LE_EXPR:
4888 case LT_EXPR:
4889 return pedantic_non_lvalue
4890 (fold (build1 (NEGATE_EXPR, type,
4891 convert (type,
4892 fold (build1 (ABS_EXPR,
4893 TREE_TYPE (arg1),
4894 arg1))))));
4895 }
4896
4897 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4898 always zero. */
4899
4900 if (integer_zerop (TREE_OPERAND (arg0, 1)) && integer_zerop (arg2))
4901 {
4902 if (comp_code == NE_EXPR)
4903 return pedantic_non_lvalue (convert (type, arg1));
4904 else if (comp_code == EQ_EXPR)
4905 return pedantic_non_lvalue (convert (type, integer_zero_node));
4906 }
4907
4908 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4909 or max (A, B), depending on the operation. */
4910
4911 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0, 1),
4912 arg2, TREE_OPERAND (arg0, 0)))
4913 {
4914 tree comp_op0 = TREE_OPERAND (arg0, 0);
4915 tree comp_op1 = TREE_OPERAND (arg0, 1);
4916 tree comp_type = TREE_TYPE (comp_op0);
4917
4918 switch (comp_code)
4919 {
4920 case EQ_EXPR:
4921 return pedantic_non_lvalue (convert (type, arg2));
4922 case NE_EXPR:
4923 return pedantic_non_lvalue (convert (type, arg1));
4924 case LE_EXPR:
4925 case LT_EXPR:
4926 return pedantic_non_lvalue
4927 (convert (type, (fold (build (MIN_EXPR, comp_type,
4928 comp_op0, comp_op1)))));
4929 case GE_EXPR:
4930 case GT_EXPR:
4931 return pedantic_non_lvalue
4932 (convert (type, fold (build (MAX_EXPR, comp_type,
4933 comp_op0, comp_op1))));
4934 }
4935 }
4936
4937 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4938 we might still be able to simplify this. For example,
4939 if C1 is one less or one more than C2, this might have started
4940 out as a MIN or MAX and been transformed by this function.
4941 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4942
4943 if (INTEGRAL_TYPE_P (type)
4944 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
4945 && TREE_CODE (arg2) == INTEGER_CST)
4946 switch (comp_code)
4947 {
4948 case EQ_EXPR:
4949 /* We can replace A with C1 in this case. */
4950 arg1 = convert (type, TREE_OPERAND (arg0, 1));
4951 t = build (code, type, TREE_OPERAND (t, 0), arg1,
4952 TREE_OPERAND (t, 2));
4953 break;
4954
4955 case LT_EXPR:
4956 /* If C1 is C2 + 1, this is min(A, C2). */
4957 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4958 && operand_equal_p (TREE_OPERAND (arg0, 1),
4959 const_binop (PLUS_EXPR, arg2,
4960 integer_one_node, 0), 1))
4961 return pedantic_non_lvalue
4962 (fold (build (MIN_EXPR, type, arg1, arg2)));
4963 break;
4964
4965 case LE_EXPR:
4966 /* If C1 is C2 - 1, this is min(A, C2). */
4967 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4968 && operand_equal_p (TREE_OPERAND (arg0, 1),
4969 const_binop (MINUS_EXPR, arg2,
4970 integer_one_node, 0), 1))
4971 return pedantic_non_lvalue
4972 (fold (build (MIN_EXPR, type, arg1, arg2)));
4973 break;
4974
4975 case GT_EXPR:
4976 /* If C1 is C2 - 1, this is max(A, C2). */
4977 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type), 1)
4978 && operand_equal_p (TREE_OPERAND (arg0, 1),
4979 const_binop (MINUS_EXPR, arg2,
4980 integer_one_node, 0), 1))
4981 return pedantic_non_lvalue
4982 (fold (build (MAX_EXPR, type, arg1, arg2)));
4983 break;
4984
4985 case GE_EXPR:
4986 /* If C1 is C2 + 1, this is max(A, C2). */
4987 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type), 1)
4988 && operand_equal_p (TREE_OPERAND (arg0, 1),
4989 const_binop (PLUS_EXPR, arg2,
4990 integer_one_node, 0), 1))
4991 return pedantic_non_lvalue
4992 (fold (build (MAX_EXPR, type, arg1, arg2)));
4993 break;
4994 }
4995 }
4996
4997 /* If the second operand is simpler than the third, swap them
4998 since that produces better jump optimization results. */
4999 if ((TREE_CONSTANT (arg1) || TREE_CODE_CLASS (TREE_CODE (arg1)) == 'd'
5000 || TREE_CODE (arg1) == SAVE_EXPR)
5001 && ! (TREE_CONSTANT (TREE_OPERAND (t, 2))
5002 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t, 2))) == 'd'
5003 || TREE_CODE (TREE_OPERAND (t, 2)) == SAVE_EXPR))
5004 {
5005 /* See if this can be inverted. If it can't, possibly because
5006 it was a floating-point inequality comparison, don't do
5007 anything. */
5008 tem = invert_truthvalue (arg0);
5009
5010 if (TREE_CODE (tem) != TRUTH_NOT_EXPR)
5011 {
5012 t = build (code, type, tem,
5013 TREE_OPERAND (t, 2), TREE_OPERAND (t, 1));
5014 arg0 = tem;
5015 arg1 = TREE_OPERAND (t, 2);
5016 STRIP_NOPS (arg1);
5017 }
5018 }
5019
5020 /* Convert A ? 1 : 0 to simply A. */
5021 if (integer_onep (TREE_OPERAND (t, 1))
5022 && integer_zerop (TREE_OPERAND (t, 2))
5023 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5024 call to fold will try to move the conversion inside
5025 a COND, which will recurse. In that case, the COND_EXPR
5026 is probably the best choice, so leave it alone. */
5027 && type == TREE_TYPE (arg0))
5028 return pedantic_non_lvalue (arg0);
5029
5030 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5031 operation is simply A & 2. */
5032
5033 if (integer_zerop (TREE_OPERAND (t, 2))
5034 && TREE_CODE (arg0) == NE_EXPR
5035 && integer_zerop (TREE_OPERAND (arg0, 1))
5036 && integer_pow2p (arg1)
5037 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
5038 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
5039 arg1, 1))
5040 return pedantic_non_lvalue (convert (type, TREE_OPERAND (arg0, 0)));
5041
5042 return t;
5043
5044 case COMPOUND_EXPR:
5045 /* When pedantic, a compound expression can be neither an lvalue
5046 nor an integer constant expression. */
5047 if (TREE_SIDE_EFFECTS (arg0) || pedantic)
5048 return t;
5049 /* Don't let (0, 0) be null pointer constant. */
5050 if (integer_zerop (arg1))
5051 return non_lvalue (arg1);
5052 return arg1;
5053
5054 case COMPLEX_EXPR:
5055 if (wins)
5056 return build_complex (arg0, arg1);
5057 return t;
5058
5059 case REALPART_EXPR:
5060 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5061 return t;
5062 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5063 return omit_one_operand (type, TREE_OPERAND (arg0, 0),
5064 TREE_OPERAND (arg0, 1));
5065 else if (TREE_CODE (arg0) == COMPLEX_CST)
5066 return TREE_REALPART (arg0);
5067 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5068 return fold (build (TREE_CODE (arg0), type,
5069 fold (build1 (REALPART_EXPR, type,
5070 TREE_OPERAND (arg0, 0))),
5071 fold (build1 (REALPART_EXPR,
5072 type, TREE_OPERAND (arg0, 1)))));
5073 return t;
5074
5075 case IMAGPART_EXPR:
5076 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
5077 return convert (type, integer_zero_node);
5078 else if (TREE_CODE (arg0) == COMPLEX_EXPR)
5079 return omit_one_operand (type, TREE_OPERAND (arg0, 1),
5080 TREE_OPERAND (arg0, 0));
5081 else if (TREE_CODE (arg0) == COMPLEX_CST)
5082 return TREE_IMAGPART (arg0);
5083 else if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
5084 return fold (build (TREE_CODE (arg0), type,
5085 fold (build1 (IMAGPART_EXPR, type,
5086 TREE_OPERAND (arg0, 0))),
5087 fold (build1 (IMAGPART_EXPR, type,
5088 TREE_OPERAND (arg0, 1)))));
5089 return t;
5090
5091 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5092 appropriate. */
5093 case CLEANUP_POINT_EXPR:
5094 if (! TREE_SIDE_EFFECTS (arg0))
5095 return convert (type, arg0);
5096
5097 {
5098 enum tree_code code0 = TREE_CODE (arg0);
5099 int kind0 = TREE_CODE_CLASS (code0);
5100 tree arg00 = TREE_OPERAND (arg0, 0);
5101 tree arg01;
5102
5103 if (kind0 == '1' || code0 == TRUTH_NOT_EXPR)
5104 return fold (build1 (code0, type,
5105 fold (build1 (CLEANUP_POINT_EXPR,
5106 TREE_TYPE (arg00), arg00))));
5107
5108 if (kind0 == '<' || kind0 == '2'
5109 || code0 == TRUTH_ANDIF_EXPR || code0 == TRUTH_ORIF_EXPR
5110 || code0 == TRUTH_AND_EXPR || code0 == TRUTH_OR_EXPR
5111 || code0 == TRUTH_XOR_EXPR)
5112 {
5113 arg01 = TREE_OPERAND (arg0, 1);
5114
5115 if (! TREE_SIDE_EFFECTS (arg00))
5116 return fold (build (code0, type, arg00,
5117 fold (build1 (CLEANUP_POINT_EXPR,
5118 TREE_TYPE (arg01), arg01))));
5119
5120 if (! TREE_SIDE_EFFECTS (arg01))
5121 return fold (build (code0, type,
5122 fold (build1 (CLEANUP_POINT_EXPR,
5123 TREE_TYPE (arg00), arg00)),
5124 arg01));
5125 }
5126
5127 return t;
5128 }
5129
5130 default:
5131 return t;
5132 } /* switch (code) */
5133 }
This page took 0.283725 seconds and 5 git commands to generate.