1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'. */
47 /* Handle floating overflow for `const_binop'. */
48 static jmp_buf float_error
;
50 static void encode
PROTO((HOST_WIDE_INT
*, HOST_WIDE_INT
, HOST_WIDE_INT
));
51 static void decode
PROTO((HOST_WIDE_INT
*, HOST_WIDE_INT
*, HOST_WIDE_INT
*));
52 int div_and_round_double
PROTO((enum tree_code
, int, HOST_WIDE_INT
,
53 HOST_WIDE_INT
, HOST_WIDE_INT
,
54 HOST_WIDE_INT
, HOST_WIDE_INT
*,
55 HOST_WIDE_INT
*, HOST_WIDE_INT
*,
57 static int split_tree
PROTO((tree
, enum tree_code
, tree
*, tree
*, int *));
58 static tree const_binop
PROTO((enum tree_code
, tree
, tree
, int));
59 static tree fold_convert
PROTO((tree
, tree
));
60 static enum tree_code invert_tree_comparison
PROTO((enum tree_code
));
61 static enum tree_code swap_tree_comparison
PROTO((enum tree_code
));
62 static int truth_value_p
PROTO((enum tree_code
));
63 static int operand_equal_for_comparison_p
PROTO((tree
, tree
, tree
));
64 static int twoval_comparison_p
PROTO((tree
, tree
*, tree
*, int *));
65 static tree eval_subst
PROTO((tree
, tree
, tree
, tree
, tree
));
66 static tree omit_one_operand
PROTO((tree
, tree
, tree
));
67 static tree pedantic_omit_one_operand
PROTO((tree
, tree
, tree
));
68 static tree distribute_bit_expr
PROTO((enum tree_code
, tree
, tree
, tree
));
69 static tree make_bit_field_ref
PROTO((tree
, tree
, int, int, int));
70 static tree optimize_bit_field_compare
PROTO((enum tree_code
, tree
,
72 static tree decode_field_reference
PROTO((tree
, int *, int *,
73 enum machine_mode
*, int *,
74 int *, tree
*, tree
*));
75 static int all_ones_mask_p
PROTO((tree
, int));
76 static int simple_operand_p
PROTO((tree
));
77 static tree range_test
PROTO((enum tree_code
, tree
, enum tree_code
,
78 enum tree_code
, tree
, tree
, tree
));
79 static tree unextend
PROTO((tree
, int, int, tree
));
80 static tree fold_truthop
PROTO((enum tree_code
, tree
, tree
, tree
));
81 static tree strip_compound_expr
PROTO((tree
, tree
));
87 /* Yield nonzero if a signed left shift of A by B bits overflows. */
88 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
90 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
91 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
92 Then this yields nonzero if overflow occurred during the addition.
93 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
94 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
95 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
97 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
98 We do that by representing the two-word integer in 4 words, with only
99 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
102 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
103 #define HIGHPART(x) \
104 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
105 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
107 /* Unpack a two-word integer into 4 words.
108 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
109 WORDS points to the array of HOST_WIDE_INTs. */
112 encode (words
, low
, hi
)
113 HOST_WIDE_INT
*words
;
114 HOST_WIDE_INT low
, hi
;
116 words
[0] = LOWPART (low
);
117 words
[1] = HIGHPART (low
);
118 words
[2] = LOWPART (hi
);
119 words
[3] = HIGHPART (hi
);
122 /* Pack an array of 4 words into a two-word integer.
123 WORDS points to the array of words.
124 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
127 decode (words
, low
, hi
)
128 HOST_WIDE_INT
*words
;
129 HOST_WIDE_INT
*low
, *hi
;
131 *low
= words
[0] | words
[1] * BASE
;
132 *hi
= words
[2] | words
[3] * BASE
;
135 /* Make the integer constant T valid for its type
136 by setting to 0 or 1 all the bits in the constant
137 that don't belong in the type.
138 Yield 1 if a signed overflow occurs, 0 otherwise.
139 If OVERFLOW is nonzero, a signed overflow has already occurred
140 in calculating T, so propagate it.
142 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
146 force_fit_type (t
, overflow
)
150 HOST_WIDE_INT low
, high
;
153 if (TREE_CODE (t
) == REAL_CST
)
155 #ifdef CHECK_FLOAT_VALUE
156 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t
)), TREE_REAL_CST (t
),
162 else if (TREE_CODE (t
) != INTEGER_CST
)
165 low
= TREE_INT_CST_LOW (t
);
166 high
= TREE_INT_CST_HIGH (t
);
168 if (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
)
171 prec
= TYPE_PRECISION (TREE_TYPE (t
));
173 /* First clear all bits that are beyond the type's precision. */
175 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
177 else if (prec
> HOST_BITS_PER_WIDE_INT
)
179 TREE_INT_CST_HIGH (t
)
180 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
184 TREE_INT_CST_HIGH (t
) = 0;
185 if (prec
< HOST_BITS_PER_WIDE_INT
)
186 TREE_INT_CST_LOW (t
) &= ~((HOST_WIDE_INT
) (-1) << prec
);
189 /* Unsigned types do not suffer sign extension or overflow. */
190 if (TREE_UNSIGNED (TREE_TYPE (t
)))
193 /* If the value's sign bit is set, extend the sign. */
194 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
195 && (prec
> HOST_BITS_PER_WIDE_INT
196 ? (TREE_INT_CST_HIGH (t
)
197 & ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
198 : TREE_INT_CST_LOW (t
) & ((HOST_WIDE_INT
) 1 << (prec
- 1))))
200 /* Value is negative:
201 set to 1 all the bits that are outside this type's precision. */
202 if (prec
> HOST_BITS_PER_WIDE_INT
)
204 TREE_INT_CST_HIGH (t
)
205 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
209 TREE_INT_CST_HIGH (t
) = -1;
210 if (prec
< HOST_BITS_PER_WIDE_INT
)
211 TREE_INT_CST_LOW (t
) |= ((HOST_WIDE_INT
) (-1) << prec
);
215 /* Yield nonzero if signed overflow occurred. */
217 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
221 /* Add two doubleword integers with doubleword result.
222 Each argument is given as two `HOST_WIDE_INT' pieces.
223 One argument is L1 and H1; the other, L2 and H2.
224 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
227 add_double (l1
, h1
, l2
, h2
, lv
, hv
)
228 HOST_WIDE_INT l1
, h1
, l2
, h2
;
229 HOST_WIDE_INT
*lv
, *hv
;
234 h
= h1
+ h2
+ ((unsigned HOST_WIDE_INT
) l
< l1
);
238 return overflow_sum_sign (h1
, h2
, h
);
241 /* Negate a doubleword integer with doubleword result.
242 Return nonzero if the operation overflows, assuming it's signed.
243 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
244 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
247 neg_double (l1
, h1
, lv
, hv
)
248 HOST_WIDE_INT l1
, h1
;
249 HOST_WIDE_INT
*lv
, *hv
;
255 return (*hv
& h1
) < 0;
265 /* Multiply two doubleword integers with doubleword result.
266 Return nonzero if the operation overflows, assuming it's signed.
267 Each argument is given as two `HOST_WIDE_INT' pieces.
268 One argument is L1 and H1; the other, L2 and H2.
269 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
272 mul_double (l1
, h1
, l2
, h2
, lv
, hv
)
273 HOST_WIDE_INT l1
, h1
, l2
, h2
;
274 HOST_WIDE_INT
*lv
, *hv
;
276 HOST_WIDE_INT arg1
[4];
277 HOST_WIDE_INT arg2
[4];
278 HOST_WIDE_INT prod
[4 * 2];
279 register unsigned HOST_WIDE_INT carry
;
280 register int i
, j
, k
;
281 HOST_WIDE_INT toplow
, tophigh
, neglow
, neghigh
;
283 encode (arg1
, l1
, h1
);
284 encode (arg2
, l2
, h2
);
286 bzero ((char *) prod
, sizeof prod
);
288 for (i
= 0; i
< 4; i
++)
291 for (j
= 0; j
< 4; j
++)
294 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
295 carry
+= arg1
[i
] * arg2
[j
];
296 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
298 prod
[k
] = LOWPART (carry
);
299 carry
= HIGHPART (carry
);
304 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
306 /* Check for overflow by calculating the top half of the answer in full;
307 it should agree with the low half's sign bit. */
308 decode (prod
+4, &toplow
, &tophigh
);
311 neg_double (l2
, h2
, &neglow
, &neghigh
);
312 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
316 neg_double (l1
, h1
, &neglow
, &neghigh
);
317 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
319 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
322 /* Shift the doubleword integer in L1, H1 left by COUNT places
323 keeping only PREC bits of result.
324 Shift right if COUNT is negative.
325 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
326 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
329 lshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
330 HOST_WIDE_INT l1
, h1
, count
;
332 HOST_WIDE_INT
*lv
, *hv
;
337 rshift_double (l1
, h1
, - count
, prec
, lv
, hv
, arith
);
341 #ifdef SHIFT_COUNT_TRUNCATED
342 if (SHIFT_COUNT_TRUNCATED
)
346 if (count
>= HOST_BITS_PER_WIDE_INT
)
348 *hv
= (unsigned HOST_WIDE_INT
) l1
<< count
- HOST_BITS_PER_WIDE_INT
;
353 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
354 | ((unsigned HOST_WIDE_INT
) l1
>> HOST_BITS_PER_WIDE_INT
- count
- 1 >> 1));
355 *lv
= (unsigned HOST_WIDE_INT
) l1
<< count
;
359 /* Shift the doubleword integer in L1, H1 right by COUNT places
360 keeping only PREC bits of result. COUNT must be positive.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 rshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
366 HOST_WIDE_INT l1
, h1
, count
;
368 HOST_WIDE_INT
*lv
, *hv
;
371 unsigned HOST_WIDE_INT signmask
;
373 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
376 #ifdef SHIFT_COUNT_TRUNCATED
377 if (SHIFT_COUNT_TRUNCATED
)
381 if (count
>= HOST_BITS_PER_WIDE_INT
)
384 *lv
= ((signmask
<< 2 * HOST_BITS_PER_WIDE_INT
- count
- 1 << 1)
385 | ((unsigned HOST_WIDE_INT
) h1
>> count
- HOST_BITS_PER_WIDE_INT
));
389 *lv
= (((unsigned HOST_WIDE_INT
) l1
>> count
)
390 | ((unsigned HOST_WIDE_INT
) h1
<< HOST_BITS_PER_WIDE_INT
- count
- 1 << 1));
391 *hv
= ((signmask
<< HOST_BITS_PER_WIDE_INT
- count
)
392 | ((unsigned HOST_WIDE_INT
) h1
>> count
));
396 /* Rotate the doubleword integer in L1, H1 left by COUNT places
397 keeping only PREC bits of result.
398 Rotate right if COUNT is negative.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
403 HOST_WIDE_INT l1
, h1
, count
;
405 HOST_WIDE_INT
*lv
, *hv
;
407 HOST_WIDE_INT s1l
, s1h
, s2l
, s2h
;
413 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
414 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
419 /* Rotate the doubleword integer in L1, H1 left by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
424 rrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
425 HOST_WIDE_INT l1
, h1
, count
;
427 HOST_WIDE_INT
*lv
, *hv
;
429 HOST_WIDE_INT s1l
, s1h
, s2l
, s2h
;
435 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
436 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
441 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
442 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
443 CODE is a tree code for a kind of division, one of
444 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
446 It controls how the quotient is rounded to a integer.
447 Return nonzero if the operation overflows.
448 UNS nonzero says do unsigned division. */
451 div_and_round_double (code
, uns
,
452 lnum_orig
, hnum_orig
, lden_orig
, hden_orig
,
453 lquo
, hquo
, lrem
, hrem
)
456 HOST_WIDE_INT lnum_orig
, hnum_orig
; /* num == numerator == dividend */
457 HOST_WIDE_INT lden_orig
, hden_orig
; /* den == denominator == divisor */
458 HOST_WIDE_INT
*lquo
, *hquo
, *lrem
, *hrem
;
461 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
462 HOST_WIDE_INT den
[4], quo
[4];
464 unsigned HOST_WIDE_INT work
;
465 register unsigned HOST_WIDE_INT carry
= 0;
466 HOST_WIDE_INT lnum
= lnum_orig
;
467 HOST_WIDE_INT hnum
= hnum_orig
;
468 HOST_WIDE_INT lden
= lden_orig
;
469 HOST_WIDE_INT hden
= hden_orig
;
472 if ((hden
== 0) && (lden
== 0))
475 /* calculate quotient sign and convert operands to unsigned. */
481 /* (minimum integer) / (-1) is the only overflow case. */
482 if (neg_double (lnum
, hnum
, &lnum
, &hnum
) && (lden
& hden
) == -1)
488 neg_double (lden
, hden
, &lden
, &hden
);
492 if (hnum
== 0 && hden
== 0)
493 { /* single precision */
495 /* This unsigned division rounds toward zero. */
496 *lquo
= lnum
/ (unsigned HOST_WIDE_INT
) lden
;
501 { /* trivial case: dividend < divisor */
502 /* hden != 0 already checked. */
509 bzero ((char *) quo
, sizeof quo
);
511 bzero ((char *) num
, sizeof num
); /* to zero 9th element */
512 bzero ((char *) den
, sizeof den
);
514 encode (num
, lnum
, hnum
);
515 encode (den
, lden
, hden
);
517 /* Special code for when the divisor < BASE. */
518 if (hden
== 0 && lden
< BASE
)
520 /* hnum != 0 already checked. */
521 for (i
= 4 - 1; i
>= 0; i
--)
523 work
= num
[i
] + carry
* BASE
;
524 quo
[i
] = work
/ (unsigned HOST_WIDE_INT
) lden
;
525 carry
= work
% (unsigned HOST_WIDE_INT
) lden
;
530 /* Full double precision division,
531 with thanks to Don Knuth's "Seminumerical Algorithms". */
532 int num_hi_sig
, den_hi_sig
;
533 unsigned HOST_WIDE_INT quo_est
, scale
;
535 /* Find the highest non-zero divisor digit. */
536 for (i
= 4 - 1; ; i
--)
542 /* Insure that the first digit of the divisor is at least BASE/2.
543 This is required by the quotient digit estimation algorithm. */
545 scale
= BASE
/ (den
[den_hi_sig
] + 1);
546 if (scale
> 1) { /* scale divisor and dividend */
548 for (i
= 0; i
<= 4 - 1; i
++) {
549 work
= (num
[i
] * scale
) + carry
;
550 num
[i
] = LOWPART (work
);
551 carry
= HIGHPART (work
);
554 for (i
= 0; i
<= 4 - 1; i
++) {
555 work
= (den
[i
] * scale
) + carry
;
556 den
[i
] = LOWPART (work
);
557 carry
= HIGHPART (work
);
558 if (den
[i
] != 0) den_hi_sig
= i
;
565 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--) {
566 /* guess the next quotient digit, quo_est, by dividing the first
567 two remaining dividend digits by the high order quotient digit.
568 quo_est is never low and is at most 2 high. */
569 unsigned HOST_WIDE_INT tmp
;
571 num_hi_sig
= i
+ den_hi_sig
+ 1;
572 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
573 if (num
[num_hi_sig
] != den
[den_hi_sig
])
574 quo_est
= work
/ den
[den_hi_sig
];
578 /* refine quo_est so it's usually correct, and at most one high. */
579 tmp
= work
- quo_est
* den
[den_hi_sig
];
581 && den
[den_hi_sig
- 1] * quo_est
> (tmp
* BASE
+ num
[num_hi_sig
- 2]))
584 /* Try QUO_EST as the quotient digit, by multiplying the
585 divisor by QUO_EST and subtracting from the remaining dividend.
586 Keep in mind that QUO_EST is the I - 1st digit. */
589 for (j
= 0; j
<= den_hi_sig
; j
++)
591 work
= quo_est
* den
[j
] + carry
;
592 carry
= HIGHPART (work
);
593 work
= num
[i
+ j
] - LOWPART (work
);
594 num
[i
+ j
] = LOWPART (work
);
595 carry
+= HIGHPART (work
) != 0;
598 /* if quo_est was high by one, then num[i] went negative and
599 we need to correct things. */
601 if (num
[num_hi_sig
] < carry
)
604 carry
= 0; /* add divisor back in */
605 for (j
= 0; j
<= den_hi_sig
; j
++)
607 work
= num
[i
+ j
] + den
[j
] + carry
;
608 carry
= HIGHPART (work
);
609 num
[i
+ j
] = LOWPART (work
);
611 num
[num_hi_sig
] += carry
;
614 /* store the quotient digit. */
619 decode (quo
, lquo
, hquo
);
622 /* if result is negative, make it so. */
624 neg_double (*lquo
, *hquo
, lquo
, hquo
);
626 /* compute trial remainder: rem = num - (quo * den) */
627 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
628 neg_double (*lrem
, *hrem
, lrem
, hrem
);
629 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
634 case TRUNC_MOD_EXPR
: /* round toward zero */
635 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
639 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
640 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
643 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
646 else return overflow
;
650 case CEIL_MOD_EXPR
: /* round toward positive infinity */
651 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
653 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
656 else return overflow
;
660 case ROUND_MOD_EXPR
: /* round to closest integer */
662 HOST_WIDE_INT labs_rem
= *lrem
, habs_rem
= *hrem
;
663 HOST_WIDE_INT labs_den
= lden
, habs_den
= hden
, ltwice
, htwice
;
665 /* get absolute values */
666 if (*hrem
< 0) neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
667 if (hden
< 0) neg_double (lden
, hden
, &labs_den
, &habs_den
);
669 /* if (2 * abs (lrem) >= abs (lden)) */
670 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
671 labs_rem
, habs_rem
, <wice
, &htwice
);
672 if (((unsigned HOST_WIDE_INT
) habs_den
673 < (unsigned HOST_WIDE_INT
) htwice
)
674 || (((unsigned HOST_WIDE_INT
) habs_den
675 == (unsigned HOST_WIDE_INT
) htwice
)
676 && ((HOST_WIDE_INT
unsigned) labs_den
677 < (unsigned HOST_WIDE_INT
) ltwice
)))
681 add_double (*lquo
, *hquo
,
682 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
685 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
688 else return overflow
;
696 /* compute true remainder: rem = num - (quo * den) */
697 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
698 neg_double (*lrem
, *hrem
, lrem
, hrem
);
699 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
703 #ifndef REAL_ARITHMETIC
704 /* Effectively truncate a real value to represent the nearest possible value
705 in a narrower mode. The result is actually represented in the same data
706 type as the argument, but its value is usually different.
708 A trap may occur during the FP operations and it is the responsibility
709 of the calling function to have a handler established. */
712 real_value_truncate (mode
, arg
)
713 enum machine_mode mode
;
716 return REAL_VALUE_TRUNCATE (mode
, arg
);
719 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
721 /* Check for infinity in an IEEE double precision number. */
727 /* The IEEE 64-bit double format. */
732 unsigned exponent
: 11;
733 unsigned mantissa1
: 20;
738 unsigned mantissa1
: 20;
739 unsigned exponent
: 11;
745 if (u
.big_endian
.sign
== 1)
748 return (u
.big_endian
.exponent
== 2047
749 && u
.big_endian
.mantissa1
== 0
750 && u
.big_endian
.mantissa2
== 0);
755 return (u
.little_endian
.exponent
== 2047
756 && u
.little_endian
.mantissa1
== 0
757 && u
.little_endian
.mantissa2
== 0);
761 /* Check whether an IEEE double precision number is a NaN. */
767 /* The IEEE 64-bit double format. */
772 unsigned exponent
: 11;
773 unsigned mantissa1
: 20;
778 unsigned mantissa1
: 20;
779 unsigned exponent
: 11;
785 if (u
.big_endian
.sign
== 1)
788 return (u
.big_endian
.exponent
== 2047
789 && (u
.big_endian
.mantissa1
!= 0
790 || u
.big_endian
.mantissa2
!= 0));
795 return (u
.little_endian
.exponent
== 2047
796 && (u
.little_endian
.mantissa1
!= 0
797 || u
.little_endian
.mantissa2
!= 0));
801 /* Check for a negative IEEE double precision number. */
807 /* The IEEE 64-bit double format. */
812 unsigned exponent
: 11;
813 unsigned mantissa1
: 20;
818 unsigned mantissa1
: 20;
819 unsigned exponent
: 11;
825 if (u
.big_endian
.sign
== 1)
828 return u
.big_endian
.sign
;
833 return u
.little_endian
.sign
;
836 #else /* Target not IEEE */
838 /* Let's assume other float formats don't have infinity.
839 (This can be overridden by redefining REAL_VALUE_ISINF.) */
847 /* Let's assume other float formats don't have NaNs.
848 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
856 /* Let's assume other float formats don't have minus zero.
857 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
864 #endif /* Target not IEEE */
865 #endif /* no REAL_ARITHMETIC */
867 /* Split a tree IN into a constant and a variable part
868 that could be combined with CODE to make IN.
869 CODE must be a commutative arithmetic operation.
870 Store the constant part into *CONP and the variable in &VARP.
871 Return 1 if this was done; zero means the tree IN did not decompose
874 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
875 Therefore, we must tell the caller whether the variable part
876 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
877 The value stored is the coefficient for the variable term.
878 The constant term we return should always be added;
879 we negate it if necessary. */
882 split_tree (in
, code
, varp
, conp
, varsignp
)
888 register tree outtype
= TREE_TYPE (in
);
892 /* Strip any conversions that don't change the machine mode. */
893 while ((TREE_CODE (in
) == NOP_EXPR
894 || TREE_CODE (in
) == CONVERT_EXPR
)
895 && (TYPE_MODE (TREE_TYPE (in
))
896 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in
, 0)))))
897 in
= TREE_OPERAND (in
, 0);
899 if (TREE_CODE (in
) == code
900 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
901 /* We can associate addition and subtraction together
902 (even though the C standard doesn't say so)
903 for integers because the value is not affected.
904 For reals, the value might be affected, so we can't. */
905 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
906 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
908 enum tree_code code
= TREE_CODE (TREE_OPERAND (in
, 0));
909 if (code
== INTEGER_CST
)
911 *conp
= TREE_OPERAND (in
, 0);
912 *varp
= TREE_OPERAND (in
, 1);
913 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
914 && TREE_TYPE (*varp
) != outtype
)
915 *varp
= convert (outtype
, *varp
);
916 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
919 if (TREE_CONSTANT (TREE_OPERAND (in
, 1)))
921 *conp
= TREE_OPERAND (in
, 1);
922 *varp
= TREE_OPERAND (in
, 0);
924 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
925 && TREE_TYPE (*varp
) != outtype
)
926 *varp
= convert (outtype
, *varp
);
927 if (TREE_CODE (in
) == MINUS_EXPR
)
929 /* If operation is subtraction and constant is second,
930 must negate it to get an additive constant.
931 And this cannot be done unless it is a manifest constant.
932 It could also be the address of a static variable.
933 We cannot negate that, so give up. */
934 if (TREE_CODE (*conp
) == INTEGER_CST
)
935 /* Subtracting from integer_zero_node loses for long long. */
936 *conp
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (*conp
), *conp
));
942 if (TREE_CONSTANT (TREE_OPERAND (in
, 0)))
944 *conp
= TREE_OPERAND (in
, 0);
945 *varp
= TREE_OPERAND (in
, 1);
946 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
947 && TREE_TYPE (*varp
) != outtype
)
948 *varp
= convert (outtype
, *varp
);
949 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
956 /* Combine two constants NUM and ARG2 under operation CODE
957 to produce a new constant.
958 We assume ARG1 and ARG2 have the same data type,
959 or at least are the same kind of constant and the same machine mode.
961 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
964 const_binop (code
, arg1
, arg2
, notrunc
)
966 register tree arg1
, arg2
;
969 if (TREE_CODE (arg1
) == INTEGER_CST
)
971 register HOST_WIDE_INT int1l
= TREE_INT_CST_LOW (arg1
);
972 register HOST_WIDE_INT int1h
= TREE_INT_CST_HIGH (arg1
);
973 HOST_WIDE_INT int2l
= TREE_INT_CST_LOW (arg2
);
974 HOST_WIDE_INT int2h
= TREE_INT_CST_HIGH (arg2
);
975 HOST_WIDE_INT low
, hi
;
976 HOST_WIDE_INT garbagel
, garbageh
;
978 int uns
= TREE_UNSIGNED (TREE_TYPE (arg1
));
984 t
= build_int_2 (int1l
| int2l
, int1h
| int2h
);
988 t
= build_int_2 (int1l
^ int2l
, int1h
^ int2h
);
992 t
= build_int_2 (int1l
& int2l
, int1h
& int2h
);
996 t
= build_int_2 (int1l
& ~int2l
, int1h
& ~int2h
);
1002 /* It's unclear from the C standard whether shifts can overflow.
1003 The following code ignores overflow; perhaps a C standard
1004 interpretation ruling is needed. */
1005 lshift_double (int1l
, int1h
, int2l
,
1006 TYPE_PRECISION (TREE_TYPE (arg1
)),
1009 t
= build_int_2 (low
, hi
);
1010 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1012 force_fit_type (t
, 0);
1013 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1014 TREE_CONSTANT_OVERFLOW (t
)
1015 = TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
);
1021 lrotate_double (int1l
, int1h
, int2l
,
1022 TYPE_PRECISION (TREE_TYPE (arg1
)),
1024 t
= build_int_2 (low
, hi
);
1031 if ((unsigned HOST_WIDE_INT
) int2l
< int1l
)
1034 overflow
= int2h
< hi
;
1036 t
= build_int_2 (int2l
, int2h
);
1042 if ((unsigned HOST_WIDE_INT
) int1l
< int2l
)
1045 overflow
= int1h
< hi
;
1047 t
= build_int_2 (int1l
, int1h
);
1050 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1051 t
= build_int_2 (low
, hi
);
1055 if (int2h
== 0 && int2l
== 0)
1057 t
= build_int_2 (int1l
, int1h
);
1060 neg_double (int2l
, int2h
, &low
, &hi
);
1061 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1062 overflow
= overflow_sum_sign (hi
, int2h
, int1h
);
1063 t
= build_int_2 (low
, hi
);
1067 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1068 t
= build_int_2 (low
, hi
);
1071 case TRUNC_DIV_EXPR
:
1072 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1073 case EXACT_DIV_EXPR
:
1074 /* This is a shortcut for a common special case.
1075 It reduces the number of tree nodes generated
1077 if (int2h
== 0 && int2l
> 0
1078 && TREE_TYPE (arg1
) == sizetype
1079 && int1h
== 0 && int1l
>= 0)
1081 if (code
== CEIL_DIV_EXPR
)
1083 return size_int (int1l
/ int2l
);
1085 case ROUND_DIV_EXPR
:
1086 if (int2h
== 0 && int2l
== 1)
1088 t
= build_int_2 (int1l
, int1h
);
1091 if (int1l
== int2l
&& int1h
== int2h
)
1093 if ((int1l
| int1h
) == 0)
1095 t
= build_int_2 (1, 0);
1098 overflow
= div_and_round_double (code
, uns
,
1099 int1l
, int1h
, int2l
, int2h
,
1100 &low
, &hi
, &garbagel
, &garbageh
);
1101 t
= build_int_2 (low
, hi
);
1104 case TRUNC_MOD_EXPR
: case ROUND_MOD_EXPR
:
1105 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1106 overflow
= div_and_round_double (code
, uns
,
1107 int1l
, int1h
, int2l
, int2h
,
1108 &garbagel
, &garbageh
, &low
, &hi
);
1109 t
= build_int_2 (low
, hi
);
1116 low
= (((unsigned HOST_WIDE_INT
) int1h
1117 < (unsigned HOST_WIDE_INT
) int2h
)
1118 || (((unsigned HOST_WIDE_INT
) int1h
1119 == (unsigned HOST_WIDE_INT
) int2h
)
1120 && ((unsigned HOST_WIDE_INT
) int1l
1121 < (unsigned HOST_WIDE_INT
) int2l
)));
1125 low
= ((int1h
< int2h
)
1126 || ((int1h
== int2h
)
1127 && ((unsigned HOST_WIDE_INT
) int1l
1128 < (unsigned HOST_WIDE_INT
) int2l
)));
1130 if (low
== (code
== MIN_EXPR
))
1131 t
= build_int_2 (int1l
, int1h
);
1133 t
= build_int_2 (int2l
, int2h
);
1140 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1142 = ((notrunc
? !uns
&& overflow
: force_fit_type (t
, overflow
&& !uns
))
1143 | TREE_OVERFLOW (arg1
)
1144 | TREE_OVERFLOW (arg2
));
1145 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1146 | TREE_CONSTANT_OVERFLOW (arg1
)
1147 | TREE_CONSTANT_OVERFLOW (arg2
));
1150 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1151 if (TREE_CODE (arg1
) == REAL_CST
)
1156 REAL_VALUE_TYPE value
;
1159 d1
= TREE_REAL_CST (arg1
);
1160 d2
= TREE_REAL_CST (arg2
);
1162 /* If either operand is a NaN, just return it. Otherwise, set up
1163 for floating-point trap; we return an overflow. */
1164 if (REAL_VALUE_ISNAN (d1
))
1166 else if (REAL_VALUE_ISNAN (d2
))
1168 else if (setjmp (float_error
))
1170 t
= copy_node (arg1
);
1175 set_float_handler (float_error
);
1177 #ifdef REAL_ARITHMETIC
1178 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1195 #ifndef REAL_INFINITY
1204 value
= MIN (d1
, d2
);
1208 value
= MAX (d1
, d2
);
1214 #endif /* no REAL_ARITHMETIC */
1215 t
= build_real (TREE_TYPE (arg1
),
1216 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1
)), value
));
1218 set_float_handler (NULL_PTR
);
1221 = (force_fit_type (t
, overflow
)
1222 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1223 TREE_CONSTANT_OVERFLOW (t
)
1225 | TREE_CONSTANT_OVERFLOW (arg1
)
1226 | TREE_CONSTANT_OVERFLOW (arg2
);
1229 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1230 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1232 register tree r1
= TREE_REALPART (arg1
);
1233 register tree i1
= TREE_IMAGPART (arg1
);
1234 register tree r2
= TREE_REALPART (arg2
);
1235 register tree i2
= TREE_IMAGPART (arg2
);
1241 t
= build_complex (const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1242 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1246 t
= build_complex (const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1247 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1251 t
= build_complex (const_binop (MINUS_EXPR
,
1252 const_binop (MULT_EXPR
,
1254 const_binop (MULT_EXPR
,
1257 const_binop (PLUS_EXPR
,
1258 const_binop (MULT_EXPR
,
1260 const_binop (MULT_EXPR
,
1267 register tree magsquared
1268 = const_binop (PLUS_EXPR
,
1269 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1270 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1274 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1275 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1276 const_binop (PLUS_EXPR
,
1277 const_binop (MULT_EXPR
, r1
, r2
,
1279 const_binop (MULT_EXPR
, i1
, i2
,
1282 magsquared
, notrunc
),
1283 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1284 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1285 const_binop (MINUS_EXPR
,
1286 const_binop (MULT_EXPR
, i1
, r2
,
1288 const_binop (MULT_EXPR
, r1
, i2
,
1291 magsquared
, notrunc
));
1298 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1304 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1308 unsigned HOST_WIDE_INT number
;
1311 /* Type-size nodes already made for small sizes. */
1312 static tree size_table
[2*HOST_BITS_PER_WIDE_INT
+ 1];
1314 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1
1315 && size_table
[number
] != 0)
1316 return size_table
[number
];
1317 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1)
1319 push_obstacks_nochange ();
1320 /* Make this a permanent node. */
1321 end_temporary_allocation ();
1322 t
= build_int_2 (number
, 0);
1323 TREE_TYPE (t
) = sizetype
;
1324 size_table
[number
] = t
;
1329 t
= build_int_2 (number
, 0);
1330 TREE_TYPE (t
) = sizetype
;
1335 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1336 CODE is a tree code. Data type is taken from `sizetype',
1337 If the operands are constant, so is the result. */
1340 size_binop (code
, arg0
, arg1
)
1341 enum tree_code code
;
1344 /* Handle the special case of two integer constants faster. */
1345 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1347 /* And some specific cases even faster than that. */
1348 if (code
== PLUS_EXPR
1349 && TREE_INT_CST_LOW (arg0
) == 0
1350 && TREE_INT_CST_HIGH (arg0
) == 0)
1352 if (code
== MINUS_EXPR
1353 && TREE_INT_CST_LOW (arg1
) == 0
1354 && TREE_INT_CST_HIGH (arg1
) == 0)
1356 if (code
== MULT_EXPR
1357 && TREE_INT_CST_LOW (arg0
) == 1
1358 && TREE_INT_CST_HIGH (arg0
) == 0)
1360 /* Handle general case of two integer constants. */
1361 return const_binop (code
, arg0
, arg1
, 0);
1364 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1365 return error_mark_node
;
1367 return fold (build (code
, sizetype
, arg0
, arg1
));
1370 /* Given T, a tree representing type conversion of ARG1, a constant,
1371 return a constant tree representing the result of conversion. */
1374 fold_convert (t
, arg1
)
1378 register tree type
= TREE_TYPE (t
);
1381 if (TREE_CODE (type
) == POINTER_TYPE
|| INTEGRAL_TYPE_P (type
))
1383 if (TREE_CODE (arg1
) == INTEGER_CST
)
1385 /* If we would build a constant wider than GCC supports,
1386 leave the conversion unfolded. */
1387 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1390 /* Given an integer constant, make new constant with new type,
1391 appropriately sign-extended or truncated. */
1392 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1393 TREE_INT_CST_HIGH (arg1
));
1394 TREE_TYPE (t
) = type
;
1395 /* Indicate an overflow if (1) ARG1 already overflowed,
1396 or (2) force_fit_type indicates an overflow.
1397 Tell force_fit_type that an overflow has already occurred
1398 if ARG1 is a too-large unsigned value and T is signed. */
1400 = (TREE_OVERFLOW (arg1
)
1401 | force_fit_type (t
,
1402 (TREE_INT_CST_HIGH (arg1
) < 0
1403 & (TREE_UNSIGNED (type
)
1404 < TREE_UNSIGNED (TREE_TYPE (arg1
))))));
1405 TREE_CONSTANT_OVERFLOW (t
)
1406 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1408 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1409 else if (TREE_CODE (arg1
) == REAL_CST
)
1411 /* Don't initialize these, use assignments.
1412 Initialized local aggregates don't work on old compilers. */
1417 x
= TREE_REAL_CST (arg1
);
1418 l
= real_value_from_int_cst (TYPE_MIN_VALUE (type
));
1419 u
= real_value_from_int_cst (TYPE_MAX_VALUE (type
));
1420 /* See if X will be in range after truncation towards 0.
1421 To compensate for truncation, move the bounds away from 0,
1422 but reject if X exactly equals the adjusted bounds. */
1423 #ifdef REAL_ARITHMETIC
1424 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1425 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1430 /* If X is a NaN, use zero instead and show we have an overflow.
1431 Otherwise, range check. */
1432 if (REAL_VALUE_ISNAN (x
))
1433 overflow
= 1, x
= dconst0
;
1434 else if (! (REAL_VALUES_LESS (l
, x
) && REAL_VALUES_LESS (x
, u
)))
1437 #ifndef REAL_ARITHMETIC
1439 HOST_WIDE_INT low
, high
;
1440 HOST_WIDE_INT half_word
1441 = (HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2);
1446 high
= (HOST_WIDE_INT
) (x
/ half_word
/ half_word
);
1447 x
-= (REAL_VALUE_TYPE
) high
* half_word
* half_word
;
1448 if (x
>= (REAL_VALUE_TYPE
) half_word
* half_word
/ 2)
1450 low
= x
- (REAL_VALUE_TYPE
) half_word
* half_word
/ 2;
1451 low
|= (HOST_WIDE_INT
) -1 << (HOST_BITS_PER_WIDE_INT
- 1);
1454 low
= (HOST_WIDE_INT
) x
;
1455 if (TREE_REAL_CST (arg1
) < 0)
1456 neg_double (low
, high
, &low
, &high
);
1457 t
= build_int_2 (low
, high
);
1461 HOST_WIDE_INT low
, high
;
1462 REAL_VALUE_TO_INT (&low
, &high
, x
);
1463 t
= build_int_2 (low
, high
);
1466 TREE_TYPE (t
) = type
;
1468 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1469 TREE_CONSTANT_OVERFLOW (t
)
1470 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1472 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1473 TREE_TYPE (t
) = type
;
1475 else if (TREE_CODE (type
) == REAL_TYPE
)
1477 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1478 if (TREE_CODE (arg1
) == INTEGER_CST
)
1479 return build_real_from_int_cst (type
, arg1
);
1480 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1481 if (TREE_CODE (arg1
) == REAL_CST
)
1483 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1486 TREE_TYPE (arg1
) = type
;
1489 else if (setjmp (float_error
))
1492 t
= copy_node (arg1
);
1495 set_float_handler (float_error
);
1497 t
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
1498 TREE_REAL_CST (arg1
)));
1499 set_float_handler (NULL_PTR
);
1503 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1504 TREE_CONSTANT_OVERFLOW (t
)
1505 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1509 TREE_CONSTANT (t
) = 1;
1513 /* Return an expr equal to X but certainly not valid as an lvalue.
1514 Also make sure it is not valid as an null pointer constant. */
1522 /* These things are certainly not lvalues. */
1523 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1524 || TREE_CODE (x
) == INTEGER_CST
1525 || TREE_CODE (x
) == REAL_CST
1526 || TREE_CODE (x
) == STRING_CST
1527 || TREE_CODE (x
) == ADDR_EXPR
)
1529 if (TREE_CODE (x
) == INTEGER_CST
&& integer_zerop (x
))
1531 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1532 so convert_for_assignment won't strip it.
1533 This is so this 0 won't be treated as a null pointer constant. */
1534 result
= build1 (NOP_EXPR
, TREE_TYPE (x
), x
);
1535 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1541 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1542 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1546 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1547 Zero means allow extended lvalues. */
1549 int pedantic_lvalues
;
1551 /* When pedantic, return an expr equal to X but certainly not valid as a
1552 pedantic lvalue. Otherwise, return X. */
1555 pedantic_non_lvalue (x
)
1558 if (pedantic_lvalues
)
1559 return non_lvalue (x
);
1564 /* Given a tree comparison code, return the code that is the logical inverse
1565 of the given code. It is not safe to do this for floating-point
1566 comparisons, except for NE_EXPR and EQ_EXPR. */
1568 static enum tree_code
1569 invert_tree_comparison (code
)
1570 enum tree_code code
;
1591 /* Similar, but return the comparison that results if the operands are
1592 swapped. This is safe for floating-point. */
1594 static enum tree_code
1595 swap_tree_comparison (code
)
1596 enum tree_code code
;
1616 /* Return nonzero if CODE is a tree code that represents a truth value. */
1619 truth_value_p (code
)
1620 enum tree_code code
;
1622 return (TREE_CODE_CLASS (code
) == '<'
1623 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
1624 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
1625 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
1628 /* Return nonzero if two operands are necessarily equal.
1629 If ONLY_CONST is non-zero, only return non-zero for constants.
1630 This function tests whether the operands are indistinguishable;
1631 it does not test whether they are equal using C's == operation.
1632 The distinction is important for IEEE floating point, because
1633 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1634 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1637 operand_equal_p (arg0
, arg1
, only_const
)
1641 /* If both types don't have the same signedness, then we can't consider
1642 them equal. We must check this before the STRIP_NOPS calls
1643 because they may change the signedness of the arguments. */
1644 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
1650 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1651 We don't care about side effects in that case because the SAVE_EXPR
1652 takes care of that for us. */
1653 if (TREE_CODE (arg0
) == SAVE_EXPR
&& arg0
== arg1
)
1654 return ! only_const
;
1656 if (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
))
1659 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1660 && TREE_CODE (arg0
) == ADDR_EXPR
1661 && TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0))
1664 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1665 && TREE_CODE (arg0
) == INTEGER_CST
1666 && TREE_INT_CST_LOW (arg0
) == TREE_INT_CST_LOW (arg1
)
1667 && TREE_INT_CST_HIGH (arg0
) == TREE_INT_CST_HIGH (arg1
))
1670 /* Detect when real constants are equal. */
1671 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1672 && TREE_CODE (arg0
) == REAL_CST
)
1673 return !bcmp ((char *) &TREE_REAL_CST (arg0
),
1674 (char *) &TREE_REAL_CST (arg1
),
1675 sizeof (REAL_VALUE_TYPE
));
1683 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
1685 /* This is needed for conversions and for COMPONENT_REF.
1686 Might as well play it safe and always test this. */
1687 if (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
1690 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
1693 /* Two conversions are equal only if signedness and modes match. */
1694 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
1695 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
1696 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
1699 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1700 TREE_OPERAND (arg1
, 0), 0);
1704 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1705 TREE_OPERAND (arg1
, 0), 0)
1706 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1707 TREE_OPERAND (arg1
, 1), 0));
1710 switch (TREE_CODE (arg0
))
1713 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1714 TREE_OPERAND (arg1
, 0), 0);
1718 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1719 TREE_OPERAND (arg1
, 0), 0)
1720 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1721 TREE_OPERAND (arg1
, 1), 0));
1724 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1725 TREE_OPERAND (arg1
, 0), 0)
1726 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1727 TREE_OPERAND (arg1
, 1), 0)
1728 && operand_equal_p (TREE_OPERAND (arg0
, 2),
1729 TREE_OPERAND (arg1
, 2), 0));
1737 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1738 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1740 When in doubt, return 0. */
1743 operand_equal_for_comparison_p (arg0
, arg1
, other
)
1747 int unsignedp1
, unsignedpo
;
1748 tree primarg1
, primother
;
1749 unsigned correct_width
;
1751 if (operand_equal_p (arg0
, arg1
, 0))
1754 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
1755 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
1758 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1759 actual comparison operand, ARG0.
1761 First throw away any conversions to wider types
1762 already present in the operands. */
1764 primarg1
= get_narrower (arg1
, &unsignedp1
);
1765 primother
= get_narrower (other
, &unsignedpo
);
1767 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
1768 if (unsignedp1
== unsignedpo
1769 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
1770 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
1772 tree type
= TREE_TYPE (arg0
);
1774 /* Make sure shorter operand is extended the right way
1775 to match the longer operand. */
1776 primarg1
= convert (signed_or_unsigned_type (unsignedp1
,
1777 TREE_TYPE (primarg1
)),
1780 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
1787 /* See if ARG is an expression that is either a comparison or is performing
1788 arithmetic on comparisons. The comparisons must only be comparing
1789 two different values, which will be stored in *CVAL1 and *CVAL2; if
1790 they are non-zero it means that some operands have already been found.
1791 No variables may be used anywhere else in the expression except in the
1792 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1793 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1795 If this is true, return 1. Otherwise, return zero. */
1798 twoval_comparison_p (arg
, cval1
, cval2
, save_p
)
1800 tree
*cval1
, *cval2
;
1803 enum tree_code code
= TREE_CODE (arg
);
1804 char class = TREE_CODE_CLASS (code
);
1806 /* We can handle some of the 'e' cases here. */
1807 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
1809 else if (class == 'e'
1810 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
1811 || code
== COMPOUND_EXPR
))
1814 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1815 the expression. There may be no way to make this work, but it needs
1816 to be looked at again for 2.6. */
1818 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0)
1820 /* If we've already found a CVAL1 or CVAL2, this expression is
1821 two complex to handle. */
1822 if (*cval1
|| *cval2
)
1833 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
1836 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
1837 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
1838 cval1
, cval2
, save_p
));
1844 if (code
== COND_EXPR
)
1845 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
1846 cval1
, cval2
, save_p
)
1847 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
1848 cval1
, cval2
, save_p
)
1849 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
1850 cval1
, cval2
, save_p
));
1854 /* First see if we can handle the first operand, then the second. For
1855 the second operand, we know *CVAL1 can't be zero. It must be that
1856 one side of the comparison is each of the values; test for the
1857 case where this isn't true by failing if the two operands
1860 if (operand_equal_p (TREE_OPERAND (arg
, 0),
1861 TREE_OPERAND (arg
, 1), 0))
1865 *cval1
= TREE_OPERAND (arg
, 0);
1866 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
1868 else if (*cval2
== 0)
1869 *cval2
= TREE_OPERAND (arg
, 0);
1870 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
1875 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
1877 else if (*cval2
== 0)
1878 *cval2
= TREE_OPERAND (arg
, 1);
1879 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
1890 /* ARG is a tree that is known to contain just arithmetic operations and
1891 comparisons. Evaluate the operations in the tree substituting NEW0 for
1892 any occurrence of OLD0 as an operand of a comparison and likewise for
1896 eval_subst (arg
, old0
, new0
, old1
, new1
)
1898 tree old0
, new0
, old1
, new1
;
1900 tree type
= TREE_TYPE (arg
);
1901 enum tree_code code
= TREE_CODE (arg
);
1902 char class = TREE_CODE_CLASS (code
);
1904 /* We can handle some of the 'e' cases here. */
1905 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
1907 else if (class == 'e'
1908 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
1914 return fold (build1 (code
, type
,
1915 eval_subst (TREE_OPERAND (arg
, 0),
1916 old0
, new0
, old1
, new1
)));
1919 return fold (build (code
, type
,
1920 eval_subst (TREE_OPERAND (arg
, 0),
1921 old0
, new0
, old1
, new1
),
1922 eval_subst (TREE_OPERAND (arg
, 1),
1923 old0
, new0
, old1
, new1
)));
1929 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
1932 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
1935 return fold (build (code
, type
,
1936 eval_subst (TREE_OPERAND (arg
, 0),
1937 old0
, new0
, old1
, new1
),
1938 eval_subst (TREE_OPERAND (arg
, 1),
1939 old0
, new0
, old1
, new1
),
1940 eval_subst (TREE_OPERAND (arg
, 2),
1941 old0
, new0
, old1
, new1
)));
1946 tree arg0
= TREE_OPERAND (arg
, 0);
1947 tree arg1
= TREE_OPERAND (arg
, 1);
1949 /* We need to check both for exact equality and tree equality. The
1950 former will be true if the operand has a side-effect. In that
1951 case, we know the operand occurred exactly once. */
1953 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
1955 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
1958 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
1960 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
1963 return fold (build (code
, type
, arg0
, arg1
));
1970 /* Return a tree for the case when the result of an expression is RESULT
1971 converted to TYPE and OMITTED was previously an operand of the expression
1972 but is now not needed (e.g., we folded OMITTED * 0).
1974 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1975 the conversion of RESULT to TYPE. */
1978 omit_one_operand (type
, result
, omitted
)
1979 tree type
, result
, omitted
;
1981 tree t
= convert (type
, result
);
1983 if (TREE_SIDE_EFFECTS (omitted
))
1984 return build (COMPOUND_EXPR
, type
, omitted
, t
);
1986 return non_lvalue (t
);
1989 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
1992 pedantic_omit_one_operand (type
, result
, omitted
)
1993 tree type
, result
, omitted
;
1995 tree t
= convert (type
, result
);
1997 if (TREE_SIDE_EFFECTS (omitted
))
1998 return build (COMPOUND_EXPR
, type
, omitted
, t
);
2000 return pedantic_non_lvalue (t
);
2005 /* Return a simplified tree node for the truth-negation of ARG. This
2006 never alters ARG itself. We assume that ARG is an operation that
2007 returns a truth value (0 or 1). */
2010 invert_truthvalue (arg
)
2013 tree type
= TREE_TYPE (arg
);
2014 enum tree_code code
= TREE_CODE (arg
);
2016 if (code
== ERROR_MARK
)
2019 /* If this is a comparison, we can simply invert it, except for
2020 floating-point non-equality comparisons, in which case we just
2021 enclose a TRUTH_NOT_EXPR around what we have. */
2023 if (TREE_CODE_CLASS (code
) == '<')
2025 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2026 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2027 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2029 return build (invert_tree_comparison (code
), type
,
2030 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2036 return convert (type
, build_int_2 (TREE_INT_CST_LOW (arg
) == 0
2037 && TREE_INT_CST_HIGH (arg
) == 0, 0));
2039 case TRUTH_AND_EXPR
:
2040 return build (TRUTH_OR_EXPR
, type
,
2041 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2042 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2045 return build (TRUTH_AND_EXPR
, type
,
2046 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2047 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2049 case TRUTH_XOR_EXPR
:
2050 /* Here we can invert either operand. We invert the first operand
2051 unless the second operand is a TRUTH_NOT_EXPR in which case our
2052 result is the XOR of the first operand with the inside of the
2053 negation of the second operand. */
2055 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2056 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2057 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2059 return build (TRUTH_XOR_EXPR
, type
,
2060 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2061 TREE_OPERAND (arg
, 1));
2063 case TRUTH_ANDIF_EXPR
:
2064 return build (TRUTH_ORIF_EXPR
, type
,
2065 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2066 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2068 case TRUTH_ORIF_EXPR
:
2069 return build (TRUTH_ANDIF_EXPR
, type
,
2070 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2071 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2073 case TRUTH_NOT_EXPR
:
2074 return TREE_OPERAND (arg
, 0);
2077 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2078 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2079 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2082 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2083 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2085 case NON_LVALUE_EXPR
:
2086 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2091 return build1 (TREE_CODE (arg
), type
,
2092 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2095 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2097 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2100 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2102 case CLEANUP_POINT_EXPR
:
2103 return build1 (CLEANUP_POINT_EXPR
, type
,
2104 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2106 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2108 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2111 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2112 operands are another bit-wise operation with a common input. If so,
2113 distribute the bit operations to save an operation and possibly two if
2114 constants are involved. For example, convert
2115 (A | B) & (A | C) into A | (B & C)
2116 Further simplification will occur if B and C are constants.
2118 If this optimization cannot be done, 0 will be returned. */
2121 distribute_bit_expr (code
, type
, arg0
, arg1
)
2122 enum tree_code code
;
2129 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2130 || TREE_CODE (arg0
) == code
2131 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2132 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2135 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2137 common
= TREE_OPERAND (arg0
, 0);
2138 left
= TREE_OPERAND (arg0
, 1);
2139 right
= TREE_OPERAND (arg1
, 1);
2141 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2143 common
= TREE_OPERAND (arg0
, 0);
2144 left
= TREE_OPERAND (arg0
, 1);
2145 right
= TREE_OPERAND (arg1
, 0);
2147 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2149 common
= TREE_OPERAND (arg0
, 1);
2150 left
= TREE_OPERAND (arg0
, 0);
2151 right
= TREE_OPERAND (arg1
, 1);
2153 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2155 common
= TREE_OPERAND (arg0
, 1);
2156 left
= TREE_OPERAND (arg0
, 0);
2157 right
= TREE_OPERAND (arg1
, 0);
2162 return fold (build (TREE_CODE (arg0
), type
, common
,
2163 fold (build (code
, type
, left
, right
))));
2166 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2167 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2170 make_bit_field_ref (inner
, type
, bitsize
, bitpos
, unsignedp
)
2173 int bitsize
, bitpos
;
2176 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2177 size_int (bitsize
), size_int (bitpos
));
2179 TREE_UNSIGNED (result
) = unsignedp
;
2184 /* Optimize a bit-field compare.
2186 There are two cases: First is a compare against a constant and the
2187 second is a comparison of two items where the fields are at the same
2188 bit position relative to the start of a chunk (byte, halfword, word)
2189 large enough to contain it. In these cases we can avoid the shift
2190 implicit in bitfield extractions.
2192 For constants, we emit a compare of the shifted constant with the
2193 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2194 compared. For two fields at the same position, we do the ANDs with the
2195 similar mask and compare the result of the ANDs.
2197 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2198 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2199 are the left and right operands of the comparison, respectively.
2201 If the optimization described above can be done, we return the resulting
2202 tree. Otherwise we return zero. */
2205 optimize_bit_field_compare (code
, compare_type
, lhs
, rhs
)
2206 enum tree_code code
;
2210 int lbitpos
, lbitsize
, rbitpos
, rbitsize
;
2211 int lnbitpos
, lnbitsize
, rnbitpos
, rnbitsize
;
2212 tree type
= TREE_TYPE (lhs
);
2213 tree signed_type
, unsigned_type
;
2214 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2215 enum machine_mode lmode
, rmode
, lnmode
, rnmode
;
2216 int lunsignedp
, runsignedp
;
2217 int lvolatilep
= 0, rvolatilep
= 0;
2218 tree linner
, rinner
;
2222 /* Get all the information about the extractions being done. If the bit size
2223 if the same as the size of the underlying object, we aren't doing an
2224 extraction at all and so can do nothing. */
2225 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2226 &lunsignedp
, &lvolatilep
);
2227 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2233 /* If this is not a constant, we can only do something if bit positions,
2234 sizes, and signedness are the same. */
2235 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
,
2236 &rmode
, &runsignedp
, &rvolatilep
);
2238 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2239 || lunsignedp
!= runsignedp
|| offset
!= 0)
2243 /* See if we can find a mode to refer to this field. We should be able to,
2244 but fail if we can't. */
2245 lnmode
= get_best_mode (lbitsize
, lbitpos
,
2246 TYPE_ALIGN (TREE_TYPE (linner
)), word_mode
,
2248 if (lnmode
== VOIDmode
)
2251 /* Set signed and unsigned types of the precision of this mode for the
2253 signed_type
= type_for_mode (lnmode
, 0);
2254 unsigned_type
= type_for_mode (lnmode
, 1);
2258 rnmode
= get_best_mode (rbitsize
, rbitpos
,
2259 TYPE_ALIGN (TREE_TYPE (rinner
)), word_mode
,
2261 if (rnmode
== VOIDmode
)
2265 /* Compute the bit position and size for the new reference and our offset
2266 within it. If the new reference is the same size as the original, we
2267 won't optimize anything, so return zero. */
2268 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2269 lnbitpos
= lbitpos
& ~ (lnbitsize
- 1);
2270 lbitpos
-= lnbitpos
;
2271 if (lnbitsize
== lbitsize
)
2276 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2277 rnbitpos
= rbitpos
& ~ (rnbitsize
- 1);
2278 rbitpos
-= rnbitpos
;
2279 if (rnbitsize
== rbitsize
)
2283 if (BYTES_BIG_ENDIAN
)
2284 lbitpos
= lnbitsize
- lbitsize
- lbitpos
;
2286 /* Make the mask to be used against the extracted field. */
2287 mask
= build_int_2 (~0, ~0);
2288 TREE_TYPE (mask
) = unsigned_type
;
2289 force_fit_type (mask
, 0);
2290 mask
= convert (unsigned_type
, mask
);
2291 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (lnbitsize
- lbitsize
), 0);
2292 mask
= const_binop (RSHIFT_EXPR
, mask
,
2293 size_int (lnbitsize
- lbitsize
- lbitpos
), 0);
2296 /* If not comparing with constant, just rework the comparison
2298 return build (code
, compare_type
,
2299 build (BIT_AND_EXPR
, unsigned_type
,
2300 make_bit_field_ref (linner
, unsigned_type
,
2301 lnbitsize
, lnbitpos
, 1),
2303 build (BIT_AND_EXPR
, unsigned_type
,
2304 make_bit_field_ref (rinner
, unsigned_type
,
2305 rnbitsize
, rnbitpos
, 1),
2308 /* Otherwise, we are handling the constant case. See if the constant is too
2309 big for the field. Warn and return a tree of for 0 (false) if so. We do
2310 this not only for its own sake, but to avoid having to test for this
2311 error case below. If we didn't, we might generate wrong code.
2313 For unsigned fields, the constant shifted right by the field length should
2314 be all zero. For signed fields, the high-order bits should agree with
2319 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2320 convert (unsigned_type
, rhs
),
2321 size_int (lbitsize
), 0)))
2323 warning ("comparison is always %s due to width of bitfield",
2324 code
== NE_EXPR
? "one" : "zero");
2325 return convert (compare_type
,
2327 ? integer_one_node
: integer_zero_node
));
2332 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
2333 size_int (lbitsize
- 1), 0);
2334 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2336 warning ("comparison is always %s due to width of bitfield",
2337 code
== NE_EXPR
? "one" : "zero");
2338 return convert (compare_type
,
2340 ? integer_one_node
: integer_zero_node
));
2344 /* Single-bit compares should always be against zero. */
2345 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2347 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2348 rhs
= convert (type
, integer_zero_node
);
2351 /* Make a new bitfield reference, shift the constant over the
2352 appropriate number of bits and mask it with the computed mask
2353 (in case this was a signed field). If we changed it, make a new one. */
2354 lhs
= make_bit_field_ref (linner
, unsigned_type
, lnbitsize
, lnbitpos
, 1);
2357 TREE_SIDE_EFFECTS (lhs
) = 1;
2358 TREE_THIS_VOLATILE (lhs
) = 1;
2361 rhs
= fold (const_binop (BIT_AND_EXPR
,
2362 const_binop (LSHIFT_EXPR
,
2363 convert (unsigned_type
, rhs
),
2364 size_int (lbitpos
), 0),
2367 return build (code
, compare_type
,
2368 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2372 /* Subroutine for fold_truthop: decode a field reference.
2374 If EXP is a comparison reference, we return the innermost reference.
2376 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2377 set to the starting bit number.
2379 If the innermost field can be completely contained in a mode-sized
2380 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2382 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2383 otherwise it is not changed.
2385 *PUNSIGNEDP is set to the signedness of the field.
2387 *PMASK is set to the mask used. This is either contained in a
2388 BIT_AND_EXPR or derived from the width of the field.
2390 *PAND_MASK is set the the mask found in a BIT_AND_EXPR, if any.
2392 Return 0 if this is not a component reference or is one that we can't
2393 do anything with. */
2396 decode_field_reference (exp
, pbitsize
, pbitpos
, pmode
, punsignedp
,
2397 pvolatilep
, pmask
, pand_mask
)
2399 int *pbitsize
, *pbitpos
;
2400 enum machine_mode
*pmode
;
2401 int *punsignedp
, *pvolatilep
;
2406 tree mask
, inner
, offset
;
2410 /* All the optimizations using this function assume integer fields.
2411 There are problems with FP fields since the type_for_size call
2412 below can fail for, e.g., XFmode. */
2413 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2418 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2420 and_mask
= TREE_OPERAND (exp
, 1);
2421 exp
= TREE_OPERAND (exp
, 0);
2422 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2423 if (TREE_CODE (and_mask
) != INTEGER_CST
)
2428 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
2429 punsignedp
, pvolatilep
);
2430 if ((inner
== exp
&& and_mask
== 0)
2431 || *pbitsize
< 0 || offset
!= 0)
2434 /* Compute the mask to access the bitfield. */
2435 unsigned_type
= type_for_size (*pbitsize
, 1);
2436 precision
= TYPE_PRECISION (unsigned_type
);
2438 mask
= build_int_2 (~0, ~0);
2439 TREE_TYPE (mask
) = unsigned_type
;
2440 force_fit_type (mask
, 0);
2441 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2442 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2444 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2446 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
2447 convert (unsigned_type
, and_mask
), mask
));
2450 *pand_mask
= and_mask
;
2454 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2458 all_ones_mask_p (mask
, size
)
2462 tree type
= TREE_TYPE (mask
);
2463 int precision
= TYPE_PRECISION (type
);
2466 tmask
= build_int_2 (~0, ~0);
2467 TREE_TYPE (tmask
) = signed_type (type
);
2468 force_fit_type (tmask
, 0);
2470 tree_int_cst_equal (mask
,
2471 const_binop (RSHIFT_EXPR
,
2472 const_binop (LSHIFT_EXPR
, tmask
,
2473 size_int (precision
- size
),
2475 size_int (precision
- size
), 0));
2478 /* Subroutine for fold_truthop: determine if an operand is simple enough
2479 to be evaluated unconditionally. */
2482 simple_operand_p (exp
)
2485 /* Strip any conversions that don't change the machine mode. */
2486 while ((TREE_CODE (exp
) == NOP_EXPR
2487 || TREE_CODE (exp
) == CONVERT_EXPR
)
2488 && (TYPE_MODE (TREE_TYPE (exp
))
2489 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
2490 exp
= TREE_OPERAND (exp
, 0);
2492 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
2493 || (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
2494 && ! TREE_ADDRESSABLE (exp
)
2495 && ! TREE_THIS_VOLATILE (exp
)
2496 && ! DECL_NONLOCAL (exp
)
2497 /* Don't regard global variables as simple. They may be
2498 allocated in ways unknown to the compiler (shared memory,
2499 #pragma weak, etc). */
2500 && ! TREE_PUBLIC (exp
)
2501 && ! DECL_EXTERNAL (exp
)
2502 /* Loading a static variable is unduly expensive, but global
2503 registers aren't expensive. */
2504 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
2507 /* Subroutine for fold_truthop: try to optimize a range test.
2509 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2511 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2512 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2513 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2516 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2517 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2518 larger than HI_CST (they may be equal).
2520 We return the simplified tree or 0 if no optimization is possible. */
2523 range_test (jcode
, type
, lo_code
, hi_code
, var
, lo_cst
, hi_cst
)
2524 enum tree_code jcode
, lo_code
, hi_code
;
2525 tree type
, var
, lo_cst
, hi_cst
;
2528 enum tree_code rcode
;
2530 /* See if this is a range test and normalize the constant terms. */
2532 if (jcode
== TRUTH_AND_EXPR
)
2537 /* See if we have VAR != CST && VAR != CST+1. */
2538 if (! (hi_code
== NE_EXPR
2539 && TREE_INT_CST_LOW (hi_cst
) - TREE_INT_CST_LOW (lo_cst
) == 1
2540 && tree_int_cst_equal (integer_one_node
,
2541 const_binop (MINUS_EXPR
,
2542 hi_cst
, lo_cst
, 0))))
2550 if (hi_code
== LT_EXPR
)
2551 hi_cst
= const_binop (MINUS_EXPR
, hi_cst
, integer_one_node
, 0);
2552 else if (hi_code
!= LE_EXPR
)
2555 if (lo_code
== GT_EXPR
)
2556 lo_cst
= const_binop (PLUS_EXPR
, lo_cst
, integer_one_node
, 0);
2558 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2571 /* See if we have VAR == CST || VAR == CST+1. */
2572 if (! (hi_code
== EQ_EXPR
2573 && TREE_INT_CST_LOW (hi_cst
) - TREE_INT_CST_LOW (lo_cst
) == 1
2574 && tree_int_cst_equal (integer_one_node
,
2575 const_binop (MINUS_EXPR
,
2576 hi_cst
, lo_cst
, 0))))
2584 if (hi_code
== GE_EXPR
)
2585 hi_cst
= const_binop (MINUS_EXPR
, hi_cst
, integer_one_node
, 0);
2586 else if (hi_code
!= GT_EXPR
)
2589 if (lo_code
== LE_EXPR
)
2590 lo_cst
= const_binop (PLUS_EXPR
, lo_cst
, integer_one_node
, 0);
2592 /* We now have VAR < LO_CST || VAR > HI_CST. */
2601 /* When normalizing, it is possible to both increment the smaller constant
2602 and decrement the larger constant. See if they are still ordered. */
2603 if (tree_int_cst_lt (hi_cst
, lo_cst
))
2606 /* Fail if VAR isn't an integer. */
2607 utype
= TREE_TYPE (var
);
2608 if (! INTEGRAL_TYPE_P (utype
))
2611 /* The range test is invalid if subtracting the two constants results
2612 in overflow. This can happen in traditional mode. */
2613 if (! int_fits_type_p (hi_cst
, TREE_TYPE (var
))
2614 || ! int_fits_type_p (lo_cst
, TREE_TYPE (var
)))
2617 if (! TREE_UNSIGNED (utype
))
2619 utype
= unsigned_type (utype
);
2620 var
= convert (utype
, var
);
2621 lo_cst
= convert (utype
, lo_cst
);
2622 hi_cst
= convert (utype
, hi_cst
);
2625 return fold (convert (type
,
2626 build (rcode
, utype
,
2627 build (MINUS_EXPR
, utype
, var
, lo_cst
),
2628 const_binop (MINUS_EXPR
, hi_cst
, lo_cst
, 0))));
2631 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
2632 bit value. Arrange things so the extra bits will be set to zero if and
2633 only if C is signed-extended to its full width. If MASK is nonzero,
2634 it is an INTEGER_CST that should be AND'ed with the extra bits. */
2637 unextend (c
, p
, unsignedp
, mask
)
2643 tree type
= TREE_TYPE (c
);
2644 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
2647 if (p
== modesize
|| unsignedp
)
2650 if (TREE_UNSIGNED (type
))
2651 c
= convert (signed_type (type
), c
);
2653 /* We work by getting just the sign bit into the low-order bit, then
2654 into the high-order bit, then sign-extend. We then XOR that value
2656 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
2657 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
2658 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
2659 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
2661 temp
= const_binop (BIT_AND_EXPR
, temp
, convert (TREE_TYPE (c
), mask
), 0);
2663 return convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
2666 /* Find ways of folding logical expressions of LHS and RHS:
2667 Try to merge two comparisons to the same innermost item.
2668 Look for range tests like "ch >= '0' && ch <= '9'".
2669 Look for combinations of simple terms on machines with expensive branches
2670 and evaluate the RHS unconditionally.
2672 For example, if we have p->a == 2 && p->b == 4 and we can make an
2673 object large enough to span both A and B, we can do this with a comparison
2674 against the object ANDed with the a mask.
2676 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2677 operations to do this with one comparison.
2679 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2680 function and the one above.
2682 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2683 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2685 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2688 We return the simplified tree or 0 if no optimization is possible. */
2691 fold_truthop (code
, truth_type
, lhs
, rhs
)
2692 enum tree_code code
;
2693 tree truth_type
, lhs
, rhs
;
2695 /* If this is the "or" of two comparisons, we can do something if we
2696 the comparisons are NE_EXPR. If this is the "and", we can do something
2697 if the comparisons are EQ_EXPR. I.e.,
2698 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2700 WANTED_CODE is this operation code. For single bit fields, we can
2701 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2702 comparison for one-bit fields. */
2704 enum tree_code wanted_code
;
2705 enum tree_code lcode
, rcode
;
2706 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
2707 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
2708 int ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
2709 int rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
2710 int xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
2711 int lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
2712 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
2713 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
2714 enum machine_mode lnmode
, rnmode
;
2715 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
2716 tree ll_and_mask
, lr_and_mask
, rl_and_mask
, rr_and_mask
;
2717 tree l_const
, r_const
;
2719 int first_bit
, end_bit
;
2722 /* Start by getting the comparison codes and seeing if this looks like
2723 a range test. Fail if anything is volatile. If one operand is a
2724 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2727 if (TREE_SIDE_EFFECTS (lhs
)
2728 || TREE_SIDE_EFFECTS (rhs
))
2731 lcode
= TREE_CODE (lhs
);
2732 rcode
= TREE_CODE (rhs
);
2734 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
2735 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
2737 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
2738 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
2740 if (TREE_CODE_CLASS (lcode
) != '<'
2741 || TREE_CODE_CLASS (rcode
) != '<')
2744 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
2745 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
2747 ll_arg
= TREE_OPERAND (lhs
, 0);
2748 lr_arg
= TREE_OPERAND (lhs
, 1);
2749 rl_arg
= TREE_OPERAND (rhs
, 0);
2750 rr_arg
= TREE_OPERAND (rhs
, 1);
2752 if (TREE_CODE (lr_arg
) == INTEGER_CST
2753 && TREE_CODE (rr_arg
) == INTEGER_CST
2754 && operand_equal_p (ll_arg
, rl_arg
, 0))
2756 if (tree_int_cst_lt (lr_arg
, rr_arg
))
2757 result
= range_test (code
, truth_type
, lcode
, rcode
,
2758 ll_arg
, lr_arg
, rr_arg
);
2760 result
= range_test (code
, truth_type
, rcode
, lcode
,
2761 ll_arg
, rr_arg
, lr_arg
);
2763 /* If this isn't a range test, it also isn't a comparison that
2764 can be merged. However, it wins to evaluate the RHS unconditionally
2765 on machines with expensive branches. */
2767 if (result
== 0 && BRANCH_COST
>= 2)
2769 if (TREE_CODE (ll_arg
) != VAR_DECL
2770 && TREE_CODE (ll_arg
) != PARM_DECL
)
2772 /* Avoid evaluating the variable part twice. */
2773 ll_arg
= save_expr (ll_arg
);
2774 lhs
= build (lcode
, TREE_TYPE (lhs
), ll_arg
, lr_arg
);
2775 rhs
= build (rcode
, TREE_TYPE (rhs
), ll_arg
, rr_arg
);
2777 return build (code
, truth_type
, lhs
, rhs
);
2782 /* If the RHS can be evaluated unconditionally and its operands are
2783 simple, it wins to evaluate the RHS unconditionally on machines
2784 with expensive branches. In this case, this isn't a comparison
2785 that can be merged. */
2787 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2788 are with zero (tmw). */
2790 if (BRANCH_COST
>= 2
2791 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
2792 && simple_operand_p (rl_arg
)
2793 && simple_operand_p (rr_arg
))
2794 return build (code
, truth_type
, lhs
, rhs
);
2796 /* See if the comparisons can be merged. Then get all the parameters for
2799 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
2800 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
2804 ll_inner
= decode_field_reference (ll_arg
,
2805 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
2806 &ll_unsignedp
, &volatilep
, &ll_mask
,
2808 lr_inner
= decode_field_reference (lr_arg
,
2809 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
2810 &lr_unsignedp
, &volatilep
, &lr_mask
,
2812 rl_inner
= decode_field_reference (rl_arg
,
2813 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
2814 &rl_unsignedp
, &volatilep
, &rl_mask
,
2816 rr_inner
= decode_field_reference (rr_arg
,
2817 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
2818 &rr_unsignedp
, &volatilep
, &rr_mask
,
2821 /* It must be true that the inner operation on the lhs of each
2822 comparison must be the same if we are to be able to do anything.
2823 Then see if we have constants. If not, the same must be true for
2825 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
2826 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
2829 if (TREE_CODE (lr_arg
) == INTEGER_CST
2830 && TREE_CODE (rr_arg
) == INTEGER_CST
)
2831 l_const
= lr_arg
, r_const
= rr_arg
;
2832 else if (lr_inner
== 0 || rr_inner
== 0
2833 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
2836 l_const
= r_const
= 0;
2838 /* If either comparison code is not correct for our logical operation,
2839 fail. However, we can convert a one-bit comparison against zero into
2840 the opposite comparison against that bit being set in the field. */
2842 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
2843 if (lcode
!= wanted_code
)
2845 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
2851 if (rcode
!= wanted_code
)
2853 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
2859 /* See if we can find a mode that contains both fields being compared on
2860 the left. If we can't, fail. Otherwise, update all constants and masks
2861 to be relative to a field of that size. */
2862 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
2863 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
2864 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
2865 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
2867 if (lnmode
== VOIDmode
)
2870 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2871 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
2872 type
= type_for_size (lnbitsize
, 1);
2873 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
2875 if (BYTES_BIG_ENDIAN
)
2877 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
2878 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
2881 ll_mask
= const_binop (LSHIFT_EXPR
, convert (type
, ll_mask
),
2882 size_int (xll_bitpos
), 0);
2883 rl_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rl_mask
),
2884 size_int (xrl_bitpos
), 0);
2888 l_const
= convert (type
, l_const
);
2889 l_const
= unextend (l_const
, ll_bitsize
, ll_unsignedp
, ll_and_mask
);
2890 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
2891 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
2892 fold (build1 (BIT_NOT_EXPR
,
2896 warning ("comparison is always %s",
2897 wanted_code
== NE_EXPR
? "one" : "zero");
2899 return convert (truth_type
,
2900 wanted_code
== NE_EXPR
2901 ? integer_one_node
: integer_zero_node
);
2906 r_const
= convert (type
, r_const
);
2907 r_const
= unextend (r_const
, rl_bitsize
, rl_unsignedp
, rl_and_mask
);
2908 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
2909 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
2910 fold (build1 (BIT_NOT_EXPR
,
2914 warning ("comparison is always %s",
2915 wanted_code
== NE_EXPR
? "one" : "zero");
2917 return convert (truth_type
,
2918 wanted_code
== NE_EXPR
2919 ? integer_one_node
: integer_zero_node
);
2923 /* If the right sides are not constant, do the same for it. Also,
2924 disallow this optimization if a size or signedness mismatch occurs
2925 between the left and right sides. */
2928 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
2929 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
2930 /* Make sure the two fields on the right
2931 correspond to the left without being swapped. */
2932 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
2935 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
2936 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
2937 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
2938 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
2940 if (rnmode
== VOIDmode
)
2943 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2944 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
2945 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
2947 if (BYTES_BIG_ENDIAN
)
2949 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
2950 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
2953 lr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, lr_mask
),
2954 size_int (xlr_bitpos
), 0);
2955 rr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rr_mask
),
2956 size_int (xrr_bitpos
), 0);
2958 /* Make a mask that corresponds to both fields being compared.
2959 Do this for both items being compared. If the masks agree,
2960 we can do this by masking both and comparing the masked
2962 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
2963 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
2964 if (operand_equal_p (ll_mask
, lr_mask
, 0) && lnbitsize
== rnbitsize
)
2966 lhs
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
2967 ll_unsignedp
|| rl_unsignedp
);
2968 rhs
= make_bit_field_ref (lr_inner
, type
, rnbitsize
, rnbitpos
,
2969 lr_unsignedp
|| rr_unsignedp
);
2970 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
2972 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
2973 rhs
= build (BIT_AND_EXPR
, type
, rhs
, ll_mask
);
2975 return build (wanted_code
, truth_type
, lhs
, rhs
);
2978 /* There is still another way we can do something: If both pairs of
2979 fields being compared are adjacent, we may be able to make a wider
2980 field containing them both. */
2981 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
2982 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
2983 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
2984 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
2985 return build (wanted_code
, truth_type
,
2986 make_bit_field_ref (ll_inner
, type
,
2987 ll_bitsize
+ rl_bitsize
,
2988 MIN (ll_bitpos
, rl_bitpos
),
2990 make_bit_field_ref (lr_inner
, type
,
2991 lr_bitsize
+ rr_bitsize
,
2992 MIN (lr_bitpos
, rr_bitpos
),
2998 /* Handle the case of comparisons with constants. If there is something in
2999 common between the masks, those bits of the constants must be the same.
3000 If not, the condition is always false. Test for this to avoid generating
3001 incorrect code below. */
3002 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
3003 if (! integer_zerop (result
)
3004 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
3005 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
3007 if (wanted_code
== NE_EXPR
)
3009 warning ("`or' of unmatched not-equal tests is always 1");
3010 return convert (truth_type
, integer_one_node
);
3014 warning ("`and' of mutually exclusive equal-tests is always zero");
3015 return convert (truth_type
, integer_zero_node
);
3019 /* Construct the expression we will return. First get the component
3020 reference we will make. Unless the mask is all ones the width of
3021 that field, perform the mask operation. Then compare with the
3023 result
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
3024 ll_unsignedp
|| rl_unsignedp
);
3026 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
3027 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
3028 result
= build (BIT_AND_EXPR
, type
, result
, ll_mask
);
3030 return build (wanted_code
, truth_type
, result
,
3031 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
3034 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3035 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3036 that we may sometimes modify the tree. */
3039 strip_compound_expr (t
, s
)
3043 tree type
= TREE_TYPE (t
);
3044 enum tree_code code
= TREE_CODE (t
);
3046 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3047 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
3048 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
3049 return TREE_OPERAND (t
, 1);
3051 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3052 don't bother handling any other types. */
3053 else if (code
== COND_EXPR
)
3055 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3056 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3057 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
3059 else if (TREE_CODE_CLASS (code
) == '1')
3060 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3061 else if (TREE_CODE_CLASS (code
) == '<'
3062 || TREE_CODE_CLASS (code
) == '2')
3064 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3065 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3071 /* Perform constant folding and related simplification of EXPR.
3072 The related simplifications include x*1 => x, x*0 => 0, etc.,
3073 and application of the associative law.
3074 NOP_EXPR conversions may be removed freely (as long as we
3075 are careful not to change the C type of the overall expression)
3076 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3077 but we can constant-fold them if they have constant operands. */
3083 register tree t
= expr
;
3084 tree t1
= NULL_TREE
;
3086 tree type
= TREE_TYPE (expr
);
3087 register tree arg0
, arg1
;
3088 register enum tree_code code
= TREE_CODE (t
);
3092 /* WINS will be nonzero when the switch is done
3093 if all operands are constant. */
3097 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3098 if (code
== RTL_EXPR
)
3101 /* Return right away if already constant. */
3102 if (TREE_CONSTANT (t
))
3104 if (code
== CONST_DECL
)
3105 return DECL_INITIAL (t
);
3109 kind
= TREE_CODE_CLASS (code
);
3110 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
3114 /* Special case for conversion ops that can have fixed point args. */
3115 arg0
= TREE_OPERAND (t
, 0);
3117 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3119 STRIP_TYPE_NOPS (arg0
);
3121 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
3122 subop
= TREE_REALPART (arg0
);
3126 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
3127 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3128 && TREE_CODE (subop
) != REAL_CST
3129 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3131 /* Note that TREE_CONSTANT isn't enough:
3132 static var addresses are constant but we can't
3133 do arithmetic on them. */
3136 else if (kind
== 'e' || kind
== '<'
3137 || kind
== '1' || kind
== '2' || kind
== 'r')
3139 register int len
= tree_code_length
[(int) code
];
3141 for (i
= 0; i
< len
; i
++)
3143 tree op
= TREE_OPERAND (t
, i
);
3147 continue; /* Valid for CALL_EXPR, at least. */
3149 if (kind
== '<' || code
== RSHIFT_EXPR
)
3151 /* Signedness matters here. Perhaps we can refine this
3153 STRIP_TYPE_NOPS (op
);
3157 /* Strip any conversions that don't change the mode. */
3161 if (TREE_CODE (op
) == COMPLEX_CST
)
3162 subop
= TREE_REALPART (op
);
3166 if (TREE_CODE (subop
) != INTEGER_CST
3167 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3168 && TREE_CODE (subop
) != REAL_CST
3169 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3171 /* Note that TREE_CONSTANT isn't enough:
3172 static var addresses are constant but we can't
3173 do arithmetic on them. */
3183 /* If this is a commutative operation, and ARG0 is a constant, move it
3184 to ARG1 to reduce the number of tests below. */
3185 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
3186 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
3187 || code
== BIT_AND_EXPR
)
3188 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
3190 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
3192 tem
= TREE_OPERAND (t
, 0); TREE_OPERAND (t
, 0) = TREE_OPERAND (t
, 1);
3193 TREE_OPERAND (t
, 1) = tem
;
3196 /* Now WINS is set as described above,
3197 ARG0 is the first operand of EXPR,
3198 and ARG1 is the second operand (if it has more than one operand).
3200 First check for cases where an arithmetic operation is applied to a
3201 compound, conditional, or comparison operation. Push the arithmetic
3202 operation inside the compound or conditional to see if any folding
3203 can then be done. Convert comparison to conditional for this purpose.
3204 The also optimizes non-constant cases that used to be done in
3207 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3208 one of the operands is a comparison and the other is a comparison, a
3209 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3210 code below would make the expression more complex. Change it to a
3211 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3212 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3214 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
3215 || code
== EQ_EXPR
|| code
== NE_EXPR
)
3216 && ((truth_value_p (TREE_CODE (arg0
))
3217 && (truth_value_p (TREE_CODE (arg1
))
3218 || (TREE_CODE (arg1
) == BIT_AND_EXPR
3219 && integer_onep (TREE_OPERAND (arg1
, 1)))))
3220 || (truth_value_p (TREE_CODE (arg1
))
3221 && (truth_value_p (TREE_CODE (arg0
))
3222 || (TREE_CODE (arg0
) == BIT_AND_EXPR
3223 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
3225 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
3226 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
3230 if (code
== EQ_EXPR
)
3231 t
= invert_truthvalue (t
);
3236 if (TREE_CODE_CLASS (code
) == '1')
3238 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
3239 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3240 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
3241 else if (TREE_CODE (arg0
) == COND_EXPR
)
3243 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3244 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))),
3245 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 2)))));
3247 /* If this was a conversion, and all we did was to move into
3248 inside the COND_EXPR, bring it back out. But leave it if
3249 it is a conversion from integer to integer and the
3250 result precision is no wider than a word since such a
3251 conversion is cheap and may be optimized away by combine,
3252 while it couldn't if it were outside the COND_EXPR. Then return
3253 so we don't get into an infinite recursion loop taking the
3254 conversion out and then back in. */
3256 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
3257 || code
== NON_LVALUE_EXPR
)
3258 && TREE_CODE (t
) == COND_EXPR
3259 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
3260 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
3261 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
3262 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
3263 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3264 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)))
3265 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
3266 t
= build1 (code
, type
,
3268 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)),
3269 TREE_OPERAND (t
, 0),
3270 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
3271 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
3274 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
3275 return fold (build (COND_EXPR
, type
, arg0
,
3276 fold (build1 (code
, type
, integer_one_node
)),
3277 fold (build1 (code
, type
, integer_zero_node
))));
3279 else if (TREE_CODE_CLASS (code
) == '2'
3280 || TREE_CODE_CLASS (code
) == '<')
3282 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
3283 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
3284 fold (build (code
, type
,
3285 arg0
, TREE_OPERAND (arg1
, 1))));
3286 else if (TREE_CODE (arg1
) == COND_EXPR
3287 || TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<')
3289 tree test
, true_value
, false_value
;
3291 if (TREE_CODE (arg1
) == COND_EXPR
)
3293 test
= TREE_OPERAND (arg1
, 0);
3294 true_value
= TREE_OPERAND (arg1
, 1);
3295 false_value
= TREE_OPERAND (arg1
, 2);
3299 tree testtype
= TREE_TYPE (arg1
);
3301 true_value
= convert (testtype
, integer_one_node
);
3302 false_value
= convert (testtype
, integer_zero_node
);
3305 /* If ARG0 is complex we want to make sure we only evaluate
3306 it once. Though this is only required if it is volatile, it
3307 might be more efficient even if it is not. However, if we
3308 succeed in folding one part to a constant, we do not need
3309 to make this SAVE_EXPR. Since we do this optimization
3310 primarily to see if we do end up with constant and this
3311 SAVE_EXPR interferes with later optimizations, suppressing
3312 it when we can is important. */
3314 if (TREE_CODE (arg0
) != SAVE_EXPR
3315 && ((TREE_CODE (arg0
) != VAR_DECL
3316 && TREE_CODE (arg0
) != PARM_DECL
)
3317 || TREE_SIDE_EFFECTS (arg0
)))
3319 tree lhs
= fold (build (code
, type
, arg0
, true_value
));
3320 tree rhs
= fold (build (code
, type
, arg0
, false_value
));
3322 if (TREE_CONSTANT (lhs
) || TREE_CONSTANT (rhs
))
3323 return fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
3325 arg0
= save_expr (arg0
);
3328 test
= fold (build (COND_EXPR
, type
, test
,
3329 fold (build (code
, type
, arg0
, true_value
)),
3330 fold (build (code
, type
, arg0
, false_value
))));
3331 if (TREE_CODE (arg0
) == SAVE_EXPR
)
3332 return build (COMPOUND_EXPR
, type
,
3333 convert (void_type_node
, arg0
),
3334 strip_compound_expr (test
, arg0
));
3336 return convert (type
, test
);
3339 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
3340 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3341 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
3342 else if (TREE_CODE (arg0
) == COND_EXPR
3343 || TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
3345 tree test
, true_value
, false_value
;
3347 if (TREE_CODE (arg0
) == COND_EXPR
)
3349 test
= TREE_OPERAND (arg0
, 0);
3350 true_value
= TREE_OPERAND (arg0
, 1);
3351 false_value
= TREE_OPERAND (arg0
, 2);
3355 tree testtype
= TREE_TYPE (arg0
);
3357 true_value
= convert (testtype
, integer_one_node
);
3358 false_value
= convert (testtype
, integer_zero_node
);
3361 if (TREE_CODE (arg1
) != SAVE_EXPR
3362 && ((TREE_CODE (arg1
) != VAR_DECL
3363 && TREE_CODE (arg1
) != PARM_DECL
)
3364 || TREE_SIDE_EFFECTS (arg1
)))
3366 tree lhs
= fold (build (code
, type
, true_value
, arg1
));
3367 tree rhs
= fold (build (code
, type
, false_value
, arg1
));
3369 if (TREE_CONSTANT (lhs
) || TREE_CONSTANT (rhs
)
3370 || TREE_CONSTANT (arg1
))
3371 return fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
3373 arg1
= save_expr (arg1
);
3376 test
= fold (build (COND_EXPR
, type
, test
,
3377 fold (build (code
, type
, true_value
, arg1
)),
3378 fold (build (code
, type
, false_value
, arg1
))));
3379 if (TREE_CODE (arg1
) == SAVE_EXPR
)
3380 return build (COMPOUND_EXPR
, type
,
3381 convert (void_type_node
, arg1
),
3382 strip_compound_expr (test
, arg1
));
3384 return convert (type
, test
);
3387 else if (TREE_CODE_CLASS (code
) == '<'
3388 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
3389 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3390 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
3391 else if (TREE_CODE_CLASS (code
) == '<'
3392 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
3393 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
3394 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
3406 return fold (DECL_INITIAL (t
));
3411 case FIX_TRUNC_EXPR
:
3412 /* Other kinds of FIX are not handled properly by fold_convert. */
3414 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
3415 return TREE_OPERAND (t
, 0);
3417 /* Handle cases of two conversions in a row. */
3418 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
3419 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
3421 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3422 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
3423 tree final_type
= TREE_TYPE (t
);
3424 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
3425 int inside_ptr
= POINTER_TYPE_P (inside_type
);
3426 int inside_float
= FLOAT_TYPE_P (inside_type
);
3427 int inside_prec
= TYPE_PRECISION (inside_type
);
3428 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
3429 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
3430 int inter_ptr
= POINTER_TYPE_P (inter_type
);
3431 int inter_float
= FLOAT_TYPE_P (inter_type
);
3432 int inter_prec
= TYPE_PRECISION (inter_type
);
3433 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
3434 int final_int
= INTEGRAL_TYPE_P (final_type
);
3435 int final_ptr
= POINTER_TYPE_P (final_type
);
3436 int final_float
= FLOAT_TYPE_P (final_type
);
3437 int final_prec
= TYPE_PRECISION (final_type
);
3438 int final_unsignedp
= TREE_UNSIGNED (final_type
);
3440 /* In addition to the cases of two conversions in a row
3441 handled below, if we are converting something to its own
3442 type via an object of identical or wider precision, neither
3443 conversion is needed. */
3444 if (inside_type
== final_type
3445 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
3446 && inter_prec
>= final_prec
)
3447 return TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
3449 /* Likewise, if the intermediate and final types are either both
3450 float or both integer, we don't need the middle conversion if
3451 it is wider than the final type and doesn't change the signedness
3452 (for integers). Avoid this if the final type is a pointer
3453 since then we sometimes need the inner conversion. Likewise if
3454 the outer has a precision not equal to the size of its mode. */
3455 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
3456 || (inter_float
&& inside_float
))
3457 && inter_prec
>= inside_prec
3458 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
3459 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
3460 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
3462 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3464 /* Two conversions in a row are not needed unless:
3465 - some conversion is floating-point (overstrict for now), or
3466 - the intermediate type is narrower than both initial and
3468 - the intermediate type and innermost type differ in signedness,
3469 and the outermost type is wider than the intermediate, or
3470 - the initial type is a pointer type and the precisions of the
3471 intermediate and final types differ, or
3472 - the final type is a pointer type and the precisions of the
3473 initial and intermediate types differ. */
3474 if (! inside_float
&& ! inter_float
&& ! final_float
3475 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
3476 && ! (inside_int
&& inter_int
3477 && inter_unsignedp
!= inside_unsignedp
3478 && inter_prec
< final_prec
)
3479 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
3480 == (final_unsignedp
&& final_prec
> inter_prec
))
3481 && ! (inside_ptr
&& inter_prec
!= final_prec
)
3482 && ! (final_ptr
&& inside_prec
!= inter_prec
)
3483 && ! (final_prec
!= GET_MODE_BITSIZE (TYPE_MODE (final_type
))
3484 && TYPE_MODE (final_type
) == TYPE_MODE (inter_type
))
3486 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3489 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
3490 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
3491 /* Detect assigning a bitfield. */
3492 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
3493 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
3495 /* Don't leave an assignment inside a conversion
3496 unless assigning a bitfield. */
3497 tree prev
= TREE_OPERAND (t
, 0);
3498 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
3499 /* First do the assignment, then return converted constant. */
3500 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
3506 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
3509 return fold_convert (t
, arg0
);
3511 #if 0 /* This loses on &"foo"[0]. */
3516 /* Fold an expression like: "foo"[2] */
3517 if (TREE_CODE (arg0
) == STRING_CST
3518 && TREE_CODE (arg1
) == INTEGER_CST
3519 && !TREE_INT_CST_HIGH (arg1
)
3520 && (i
= TREE_INT_CST_LOW (arg1
)) < TREE_STRING_LENGTH (arg0
))
3522 t
= build_int_2 (TREE_STRING_POINTER (arg0
)[i
], 0);
3523 TREE_TYPE (t
) = TREE_TYPE (TREE_TYPE (arg0
));
3524 force_fit_type (t
, 0);
3531 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3533 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
3540 TREE_CONSTANT (t
) = wins
;
3546 if (TREE_CODE (arg0
) == INTEGER_CST
)
3548 HOST_WIDE_INT low
, high
;
3549 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
3550 TREE_INT_CST_HIGH (arg0
),
3552 t
= build_int_2 (low
, high
);
3553 TREE_TYPE (t
) = type
;
3555 = (TREE_OVERFLOW (arg0
)
3556 | force_fit_type (t
, overflow
&& !TREE_UNSIGNED (type
)));
3557 TREE_CONSTANT_OVERFLOW (t
)
3558 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
3560 else if (TREE_CODE (arg0
) == REAL_CST
)
3561 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
3562 TREE_TYPE (t
) = type
;
3564 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
3565 return TREE_OPERAND (arg0
, 0);
3567 /* Convert - (a - b) to (b - a) for non-floating-point. */
3568 else if (TREE_CODE (arg0
) == MINUS_EXPR
&& ! FLOAT_TYPE_P (type
))
3569 return build (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 1),
3570 TREE_OPERAND (arg0
, 0));
3577 if (TREE_CODE (arg0
) == INTEGER_CST
)
3579 if (! TREE_UNSIGNED (type
)
3580 && TREE_INT_CST_HIGH (arg0
) < 0)
3582 HOST_WIDE_INT low
, high
;
3583 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
3584 TREE_INT_CST_HIGH (arg0
),
3586 t
= build_int_2 (low
, high
);
3587 TREE_TYPE (t
) = type
;
3589 = (TREE_OVERFLOW (arg0
)
3590 | force_fit_type (t
, overflow
));
3591 TREE_CONSTANT_OVERFLOW (t
)
3592 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
3595 else if (TREE_CODE (arg0
) == REAL_CST
)
3597 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
3598 t
= build_real (type
,
3599 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
3601 TREE_TYPE (t
) = type
;
3603 else if (TREE_CODE (arg0
) == ABS_EXPR
|| TREE_CODE (arg0
) == NEGATE_EXPR
)
3604 return build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
3608 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
3610 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
3611 return build (COMPLEX_EXPR
, TREE_TYPE (arg0
),
3612 TREE_OPERAND (arg0
, 0),
3613 fold (build1 (NEGATE_EXPR
,
3614 TREE_TYPE (TREE_TYPE (arg0
)),
3615 TREE_OPERAND (arg0
, 1))));
3616 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
3617 return build_complex (TREE_OPERAND (arg0
, 0),
3618 fold (build1 (NEGATE_EXPR
,
3619 TREE_TYPE (TREE_TYPE (arg0
)),
3620 TREE_OPERAND (arg0
, 1))));
3621 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
3622 return fold (build (TREE_CODE (arg0
), type
,
3623 fold (build1 (CONJ_EXPR
, type
,
3624 TREE_OPERAND (arg0
, 0))),
3625 fold (build1 (CONJ_EXPR
,
3626 type
, TREE_OPERAND (arg0
, 1)))));
3627 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
3628 return TREE_OPERAND (arg0
, 0);
3634 if (TREE_CODE (arg0
) == INTEGER_CST
)
3635 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
3636 ~ TREE_INT_CST_HIGH (arg0
));
3637 TREE_TYPE (t
) = type
;
3638 force_fit_type (t
, 0);
3639 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
3640 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
3642 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
3643 return TREE_OPERAND (arg0
, 0);
3647 /* A + (-B) -> A - B */
3648 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
3649 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
3650 else if (! FLOAT_TYPE_P (type
))
3652 if (integer_zerop (arg1
))
3653 return non_lvalue (convert (type
, arg0
));
3655 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3656 with a constant, and the two constants have no bits in common,
3657 we should treat this as a BIT_IOR_EXPR since this may produce more
3659 if (TREE_CODE (arg0
) == BIT_AND_EXPR
3660 && TREE_CODE (arg1
) == BIT_AND_EXPR
3661 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3662 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3663 && integer_zerop (const_binop (BIT_AND_EXPR
,
3664 TREE_OPERAND (arg0
, 1),
3665 TREE_OPERAND (arg1
, 1), 0)))
3667 code
= BIT_IOR_EXPR
;
3671 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3672 about the case where C is a constant, just try one of the
3673 four possibilities. */
3675 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
3676 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3677 TREE_OPERAND (arg1
, 1), 0))
3678 return fold (build (MULT_EXPR
, type
,
3679 fold (build (PLUS_EXPR
, type
,
3680 TREE_OPERAND (arg0
, 0),
3681 TREE_OPERAND (arg1
, 0))),
3682 TREE_OPERAND (arg0
, 1)));
3684 /* In IEEE floating point, x+0 may not equal x. */
3685 else if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3687 && real_zerop (arg1
))
3688 return non_lvalue (convert (type
, arg0
));
3690 /* In most languages, can't associate operations on floats
3691 through parentheses. Rather than remember where the parentheses
3692 were, we don't associate floats at all. It shouldn't matter much.
3693 However, associating multiplications is only very slightly
3694 inaccurate, so do that if -ffast-math is specified. */
3695 if (FLOAT_TYPE_P (type
)
3696 && ! (flag_fast_math
&& code
== MULT_EXPR
))
3699 /* The varsign == -1 cases happen only for addition and subtraction.
3700 It says that the arg that was split was really CON minus VAR.
3701 The rest of the code applies to all associative operations. */
3707 if (split_tree (arg0
, code
, &var
, &con
, &varsign
))
3711 /* EXPR is (CON-VAR) +- ARG1. */
3712 /* If it is + and VAR==ARG1, return just CONST. */
3713 if (code
== PLUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
3714 return convert (TREE_TYPE (t
), con
);
3716 /* If ARG0 is a constant, don't change things around;
3717 instead keep all the constant computations together. */
3719 if (TREE_CONSTANT (arg0
))
3722 /* Otherwise return (CON +- ARG1) - VAR. */
3723 t
= build (MINUS_EXPR
, type
,
3724 fold (build (code
, type
, con
, arg1
)), var
);
3728 /* EXPR is (VAR+CON) +- ARG1. */
3729 /* If it is - and VAR==ARG1, return just CONST. */
3730 if (code
== MINUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
3731 return convert (TREE_TYPE (t
), con
);
3733 /* If ARG0 is a constant, don't change things around;
3734 instead keep all the constant computations together. */
3736 if (TREE_CONSTANT (arg0
))
3739 /* Otherwise return VAR +- (ARG1 +- CON). */
3740 tem
= fold (build (code
, type
, arg1
, con
));
3741 t
= build (code
, type
, var
, tem
);
3743 if (integer_zerop (tem
)
3744 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
))
3745 return convert (type
, var
);
3746 /* If we have x +/- (c - d) [c an explicit integer]
3747 change it to x -/+ (d - c) since if d is relocatable
3748 then the latter can be a single immediate insn
3749 and the former cannot. */
3750 if (TREE_CODE (tem
) == MINUS_EXPR
3751 && TREE_CODE (TREE_OPERAND (tem
, 0)) == INTEGER_CST
)
3753 tree tem1
= TREE_OPERAND (tem
, 1);
3754 TREE_OPERAND (tem
, 1) = TREE_OPERAND (tem
, 0);
3755 TREE_OPERAND (tem
, 0) = tem1
;
3757 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
3763 if (split_tree (arg1
, code
, &var
, &con
, &varsign
))
3765 if (TREE_CONSTANT (arg1
))
3770 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
3772 /* EXPR is ARG0 +- (CON +- VAR). */
3773 if (TREE_CODE (t
) == MINUS_EXPR
3774 && operand_equal_p (var
, arg0
, 0))
3776 /* If VAR and ARG0 cancel, return just CON or -CON. */
3777 if (code
== PLUS_EXPR
)
3778 return convert (TREE_TYPE (t
), con
);
3779 return fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
),
3780 convert (TREE_TYPE (t
), con
)));
3783 t
= build (TREE_CODE (t
), type
,
3784 fold (build (code
, TREE_TYPE (t
), arg0
, con
)), var
);
3786 if (integer_zerop (TREE_OPERAND (t
, 0))
3787 && TREE_CODE (t
) == PLUS_EXPR
)
3788 return convert (TREE_TYPE (t
), var
);
3793 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3794 if (TREE_CODE (arg1
) == REAL_CST
)
3796 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3798 t1
= const_binop (code
, arg0
, arg1
, 0);
3799 if (t1
!= NULL_TREE
)
3801 /* The return value should always have
3802 the same type as the original expression. */
3803 TREE_TYPE (t1
) = TREE_TYPE (t
);
3809 if (! FLOAT_TYPE_P (type
))
3811 if (! wins
&& integer_zerop (arg0
))
3812 return build1 (NEGATE_EXPR
, type
, arg1
);
3813 if (integer_zerop (arg1
))
3814 return non_lvalue (convert (type
, arg0
));
3816 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3817 about the case where C is a constant, just try one of the
3818 four possibilities. */
3820 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
3821 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3822 TREE_OPERAND (arg1
, 1), 0))
3823 return fold (build (MULT_EXPR
, type
,
3824 fold (build (MINUS_EXPR
, type
,
3825 TREE_OPERAND (arg0
, 0),
3826 TREE_OPERAND (arg1
, 0))),
3827 TREE_OPERAND (arg0
, 1)));
3829 /* Convert A - (-B) to A + B. */
3830 else if (TREE_CODE (arg1
) == NEGATE_EXPR
)
3831 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
3833 else if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3836 /* Except with IEEE floating point, 0-x equals -x. */
3837 if (! wins
&& real_zerop (arg0
))
3838 return build1 (NEGATE_EXPR
, type
, arg1
);
3839 /* Except with IEEE floating point, x-0 equals x. */
3840 if (real_zerop (arg1
))
3841 return non_lvalue (convert (type
, arg0
));
3844 /* Fold &x - &x. This can happen from &x.foo - &x.
3845 This is unsafe for certain floats even in non-IEEE formats.
3846 In IEEE, it is unsafe because it does wrong for NaNs.
3847 Also note that operand_equal_p is always false if an operand
3850 if ((! FLOAT_TYPE_P (type
) || flag_fast_math
)
3851 && operand_equal_p (arg0
, arg1
, 0))
3852 return convert (type
, integer_zero_node
);
3857 if (! FLOAT_TYPE_P (type
))
3859 if (integer_zerop (arg1
))
3860 return omit_one_operand (type
, arg1
, arg0
);
3861 if (integer_onep (arg1
))
3862 return non_lvalue (convert (type
, arg0
));
3864 /* ((A / C) * C) is A if the division is an
3865 EXACT_DIV_EXPR. Since C is normally a constant,
3866 just check for one of the four possibilities. */
3868 if (TREE_CODE (arg0
) == EXACT_DIV_EXPR
3869 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
3870 return TREE_OPERAND (arg0
, 0);
3872 /* (a * (1 << b)) is (a << b) */
3873 if (TREE_CODE (arg1
) == LSHIFT_EXPR
3874 && integer_onep (TREE_OPERAND (arg1
, 0)))
3875 return fold (build (LSHIFT_EXPR
, type
, arg0
,
3876 TREE_OPERAND (arg1
, 1)));
3877 if (TREE_CODE (arg0
) == LSHIFT_EXPR
3878 && integer_onep (TREE_OPERAND (arg0
, 0)))
3879 return fold (build (LSHIFT_EXPR
, type
, arg1
,
3880 TREE_OPERAND (arg0
, 1)));
3884 /* x*0 is 0, except for IEEE floating point. */
3885 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3887 && real_zerop (arg1
))
3888 return omit_one_operand (type
, arg1
, arg0
);
3889 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3890 However, ANSI says we can drop signals,
3891 so we can do this anyway. */
3892 if (real_onep (arg1
))
3893 return non_lvalue (convert (type
, arg0
));
3895 if (! wins
&& real_twop (arg1
))
3897 tree arg
= save_expr (arg0
);
3898 return build (PLUS_EXPR
, type
, arg
, arg
);
3905 if (integer_all_onesp (arg1
))
3906 return omit_one_operand (type
, arg1
, arg0
);
3907 if (integer_zerop (arg1
))
3908 return non_lvalue (convert (type
, arg0
));
3909 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
3910 if (t1
!= NULL_TREE
)
3913 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3914 is a rotate of A by C1 bits. */
3916 if ((TREE_CODE (arg0
) == RSHIFT_EXPR
3917 || TREE_CODE (arg0
) == LSHIFT_EXPR
)
3918 && (TREE_CODE (arg1
) == RSHIFT_EXPR
3919 || TREE_CODE (arg1
) == LSHIFT_EXPR
)
3920 && TREE_CODE (arg0
) != TREE_CODE (arg1
)
3921 && operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
,0), 0)
3922 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
3923 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3924 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3925 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
3926 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1
, 1)) == 0
3927 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
3928 + TREE_INT_CST_LOW (TREE_OPERAND (arg1
, 1)))
3929 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
3930 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3931 TREE_CODE (arg0
) == LSHIFT_EXPR
3932 ? TREE_OPERAND (arg0
, 1) : TREE_OPERAND (arg1
, 1));
3937 if (integer_zerop (arg1
))
3938 return non_lvalue (convert (type
, arg0
));
3939 if (integer_all_onesp (arg1
))
3940 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
3945 if (integer_all_onesp (arg1
))
3946 return non_lvalue (convert (type
, arg0
));
3947 if (integer_zerop (arg1
))
3948 return omit_one_operand (type
, arg1
, arg0
);
3949 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
3950 if (t1
!= NULL_TREE
)
3952 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3953 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == NOP_EXPR
3954 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
3956 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)));
3957 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
3958 && (~TREE_INT_CST_LOW (arg0
)
3959 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
3960 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg1
, 0));
3962 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
3963 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
3965 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
3966 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
3967 && (~TREE_INT_CST_LOW (arg1
)
3968 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
3969 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
3973 case BIT_ANDTC_EXPR
:
3974 if (integer_all_onesp (arg0
))
3975 return non_lvalue (convert (type
, arg1
));
3976 if (integer_zerop (arg0
))
3977 return omit_one_operand (type
, arg0
, arg1
);
3978 if (TREE_CODE (arg1
) == INTEGER_CST
)
3980 arg1
= fold (build1 (BIT_NOT_EXPR
, type
, arg1
));
3981 code
= BIT_AND_EXPR
;
3987 /* In most cases, do nothing with a divide by zero. */
3988 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3989 #ifndef REAL_INFINITY
3990 if (TREE_CODE (arg1
) == REAL_CST
&& real_zerop (arg1
))
3993 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3995 /* In IEEE floating point, x/1 is not equivalent to x for snans.
3996 However, ANSI says we can drop signals, so we can do this anyway. */
3997 if (real_onep (arg1
))
3998 return non_lvalue (convert (type
, arg0
));
4000 /* If ARG1 is a constant, we can convert this to a multiply by the
4001 reciprocal. This does not have the same rounding properties,
4002 so only do this if -ffast-math. We can actually always safely
4003 do it if ARG1 is a power of two, but it's hard to tell if it is
4004 or not in a portable manner. */
4005 if (TREE_CODE (arg1
) == REAL_CST
&& flag_fast_math
4006 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
4008 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
4012 case TRUNC_DIV_EXPR
:
4013 case ROUND_DIV_EXPR
:
4014 case FLOOR_DIV_EXPR
:
4016 case EXACT_DIV_EXPR
:
4017 if (integer_onep (arg1
))
4018 return non_lvalue (convert (type
, arg0
));
4019 if (integer_zerop (arg1
))
4022 /* If we have ((a / C1) / C2) where both division are the same type, try
4023 to simplify. First see if C1 * C2 overflows or not. */
4024 if (TREE_CODE (arg0
) == code
&& TREE_CODE (arg1
) == INTEGER_CST
4025 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
4029 new_divisor
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 1), arg1
, 0);
4030 tem
= const_binop (FLOOR_DIV_EXPR
, new_divisor
, arg1
, 0);
4032 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_LOW (tem
)
4033 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_HIGH (tem
))
4035 /* If no overflow, divide by C1*C2. */
4036 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), new_divisor
));
4040 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4041 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4042 expressions, which often appear in the offsets or sizes of
4043 objects with a varying size. Only deal with positive divisors
4044 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4046 Look for NOPs and SAVE_EXPRs inside. */
4048 if (TREE_CODE (arg1
) == INTEGER_CST
4049 && tree_int_cst_sgn (arg1
) >= 0)
4051 int have_save_expr
= 0;
4052 tree c2
= integer_zero_node
;
4055 if (TREE_CODE (xarg0
) == SAVE_EXPR
)
4056 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
4060 if (TREE_CODE (xarg0
) == PLUS_EXPR
4061 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
4062 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
4063 else if (TREE_CODE (xarg0
) == MINUS_EXPR
4064 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4065 /* If we are doing this computation unsigned, the negate
4067 && ! TREE_UNSIGNED (type
))
4069 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
4070 xarg0
= TREE_OPERAND (xarg0
, 0);
4073 if (TREE_CODE (xarg0
) == SAVE_EXPR
)
4074 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
4078 if (TREE_CODE (xarg0
) == MULT_EXPR
4079 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4080 && tree_int_cst_sgn (TREE_OPERAND (xarg0
, 1)) >= 0
4081 && (integer_zerop (const_binop (TRUNC_MOD_EXPR
,
4082 TREE_OPERAND (xarg0
, 1), arg1
, 1))
4083 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, arg1
,
4084 TREE_OPERAND (xarg0
, 1), 1)))
4085 && (tree_int_cst_sgn (c2
) >= 0
4086 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, c2
,
4089 tree outer_div
= integer_one_node
;
4090 tree c1
= TREE_OPERAND (xarg0
, 1);
4093 /* If C3 > C1, set them equal and do a divide by
4094 C3/C1 at the end of the operation. */
4095 if (tree_int_cst_lt (c1
, c3
))
4096 outer_div
= const_binop (code
, c3
, c1
, 0), c3
= c1
;
4098 /* The result is A * (C1/C3) + (C2/C3). */
4099 t
= fold (build (PLUS_EXPR
, type
,
4100 fold (build (MULT_EXPR
, type
,
4101 TREE_OPERAND (xarg0
, 0),
4102 const_binop (code
, c1
, c3
, 1))),
4103 const_binop (code
, c2
, c3
, 1)));
4105 if (! integer_onep (outer_div
))
4106 t
= fold (build (code
, type
, t
, convert (type
, outer_div
)));
4118 case FLOOR_MOD_EXPR
:
4119 case ROUND_MOD_EXPR
:
4120 case TRUNC_MOD_EXPR
:
4121 if (integer_onep (arg1
))
4122 return omit_one_operand (type
, integer_zero_node
, arg0
);
4123 if (integer_zerop (arg1
))
4126 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4127 where C1 % C3 == 0. Handle similarly to the division case,
4128 but don't bother with SAVE_EXPRs. */
4130 if (TREE_CODE (arg1
) == INTEGER_CST
4131 && ! integer_zerop (arg1
))
4133 tree c2
= integer_zero_node
;
4136 if (TREE_CODE (xarg0
) == PLUS_EXPR
4137 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
4138 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
4139 else if (TREE_CODE (xarg0
) == MINUS_EXPR
4140 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4141 && ! TREE_UNSIGNED (type
))
4143 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
4144 xarg0
= TREE_OPERAND (xarg0
, 0);
4149 if (TREE_CODE (xarg0
) == MULT_EXPR
4150 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4151 && integer_zerop (const_binop (TRUNC_MOD_EXPR
,
4152 TREE_OPERAND (xarg0
, 1),
4154 && tree_int_cst_sgn (c2
) >= 0)
4155 /* The result is (C2%C3). */
4156 return omit_one_operand (type
, const_binop (code
, c2
, arg1
, 1),
4157 TREE_OPERAND (xarg0
, 0));
4166 if (integer_zerop (arg1
))
4167 return non_lvalue (convert (type
, arg0
));
4168 /* Since negative shift count is not well-defined,
4169 don't try to compute it in the compiler. */
4170 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
4172 /* Rewrite an LROTATE_EXPR by a constant into an
4173 RROTATE_EXPR by a new constant. */
4174 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
4176 TREE_SET_CODE (t
, RROTATE_EXPR
);
4177 code
= RROTATE_EXPR
;
4178 TREE_OPERAND (t
, 1) = arg1
4181 convert (TREE_TYPE (arg1
),
4182 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0)),
4184 if (tree_int_cst_sgn (arg1
) < 0)
4188 /* If we have a rotate of a bit operation with the rotate count and
4189 the second operand of the bit operation both constant,
4190 permute the two operations. */
4191 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
4192 && (TREE_CODE (arg0
) == BIT_AND_EXPR
4193 || TREE_CODE (arg0
) == BIT_ANDTC_EXPR
4194 || TREE_CODE (arg0
) == BIT_IOR_EXPR
4195 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
4196 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
4197 return fold (build (TREE_CODE (arg0
), type
,
4198 fold (build (code
, type
,
4199 TREE_OPERAND (arg0
, 0), arg1
)),
4200 fold (build (code
, type
,
4201 TREE_OPERAND (arg0
, 1), arg1
))));
4203 /* Two consecutive rotates adding up to the width of the mode can
4205 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
4206 && TREE_CODE (arg0
) == RROTATE_EXPR
4207 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
4208 && TREE_INT_CST_HIGH (arg1
) == 0
4209 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
4210 && ((TREE_INT_CST_LOW (arg1
)
4211 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
4212 == GET_MODE_BITSIZE (TYPE_MODE (type
))))
4213 return TREE_OPERAND (arg0
, 0);
4218 if (operand_equal_p (arg0
, arg1
, 0))
4220 if (INTEGRAL_TYPE_P (type
)
4221 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
4222 return omit_one_operand (type
, arg1
, arg0
);
4226 if (operand_equal_p (arg0
, arg1
, 0))
4228 if (INTEGRAL_TYPE_P (type
)
4229 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
4230 return omit_one_operand (type
, arg1
, arg0
);
4233 case TRUTH_NOT_EXPR
:
4234 /* Note that the operand of this must be an int
4235 and its values must be 0 or 1.
4236 ("true" is a fixed value perhaps depending on the language,
4237 but we don't handle values other than 1 correctly yet.) */
4238 tem
= invert_truthvalue (arg0
);
4239 /* Avoid infinite recursion. */
4240 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
4242 return convert (type
, tem
);
4244 case TRUTH_ANDIF_EXPR
:
4245 /* Note that the operands of this must be ints
4246 and their values must be 0 or 1.
4247 ("true" is a fixed value perhaps depending on the language.) */
4248 /* If first arg is constant zero, return it. */
4249 if (integer_zerop (arg0
))
4251 case TRUTH_AND_EXPR
:
4252 /* If either arg is constant true, drop it. */
4253 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
4254 return non_lvalue (arg1
);
4255 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
4256 return non_lvalue (arg0
);
4257 /* If second arg is constant zero, result is zero, but first arg
4258 must be evaluated. */
4259 if (integer_zerop (arg1
))
4260 return omit_one_operand (type
, arg1
, arg0
);
4263 /* We only do these simplifications if we are optimizing. */
4267 /* Check for things like (A || B) && (A || C). We can convert this
4268 to A || (B && C). Note that either operator can be any of the four
4269 truth and/or operations and the transformation will still be
4270 valid. Also note that we only care about order for the
4271 ANDIF and ORIF operators. */
4272 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
4273 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
4274 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
4275 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
4276 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
))
4278 tree a00
= TREE_OPERAND (arg0
, 0);
4279 tree a01
= TREE_OPERAND (arg0
, 1);
4280 tree a10
= TREE_OPERAND (arg1
, 0);
4281 tree a11
= TREE_OPERAND (arg1
, 1);
4282 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
4283 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
4284 && (code
== TRUTH_AND_EXPR
4285 || code
== TRUTH_OR_EXPR
));
4287 if (operand_equal_p (a00
, a10
, 0))
4288 return fold (build (TREE_CODE (arg0
), type
, a00
,
4289 fold (build (code
, type
, a01
, a11
))));
4290 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
4291 return fold (build (TREE_CODE (arg0
), type
, a00
,
4292 fold (build (code
, type
, a01
, a10
))));
4293 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
4294 return fold (build (TREE_CODE (arg0
), type
, a01
,
4295 fold (build (code
, type
, a00
, a11
))));
4297 /* This case if tricky because we must either have commutative
4298 operators or else A10 must not have side-effects. */
4300 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
4301 && operand_equal_p (a01
, a11
, 0))
4302 return fold (build (TREE_CODE (arg0
), type
,
4303 fold (build (code
, type
, a00
, a10
)),
4307 /* Check for the possibility of merging component references. If our
4308 lhs is another similar operation, try to merge its rhs with our
4309 rhs. Then try to merge our lhs and rhs. */
4310 if (TREE_CODE (arg0
) == code
4311 && 0 != (tem
= fold_truthop (code
, type
,
4312 TREE_OPERAND (arg0
, 1), arg1
)))
4313 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
4315 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
4320 case TRUTH_ORIF_EXPR
:
4321 /* Note that the operands of this must be ints
4322 and their values must be 0 or true.
4323 ("true" is a fixed value perhaps depending on the language.) */
4324 /* If first arg is constant true, return it. */
4325 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
4328 /* If either arg is constant zero, drop it. */
4329 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
4330 return non_lvalue (arg1
);
4331 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
))
4332 return non_lvalue (arg0
);
4333 /* If second arg is constant true, result is true, but we must
4334 evaluate first arg. */
4335 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
4336 return omit_one_operand (type
, arg1
, arg0
);
4339 case TRUTH_XOR_EXPR
:
4340 /* If either arg is constant zero, drop it. */
4341 if (integer_zerop (arg0
))
4342 return non_lvalue (arg1
);
4343 if (integer_zerop (arg1
))
4344 return non_lvalue (arg0
);
4345 /* If either arg is constant true, this is a logical inversion. */
4346 if (integer_onep (arg0
))
4347 return non_lvalue (invert_truthvalue (arg1
));
4348 if (integer_onep (arg1
))
4349 return non_lvalue (invert_truthvalue (arg0
));
4358 /* If one arg is a constant integer, put it last. */
4359 if (TREE_CODE (arg0
) == INTEGER_CST
4360 && TREE_CODE (arg1
) != INTEGER_CST
)
4362 TREE_OPERAND (t
, 0) = arg1
;
4363 TREE_OPERAND (t
, 1) = arg0
;
4364 arg0
= TREE_OPERAND (t
, 0);
4365 arg1
= TREE_OPERAND (t
, 1);
4366 code
= swap_tree_comparison (code
);
4367 TREE_SET_CODE (t
, code
);
4370 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4371 First, see if one arg is constant; find the constant arg
4372 and the other one. */
4374 tree constop
= 0, varop
;
4375 int constopnum
= -1;
4377 if (TREE_CONSTANT (arg1
))
4378 constopnum
= 1, constop
= arg1
, varop
= arg0
;
4379 if (TREE_CONSTANT (arg0
))
4380 constopnum
= 0, constop
= arg0
, varop
= arg1
;
4382 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
4384 /* This optimization is invalid for ordered comparisons
4385 if CONST+INCR overflows or if foo+incr might overflow.
4386 This optimization is invalid for floating point due to rounding.
4387 For pointer types we assume overflow doesn't happen. */
4388 if (TREE_CODE (TREE_TYPE (varop
)) == POINTER_TYPE
4389 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
4390 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
4393 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
4394 constop
, TREE_OPERAND (varop
, 1)));
4395 TREE_SET_CODE (varop
, PREINCREMENT_EXPR
);
4397 t
= build (code
, type
, TREE_OPERAND (t
, 0),
4398 TREE_OPERAND (t
, 1));
4399 TREE_OPERAND (t
, constopnum
) = newconst
;
4403 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
4405 if (TREE_CODE (TREE_TYPE (varop
)) == POINTER_TYPE
4406 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
4407 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
4410 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
4411 constop
, TREE_OPERAND (varop
, 1)));
4412 TREE_SET_CODE (varop
, PREDECREMENT_EXPR
);
4413 t
= build (code
, type
, TREE_OPERAND (t
, 0),
4414 TREE_OPERAND (t
, 1));
4415 TREE_OPERAND (t
, constopnum
) = newconst
;
4421 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4422 if (TREE_CODE (arg1
) == INTEGER_CST
4423 && TREE_CODE (arg0
) != INTEGER_CST
4424 && tree_int_cst_sgn (arg1
) > 0)
4426 switch (TREE_CODE (t
))
4430 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
4431 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
4436 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
4437 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
4442 /* If this is an EQ or NE comparison with zero and ARG0 is
4443 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4444 two operations, but the latter can be done in one less insn
4445 one machine that have only two-operand insns or on which a
4446 constant cannot be the first operand. */
4447 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
4448 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
4450 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
4451 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
4453 fold (build (code
, type
,
4454 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
4456 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
4457 TREE_OPERAND (arg0
, 1),
4458 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
4459 convert (TREE_TYPE (arg0
),
4462 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
4463 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
4465 fold (build (code
, type
,
4466 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
4468 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
4469 TREE_OPERAND (arg0
, 0),
4470 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
4471 convert (TREE_TYPE (arg0
),
4476 /* If this is an NE or EQ comparison of zero against the result of a
4477 signed MOD operation whose second operand is a power of 2, make
4478 the MOD operation unsigned since it is simpler and equivalent. */
4479 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
4480 && integer_zerop (arg1
)
4481 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
4482 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
4483 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
4484 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
4485 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
4486 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
4488 tree newtype
= unsigned_type (TREE_TYPE (arg0
));
4489 tree newmod
= build (TREE_CODE (arg0
), newtype
,
4490 convert (newtype
, TREE_OPERAND (arg0
, 0)),
4491 convert (newtype
, TREE_OPERAND (arg0
, 1)));
4493 return build (code
, type
, newmod
, convert (newtype
, arg1
));
4496 /* If this is an NE comparison of zero with an AND of one, remove the
4497 comparison since the AND will give the correct value. */
4498 if (code
== NE_EXPR
&& integer_zerop (arg1
)
4499 && TREE_CODE (arg0
) == BIT_AND_EXPR
4500 && integer_onep (TREE_OPERAND (arg0
, 1)))
4501 return convert (type
, arg0
);
4503 /* If we have (A & C) == C where C is a power of 2, convert this into
4504 (A & C) != 0. Similarly for NE_EXPR. */
4505 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
4506 && TREE_CODE (arg0
) == BIT_AND_EXPR
4507 && integer_pow2p (TREE_OPERAND (arg0
, 1))
4508 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
4509 return build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
4510 arg0
, integer_zero_node
);
4512 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4513 and similarly for >= into !=. */
4514 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
4515 && TREE_UNSIGNED (TREE_TYPE (arg0
))
4516 && TREE_CODE (arg1
) == LSHIFT_EXPR
4517 && integer_onep (TREE_OPERAND (arg1
, 0)))
4518 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
4519 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
4520 TREE_OPERAND (arg1
, 1)),
4521 convert (TREE_TYPE (arg0
), integer_zero_node
));
4523 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
4524 && TREE_UNSIGNED (TREE_TYPE (arg0
))
4525 && (TREE_CODE (arg1
) == NOP_EXPR
4526 || TREE_CODE (arg1
) == CONVERT_EXPR
)
4527 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
4528 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
4530 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
4531 convert (TREE_TYPE (arg0
),
4532 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
4533 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
4534 convert (TREE_TYPE (arg0
), integer_zero_node
));
4536 /* Simplify comparison of something with itself. (For IEEE
4537 floating-point, we can only do some of these simplifications.) */
4538 if (operand_equal_p (arg0
, arg1
, 0))
4545 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
4547 t
= build_int_2 (1, 0);
4548 TREE_TYPE (t
) = type
;
4552 TREE_SET_CODE (t
, code
);
4556 /* For NE, we can only do this simplification if integer. */
4557 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
4559 /* ... fall through ... */
4562 t
= build_int_2 (0, 0);
4563 TREE_TYPE (t
) = type
;
4568 /* An unsigned comparison against 0 can be simplified. */
4569 if (integer_zerop (arg1
)
4570 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
4571 || TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
)
4572 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
4574 switch (TREE_CODE (t
))
4578 TREE_SET_CODE (t
, NE_EXPR
);
4582 TREE_SET_CODE (t
, EQ_EXPR
);
4585 return omit_one_operand (type
,
4586 convert (type
, integer_one_node
),
4589 return omit_one_operand (type
,
4590 convert (type
, integer_zero_node
),
4595 /* If we are comparing an expression that just has comparisons
4596 of two integer values, arithmetic expressions of those comparisons,
4597 and constants, we can simplify it. There are only three cases
4598 to check: the two values can either be equal, the first can be
4599 greater, or the second can be greater. Fold the expression for
4600 those three values. Since each value must be 0 or 1, we have
4601 eight possibilities, each of which corresponds to the constant 0
4602 or 1 or one of the six possible comparisons.
4604 This handles common cases like (a > b) == 0 but also handles
4605 expressions like ((x > y) - (y > x)) > 0, which supposedly
4606 occur in macroized code. */
4608 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
4610 tree cval1
= 0, cval2
= 0;
4613 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
4614 /* Don't handle degenerate cases here; they should already
4615 have been handled anyway. */
4616 && cval1
!= 0 && cval2
!= 0
4617 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
4618 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
4619 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
4620 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
4621 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
4623 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
4624 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
4626 /* We can't just pass T to eval_subst in case cval1 or cval2
4627 was the same as ARG1. */
4630 = fold (build (code
, type
,
4631 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
4634 = fold (build (code
, type
,
4635 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
4638 = fold (build (code
, type
,
4639 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
4642 /* All three of these results should be 0 or 1. Confirm they
4643 are. Then use those values to select the proper code
4646 if ((integer_zerop (high_result
)
4647 || integer_onep (high_result
))
4648 && (integer_zerop (equal_result
)
4649 || integer_onep (equal_result
))
4650 && (integer_zerop (low_result
)
4651 || integer_onep (low_result
)))
4653 /* Make a 3-bit mask with the high-order bit being the
4654 value for `>', the next for '=', and the low for '<'. */
4655 switch ((integer_onep (high_result
) * 4)
4656 + (integer_onep (equal_result
) * 2)
4657 + integer_onep (low_result
))
4661 return omit_one_operand (type
, integer_zero_node
, arg0
);
4682 return omit_one_operand (type
, integer_one_node
, arg0
);
4685 t
= build (code
, type
, cval1
, cval2
);
4687 return save_expr (t
);
4694 /* If this is a comparison of a field, we may be able to simplify it. */
4695 if ((TREE_CODE (arg0
) == COMPONENT_REF
4696 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
4697 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
4698 /* Handle the constant case even without -O
4699 to make sure the warnings are given. */
4700 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
4702 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
4706 /* If this is a comparison of complex values and either or both
4707 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4708 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4709 may prevent needless evaluations. */
4710 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
4711 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
4712 && (TREE_CODE (arg0
) == COMPLEX_EXPR
4713 || TREE_CODE (arg1
) == COMPLEX_EXPR
))
4715 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
4716 tree real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
4717 tree imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
4718 tree real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
4719 tree imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
4721 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
4724 fold (build (code
, type
, real0
, real1
)),
4725 fold (build (code
, type
, imag0
, imag1
))));
4728 /* From here on, the only cases we handle are when the result is
4729 known to be a constant.
4731 To compute GT, swap the arguments and do LT.
4732 To compute GE, do LT and invert the result.
4733 To compute LE, swap the arguments, do LT and invert the result.
4734 To compute NE, do EQ and invert the result.
4736 Therefore, the code below must handle only EQ and LT. */
4738 if (code
== LE_EXPR
|| code
== GT_EXPR
)
4740 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
4741 code
= swap_tree_comparison (code
);
4744 /* Note that it is safe to invert for real values here because we
4745 will check below in the one case that it matters. */
4748 if (code
== NE_EXPR
|| code
== GE_EXPR
)
4751 code
= invert_tree_comparison (code
);
4754 /* Compute a result for LT or EQ if args permit;
4755 otherwise return T. */
4756 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
4758 if (code
== EQ_EXPR
)
4759 t1
= build_int_2 ((TREE_INT_CST_LOW (arg0
)
4760 == TREE_INT_CST_LOW (arg1
))
4761 && (TREE_INT_CST_HIGH (arg0
)
4762 == TREE_INT_CST_HIGH (arg1
)),
4765 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
4766 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
4767 : INT_CST_LT (arg0
, arg1
)),
4771 /* Assume a nonexplicit constant cannot equal an explicit one,
4772 since such code would be undefined anyway.
4773 Exception: on sysvr4, using #pragma weak,
4774 a label can come out as 0. */
4775 else if (TREE_CODE (arg1
) == INTEGER_CST
4776 && !integer_zerop (arg1
)
4777 && TREE_CONSTANT (arg0
)
4778 && TREE_CODE (arg0
) == ADDR_EXPR
4780 t1
= build_int_2 (0, 0);
4782 /* Two real constants can be compared explicitly. */
4783 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
4785 /* If either operand is a NaN, the result is false with two
4786 exceptions: First, an NE_EXPR is true on NaNs, but that case
4787 is already handled correctly since we will be inverting the
4788 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4789 or a GE_EXPR into a LT_EXPR, we must return true so that it
4790 will be inverted into false. */
4792 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
4793 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
4794 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
4796 else if (code
== EQ_EXPR
)
4797 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
4798 TREE_REAL_CST (arg1
)),
4801 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
4802 TREE_REAL_CST (arg1
)),
4806 if (t1
== NULL_TREE
)
4810 TREE_INT_CST_LOW (t1
) ^= 1;
4812 TREE_TYPE (t1
) = type
;
4816 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4817 so all simple results must be passed through pedantic_non_lvalue. */
4818 if (TREE_CODE (arg0
) == INTEGER_CST
)
4819 return pedantic_non_lvalue
4820 (TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1)));
4821 else if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
4822 return pedantic_omit_one_operand (type
, arg1
, arg0
);
4824 /* If the second operand is zero, invert the comparison and swap
4825 the second and third operands. Likewise if the second operand
4826 is constant and the third is not or if the third operand is
4827 equivalent to the first operand of the comparison. */
4829 if (integer_zerop (arg1
)
4830 || (TREE_CONSTANT (arg1
) && ! TREE_CONSTANT (TREE_OPERAND (t
, 2)))
4831 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4832 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
4833 TREE_OPERAND (t
, 2),
4834 TREE_OPERAND (arg0
, 1))))
4836 /* See if this can be inverted. If it can't, possibly because
4837 it was a floating-point inequality comparison, don't do
4839 tem
= invert_truthvalue (arg0
);
4841 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
4843 t
= build (code
, type
, tem
,
4844 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
4846 arg1
= TREE_OPERAND (t
, 2);
4851 /* If we have A op B ? A : C, we may be able to convert this to a
4852 simpler expression, depending on the operation and the values
4853 of B and C. IEEE floating point prevents this though,
4854 because A or B might be -0.0 or a NaN. */
4856 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4857 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4858 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
4860 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
4861 arg1
, TREE_OPERAND (arg0
, 1)))
4863 tree arg2
= TREE_OPERAND (t
, 2);
4864 enum tree_code comp_code
= TREE_CODE (arg0
);
4868 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4869 depending on the comparison operation. */
4870 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
4871 ? real_zerop (TREE_OPERAND (arg0
, 1))
4872 : integer_zerop (TREE_OPERAND (arg0
, 1)))
4873 && TREE_CODE (arg2
) == NEGATE_EXPR
4874 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4878 return pedantic_non_lvalue
4879 (fold (build1 (NEGATE_EXPR
, type
, arg1
)));
4881 return pedantic_non_lvalue (convert (type
, arg1
));
4884 return pedantic_non_lvalue
4885 (convert (type
, fold (build1 (ABS_EXPR
,
4886 TREE_TYPE (arg1
), arg1
))));
4889 return pedantic_non_lvalue
4890 (fold (build1 (NEGATE_EXPR
, type
,
4892 fold (build1 (ABS_EXPR
,
4897 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4900 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
4902 if (comp_code
== NE_EXPR
)
4903 return pedantic_non_lvalue (convert (type
, arg1
));
4904 else if (comp_code
== EQ_EXPR
)
4905 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
4908 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4909 or max (A, B), depending on the operation. */
4911 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
4912 arg2
, TREE_OPERAND (arg0
, 0)))
4914 tree comp_op0
= TREE_OPERAND (arg0
, 0);
4915 tree comp_op1
= TREE_OPERAND (arg0
, 1);
4916 tree comp_type
= TREE_TYPE (comp_op0
);
4921 return pedantic_non_lvalue (convert (type
, arg2
));
4923 return pedantic_non_lvalue (convert (type
, arg1
));
4926 return pedantic_non_lvalue
4927 (convert (type
, (fold (build (MIN_EXPR
, comp_type
,
4928 comp_op0
, comp_op1
)))));
4931 return pedantic_non_lvalue
4932 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
4933 comp_op0
, comp_op1
))));
4937 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4938 we might still be able to simplify this. For example,
4939 if C1 is one less or one more than C2, this might have started
4940 out as a MIN or MAX and been transformed by this function.
4941 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4943 if (INTEGRAL_TYPE_P (type
)
4944 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
4945 && TREE_CODE (arg2
) == INTEGER_CST
)
4949 /* We can replace A with C1 in this case. */
4950 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
4951 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
4952 TREE_OPERAND (t
, 2));
4956 /* If C1 is C2 + 1, this is min(A, C2). */
4957 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
4958 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4959 const_binop (PLUS_EXPR
, arg2
,
4960 integer_one_node
, 0), 1))
4961 return pedantic_non_lvalue
4962 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
4966 /* If C1 is C2 - 1, this is min(A, C2). */
4967 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
4968 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4969 const_binop (MINUS_EXPR
, arg2
,
4970 integer_one_node
, 0), 1))
4971 return pedantic_non_lvalue
4972 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
4976 /* If C1 is C2 - 1, this is max(A, C2). */
4977 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
4978 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4979 const_binop (MINUS_EXPR
, arg2
,
4980 integer_one_node
, 0), 1))
4981 return pedantic_non_lvalue
4982 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
4986 /* If C1 is C2 + 1, this is max(A, C2). */
4987 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
4988 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4989 const_binop (PLUS_EXPR
, arg2
,
4990 integer_one_node
, 0), 1))
4991 return pedantic_non_lvalue
4992 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
4997 /* If the second operand is simpler than the third, swap them
4998 since that produces better jump optimization results. */
4999 if ((TREE_CONSTANT (arg1
) || TREE_CODE_CLASS (TREE_CODE (arg1
)) == 'd'
5000 || TREE_CODE (arg1
) == SAVE_EXPR
)
5001 && ! (TREE_CONSTANT (TREE_OPERAND (t
, 2))
5002 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t
, 2))) == 'd'
5003 || TREE_CODE (TREE_OPERAND (t
, 2)) == SAVE_EXPR
))
5005 /* See if this can be inverted. If it can't, possibly because
5006 it was a floating-point inequality comparison, don't do
5008 tem
= invert_truthvalue (arg0
);
5010 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
5012 t
= build (code
, type
, tem
,
5013 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
5015 arg1
= TREE_OPERAND (t
, 2);
5020 /* Convert A ? 1 : 0 to simply A. */
5021 if (integer_onep (TREE_OPERAND (t
, 1))
5022 && integer_zerop (TREE_OPERAND (t
, 2))
5023 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
5024 call to fold will try to move the conversion inside
5025 a COND, which will recurse. In that case, the COND_EXPR
5026 is probably the best choice, so leave it alone. */
5027 && type
== TREE_TYPE (arg0
))
5028 return pedantic_non_lvalue (arg0
);
5030 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5031 operation is simply A & 2. */
5033 if (integer_zerop (TREE_OPERAND (t
, 2))
5034 && TREE_CODE (arg0
) == NE_EXPR
5035 && integer_zerop (TREE_OPERAND (arg0
, 1))
5036 && integer_pow2p (arg1
)
5037 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
5038 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
5040 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
5045 /* When pedantic, a compound expression can be neither an lvalue
5046 nor an integer constant expression. */
5047 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
5049 /* Don't let (0, 0) be null pointer constant. */
5050 if (integer_zerop (arg1
))
5051 return non_lvalue (arg1
);
5056 return build_complex (arg0
, arg1
);
5060 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5062 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5063 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
5064 TREE_OPERAND (arg0
, 1));
5065 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5066 return TREE_REALPART (arg0
);
5067 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5068 return fold (build (TREE_CODE (arg0
), type
,
5069 fold (build1 (REALPART_EXPR
, type
,
5070 TREE_OPERAND (arg0
, 0))),
5071 fold (build1 (REALPART_EXPR
,
5072 type
, TREE_OPERAND (arg0
, 1)))));
5076 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5077 return convert (type
, integer_zero_node
);
5078 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5079 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
5080 TREE_OPERAND (arg0
, 0));
5081 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5082 return TREE_IMAGPART (arg0
);
5083 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5084 return fold (build (TREE_CODE (arg0
), type
,
5085 fold (build1 (IMAGPART_EXPR
, type
,
5086 TREE_OPERAND (arg0
, 0))),
5087 fold (build1 (IMAGPART_EXPR
, type
,
5088 TREE_OPERAND (arg0
, 1)))));
5091 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5093 case CLEANUP_POINT_EXPR
:
5094 if (! TREE_SIDE_EFFECTS (arg0
))
5095 return convert (type
, arg0
);
5098 enum tree_code code0
= TREE_CODE (arg0
);
5099 int kind0
= TREE_CODE_CLASS (code0
);
5100 tree arg00
= TREE_OPERAND (arg0
, 0);
5103 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
5104 return fold (build1 (code0
, type
,
5105 fold (build1 (CLEANUP_POINT_EXPR
,
5106 TREE_TYPE (arg00
), arg00
))));
5108 if (kind0
== '<' || kind0
== '2'
5109 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
5110 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
5111 || code0
== TRUTH_XOR_EXPR
)
5113 arg01
= TREE_OPERAND (arg0
, 1);
5115 if (! TREE_SIDE_EFFECTS (arg00
))
5116 return fold (build (code0
, type
, arg00
,
5117 fold (build1 (CLEANUP_POINT_EXPR
,
5118 TREE_TYPE (arg01
), arg01
))));
5120 if (! TREE_SIDE_EFFECTS (arg01
))
5121 return fold (build (code0
, type
,
5122 fold (build1 (CLEANUP_POINT_EXPR
,
5123 TREE_TYPE (arg00
), arg00
)),
5132 } /* switch (code) */