1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987, 88, 92, 93, 94, 1995 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
21 /*@@ This file should be rewritten to use an arbitrary precision
22 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
23 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
24 @@ The routines that translate from the ap rep should
25 @@ warn if precision et. al. is lost.
26 @@ This would also make life easier when this technology is used
27 @@ for cross-compilers. */
30 /* The entry points in this file are fold, size_int and size_binop.
32 fold takes a tree as argument and returns a simplified tree.
34 size_binop takes a tree code for an arithmetic operation
35 and two operands that are trees, and produces a tree for the
36 result, assuming the type comes from `sizetype'.
38 size_int takes an integer value, and creates a tree constant
39 with type from `sizetype'. */
47 /* Handle floating overflow for `const_binop'. */
48 static jmp_buf float_error
;
50 static void encode
PROTO((HOST_WIDE_INT
*, HOST_WIDE_INT
, HOST_WIDE_INT
));
51 static void decode
PROTO((HOST_WIDE_INT
*, HOST_WIDE_INT
*, HOST_WIDE_INT
*));
52 int div_and_round_double
PROTO((enum tree_code
, int, HOST_WIDE_INT
,
53 HOST_WIDE_INT
, HOST_WIDE_INT
,
54 HOST_WIDE_INT
, HOST_WIDE_INT
*,
55 HOST_WIDE_INT
*, HOST_WIDE_INT
*,
57 static int split_tree
PROTO((tree
, enum tree_code
, tree
*, tree
*, int *));
58 static tree const_binop
PROTO((enum tree_code
, tree
, tree
, int));
59 static tree fold_convert
PROTO((tree
, tree
));
60 static enum tree_code invert_tree_comparison
PROTO((enum tree_code
));
61 static enum tree_code swap_tree_comparison
PROTO((enum tree_code
));
62 static int truth_value_p
PROTO((enum tree_code
));
63 static int operand_equal_for_comparison_p
PROTO((tree
, tree
, tree
));
64 static int twoval_comparison_p
PROTO((tree
, tree
*, tree
*, int *));
65 static tree eval_subst
PROTO((tree
, tree
, tree
, tree
, tree
));
66 static tree omit_one_operand
PROTO((tree
, tree
, tree
));
67 static tree pedantic_omit_one_operand
PROTO((tree
, tree
, tree
));
68 static tree distribute_bit_expr
PROTO((enum tree_code
, tree
, tree
, tree
));
69 static tree make_bit_field_ref
PROTO((tree
, tree
, int, int, int));
70 static tree optimize_bit_field_compare
PROTO((enum tree_code
, tree
,
72 static tree decode_field_reference
PROTO((tree
, int *, int *,
73 enum machine_mode
*, int *,
75 static int all_ones_mask_p
PROTO((tree
, int));
76 static int simple_operand_p
PROTO((tree
));
77 static tree range_test
PROTO((enum tree_code
, tree
, enum tree_code
,
78 enum tree_code
, tree
, tree
, tree
));
79 static tree unextend
PROTO((tree
, int, int));
80 static tree fold_truthop
PROTO((enum tree_code
, tree
, tree
, tree
));
81 static tree strip_compound_expr
PROTO((tree
, tree
));
87 /* Yield nonzero if a signed left shift of A by B bits overflows. */
88 #define left_shift_overflows(a, b) ((a) != ((a) << (b)) >> (b))
90 /* Suppose A1 + B1 = SUM1, using 2's complement arithmetic ignoring overflow.
91 Suppose A, B and SUM have the same respective signs as A1, B1, and SUM1.
92 Then this yields nonzero if overflow occurred during the addition.
93 Overflow occurs if A and B have the same sign, but A and SUM differ in sign.
94 Use `^' to test whether signs differ, and `< 0' to isolate the sign. */
95 #define overflow_sum_sign(a, b, sum) ((~((a) ^ (b)) & ((a) ^ (sum))) < 0)
97 /* To do constant folding on INTEGER_CST nodes requires two-word arithmetic.
98 We do that by representing the two-word integer in 4 words, with only
99 HOST_BITS_PER_WIDE_INT/2 bits stored in each word, as a positive number. */
102 ((x) & (((unsigned HOST_WIDE_INT) 1 << (HOST_BITS_PER_WIDE_INT/2)) - 1))
103 #define HIGHPART(x) \
104 ((unsigned HOST_WIDE_INT) (x) >> HOST_BITS_PER_WIDE_INT/2)
105 #define BASE ((unsigned HOST_WIDE_INT) 1 << HOST_BITS_PER_WIDE_INT/2)
107 /* Unpack a two-word integer into 4 words.
108 LOW and HI are the integer, as two `HOST_WIDE_INT' pieces.
109 WORDS points to the array of HOST_WIDE_INTs. */
112 encode (words
, low
, hi
)
113 HOST_WIDE_INT
*words
;
114 HOST_WIDE_INT low
, hi
;
116 words
[0] = LOWPART (low
);
117 words
[1] = HIGHPART (low
);
118 words
[2] = LOWPART (hi
);
119 words
[3] = HIGHPART (hi
);
122 /* Pack an array of 4 words into a two-word integer.
123 WORDS points to the array of words.
124 The integer is stored into *LOW and *HI as two `HOST_WIDE_INT' pieces. */
127 decode (words
, low
, hi
)
128 HOST_WIDE_INT
*words
;
129 HOST_WIDE_INT
*low
, *hi
;
131 *low
= words
[0] | words
[1] * BASE
;
132 *hi
= words
[2] | words
[3] * BASE
;
135 /* Make the integer constant T valid for its type
136 by setting to 0 or 1 all the bits in the constant
137 that don't belong in the type.
138 Yield 1 if a signed overflow occurs, 0 otherwise.
139 If OVERFLOW is nonzero, a signed overflow has already occurred
140 in calculating T, so propagate it.
142 Make the real constant T valid for its type by calling CHECK_FLOAT_VALUE,
146 force_fit_type (t
, overflow
)
150 HOST_WIDE_INT low
, high
;
153 if (TREE_CODE (t
) == REAL_CST
)
155 #ifdef CHECK_FLOAT_VALUE
156 CHECK_FLOAT_VALUE (TYPE_MODE (TREE_TYPE (t
)), TREE_REAL_CST (t
),
162 else if (TREE_CODE (t
) != INTEGER_CST
)
165 low
= TREE_INT_CST_LOW (t
);
166 high
= TREE_INT_CST_HIGH (t
);
168 if (TREE_CODE (TREE_TYPE (t
)) == POINTER_TYPE
)
171 prec
= TYPE_PRECISION (TREE_TYPE (t
));
173 /* First clear all bits that are beyond the type's precision. */
175 if (prec
== 2 * HOST_BITS_PER_WIDE_INT
)
177 else if (prec
> HOST_BITS_PER_WIDE_INT
)
179 TREE_INT_CST_HIGH (t
)
180 &= ~((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
184 TREE_INT_CST_HIGH (t
) = 0;
185 if (prec
< HOST_BITS_PER_WIDE_INT
)
186 TREE_INT_CST_LOW (t
) &= ~((HOST_WIDE_INT
) (-1) << prec
);
189 /* Unsigned types do not suffer sign extension or overflow. */
190 if (TREE_UNSIGNED (TREE_TYPE (t
)))
193 /* If the value's sign bit is set, extend the sign. */
194 if (prec
!= 2 * HOST_BITS_PER_WIDE_INT
195 && (prec
> HOST_BITS_PER_WIDE_INT
196 ? (TREE_INT_CST_HIGH (t
)
197 & ((HOST_WIDE_INT
) 1 << (prec
- HOST_BITS_PER_WIDE_INT
- 1)))
198 : TREE_INT_CST_LOW (t
) & ((HOST_WIDE_INT
) 1 << (prec
- 1))))
200 /* Value is negative:
201 set to 1 all the bits that are outside this type's precision. */
202 if (prec
> HOST_BITS_PER_WIDE_INT
)
204 TREE_INT_CST_HIGH (t
)
205 |= ((HOST_WIDE_INT
) (-1) << (prec
- HOST_BITS_PER_WIDE_INT
));
209 TREE_INT_CST_HIGH (t
) = -1;
210 if (prec
< HOST_BITS_PER_WIDE_INT
)
211 TREE_INT_CST_LOW (t
) |= ((HOST_WIDE_INT
) (-1) << prec
);
215 /* Yield nonzero if signed overflow occurred. */
217 ((overflow
| (low
^ TREE_INT_CST_LOW (t
)) | (high
^ TREE_INT_CST_HIGH (t
)))
221 /* Add two doubleword integers with doubleword result.
222 Each argument is given as two `HOST_WIDE_INT' pieces.
223 One argument is L1 and H1; the other, L2 and H2.
224 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
227 add_double (l1
, h1
, l2
, h2
, lv
, hv
)
228 HOST_WIDE_INT l1
, h1
, l2
, h2
;
229 HOST_WIDE_INT
*lv
, *hv
;
234 h
= h1
+ h2
+ ((unsigned HOST_WIDE_INT
) l
< l1
);
238 return overflow_sum_sign (h1
, h2
, h
);
241 /* Negate a doubleword integer with doubleword result.
242 Return nonzero if the operation overflows, assuming it's signed.
243 The argument is given as two `HOST_WIDE_INT' pieces in L1 and H1.
244 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
247 neg_double (l1
, h1
, lv
, hv
)
248 HOST_WIDE_INT l1
, h1
;
249 HOST_WIDE_INT
*lv
, *hv
;
255 return (*hv
& h1
) < 0;
265 /* Multiply two doubleword integers with doubleword result.
266 Return nonzero if the operation overflows, assuming it's signed.
267 Each argument is given as two `HOST_WIDE_INT' pieces.
268 One argument is L1 and H1; the other, L2 and H2.
269 The value is stored as two `HOST_WIDE_INT' pieces in *LV and *HV. */
272 mul_double (l1
, h1
, l2
, h2
, lv
, hv
)
273 HOST_WIDE_INT l1
, h1
, l2
, h2
;
274 HOST_WIDE_INT
*lv
, *hv
;
276 HOST_WIDE_INT arg1
[4];
277 HOST_WIDE_INT arg2
[4];
278 HOST_WIDE_INT prod
[4 * 2];
279 register unsigned HOST_WIDE_INT carry
;
280 register int i
, j
, k
;
281 HOST_WIDE_INT toplow
, tophigh
, neglow
, neghigh
;
283 encode (arg1
, l1
, h1
);
284 encode (arg2
, l2
, h2
);
286 bzero ((char *) prod
, sizeof prod
);
288 for (i
= 0; i
< 4; i
++)
291 for (j
= 0; j
< 4; j
++)
294 /* This product is <= 0xFFFE0001, the sum <= 0xFFFF0000. */
295 carry
+= arg1
[i
] * arg2
[j
];
296 /* Since prod[p] < 0xFFFF, this sum <= 0xFFFFFFFF. */
298 prod
[k
] = LOWPART (carry
);
299 carry
= HIGHPART (carry
);
304 decode (prod
, lv
, hv
); /* This ignores prod[4] through prod[4*2-1] */
306 /* Check for overflow by calculating the top half of the answer in full;
307 it should agree with the low half's sign bit. */
308 decode (prod
+4, &toplow
, &tophigh
);
311 neg_double (l2
, h2
, &neglow
, &neghigh
);
312 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
316 neg_double (l1
, h1
, &neglow
, &neghigh
);
317 add_double (neglow
, neghigh
, toplow
, tophigh
, &toplow
, &tophigh
);
319 return (*hv
< 0 ? ~(toplow
& tophigh
) : toplow
| tophigh
) != 0;
322 /* Shift the doubleword integer in L1, H1 left by COUNT places
323 keeping only PREC bits of result.
324 Shift right if COUNT is negative.
325 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
326 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
329 lshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
330 HOST_WIDE_INT l1
, h1
, count
;
332 HOST_WIDE_INT
*lv
, *hv
;
337 rshift_double (l1
, h1
, - count
, prec
, lv
, hv
, arith
);
341 #ifdef SHIFT_COUNT_TRUNCATED
342 if (SHIFT_COUNT_TRUNCATED
)
346 if (count
>= HOST_BITS_PER_WIDE_INT
)
348 *hv
= (unsigned HOST_WIDE_INT
) l1
<< count
- HOST_BITS_PER_WIDE_INT
;
353 *hv
= (((unsigned HOST_WIDE_INT
) h1
<< count
)
354 | ((unsigned HOST_WIDE_INT
) l1
>> HOST_BITS_PER_WIDE_INT
- count
- 1 >> 1));
355 *lv
= (unsigned HOST_WIDE_INT
) l1
<< count
;
359 /* Shift the doubleword integer in L1, H1 right by COUNT places
360 keeping only PREC bits of result. COUNT must be positive.
361 ARITH nonzero specifies arithmetic shifting; otherwise use logical shift.
362 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
365 rshift_double (l1
, h1
, count
, prec
, lv
, hv
, arith
)
366 HOST_WIDE_INT l1
, h1
, count
;
368 HOST_WIDE_INT
*lv
, *hv
;
371 unsigned HOST_WIDE_INT signmask
;
373 ? -((unsigned HOST_WIDE_INT
) h1
>> (HOST_BITS_PER_WIDE_INT
- 1))
376 #ifdef SHIFT_COUNT_TRUNCATED
377 if (SHIFT_COUNT_TRUNCATED
)
381 if (count
>= HOST_BITS_PER_WIDE_INT
)
384 *lv
= ((signmask
<< 2 * HOST_BITS_PER_WIDE_INT
- count
- 1 << 1)
385 | ((unsigned HOST_WIDE_INT
) h1
>> count
- HOST_BITS_PER_WIDE_INT
));
389 *lv
= (((unsigned HOST_WIDE_INT
) l1
>> count
)
390 | ((unsigned HOST_WIDE_INT
) h1
<< HOST_BITS_PER_WIDE_INT
- count
- 1 << 1));
391 *hv
= ((signmask
<< HOST_BITS_PER_WIDE_INT
- count
)
392 | ((unsigned HOST_WIDE_INT
) h1
>> count
));
396 /* Rotate the doubleword integer in L1, H1 left by COUNT places
397 keeping only PREC bits of result.
398 Rotate right if COUNT is negative.
399 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
402 lrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
403 HOST_WIDE_INT l1
, h1
, count
;
405 HOST_WIDE_INT
*lv
, *hv
;
407 HOST_WIDE_INT s1l
, s1h
, s2l
, s2h
;
413 lshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
414 rshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
419 /* Rotate the doubleword integer in L1, H1 left by COUNT places
420 keeping only PREC bits of result. COUNT must be positive.
421 Store the value as two `HOST_WIDE_INT' pieces in *LV and *HV. */
424 rrotate_double (l1
, h1
, count
, prec
, lv
, hv
)
425 HOST_WIDE_INT l1
, h1
, count
;
427 HOST_WIDE_INT
*lv
, *hv
;
429 HOST_WIDE_INT s1l
, s1h
, s2l
, s2h
;
435 rshift_double (l1
, h1
, count
, prec
, &s1l
, &s1h
, 0);
436 lshift_double (l1
, h1
, prec
- count
, prec
, &s2l
, &s2h
, 0);
441 /* Divide doubleword integer LNUM, HNUM by doubleword integer LDEN, HDEN
442 for a quotient (stored in *LQUO, *HQUO) and remainder (in *LREM, *HREM).
443 CODE is a tree code for a kind of division, one of
444 TRUNC_DIV_EXPR, FLOOR_DIV_EXPR, CEIL_DIV_EXPR, ROUND_DIV_EXPR
446 It controls how the quotient is rounded to a integer.
447 Return nonzero if the operation overflows.
448 UNS nonzero says do unsigned division. */
451 div_and_round_double (code
, uns
,
452 lnum_orig
, hnum_orig
, lden_orig
, hden_orig
,
453 lquo
, hquo
, lrem
, hrem
)
456 HOST_WIDE_INT lnum_orig
, hnum_orig
; /* num == numerator == dividend */
457 HOST_WIDE_INT lden_orig
, hden_orig
; /* den == denominator == divisor */
458 HOST_WIDE_INT
*lquo
, *hquo
, *lrem
, *hrem
;
461 HOST_WIDE_INT num
[4 + 1]; /* extra element for scaling. */
462 HOST_WIDE_INT den
[4], quo
[4];
464 unsigned HOST_WIDE_INT work
;
465 register int carry
= 0;
466 HOST_WIDE_INT lnum
= lnum_orig
;
467 HOST_WIDE_INT hnum
= hnum_orig
;
468 HOST_WIDE_INT lden
= lden_orig
;
469 HOST_WIDE_INT hden
= hden_orig
;
472 if ((hden
== 0) && (lden
== 0))
475 /* calculate quotient sign and convert operands to unsigned. */
481 /* (minimum integer) / (-1) is the only overflow case. */
482 if (neg_double (lnum
, hnum
, &lnum
, &hnum
) && (lden
& hden
) == -1)
488 neg_double (lden
, hden
, &lden
, &hden
);
492 if (hnum
== 0 && hden
== 0)
493 { /* single precision */
495 /* This unsigned division rounds toward zero. */
496 *lquo
= lnum
/ (unsigned HOST_WIDE_INT
) lden
;
501 { /* trivial case: dividend < divisor */
502 /* hden != 0 already checked. */
509 bzero ((char *) quo
, sizeof quo
);
511 bzero ((char *) num
, sizeof num
); /* to zero 9th element */
512 bzero ((char *) den
, sizeof den
);
514 encode (num
, lnum
, hnum
);
515 encode (den
, lden
, hden
);
517 /* Special code for when the divisor < BASE. */
518 if (hden
== 0 && lden
< BASE
)
520 /* hnum != 0 already checked. */
521 for (i
= 4 - 1; i
>= 0; i
--)
523 work
= num
[i
] + carry
* BASE
;
524 quo
[i
] = work
/ (unsigned HOST_WIDE_INT
) lden
;
525 carry
= work
% (unsigned HOST_WIDE_INT
) lden
;
530 /* Full double precision division,
531 with thanks to Don Knuth's "Seminumerical Algorithms". */
532 int quo_est
, scale
, num_hi_sig
, den_hi_sig
;
534 /* Find the highest non-zero divisor digit. */
535 for (i
= 4 - 1; ; i
--)
541 /* Insure that the first digit of the divisor is at least BASE/2.
542 This is required by the quotient digit estimation algorithm. */
544 scale
= BASE
/ (den
[den_hi_sig
] + 1);
545 if (scale
> 1) { /* scale divisor and dividend */
547 for (i
= 0; i
<= 4 - 1; i
++) {
548 work
= (num
[i
] * scale
) + carry
;
549 num
[i
] = LOWPART (work
);
550 carry
= HIGHPART (work
);
553 for (i
= 0; i
<= 4 - 1; i
++) {
554 work
= (den
[i
] * scale
) + carry
;
555 den
[i
] = LOWPART (work
);
556 carry
= HIGHPART (work
);
557 if (den
[i
] != 0) den_hi_sig
= i
;
564 for (i
= num_hi_sig
- den_hi_sig
- 1; i
>= 0; i
--) {
565 /* guess the next quotient digit, quo_est, by dividing the first
566 two remaining dividend digits by the high order quotient digit.
567 quo_est is never low and is at most 2 high. */
568 unsigned HOST_WIDE_INT tmp
;
570 num_hi_sig
= i
+ den_hi_sig
+ 1;
571 work
= num
[num_hi_sig
] * BASE
+ num
[num_hi_sig
- 1];
572 if (num
[num_hi_sig
] != den
[den_hi_sig
])
573 quo_est
= work
/ den
[den_hi_sig
];
577 /* refine quo_est so it's usually correct, and at most one high. */
578 tmp
= work
- quo_est
* den
[den_hi_sig
];
580 && den
[den_hi_sig
- 1] * quo_est
> (tmp
* BASE
+ num
[num_hi_sig
- 2]))
583 /* Try QUO_EST as the quotient digit, by multiplying the
584 divisor by QUO_EST and subtracting from the remaining dividend.
585 Keep in mind that QUO_EST is the I - 1st digit. */
588 for (j
= 0; j
<= den_hi_sig
; j
++)
590 work
= quo_est
* den
[j
] + carry
;
591 carry
= HIGHPART (work
);
592 work
= num
[i
+ j
] - LOWPART (work
);
593 num
[i
+ j
] = LOWPART (work
);
594 carry
+= HIGHPART (work
) != 0;
597 /* if quo_est was high by one, then num[i] went negative and
598 we need to correct things. */
600 if (num
[num_hi_sig
] < carry
)
603 carry
= 0; /* add divisor back in */
604 for (j
= 0; j
<= den_hi_sig
; j
++)
606 work
= num
[i
+ j
] + den
[j
] + carry
;
607 carry
= HIGHPART (work
);
608 num
[i
+ j
] = LOWPART (work
);
610 num
[num_hi_sig
] += carry
;
613 /* store the quotient digit. */
618 decode (quo
, lquo
, hquo
);
621 /* if result is negative, make it so. */
623 neg_double (*lquo
, *hquo
, lquo
, hquo
);
625 /* compute trial remainder: rem = num - (quo * den) */
626 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
627 neg_double (*lrem
, *hrem
, lrem
, hrem
);
628 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
633 case TRUNC_MOD_EXPR
: /* round toward zero */
634 case EXACT_DIV_EXPR
: /* for this one, it shouldn't matter */
638 case FLOOR_MOD_EXPR
: /* round toward negative infinity */
639 if (quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio < 0 && rem != 0 */
642 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1,
645 else return overflow
;
649 case CEIL_MOD_EXPR
: /* round toward positive infinity */
650 if (!quo_neg
&& (*lrem
!= 0 || *hrem
!= 0)) /* ratio > 0 && rem != 0 */
652 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
655 else return overflow
;
659 case ROUND_MOD_EXPR
: /* round to closest integer */
661 HOST_WIDE_INT labs_rem
= *lrem
, habs_rem
= *hrem
;
662 HOST_WIDE_INT labs_den
= lden
, habs_den
= hden
, ltwice
, htwice
;
664 /* get absolute values */
665 if (*hrem
< 0) neg_double (*lrem
, *hrem
, &labs_rem
, &habs_rem
);
666 if (hden
< 0) neg_double (lden
, hden
, &labs_den
, &habs_den
);
668 /* if (2 * abs (lrem) >= abs (lden)) */
669 mul_double ((HOST_WIDE_INT
) 2, (HOST_WIDE_INT
) 0,
670 labs_rem
, habs_rem
, <wice
, &htwice
);
671 if (((unsigned HOST_WIDE_INT
) habs_den
672 < (unsigned HOST_WIDE_INT
) htwice
)
673 || (((unsigned HOST_WIDE_INT
) habs_den
674 == (unsigned HOST_WIDE_INT
) htwice
)
675 && ((HOST_WIDE_INT
unsigned) labs_den
676 < (unsigned HOST_WIDE_INT
) ltwice
)))
680 add_double (*lquo
, *hquo
,
681 (HOST_WIDE_INT
) -1, (HOST_WIDE_INT
) -1, lquo
, hquo
);
684 add_double (*lquo
, *hquo
, (HOST_WIDE_INT
) 1, (HOST_WIDE_INT
) 0,
687 else return overflow
;
695 /* compute true remainder: rem = num - (quo * den) */
696 mul_double (*lquo
, *hquo
, lden_orig
, hden_orig
, lrem
, hrem
);
697 neg_double (*lrem
, *hrem
, lrem
, hrem
);
698 add_double (lnum_orig
, hnum_orig
, *lrem
, *hrem
, lrem
, hrem
);
702 #ifndef REAL_ARITHMETIC
703 /* Effectively truncate a real value to represent the nearest possible value
704 in a narrower mode. The result is actually represented in the same data
705 type as the argument, but its value is usually different.
707 A trap may occur during the FP operations and it is the responsibility
708 of the calling function to have a handler established. */
711 real_value_truncate (mode
, arg
)
712 enum machine_mode mode
;
715 return REAL_VALUE_TRUNCATE (mode
, arg
);
718 #if TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
720 /* Check for infinity in an IEEE double precision number. */
726 /* The IEEE 64-bit double format. */
731 unsigned exponent
: 11;
732 unsigned mantissa1
: 20;
737 unsigned mantissa1
: 20;
738 unsigned exponent
: 11;
744 if (u
.big_endian
.sign
== 1)
747 return (u
.big_endian
.exponent
== 2047
748 && u
.big_endian
.mantissa1
== 0
749 && u
.big_endian
.mantissa2
== 0);
754 return (u
.little_endian
.exponent
== 2047
755 && u
.little_endian
.mantissa1
== 0
756 && u
.little_endian
.mantissa2
== 0);
760 /* Check whether an IEEE double precision number is a NaN. */
766 /* The IEEE 64-bit double format. */
771 unsigned exponent
: 11;
772 unsigned mantissa1
: 20;
777 unsigned mantissa1
: 20;
778 unsigned exponent
: 11;
784 if (u
.big_endian
.sign
== 1)
787 return (u
.big_endian
.exponent
== 2047
788 && (u
.big_endian
.mantissa1
!= 0
789 || u
.big_endian
.mantissa2
!= 0));
794 return (u
.little_endian
.exponent
== 2047
795 && (u
.little_endian
.mantissa1
!= 0
796 || u
.little_endian
.mantissa2
!= 0));
800 /* Check for a negative IEEE double precision number. */
806 /* The IEEE 64-bit double format. */
811 unsigned exponent
: 11;
812 unsigned mantissa1
: 20;
817 unsigned mantissa1
: 20;
818 unsigned exponent
: 11;
824 if (u
.big_endian
.sign
== 1)
827 return u
.big_endian
.sign
;
832 return u
.little_endian
.sign
;
835 #else /* Target not IEEE */
837 /* Let's assume other float formats don't have infinity.
838 (This can be overridden by redefining REAL_VALUE_ISINF.) */
846 /* Let's assume other float formats don't have NaNs.
847 (This can be overridden by redefining REAL_VALUE_ISNAN.) */
855 /* Let's assume other float formats don't have minus zero.
856 (This can be overridden by redefining REAL_VALUE_NEGATIVE.) */
863 #endif /* Target not IEEE */
864 #endif /* no REAL_ARITHMETIC */
866 /* Split a tree IN into a constant and a variable part
867 that could be combined with CODE to make IN.
868 CODE must be a commutative arithmetic operation.
869 Store the constant part into *CONP and the variable in &VARP.
870 Return 1 if this was done; zero means the tree IN did not decompose
873 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR.
874 Therefore, we must tell the caller whether the variable part
875 was subtracted. We do this by storing 1 or -1 into *VARSIGNP.
876 The value stored is the coefficient for the variable term.
877 The constant term we return should always be added;
878 we negate it if necessary. */
881 split_tree (in
, code
, varp
, conp
, varsignp
)
887 register tree outtype
= TREE_TYPE (in
);
891 /* Strip any conversions that don't change the machine mode. */
892 while ((TREE_CODE (in
) == NOP_EXPR
893 || TREE_CODE (in
) == CONVERT_EXPR
)
894 && (TYPE_MODE (TREE_TYPE (in
))
895 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (in
, 0)))))
896 in
= TREE_OPERAND (in
, 0);
898 if (TREE_CODE (in
) == code
899 || (! FLOAT_TYPE_P (TREE_TYPE (in
))
900 /* We can associate addition and subtraction together
901 (even though the C standard doesn't say so)
902 for integers because the value is not affected.
903 For reals, the value might be affected, so we can't. */
904 && ((code
== PLUS_EXPR
&& TREE_CODE (in
) == MINUS_EXPR
)
905 || (code
== MINUS_EXPR
&& TREE_CODE (in
) == PLUS_EXPR
))))
907 enum tree_code code
= TREE_CODE (TREE_OPERAND (in
, 0));
908 if (code
== INTEGER_CST
)
910 *conp
= TREE_OPERAND (in
, 0);
911 *varp
= TREE_OPERAND (in
, 1);
912 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
913 && TREE_TYPE (*varp
) != outtype
)
914 *varp
= convert (outtype
, *varp
);
915 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
918 if (TREE_CONSTANT (TREE_OPERAND (in
, 1)))
920 *conp
= TREE_OPERAND (in
, 1);
921 *varp
= TREE_OPERAND (in
, 0);
923 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
924 && TREE_TYPE (*varp
) != outtype
)
925 *varp
= convert (outtype
, *varp
);
926 if (TREE_CODE (in
) == MINUS_EXPR
)
928 /* If operation is subtraction and constant is second,
929 must negate it to get an additive constant.
930 And this cannot be done unless it is a manifest constant.
931 It could also be the address of a static variable.
932 We cannot negate that, so give up. */
933 if (TREE_CODE (*conp
) == INTEGER_CST
)
934 /* Subtracting from integer_zero_node loses for long long. */
935 *conp
= fold (build1 (NEGATE_EXPR
, TREE_TYPE (*conp
), *conp
));
941 if (TREE_CONSTANT (TREE_OPERAND (in
, 0)))
943 *conp
= TREE_OPERAND (in
, 0);
944 *varp
= TREE_OPERAND (in
, 1);
945 if (TYPE_MODE (TREE_TYPE (*varp
)) != TYPE_MODE (outtype
)
946 && TREE_TYPE (*varp
) != outtype
)
947 *varp
= convert (outtype
, *varp
);
948 *varsignp
= (TREE_CODE (in
) == MINUS_EXPR
) ? -1 : 1;
955 /* Combine two constants NUM and ARG2 under operation CODE
956 to produce a new constant.
957 We assume ARG1 and ARG2 have the same data type,
958 or at least are the same kind of constant and the same machine mode.
960 If NOTRUNC is nonzero, do not truncate the result to fit the data type. */
963 const_binop (code
, arg1
, arg2
, notrunc
)
965 register tree arg1
, arg2
;
968 if (TREE_CODE (arg1
) == INTEGER_CST
)
970 register HOST_WIDE_INT int1l
= TREE_INT_CST_LOW (arg1
);
971 register HOST_WIDE_INT int1h
= TREE_INT_CST_HIGH (arg1
);
972 HOST_WIDE_INT int2l
= TREE_INT_CST_LOW (arg2
);
973 HOST_WIDE_INT int2h
= TREE_INT_CST_HIGH (arg2
);
974 HOST_WIDE_INT low
, hi
;
975 HOST_WIDE_INT garbagel
, garbageh
;
977 int uns
= TREE_UNSIGNED (TREE_TYPE (arg1
));
983 t
= build_int_2 (int1l
| int2l
, int1h
| int2h
);
987 t
= build_int_2 (int1l
^ int2l
, int1h
^ int2h
);
991 t
= build_int_2 (int1l
& int2l
, int1h
& int2h
);
995 t
= build_int_2 (int1l
& ~int2l
, int1h
& ~int2h
);
1001 /* It's unclear from the C standard whether shifts can overflow.
1002 The following code ignores overflow; perhaps a C standard
1003 interpretation ruling is needed. */
1004 lshift_double (int1l
, int1h
, int2l
,
1005 TYPE_PRECISION (TREE_TYPE (arg1
)),
1008 t
= build_int_2 (low
, hi
);
1009 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1011 force_fit_type (t
, 0);
1012 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
);
1013 TREE_CONSTANT_OVERFLOW (t
)
1014 = TREE_CONSTANT_OVERFLOW (arg1
) | TREE_CONSTANT_OVERFLOW (arg2
);
1020 lrotate_double (int1l
, int1h
, int2l
,
1021 TYPE_PRECISION (TREE_TYPE (arg1
)),
1023 t
= build_int_2 (low
, hi
);
1030 if ((unsigned HOST_WIDE_INT
) int2l
< int1l
)
1033 overflow
= int2h
< hi
;
1035 t
= build_int_2 (int2l
, int2h
);
1041 if ((unsigned HOST_WIDE_INT
) int1l
< int2l
)
1044 overflow
= int1h
< hi
;
1046 t
= build_int_2 (int1l
, int1h
);
1049 overflow
= add_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1050 t
= build_int_2 (low
, hi
);
1054 if (int2h
== 0 && int2l
== 0)
1056 t
= build_int_2 (int1l
, int1h
);
1059 neg_double (int2l
, int2h
, &low
, &hi
);
1060 add_double (int1l
, int1h
, low
, hi
, &low
, &hi
);
1061 overflow
= overflow_sum_sign (hi
, int2h
, int1h
);
1062 t
= build_int_2 (low
, hi
);
1066 overflow
= mul_double (int1l
, int1h
, int2l
, int2h
, &low
, &hi
);
1067 t
= build_int_2 (low
, hi
);
1070 case TRUNC_DIV_EXPR
:
1071 case FLOOR_DIV_EXPR
: case CEIL_DIV_EXPR
:
1072 case EXACT_DIV_EXPR
:
1073 /* This is a shortcut for a common special case.
1074 It reduces the number of tree nodes generated
1076 if (int2h
== 0 && int2l
> 0
1077 && TREE_TYPE (arg1
) == sizetype
1078 && int1h
== 0 && int1l
>= 0)
1080 if (code
== CEIL_DIV_EXPR
)
1082 return size_int (int1l
/ int2l
);
1084 case ROUND_DIV_EXPR
:
1085 if (int2h
== 0 && int2l
== 1)
1087 t
= build_int_2 (int1l
, int1h
);
1090 if (int1l
== int2l
&& int1h
== int2h
)
1092 if ((int1l
| int1h
) == 0)
1094 t
= build_int_2 (1, 0);
1097 overflow
= div_and_round_double (code
, uns
,
1098 int1l
, int1h
, int2l
, int2h
,
1099 &low
, &hi
, &garbagel
, &garbageh
);
1100 t
= build_int_2 (low
, hi
);
1103 case TRUNC_MOD_EXPR
: case ROUND_MOD_EXPR
:
1104 case FLOOR_MOD_EXPR
: case CEIL_MOD_EXPR
:
1105 overflow
= div_and_round_double (code
, uns
,
1106 int1l
, int1h
, int2l
, int2h
,
1107 &garbagel
, &garbageh
, &low
, &hi
);
1108 t
= build_int_2 (low
, hi
);
1115 low
= (((unsigned HOST_WIDE_INT
) int1h
1116 < (unsigned HOST_WIDE_INT
) int2h
)
1117 || (((unsigned HOST_WIDE_INT
) int1h
1118 == (unsigned HOST_WIDE_INT
) int2h
)
1119 && ((unsigned HOST_WIDE_INT
) int1l
1120 < (unsigned HOST_WIDE_INT
) int2l
)));
1124 low
= ((int1h
< int2h
)
1125 || ((int1h
== int2h
)
1126 && ((unsigned HOST_WIDE_INT
) int1l
1127 < (unsigned HOST_WIDE_INT
) int2l
)));
1129 if (low
== (code
== MIN_EXPR
))
1130 t
= build_int_2 (int1l
, int1h
);
1132 t
= build_int_2 (int2l
, int2h
);
1139 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1141 = ((notrunc
? !uns
&& overflow
: force_fit_type (t
, overflow
&& !uns
))
1142 | TREE_OVERFLOW (arg1
)
1143 | TREE_OVERFLOW (arg2
));
1144 TREE_CONSTANT_OVERFLOW (t
) = (TREE_OVERFLOW (t
)
1145 | TREE_CONSTANT_OVERFLOW (arg1
)
1146 | TREE_CONSTANT_OVERFLOW (arg2
));
1149 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1150 if (TREE_CODE (arg1
) == REAL_CST
)
1155 REAL_VALUE_TYPE value
;
1158 d1
= TREE_REAL_CST (arg1
);
1159 d2
= TREE_REAL_CST (arg2
);
1161 /* If either operand is a NaN, just return it. Otherwise, set up
1162 for floating-point trap; we return an overflow. */
1163 if (REAL_VALUE_ISNAN (d1
))
1165 else if (REAL_VALUE_ISNAN (d2
))
1167 else if (setjmp (float_error
))
1169 t
= copy_node (arg1
);
1174 set_float_handler (float_error
);
1176 #ifdef REAL_ARITHMETIC
1177 REAL_ARITHMETIC (value
, code
, d1
, d2
);
1194 #ifndef REAL_INFINITY
1203 value
= MIN (d1
, d2
);
1207 value
= MAX (d1
, d2
);
1213 #endif /* no REAL_ARITHMETIC */
1214 t
= build_real (TREE_TYPE (arg1
),
1215 real_value_truncate (TYPE_MODE (TREE_TYPE (arg1
)), value
));
1217 set_float_handler (NULL_PTR
);
1220 = (force_fit_type (t
, overflow
)
1221 | TREE_OVERFLOW (arg1
) | TREE_OVERFLOW (arg2
));
1222 TREE_CONSTANT_OVERFLOW (t
)
1224 | TREE_CONSTANT_OVERFLOW (arg1
)
1225 | TREE_CONSTANT_OVERFLOW (arg2
);
1228 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1229 if (TREE_CODE (arg1
) == COMPLEX_CST
)
1231 register tree r1
= TREE_REALPART (arg1
);
1232 register tree i1
= TREE_IMAGPART (arg1
);
1233 register tree r2
= TREE_REALPART (arg2
);
1234 register tree i2
= TREE_IMAGPART (arg2
);
1240 t
= build_complex (const_binop (PLUS_EXPR
, r1
, r2
, notrunc
),
1241 const_binop (PLUS_EXPR
, i1
, i2
, notrunc
));
1245 t
= build_complex (const_binop (MINUS_EXPR
, r1
, r2
, notrunc
),
1246 const_binop (MINUS_EXPR
, i1
, i2
, notrunc
));
1250 t
= build_complex (const_binop (MINUS_EXPR
,
1251 const_binop (MULT_EXPR
,
1253 const_binop (MULT_EXPR
,
1256 const_binop (PLUS_EXPR
,
1257 const_binop (MULT_EXPR
,
1259 const_binop (MULT_EXPR
,
1266 register tree magsquared
1267 = const_binop (PLUS_EXPR
,
1268 const_binop (MULT_EXPR
, r2
, r2
, notrunc
),
1269 const_binop (MULT_EXPR
, i2
, i2
, notrunc
),
1273 (const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1274 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1275 const_binop (PLUS_EXPR
,
1276 const_binop (MULT_EXPR
, r1
, r2
,
1278 const_binop (MULT_EXPR
, i1
, i2
,
1281 magsquared
, notrunc
),
1282 const_binop (INTEGRAL_TYPE_P (TREE_TYPE (r1
))
1283 ? TRUNC_DIV_EXPR
: RDIV_EXPR
,
1284 const_binop (MINUS_EXPR
,
1285 const_binop (MULT_EXPR
, i1
, r2
,
1287 const_binop (MULT_EXPR
, r1
, i2
,
1290 magsquared
, notrunc
));
1297 TREE_TYPE (t
) = TREE_TYPE (arg1
);
1303 /* Return an INTEGER_CST with value V and type from `sizetype'. */
1307 unsigned HOST_WIDE_INT number
;
1310 /* Type-size nodes already made for small sizes. */
1311 static tree size_table
[2*HOST_BITS_PER_WIDE_INT
+ 1];
1313 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1
1314 && size_table
[number
] != 0)
1315 return size_table
[number
];
1316 if (number
< 2*HOST_BITS_PER_WIDE_INT
+ 1)
1318 push_obstacks_nochange ();
1319 /* Make this a permanent node. */
1320 end_temporary_allocation ();
1321 t
= build_int_2 (number
, 0);
1322 TREE_TYPE (t
) = sizetype
;
1323 size_table
[number
] = t
;
1328 t
= build_int_2 (number
, 0);
1329 TREE_TYPE (t
) = sizetype
;
1334 /* Combine operands OP1 and OP2 with arithmetic operation CODE.
1335 CODE is a tree code. Data type is taken from `sizetype',
1336 If the operands are constant, so is the result. */
1339 size_binop (code
, arg0
, arg1
)
1340 enum tree_code code
;
1343 /* Handle the special case of two integer constants faster. */
1344 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
1346 /* And some specific cases even faster than that. */
1347 if (code
== PLUS_EXPR
1348 && TREE_INT_CST_LOW (arg0
) == 0
1349 && TREE_INT_CST_HIGH (arg0
) == 0)
1351 if (code
== MINUS_EXPR
1352 && TREE_INT_CST_LOW (arg1
) == 0
1353 && TREE_INT_CST_HIGH (arg1
) == 0)
1355 if (code
== MULT_EXPR
1356 && TREE_INT_CST_LOW (arg0
) == 1
1357 && TREE_INT_CST_HIGH (arg0
) == 0)
1359 /* Handle general case of two integer constants. */
1360 return const_binop (code
, arg0
, arg1
, 1);
1363 if (arg0
== error_mark_node
|| arg1
== error_mark_node
)
1364 return error_mark_node
;
1366 return fold (build (code
, sizetype
, arg0
, arg1
));
1369 /* Given T, a tree representing type conversion of ARG1, a constant,
1370 return a constant tree representing the result of conversion. */
1373 fold_convert (t
, arg1
)
1377 register tree type
= TREE_TYPE (t
);
1380 if (TREE_CODE (type
) == POINTER_TYPE
|| INTEGRAL_TYPE_P (type
))
1382 if (TREE_CODE (arg1
) == INTEGER_CST
)
1384 /* If we would build a constant wider than GCC supports,
1385 leave the conversion unfolded. */
1386 if (TYPE_PRECISION (type
) > 2 * HOST_BITS_PER_WIDE_INT
)
1389 /* Given an integer constant, make new constant with new type,
1390 appropriately sign-extended or truncated. */
1391 t
= build_int_2 (TREE_INT_CST_LOW (arg1
),
1392 TREE_INT_CST_HIGH (arg1
));
1393 TREE_TYPE (t
) = type
;
1394 /* Indicate an overflow if (1) ARG1 already overflowed,
1395 or (2) force_fit_type indicates an overflow.
1396 Tell force_fit_type that an overflow has already occurred
1397 if ARG1 is a too-large unsigned value and T is signed. */
1399 = (TREE_OVERFLOW (arg1
)
1400 | force_fit_type (t
,
1401 (TREE_INT_CST_HIGH (arg1
) < 0
1402 & (TREE_UNSIGNED (type
)
1403 < TREE_UNSIGNED (TREE_TYPE (arg1
))))));
1404 TREE_CONSTANT_OVERFLOW (t
)
1405 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1407 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1408 else if (TREE_CODE (arg1
) == REAL_CST
)
1410 /* Don't initialize these, use assignments.
1411 Initialized local aggregates don't work on old compilers. */
1416 x
= TREE_REAL_CST (arg1
);
1417 l
= real_value_from_int_cst (TYPE_MIN_VALUE (type
));
1418 u
= real_value_from_int_cst (TYPE_MAX_VALUE (type
));
1419 /* See if X will be in range after truncation towards 0.
1420 To compensate for truncation, move the bounds away from 0,
1421 but reject if X exactly equals the adjusted bounds. */
1422 #ifdef REAL_ARITHMETIC
1423 REAL_ARITHMETIC (l
, MINUS_EXPR
, l
, dconst1
);
1424 REAL_ARITHMETIC (u
, PLUS_EXPR
, u
, dconst1
);
1429 /* If X is a NaN, use zero instead and show we have an overflow.
1430 Otherwise, range check. */
1431 if (REAL_VALUE_ISNAN (x
))
1432 overflow
= 1, x
= dconst0
;
1433 else if (! (REAL_VALUES_LESS (l
, x
) && REAL_VALUES_LESS (x
, u
)))
1436 #ifndef REAL_ARITHMETIC
1438 HOST_WIDE_INT low
, high
;
1439 HOST_WIDE_INT half_word
1440 = (HOST_WIDE_INT
) 1 << (HOST_BITS_PER_WIDE_INT
/ 2);
1445 high
= (HOST_WIDE_INT
) (x
/ half_word
/ half_word
);
1446 x
-= (REAL_VALUE_TYPE
) high
* half_word
* half_word
;
1447 if (x
>= (REAL_VALUE_TYPE
) half_word
* half_word
/ 2)
1449 low
= x
- (REAL_VALUE_TYPE
) half_word
* half_word
/ 2;
1450 low
|= (HOST_WIDE_INT
) -1 << (HOST_BITS_PER_WIDE_INT
- 1);
1453 low
= (HOST_WIDE_INT
) x
;
1454 if (TREE_REAL_CST (arg1
) < 0)
1455 neg_double (low
, high
, &low
, &high
);
1456 t
= build_int_2 (low
, high
);
1460 HOST_WIDE_INT low
, high
;
1461 REAL_VALUE_TO_INT (&low
, &high
, x
);
1462 t
= build_int_2 (low
, high
);
1465 TREE_TYPE (t
) = type
;
1467 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1468 TREE_CONSTANT_OVERFLOW (t
)
1469 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1471 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1472 TREE_TYPE (t
) = type
;
1474 else if (TREE_CODE (type
) == REAL_TYPE
)
1476 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
1477 if (TREE_CODE (arg1
) == INTEGER_CST
)
1478 return build_real_from_int_cst (type
, arg1
);
1479 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
1480 if (TREE_CODE (arg1
) == REAL_CST
)
1482 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
1484 else if (setjmp (float_error
))
1487 t
= copy_node (arg1
);
1490 set_float_handler (float_error
);
1492 t
= build_real (type
, real_value_truncate (TYPE_MODE (type
),
1493 TREE_REAL_CST (arg1
)));
1494 set_float_handler (NULL_PTR
);
1498 = TREE_OVERFLOW (arg1
) | force_fit_type (t
, overflow
);
1499 TREE_CONSTANT_OVERFLOW (t
)
1500 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg1
);
1504 TREE_CONSTANT (t
) = 1;
1508 /* Return an expr equal to X but certainly not valid as an lvalue.
1509 Also make sure it is not valid as an null pointer constant. */
1517 /* These things are certainly not lvalues. */
1518 if (TREE_CODE (x
) == NON_LVALUE_EXPR
1519 || TREE_CODE (x
) == INTEGER_CST
1520 || TREE_CODE (x
) == REAL_CST
1521 || TREE_CODE (x
) == STRING_CST
1522 || TREE_CODE (x
) == ADDR_EXPR
)
1524 if (TREE_CODE (x
) == INTEGER_CST
&& integer_zerop (x
))
1526 /* Use NOP_EXPR instead of NON_LVALUE_EXPR
1527 so convert_for_assignment won't strip it.
1528 This is so this 0 won't be treated as a null pointer constant. */
1529 result
= build1 (NOP_EXPR
, TREE_TYPE (x
), x
);
1530 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1536 result
= build1 (NON_LVALUE_EXPR
, TREE_TYPE (x
), x
);
1537 TREE_CONSTANT (result
) = TREE_CONSTANT (x
);
1541 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
1542 Zero means allow extended lvalues. */
1544 int pedantic_lvalues
;
1546 /* When pedantic, return an expr equal to X but certainly not valid as a
1547 pedantic lvalue. Otherwise, return X. */
1550 pedantic_non_lvalue (x
)
1553 if (pedantic_lvalues
)
1554 return non_lvalue (x
);
1559 /* Given a tree comparison code, return the code that is the logical inverse
1560 of the given code. It is not safe to do this for floating-point
1561 comparisons, except for NE_EXPR and EQ_EXPR. */
1563 static enum tree_code
1564 invert_tree_comparison (code
)
1565 enum tree_code code
;
1586 /* Similar, but return the comparison that results if the operands are
1587 swapped. This is safe for floating-point. */
1589 static enum tree_code
1590 swap_tree_comparison (code
)
1591 enum tree_code code
;
1611 /* Return nonzero if CODE is a tree code that represents a truth value. */
1614 truth_value_p (code
)
1615 enum tree_code code
;
1617 return (TREE_CODE_CLASS (code
) == '<'
1618 || code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
1619 || code
== TRUTH_OR_EXPR
|| code
== TRUTH_ORIF_EXPR
1620 || code
== TRUTH_XOR_EXPR
|| code
== TRUTH_NOT_EXPR
);
1623 /* Return nonzero if two operands are necessarily equal.
1624 If ONLY_CONST is non-zero, only return non-zero for constants.
1625 This function tests whether the operands are indistinguishable;
1626 it does not test whether they are equal using C's == operation.
1627 The distinction is important for IEEE floating point, because
1628 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
1629 (2) two NaNs may be indistinguishable, but NaN!=NaN. */
1632 operand_equal_p (arg0
, arg1
, only_const
)
1636 /* If both types don't have the same signedness, then we can't consider
1637 them equal. We must check this before the STRIP_NOPS calls
1638 because they may change the signedness of the arguments. */
1639 if (TREE_UNSIGNED (TREE_TYPE (arg0
)) != TREE_UNSIGNED (TREE_TYPE (arg1
)))
1645 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
1646 We don't care about side effects in that case because the SAVE_EXPR
1647 takes care of that for us. */
1648 if (TREE_CODE (arg0
) == SAVE_EXPR
&& arg0
== arg1
)
1649 return ! only_const
;
1651 if (TREE_SIDE_EFFECTS (arg0
) || TREE_SIDE_EFFECTS (arg1
))
1654 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1655 && TREE_CODE (arg0
) == ADDR_EXPR
1656 && TREE_OPERAND (arg0
, 0) == TREE_OPERAND (arg1
, 0))
1659 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1660 && TREE_CODE (arg0
) == INTEGER_CST
1661 && TREE_INT_CST_LOW (arg0
) == TREE_INT_CST_LOW (arg1
)
1662 && TREE_INT_CST_HIGH (arg0
) == TREE_INT_CST_HIGH (arg1
))
1665 /* Detect when real constants are equal. */
1666 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
1667 && TREE_CODE (arg0
) == REAL_CST
)
1668 return !bcmp ((char *) &TREE_REAL_CST (arg0
),
1669 (char *) &TREE_REAL_CST (arg1
),
1670 sizeof (REAL_VALUE_TYPE
));
1678 if (TREE_CODE (arg0
) != TREE_CODE (arg1
))
1680 /* This is needed for conversions and for COMPONENT_REF.
1681 Might as well play it safe and always test this. */
1682 if (TYPE_MODE (TREE_TYPE (arg0
)) != TYPE_MODE (TREE_TYPE (arg1
)))
1685 switch (TREE_CODE_CLASS (TREE_CODE (arg0
)))
1688 /* Two conversions are equal only if signedness and modes match. */
1689 if ((TREE_CODE (arg0
) == NOP_EXPR
|| TREE_CODE (arg0
) == CONVERT_EXPR
)
1690 && (TREE_UNSIGNED (TREE_TYPE (arg0
))
1691 != TREE_UNSIGNED (TREE_TYPE (arg1
))))
1694 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1695 TREE_OPERAND (arg1
, 0), 0);
1699 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1700 TREE_OPERAND (arg1
, 0), 0)
1701 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1702 TREE_OPERAND (arg1
, 1), 0));
1705 switch (TREE_CODE (arg0
))
1708 return operand_equal_p (TREE_OPERAND (arg0
, 0),
1709 TREE_OPERAND (arg1
, 0), 0);
1713 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1714 TREE_OPERAND (arg1
, 0), 0)
1715 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1716 TREE_OPERAND (arg1
, 1), 0));
1719 return (operand_equal_p (TREE_OPERAND (arg0
, 0),
1720 TREE_OPERAND (arg1
, 0), 0)
1721 && operand_equal_p (TREE_OPERAND (arg0
, 1),
1722 TREE_OPERAND (arg1
, 1), 0)
1723 && operand_equal_p (TREE_OPERAND (arg0
, 2),
1724 TREE_OPERAND (arg1
, 2), 0));
1732 /* Similar to operand_equal_p, but see if ARG0 might have been made by
1733 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
1735 When in doubt, return 0. */
1738 operand_equal_for_comparison_p (arg0
, arg1
, other
)
1742 int unsignedp1
, unsignedpo
;
1743 tree primarg1
, primother
;
1744 unsigned correct_width
;
1746 if (operand_equal_p (arg0
, arg1
, 0))
1749 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
))
1750 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1
)))
1753 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
1754 actual comparison operand, ARG0.
1756 First throw away any conversions to wider types
1757 already present in the operands. */
1759 primarg1
= get_narrower (arg1
, &unsignedp1
);
1760 primother
= get_narrower (other
, &unsignedpo
);
1762 correct_width
= TYPE_PRECISION (TREE_TYPE (arg1
));
1763 if (unsignedp1
== unsignedpo
1764 && TYPE_PRECISION (TREE_TYPE (primarg1
)) < correct_width
1765 && TYPE_PRECISION (TREE_TYPE (primother
)) < correct_width
)
1767 tree type
= TREE_TYPE (arg0
);
1769 /* Make sure shorter operand is extended the right way
1770 to match the longer operand. */
1771 primarg1
= convert (signed_or_unsigned_type (unsignedp1
,
1772 TREE_TYPE (primarg1
)),
1775 if (operand_equal_p (arg0
, convert (type
, primarg1
), 0))
1782 /* See if ARG is an expression that is either a comparison or is performing
1783 arithmetic on comparisons. The comparisons must only be comparing
1784 two different values, which will be stored in *CVAL1 and *CVAL2; if
1785 they are non-zero it means that some operands have already been found.
1786 No variables may be used anywhere else in the expression except in the
1787 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
1788 the expression and save_expr needs to be called with CVAL1 and CVAL2.
1790 If this is true, return 1. Otherwise, return zero. */
1793 twoval_comparison_p (arg
, cval1
, cval2
, save_p
)
1795 tree
*cval1
, *cval2
;
1798 enum tree_code code
= TREE_CODE (arg
);
1799 char class = TREE_CODE_CLASS (code
);
1801 /* We can handle some of the 'e' cases here. */
1802 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
1804 else if (class == 'e'
1805 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
1806 || code
== COMPOUND_EXPR
))
1809 /* ??? Disable this since the SAVE_EXPR might already be in use outside
1810 the expression. There may be no way to make this work, but it needs
1811 to be looked at again for 2.6. */
1813 else if (class == 'e' && code
== SAVE_EXPR
&& SAVE_EXPR_RTL (arg
) == 0)
1815 /* If we've already found a CVAL1 or CVAL2, this expression is
1816 two complex to handle. */
1817 if (*cval1
|| *cval2
)
1828 return twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
);
1831 return (twoval_comparison_p (TREE_OPERAND (arg
, 0), cval1
, cval2
, save_p
)
1832 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
1833 cval1
, cval2
, save_p
));
1839 if (code
== COND_EXPR
)
1840 return (twoval_comparison_p (TREE_OPERAND (arg
, 0),
1841 cval1
, cval2
, save_p
)
1842 && twoval_comparison_p (TREE_OPERAND (arg
, 1),
1843 cval1
, cval2
, save_p
)
1844 && twoval_comparison_p (TREE_OPERAND (arg
, 2),
1845 cval1
, cval2
, save_p
));
1849 /* First see if we can handle the first operand, then the second. For
1850 the second operand, we know *CVAL1 can't be zero. It must be that
1851 one side of the comparison is each of the values; test for the
1852 case where this isn't true by failing if the two operands
1855 if (operand_equal_p (TREE_OPERAND (arg
, 0),
1856 TREE_OPERAND (arg
, 1), 0))
1860 *cval1
= TREE_OPERAND (arg
, 0);
1861 else if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 0), 0))
1863 else if (*cval2
== 0)
1864 *cval2
= TREE_OPERAND (arg
, 0);
1865 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 0), 0))
1870 if (operand_equal_p (*cval1
, TREE_OPERAND (arg
, 1), 0))
1872 else if (*cval2
== 0)
1873 *cval2
= TREE_OPERAND (arg
, 1);
1874 else if (operand_equal_p (*cval2
, TREE_OPERAND (arg
, 1), 0))
1885 /* ARG is a tree that is known to contain just arithmetic operations and
1886 comparisons. Evaluate the operations in the tree substituting NEW0 for
1887 any occurrence of OLD0 as an operand of a comparison and likewise for
1891 eval_subst (arg
, old0
, new0
, old1
, new1
)
1893 tree old0
, new0
, old1
, new1
;
1895 tree type
= TREE_TYPE (arg
);
1896 enum tree_code code
= TREE_CODE (arg
);
1897 char class = TREE_CODE_CLASS (code
);
1899 /* We can handle some of the 'e' cases here. */
1900 if (class == 'e' && code
== TRUTH_NOT_EXPR
)
1902 else if (class == 'e'
1903 && (code
== TRUTH_ANDIF_EXPR
|| code
== TRUTH_ORIF_EXPR
))
1909 return fold (build1 (code
, type
,
1910 eval_subst (TREE_OPERAND (arg
, 0),
1911 old0
, new0
, old1
, new1
)));
1914 return fold (build (code
, type
,
1915 eval_subst (TREE_OPERAND (arg
, 0),
1916 old0
, new0
, old1
, new1
),
1917 eval_subst (TREE_OPERAND (arg
, 1),
1918 old0
, new0
, old1
, new1
)));
1924 return eval_subst (TREE_OPERAND (arg
, 0), old0
, new0
, old1
, new1
);
1927 return eval_subst (TREE_OPERAND (arg
, 1), old0
, new0
, old1
, new1
);
1930 return fold (build (code
, type
,
1931 eval_subst (TREE_OPERAND (arg
, 0),
1932 old0
, new0
, old1
, new1
),
1933 eval_subst (TREE_OPERAND (arg
, 1),
1934 old0
, new0
, old1
, new1
),
1935 eval_subst (TREE_OPERAND (arg
, 2),
1936 old0
, new0
, old1
, new1
)));
1941 tree arg0
= TREE_OPERAND (arg
, 0);
1942 tree arg1
= TREE_OPERAND (arg
, 1);
1944 /* We need to check both for exact equality and tree equality. The
1945 former will be true if the operand has a side-effect. In that
1946 case, we know the operand occurred exactly once. */
1948 if (arg0
== old0
|| operand_equal_p (arg0
, old0
, 0))
1950 else if (arg0
== old1
|| operand_equal_p (arg0
, old1
, 0))
1953 if (arg1
== old0
|| operand_equal_p (arg1
, old0
, 0))
1955 else if (arg1
== old1
|| operand_equal_p (arg1
, old1
, 0))
1958 return fold (build (code
, type
, arg0
, arg1
));
1965 /* Return a tree for the case when the result of an expression is RESULT
1966 converted to TYPE and OMITTED was previously an operand of the expression
1967 but is now not needed (e.g., we folded OMITTED * 0).
1969 If OMITTED has side effects, we must evaluate it. Otherwise, just do
1970 the conversion of RESULT to TYPE. */
1973 omit_one_operand (type
, result
, omitted
)
1974 tree type
, result
, omitted
;
1976 tree t
= convert (type
, result
);
1978 if (TREE_SIDE_EFFECTS (omitted
))
1979 return build (COMPOUND_EXPR
, type
, omitted
, t
);
1981 return non_lvalue (t
);
1984 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
1987 pedantic_omit_one_operand (type
, result
, omitted
)
1988 tree type
, result
, omitted
;
1990 tree t
= convert (type
, result
);
1992 if (TREE_SIDE_EFFECTS (omitted
))
1993 return build (COMPOUND_EXPR
, type
, omitted
, t
);
1995 return pedantic_non_lvalue (t
);
2000 /* Return a simplified tree node for the truth-negation of ARG. This
2001 never alters ARG itself. We assume that ARG is an operation that
2002 returns a truth value (0 or 1). */
2005 invert_truthvalue (arg
)
2008 tree type
= TREE_TYPE (arg
);
2009 enum tree_code code
= TREE_CODE (arg
);
2011 if (code
== ERROR_MARK
)
2014 /* If this is a comparison, we can simply invert it, except for
2015 floating-point non-equality comparisons, in which case we just
2016 enclose a TRUTH_NOT_EXPR around what we have. */
2018 if (TREE_CODE_CLASS (code
) == '<')
2020 if (FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg
, 0)))
2021 && code
!= NE_EXPR
&& code
!= EQ_EXPR
)
2022 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2024 return build (invert_tree_comparison (code
), type
,
2025 TREE_OPERAND (arg
, 0), TREE_OPERAND (arg
, 1));
2031 return convert (type
, build_int_2 (TREE_INT_CST_LOW (arg
) == 0
2032 && TREE_INT_CST_HIGH (arg
) == 0, 0));
2034 case TRUTH_AND_EXPR
:
2035 return build (TRUTH_OR_EXPR
, type
,
2036 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2037 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2040 return build (TRUTH_AND_EXPR
, type
,
2041 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2042 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2044 case TRUTH_XOR_EXPR
:
2045 /* Here we can invert either operand. We invert the first operand
2046 unless the second operand is a TRUTH_NOT_EXPR in which case our
2047 result is the XOR of the first operand with the inside of the
2048 negation of the second operand. */
2050 if (TREE_CODE (TREE_OPERAND (arg
, 1)) == TRUTH_NOT_EXPR
)
2051 return build (TRUTH_XOR_EXPR
, type
, TREE_OPERAND (arg
, 0),
2052 TREE_OPERAND (TREE_OPERAND (arg
, 1), 0));
2054 return build (TRUTH_XOR_EXPR
, type
,
2055 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2056 TREE_OPERAND (arg
, 1));
2058 case TRUTH_ANDIF_EXPR
:
2059 return build (TRUTH_ORIF_EXPR
, type
,
2060 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2061 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2063 case TRUTH_ORIF_EXPR
:
2064 return build (TRUTH_ANDIF_EXPR
, type
,
2065 invert_truthvalue (TREE_OPERAND (arg
, 0)),
2066 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2068 case TRUTH_NOT_EXPR
:
2069 return TREE_OPERAND (arg
, 0);
2072 return build (COND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2073 invert_truthvalue (TREE_OPERAND (arg
, 1)),
2074 invert_truthvalue (TREE_OPERAND (arg
, 2)));
2077 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg
, 0),
2078 invert_truthvalue (TREE_OPERAND (arg
, 1)));
2080 case NON_LVALUE_EXPR
:
2081 return invert_truthvalue (TREE_OPERAND (arg
, 0));
2086 return build1 (TREE_CODE (arg
), type
,
2087 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2090 if (!integer_onep (TREE_OPERAND (arg
, 1)))
2092 return build (EQ_EXPR
, type
, arg
, convert (type
, integer_zero_node
));
2095 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2097 case CLEANUP_POINT_EXPR
:
2098 return build1 (CLEANUP_POINT_EXPR
, type
,
2099 invert_truthvalue (TREE_OPERAND (arg
, 0)));
2101 if (TREE_CODE (TREE_TYPE (arg
)) != BOOLEAN_TYPE
)
2103 return build1 (TRUTH_NOT_EXPR
, type
, arg
);
2106 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
2107 operands are another bit-wise operation with a common input. If so,
2108 distribute the bit operations to save an operation and possibly two if
2109 constants are involved. For example, convert
2110 (A | B) & (A | C) into A | (B & C)
2111 Further simplification will occur if B and C are constants.
2113 If this optimization cannot be done, 0 will be returned. */
2116 distribute_bit_expr (code
, type
, arg0
, arg1
)
2117 enum tree_code code
;
2124 if (TREE_CODE (arg0
) != TREE_CODE (arg1
)
2125 || TREE_CODE (arg0
) == code
2126 || (TREE_CODE (arg0
) != BIT_AND_EXPR
2127 && TREE_CODE (arg0
) != BIT_IOR_EXPR
))
2130 if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 0), 0))
2132 common
= TREE_OPERAND (arg0
, 0);
2133 left
= TREE_OPERAND (arg0
, 1);
2134 right
= TREE_OPERAND (arg1
, 1);
2136 else if (operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
, 1), 0))
2138 common
= TREE_OPERAND (arg0
, 0);
2139 left
= TREE_OPERAND (arg0
, 1);
2140 right
= TREE_OPERAND (arg1
, 0);
2142 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 0), 0))
2144 common
= TREE_OPERAND (arg0
, 1);
2145 left
= TREE_OPERAND (arg0
, 0);
2146 right
= TREE_OPERAND (arg1
, 1);
2148 else if (operand_equal_p (TREE_OPERAND (arg0
, 1), TREE_OPERAND (arg1
, 1), 0))
2150 common
= TREE_OPERAND (arg0
, 1);
2151 left
= TREE_OPERAND (arg0
, 0);
2152 right
= TREE_OPERAND (arg1
, 0);
2157 return fold (build (TREE_CODE (arg0
), type
, common
,
2158 fold (build (code
, type
, left
, right
))));
2161 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
2162 starting at BITPOS. The field is unsigned if UNSIGNEDP is non-zero. */
2165 make_bit_field_ref (inner
, type
, bitsize
, bitpos
, unsignedp
)
2168 int bitsize
, bitpos
;
2171 tree result
= build (BIT_FIELD_REF
, type
, inner
,
2172 size_int (bitsize
), size_int (bitpos
));
2174 TREE_UNSIGNED (result
) = unsignedp
;
2179 /* Optimize a bit-field compare.
2181 There are two cases: First is a compare against a constant and the
2182 second is a comparison of two items where the fields are at the same
2183 bit position relative to the start of a chunk (byte, halfword, word)
2184 large enough to contain it. In these cases we can avoid the shift
2185 implicit in bitfield extractions.
2187 For constants, we emit a compare of the shifted constant with the
2188 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
2189 compared. For two fields at the same position, we do the ANDs with the
2190 similar mask and compare the result of the ANDs.
2192 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
2193 COMPARE_TYPE is the type of the comparison, and LHS and RHS
2194 are the left and right operands of the comparison, respectively.
2196 If the optimization described above can be done, we return the resulting
2197 tree. Otherwise we return zero. */
2200 optimize_bit_field_compare (code
, compare_type
, lhs
, rhs
)
2201 enum tree_code code
;
2205 int lbitpos
, lbitsize
, rbitpos
, rbitsize
;
2206 int lnbitpos
, lnbitsize
, rnbitpos
, rnbitsize
;
2207 tree type
= TREE_TYPE (lhs
);
2208 tree signed_type
, unsigned_type
;
2209 int const_p
= TREE_CODE (rhs
) == INTEGER_CST
;
2210 enum machine_mode lmode
, rmode
, lnmode
, rnmode
;
2211 int lunsignedp
, runsignedp
;
2212 int lvolatilep
= 0, rvolatilep
= 0;
2213 tree linner
, rinner
;
2217 /* Get all the information about the extractions being done. If the bit size
2218 if the same as the size of the underlying object, we aren't doing an
2219 extraction at all and so can do nothing. */
2220 linner
= get_inner_reference (lhs
, &lbitsize
, &lbitpos
, &offset
, &lmode
,
2221 &lunsignedp
, &lvolatilep
);
2222 if (linner
== lhs
|| lbitsize
== GET_MODE_BITSIZE (lmode
) || lbitsize
< 0
2228 /* If this is not a constant, we can only do something if bit positions,
2229 sizes, and signedness are the same. */
2230 rinner
= get_inner_reference (rhs
, &rbitsize
, &rbitpos
, &offset
,
2231 &rmode
, &runsignedp
, &rvolatilep
);
2233 if (rinner
== rhs
|| lbitpos
!= rbitpos
|| lbitsize
!= rbitsize
2234 || lunsignedp
!= runsignedp
|| offset
!= 0)
2238 /* See if we can find a mode to refer to this field. We should be able to,
2239 but fail if we can't. */
2240 lnmode
= get_best_mode (lbitsize
, lbitpos
,
2241 TYPE_ALIGN (TREE_TYPE (linner
)), word_mode
,
2243 if (lnmode
== VOIDmode
)
2246 /* Set signed and unsigned types of the precision of this mode for the
2248 signed_type
= type_for_mode (lnmode
, 0);
2249 unsigned_type
= type_for_mode (lnmode
, 1);
2253 rnmode
= get_best_mode (rbitsize
, rbitpos
,
2254 TYPE_ALIGN (TREE_TYPE (rinner
)), word_mode
,
2256 if (rnmode
== VOIDmode
)
2260 /* Compute the bit position and size for the new reference and our offset
2261 within it. If the new reference is the same size as the original, we
2262 won't optimize anything, so return zero. */
2263 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2264 lnbitpos
= lbitpos
& ~ (lnbitsize
- 1);
2265 lbitpos
-= lnbitpos
;
2266 if (lnbitsize
== lbitsize
)
2271 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2272 rnbitpos
= rbitpos
& ~ (rnbitsize
- 1);
2273 rbitpos
-= rnbitpos
;
2274 if (rnbitsize
== rbitsize
)
2278 if (BYTES_BIG_ENDIAN
)
2279 lbitpos
= lnbitsize
- lbitsize
- lbitpos
;
2281 /* Make the mask to be used against the extracted field. */
2282 mask
= build_int_2 (~0, ~0);
2283 TREE_TYPE (mask
) = unsigned_type
;
2284 force_fit_type (mask
, 0);
2285 mask
= convert (unsigned_type
, mask
);
2286 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (lnbitsize
- lbitsize
), 0);
2287 mask
= const_binop (RSHIFT_EXPR
, mask
,
2288 size_int (lnbitsize
- lbitsize
- lbitpos
), 0);
2291 /* If not comparing with constant, just rework the comparison
2293 return build (code
, compare_type
,
2294 build (BIT_AND_EXPR
, unsigned_type
,
2295 make_bit_field_ref (linner
, unsigned_type
,
2296 lnbitsize
, lnbitpos
, 1),
2298 build (BIT_AND_EXPR
, unsigned_type
,
2299 make_bit_field_ref (rinner
, unsigned_type
,
2300 rnbitsize
, rnbitpos
, 1),
2303 /* Otherwise, we are handling the constant case. See if the constant is too
2304 big for the field. Warn and return a tree of for 0 (false) if so. We do
2305 this not only for its own sake, but to avoid having to test for this
2306 error case below. If we didn't, we might generate wrong code.
2308 For unsigned fields, the constant shifted right by the field length should
2309 be all zero. For signed fields, the high-order bits should agree with
2314 if (! integer_zerop (const_binop (RSHIFT_EXPR
,
2315 convert (unsigned_type
, rhs
),
2316 size_int (lbitsize
), 0)))
2318 warning ("comparison is always %s due to width of bitfield",
2319 code
== NE_EXPR
? "one" : "zero");
2320 return convert (compare_type
,
2322 ? integer_one_node
: integer_zero_node
));
2327 tree tem
= const_binop (RSHIFT_EXPR
, convert (signed_type
, rhs
),
2328 size_int (lbitsize
- 1), 0);
2329 if (! integer_zerop (tem
) && ! integer_all_onesp (tem
))
2331 warning ("comparison is always %s due to width of bitfield",
2332 code
== NE_EXPR
? "one" : "zero");
2333 return convert (compare_type
,
2335 ? integer_one_node
: integer_zero_node
));
2339 /* Single-bit compares should always be against zero. */
2340 if (lbitsize
== 1 && ! integer_zerop (rhs
))
2342 code
= code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
;
2343 rhs
= convert (type
, integer_zero_node
);
2346 /* Make a new bitfield reference, shift the constant over the
2347 appropriate number of bits and mask it with the computed mask
2348 (in case this was a signed field). If we changed it, make a new one. */
2349 lhs
= make_bit_field_ref (linner
, unsigned_type
, lnbitsize
, lnbitpos
, 1);
2352 TREE_SIDE_EFFECTS (lhs
) = 1;
2353 TREE_THIS_VOLATILE (lhs
) = 1;
2356 rhs
= fold (const_binop (BIT_AND_EXPR
,
2357 const_binop (LSHIFT_EXPR
,
2358 convert (unsigned_type
, rhs
),
2359 size_int (lbitpos
), 0),
2362 return build (code
, compare_type
,
2363 build (BIT_AND_EXPR
, unsigned_type
, lhs
, mask
),
2367 /* Subroutine for fold_truthop: decode a field reference.
2369 If EXP is a comparison reference, we return the innermost reference.
2371 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
2372 set to the starting bit number.
2374 If the innermost field can be completely contained in a mode-sized
2375 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
2377 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
2378 otherwise it is not changed.
2380 *PUNSIGNEDP is set to the signedness of the field.
2382 *PMASK is set to the mask used. This is either contained in a
2383 BIT_AND_EXPR or derived from the width of the field.
2385 Return 0 if this is not a component reference or is one that we can't
2386 do anything with. */
2389 decode_field_reference (exp
, pbitsize
, pbitpos
, pmode
, punsignedp
,
2392 int *pbitsize
, *pbitpos
;
2393 enum machine_mode
*pmode
;
2394 int *punsignedp
, *pvolatilep
;
2398 tree mask
, inner
, offset
;
2402 /* All the optimizations using this function assume integer fields.
2403 There are problems with FP fields since the type_for_size call
2404 below can fail for, e.g., XFmode. */
2405 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp
)))
2410 if (TREE_CODE (exp
) == BIT_AND_EXPR
)
2412 and_mask
= TREE_OPERAND (exp
, 1);
2413 exp
= TREE_OPERAND (exp
, 0);
2414 STRIP_NOPS (exp
); STRIP_NOPS (and_mask
);
2415 if (TREE_CODE (and_mask
) != INTEGER_CST
)
2420 inner
= get_inner_reference (exp
, pbitsize
, pbitpos
, &offset
, pmode
,
2421 punsignedp
, pvolatilep
);
2422 if ((inner
== exp
&& and_mask
== 0)
2423 || *pbitsize
< 0 || offset
!= 0)
2426 /* Compute the mask to access the bitfield. */
2427 unsigned_type
= type_for_size (*pbitsize
, 1);
2428 precision
= TYPE_PRECISION (unsigned_type
);
2430 mask
= build_int_2 (~0, ~0);
2431 TREE_TYPE (mask
) = unsigned_type
;
2432 force_fit_type (mask
, 0);
2433 mask
= const_binop (LSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2434 mask
= const_binop (RSHIFT_EXPR
, mask
, size_int (precision
- *pbitsize
), 0);
2436 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
2438 mask
= fold (build (BIT_AND_EXPR
, unsigned_type
,
2439 convert (unsigned_type
, and_mask
), mask
));
2445 /* Return non-zero if MASK represents a mask of SIZE ones in the low-order
2449 all_ones_mask_p (mask
, size
)
2453 tree type
= TREE_TYPE (mask
);
2454 int precision
= TYPE_PRECISION (type
);
2457 tmask
= build_int_2 (~0, ~0);
2458 TREE_TYPE (tmask
) = signed_type (type
);
2459 force_fit_type (tmask
, 0);
2461 tree_int_cst_equal (mask
,
2462 const_binop (RSHIFT_EXPR
,
2463 const_binop (LSHIFT_EXPR
, tmask
,
2464 size_int (precision
- size
),
2466 size_int (precision
- size
), 0));
2469 /* Subroutine for fold_truthop: determine if an operand is simple enough
2470 to be evaluated unconditionally. */
2473 simple_operand_p (exp
)
2476 /* Strip any conversions that don't change the machine mode. */
2477 while ((TREE_CODE (exp
) == NOP_EXPR
2478 || TREE_CODE (exp
) == CONVERT_EXPR
)
2479 && (TYPE_MODE (TREE_TYPE (exp
))
2480 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)))))
2481 exp
= TREE_OPERAND (exp
, 0);
2483 return (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'c'
2484 || (TREE_CODE_CLASS (TREE_CODE (exp
)) == 'd'
2485 && ! TREE_ADDRESSABLE (exp
)
2486 && ! TREE_THIS_VOLATILE (exp
)
2487 && ! DECL_NONLOCAL (exp
)
2488 /* Don't regard global variables as simple. They may be
2489 allocated in ways unknown to the compiler (shared memory,
2490 #pragma weak, etc). */
2491 && ! TREE_PUBLIC (exp
)
2492 && ! DECL_EXTERNAL (exp
)
2493 /* Loading a static variable is unduly expensive, but global
2494 registers aren't expensive. */
2495 && (! TREE_STATIC (exp
) || DECL_REGISTER (exp
))));
2498 /* Subroutine for fold_truthop: try to optimize a range test.
2500 For example, "i >= 2 && i =< 9" can be done as "(unsigned) (i - 2) <= 7".
2502 JCODE is the logical combination of the two terms. It is TRUTH_AND_EXPR
2503 (representing TRUTH_ANDIF_EXPR and TRUTH_AND_EXPR) or TRUTH_OR_EXPR
2504 (representing TRUTH_ORIF_EXPR and TRUTH_OR_EXPR). TYPE is the type of
2507 VAR is the value being tested. LO_CODE and HI_CODE are the comparison
2508 operators comparing VAR to LO_CST and HI_CST. LO_CST is known to be no
2509 larger than HI_CST (they may be equal).
2511 We return the simplified tree or 0 if no optimization is possible. */
2514 range_test (jcode
, type
, lo_code
, hi_code
, var
, lo_cst
, hi_cst
)
2515 enum tree_code jcode
, lo_code
, hi_code
;
2516 tree type
, var
, lo_cst
, hi_cst
;
2519 enum tree_code rcode
;
2521 /* See if this is a range test and normalize the constant terms. */
2523 if (jcode
== TRUTH_AND_EXPR
)
2528 /* See if we have VAR != CST && VAR != CST+1. */
2529 if (! (hi_code
== NE_EXPR
2530 && TREE_INT_CST_LOW (hi_cst
) - TREE_INT_CST_LOW (lo_cst
) == 1
2531 && tree_int_cst_equal (integer_one_node
,
2532 const_binop (MINUS_EXPR
,
2533 hi_cst
, lo_cst
, 0))))
2541 if (hi_code
== LT_EXPR
)
2542 hi_cst
= const_binop (MINUS_EXPR
, hi_cst
, integer_one_node
, 0);
2543 else if (hi_code
!= LE_EXPR
)
2546 if (lo_code
== GT_EXPR
)
2547 lo_cst
= const_binop (PLUS_EXPR
, lo_cst
, integer_one_node
, 0);
2549 /* We now have VAR >= LO_CST && VAR <= HI_CST. */
2562 /* See if we have VAR == CST || VAR == CST+1. */
2563 if (! (hi_code
== EQ_EXPR
2564 && TREE_INT_CST_LOW (hi_cst
) - TREE_INT_CST_LOW (lo_cst
) == 1
2565 && tree_int_cst_equal (integer_one_node
,
2566 const_binop (MINUS_EXPR
,
2567 hi_cst
, lo_cst
, 0))))
2575 if (hi_code
== GE_EXPR
)
2576 hi_cst
= const_binop (MINUS_EXPR
, hi_cst
, integer_one_node
, 0);
2577 else if (hi_code
!= GT_EXPR
)
2580 if (lo_code
== LE_EXPR
)
2581 lo_cst
= const_binop (PLUS_EXPR
, lo_cst
, integer_one_node
, 0);
2583 /* We now have VAR < LO_CST || VAR > HI_CST. */
2592 /* When normalizing, it is possible to both increment the smaller constant
2593 and decrement the larger constant. See if they are still ordered. */
2594 if (tree_int_cst_lt (hi_cst
, lo_cst
))
2597 /* Fail if VAR isn't an integer. */
2598 utype
= TREE_TYPE (var
);
2599 if (! INTEGRAL_TYPE_P (utype
))
2602 /* The range test is invalid if subtracting the two constants results
2603 in overflow. This can happen in traditional mode. */
2604 if (! int_fits_type_p (hi_cst
, TREE_TYPE (var
))
2605 || ! int_fits_type_p (lo_cst
, TREE_TYPE (var
)))
2608 if (! TREE_UNSIGNED (utype
))
2610 utype
= unsigned_type (utype
);
2611 var
= convert (utype
, var
);
2612 lo_cst
= convert (utype
, lo_cst
);
2613 hi_cst
= convert (utype
, hi_cst
);
2616 return fold (convert (type
,
2617 build (rcode
, utype
,
2618 build (MINUS_EXPR
, utype
, var
, lo_cst
),
2619 const_binop (MINUS_EXPR
, hi_cst
, lo_cst
, 0))));
2622 /* Subroutine for fold_truthop: C is an INTEGER_CST interpreted as a P
2623 bit value. Arrange things so the extra bits will be set to zero if and
2624 only if C is signed-extended to its full width. */
2627 unextend (c
, p
, unsignedp
)
2632 tree type
= TREE_TYPE (c
);
2633 int modesize
= GET_MODE_BITSIZE (TYPE_MODE (type
));
2636 if (p
== modesize
|| unsignedp
)
2639 if (TREE_UNSIGNED (type
))
2640 c
= convert (signed_type (type
), c
);
2642 /* We work by getting just the sign bit into the low-order bit, then
2643 into the high-order bit, then sign-extend. We then XOR that value
2645 temp
= const_binop (RSHIFT_EXPR
, c
, size_int (p
- 1), 0);
2646 temp
= const_binop (BIT_AND_EXPR
, temp
, size_int (1), 0);
2647 temp
= const_binop (LSHIFT_EXPR
, temp
, size_int (modesize
- 1), 0);
2648 temp
= const_binop (RSHIFT_EXPR
, temp
, size_int (modesize
- p
- 1), 0);
2649 return convert (type
, const_binop (BIT_XOR_EXPR
, c
, temp
, 0));
2652 /* Find ways of folding logical expressions of LHS and RHS:
2653 Try to merge two comparisons to the same innermost item.
2654 Look for range tests like "ch >= '0' && ch <= '9'".
2655 Look for combinations of simple terms on machines with expensive branches
2656 and evaluate the RHS unconditionally.
2658 For example, if we have p->a == 2 && p->b == 4 and we can make an
2659 object large enough to span both A and B, we can do this with a comparison
2660 against the object ANDed with the a mask.
2662 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
2663 operations to do this with one comparison.
2665 We check for both normal comparisons and the BIT_AND_EXPRs made this by
2666 function and the one above.
2668 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
2669 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
2671 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
2674 We return the simplified tree or 0 if no optimization is possible. */
2677 fold_truthop (code
, truth_type
, lhs
, rhs
)
2678 enum tree_code code
;
2679 tree truth_type
, lhs
, rhs
;
2681 /* If this is the "or" of two comparisons, we can do something if we
2682 the comparisons are NE_EXPR. If this is the "and", we can do something
2683 if the comparisons are EQ_EXPR. I.e.,
2684 (a->b == 2 && a->c == 4) can become (a->new == NEW).
2686 WANTED_CODE is this operation code. For single bit fields, we can
2687 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
2688 comparison for one-bit fields. */
2690 enum tree_code wanted_code
;
2691 enum tree_code lcode
, rcode
;
2692 tree ll_arg
, lr_arg
, rl_arg
, rr_arg
;
2693 tree ll_inner
, lr_inner
, rl_inner
, rr_inner
;
2694 int ll_bitsize
, ll_bitpos
, lr_bitsize
, lr_bitpos
;
2695 int rl_bitsize
, rl_bitpos
, rr_bitsize
, rr_bitpos
;
2696 int xll_bitpos
, xlr_bitpos
, xrl_bitpos
, xrr_bitpos
;
2697 int lnbitsize
, lnbitpos
, rnbitsize
, rnbitpos
;
2698 int ll_unsignedp
, lr_unsignedp
, rl_unsignedp
, rr_unsignedp
;
2699 enum machine_mode ll_mode
, lr_mode
, rl_mode
, rr_mode
;
2700 enum machine_mode lnmode
, rnmode
;
2701 tree ll_mask
, lr_mask
, rl_mask
, rr_mask
;
2702 tree l_const
, r_const
;
2704 int first_bit
, end_bit
;
2707 /* Start by getting the comparison codes and seeing if this looks like
2708 a range test. Fail if anything is volatile. If one operand is a
2709 BIT_AND_EXPR with the constant one, treat it as if it were surrounded
2712 if (TREE_SIDE_EFFECTS (lhs
)
2713 || TREE_SIDE_EFFECTS (rhs
))
2716 lcode
= TREE_CODE (lhs
);
2717 rcode
= TREE_CODE (rhs
);
2719 if (lcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (lhs
, 1)))
2720 lcode
= NE_EXPR
, lhs
= build (NE_EXPR
, truth_type
, lhs
, integer_zero_node
);
2722 if (rcode
== BIT_AND_EXPR
&& integer_onep (TREE_OPERAND (rhs
, 1)))
2723 rcode
= NE_EXPR
, rhs
= build (NE_EXPR
, truth_type
, rhs
, integer_zero_node
);
2725 if (TREE_CODE_CLASS (lcode
) != '<'
2726 || TREE_CODE_CLASS (rcode
) != '<')
2729 code
= ((code
== TRUTH_AND_EXPR
|| code
== TRUTH_ANDIF_EXPR
)
2730 ? TRUTH_AND_EXPR
: TRUTH_OR_EXPR
);
2732 ll_arg
= TREE_OPERAND (lhs
, 0);
2733 lr_arg
= TREE_OPERAND (lhs
, 1);
2734 rl_arg
= TREE_OPERAND (rhs
, 0);
2735 rr_arg
= TREE_OPERAND (rhs
, 1);
2737 if (TREE_CODE (lr_arg
) == INTEGER_CST
2738 && TREE_CODE (rr_arg
) == INTEGER_CST
2739 && operand_equal_p (ll_arg
, rl_arg
, 0))
2741 if (tree_int_cst_lt (lr_arg
, rr_arg
))
2742 result
= range_test (code
, truth_type
, lcode
, rcode
,
2743 ll_arg
, lr_arg
, rr_arg
);
2745 result
= range_test (code
, truth_type
, rcode
, lcode
,
2746 ll_arg
, rr_arg
, lr_arg
);
2748 /* If this isn't a range test, it also isn't a comparison that
2749 can be merged. However, it wins to evaluate the RHS unconditionally
2750 on machines with expensive branches. */
2752 if (result
== 0 && BRANCH_COST
>= 2)
2754 if (TREE_CODE (ll_arg
) != VAR_DECL
2755 && TREE_CODE (ll_arg
) != PARM_DECL
)
2757 /* Avoid evaluating the variable part twice. */
2758 ll_arg
= save_expr (ll_arg
);
2759 lhs
= build (lcode
, TREE_TYPE (lhs
), ll_arg
, lr_arg
);
2760 rhs
= build (rcode
, TREE_TYPE (rhs
), ll_arg
, rr_arg
);
2762 return build (code
, truth_type
, lhs
, rhs
);
2767 /* If the RHS can be evaluated unconditionally and its operands are
2768 simple, it wins to evaluate the RHS unconditionally on machines
2769 with expensive branches. In this case, this isn't a comparison
2770 that can be merged. */
2772 /* @@ I'm not sure it wins on the m88110 to do this if the comparisons
2773 are with zero (tmw). */
2775 if (BRANCH_COST
>= 2
2776 && INTEGRAL_TYPE_P (TREE_TYPE (rhs
))
2777 && simple_operand_p (rl_arg
)
2778 && simple_operand_p (rr_arg
))
2779 return build (code
, truth_type
, lhs
, rhs
);
2781 /* See if the comparisons can be merged. Then get all the parameters for
2784 if ((lcode
!= EQ_EXPR
&& lcode
!= NE_EXPR
)
2785 || (rcode
!= EQ_EXPR
&& rcode
!= NE_EXPR
))
2789 ll_inner
= decode_field_reference (ll_arg
,
2790 &ll_bitsize
, &ll_bitpos
, &ll_mode
,
2791 &ll_unsignedp
, &volatilep
, &ll_mask
);
2792 lr_inner
= decode_field_reference (lr_arg
,
2793 &lr_bitsize
, &lr_bitpos
, &lr_mode
,
2794 &lr_unsignedp
, &volatilep
, &lr_mask
);
2795 rl_inner
= decode_field_reference (rl_arg
,
2796 &rl_bitsize
, &rl_bitpos
, &rl_mode
,
2797 &rl_unsignedp
, &volatilep
, &rl_mask
);
2798 rr_inner
= decode_field_reference (rr_arg
,
2799 &rr_bitsize
, &rr_bitpos
, &rr_mode
,
2800 &rr_unsignedp
, &volatilep
, &rr_mask
);
2802 /* It must be true that the inner operation on the lhs of each
2803 comparison must be the same if we are to be able to do anything.
2804 Then see if we have constants. If not, the same must be true for
2806 if (volatilep
|| ll_inner
== 0 || rl_inner
== 0
2807 || ! operand_equal_p (ll_inner
, rl_inner
, 0))
2810 if (TREE_CODE (lr_arg
) == INTEGER_CST
2811 && TREE_CODE (rr_arg
) == INTEGER_CST
)
2812 l_const
= lr_arg
, r_const
= rr_arg
;
2813 else if (lr_inner
== 0 || rr_inner
== 0
2814 || ! operand_equal_p (lr_inner
, rr_inner
, 0))
2817 l_const
= r_const
= 0;
2819 /* If either comparison code is not correct for our logical operation,
2820 fail. However, we can convert a one-bit comparison against zero into
2821 the opposite comparison against that bit being set in the field. */
2823 wanted_code
= (code
== TRUTH_AND_EXPR
? EQ_EXPR
: NE_EXPR
);
2824 if (lcode
!= wanted_code
)
2826 if (l_const
&& integer_zerop (l_const
) && integer_pow2p (ll_mask
))
2832 if (rcode
!= wanted_code
)
2834 if (r_const
&& integer_zerop (r_const
) && integer_pow2p (rl_mask
))
2840 /* See if we can find a mode that contains both fields being compared on
2841 the left. If we can't, fail. Otherwise, update all constants and masks
2842 to be relative to a field of that size. */
2843 first_bit
= MIN (ll_bitpos
, rl_bitpos
);
2844 end_bit
= MAX (ll_bitpos
+ ll_bitsize
, rl_bitpos
+ rl_bitsize
);
2845 lnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
2846 TYPE_ALIGN (TREE_TYPE (ll_inner
)), word_mode
,
2848 if (lnmode
== VOIDmode
)
2851 lnbitsize
= GET_MODE_BITSIZE (lnmode
);
2852 lnbitpos
= first_bit
& ~ (lnbitsize
- 1);
2853 type
= type_for_size (lnbitsize
, 1);
2854 xll_bitpos
= ll_bitpos
- lnbitpos
, xrl_bitpos
= rl_bitpos
- lnbitpos
;
2856 if (BYTES_BIG_ENDIAN
)
2858 xll_bitpos
= lnbitsize
- xll_bitpos
- ll_bitsize
;
2859 xrl_bitpos
= lnbitsize
- xrl_bitpos
- rl_bitsize
;
2862 ll_mask
= const_binop (LSHIFT_EXPR
, convert (type
, ll_mask
),
2863 size_int (xll_bitpos
), 0);
2864 rl_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rl_mask
),
2865 size_int (xrl_bitpos
), 0);
2869 l_const
= convert (type
, unextend (l_const
, ll_bitsize
, ll_unsignedp
));
2870 l_const
= const_binop (LSHIFT_EXPR
, l_const
, size_int (xll_bitpos
), 0);
2871 if (! integer_zerop (const_binop (BIT_AND_EXPR
, l_const
,
2872 fold (build1 (BIT_NOT_EXPR
,
2876 warning ("comparison is always %s",
2877 wanted_code
== NE_EXPR
? "one" : "zero");
2879 return convert (truth_type
,
2880 wanted_code
== NE_EXPR
2881 ? integer_one_node
: integer_zero_node
);
2886 r_const
= convert (type
, unextend (r_const
, rl_bitsize
, rl_unsignedp
));
2887 r_const
= const_binop (LSHIFT_EXPR
, r_const
, size_int (xrl_bitpos
), 0);
2888 if (! integer_zerop (const_binop (BIT_AND_EXPR
, r_const
,
2889 fold (build1 (BIT_NOT_EXPR
,
2893 warning ("comparison is always %s",
2894 wanted_code
== NE_EXPR
? "one" : "zero");
2896 return convert (truth_type
,
2897 wanted_code
== NE_EXPR
2898 ? integer_one_node
: integer_zero_node
);
2902 /* If the right sides are not constant, do the same for it. Also,
2903 disallow this optimization if a size or signedness mismatch occurs
2904 between the left and right sides. */
2907 if (ll_bitsize
!= lr_bitsize
|| rl_bitsize
!= rr_bitsize
2908 || ll_unsignedp
!= lr_unsignedp
|| rl_unsignedp
!= rr_unsignedp
2909 /* Make sure the two fields on the right
2910 correspond to the left without being swapped. */
2911 || ll_bitpos
- rl_bitpos
!= lr_bitpos
- rr_bitpos
)
2914 first_bit
= MIN (lr_bitpos
, rr_bitpos
);
2915 end_bit
= MAX (lr_bitpos
+ lr_bitsize
, rr_bitpos
+ rr_bitsize
);
2916 rnmode
= get_best_mode (end_bit
- first_bit
, first_bit
,
2917 TYPE_ALIGN (TREE_TYPE (lr_inner
)), word_mode
,
2919 if (rnmode
== VOIDmode
)
2922 rnbitsize
= GET_MODE_BITSIZE (rnmode
);
2923 rnbitpos
= first_bit
& ~ (rnbitsize
- 1);
2924 xlr_bitpos
= lr_bitpos
- rnbitpos
, xrr_bitpos
= rr_bitpos
- rnbitpos
;
2926 if (BYTES_BIG_ENDIAN
)
2928 xlr_bitpos
= rnbitsize
- xlr_bitpos
- lr_bitsize
;
2929 xrr_bitpos
= rnbitsize
- xrr_bitpos
- rr_bitsize
;
2932 lr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, lr_mask
),
2933 size_int (xlr_bitpos
), 0);
2934 rr_mask
= const_binop (LSHIFT_EXPR
, convert (type
, rr_mask
),
2935 size_int (xrr_bitpos
), 0);
2937 /* Make a mask that corresponds to both fields being compared.
2938 Do this for both items being compared. If the masks agree,
2939 we can do this by masking both and comparing the masked
2941 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
2942 lr_mask
= const_binop (BIT_IOR_EXPR
, lr_mask
, rr_mask
, 0);
2943 if (operand_equal_p (ll_mask
, lr_mask
, 0) && lnbitsize
== rnbitsize
)
2945 lhs
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
2946 ll_unsignedp
|| rl_unsignedp
);
2947 rhs
= make_bit_field_ref (lr_inner
, type
, rnbitsize
, rnbitpos
,
2948 lr_unsignedp
|| rr_unsignedp
);
2949 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
2951 lhs
= build (BIT_AND_EXPR
, type
, lhs
, ll_mask
);
2952 rhs
= build (BIT_AND_EXPR
, type
, rhs
, ll_mask
);
2954 return build (wanted_code
, truth_type
, lhs
, rhs
);
2957 /* There is still another way we can do something: If both pairs of
2958 fields being compared are adjacent, we may be able to make a wider
2959 field containing them both. */
2960 if ((ll_bitsize
+ ll_bitpos
== rl_bitpos
2961 && lr_bitsize
+ lr_bitpos
== rr_bitpos
)
2962 || (ll_bitpos
== rl_bitpos
+ rl_bitsize
2963 && lr_bitpos
== rr_bitpos
+ rr_bitsize
))
2964 return build (wanted_code
, truth_type
,
2965 make_bit_field_ref (ll_inner
, type
,
2966 ll_bitsize
+ rl_bitsize
,
2967 MIN (ll_bitpos
, rl_bitpos
),
2969 make_bit_field_ref (lr_inner
, type
,
2970 lr_bitsize
+ rr_bitsize
,
2971 MIN (lr_bitpos
, rr_bitpos
),
2977 /* Handle the case of comparisons with constants. If there is something in
2978 common between the masks, those bits of the constants must be the same.
2979 If not, the condition is always false. Test for this to avoid generating
2980 incorrect code below. */
2981 result
= const_binop (BIT_AND_EXPR
, ll_mask
, rl_mask
, 0);
2982 if (! integer_zerop (result
)
2983 && simple_cst_equal (const_binop (BIT_AND_EXPR
, result
, l_const
, 0),
2984 const_binop (BIT_AND_EXPR
, result
, r_const
, 0)) != 1)
2986 if (wanted_code
== NE_EXPR
)
2988 warning ("`or' of unmatched not-equal tests is always 1");
2989 return convert (truth_type
, integer_one_node
);
2993 warning ("`and' of mutually exclusive equal-tests is always zero");
2994 return convert (truth_type
, integer_zero_node
);
2998 /* Construct the expression we will return. First get the component
2999 reference we will make. Unless the mask is all ones the width of
3000 that field, perform the mask operation. Then compare with the
3002 result
= make_bit_field_ref (ll_inner
, type
, lnbitsize
, lnbitpos
,
3003 ll_unsignedp
|| rl_unsignedp
);
3005 ll_mask
= const_binop (BIT_IOR_EXPR
, ll_mask
, rl_mask
, 0);
3006 if (! all_ones_mask_p (ll_mask
, lnbitsize
))
3007 result
= build (BIT_AND_EXPR
, type
, result
, ll_mask
);
3009 return build (wanted_code
, truth_type
, result
,
3010 const_binop (BIT_IOR_EXPR
, l_const
, r_const
, 0));
3013 /* If T contains a COMPOUND_EXPR which was inserted merely to evaluate
3014 S, a SAVE_EXPR, return the expression actually being evaluated. Note
3015 that we may sometimes modify the tree. */
3018 strip_compound_expr (t
, s
)
3022 tree type
= TREE_TYPE (t
);
3023 enum tree_code code
= TREE_CODE (t
);
3025 /* See if this is the COMPOUND_EXPR we want to eliminate. */
3026 if (code
== COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
3027 && TREE_OPERAND (TREE_OPERAND (t
, 0), 0) == s
)
3028 return TREE_OPERAND (t
, 1);
3030 /* See if this is a COND_EXPR or a simple arithmetic operator. We
3031 don't bother handling any other types. */
3032 else if (code
== COND_EXPR
)
3034 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3035 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3036 TREE_OPERAND (t
, 2) = strip_compound_expr (TREE_OPERAND (t
, 2), s
);
3038 else if (TREE_CODE_CLASS (code
) == '1')
3039 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3040 else if (TREE_CODE_CLASS (code
) == '<'
3041 || TREE_CODE_CLASS (code
) == '2')
3043 TREE_OPERAND (t
, 0) = strip_compound_expr (TREE_OPERAND (t
, 0), s
);
3044 TREE_OPERAND (t
, 1) = strip_compound_expr (TREE_OPERAND (t
, 1), s
);
3050 /* Perform constant folding and related simplification of EXPR.
3051 The related simplifications include x*1 => x, x*0 => 0, etc.,
3052 and application of the associative law.
3053 NOP_EXPR conversions may be removed freely (as long as we
3054 are careful not to change the C type of the overall expression)
3055 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
3056 but we can constant-fold them if they have constant operands. */
3062 register tree t
= expr
;
3063 tree t1
= NULL_TREE
;
3065 tree type
= TREE_TYPE (expr
);
3066 register tree arg0
, arg1
;
3067 register enum tree_code code
= TREE_CODE (t
);
3071 /* WINS will be nonzero when the switch is done
3072 if all operands are constant. */
3076 /* Don't try to process an RTL_EXPR since its operands aren't trees. */
3077 if (code
== RTL_EXPR
)
3080 /* Return right away if already constant. */
3081 if (TREE_CONSTANT (t
))
3083 if (code
== CONST_DECL
)
3084 return DECL_INITIAL (t
);
3088 kind
= TREE_CODE_CLASS (code
);
3089 if (code
== NOP_EXPR
|| code
== FLOAT_EXPR
|| code
== CONVERT_EXPR
)
3093 /* Special case for conversion ops that can have fixed point args. */
3094 arg0
= TREE_OPERAND (t
, 0);
3096 /* Don't use STRIP_NOPS, because signedness of argument type matters. */
3098 STRIP_TYPE_NOPS (arg0
);
3100 if (arg0
!= 0 && TREE_CODE (arg0
) == COMPLEX_CST
)
3101 subop
= TREE_REALPART (arg0
);
3105 if (subop
!= 0 && TREE_CODE (subop
) != INTEGER_CST
3106 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3107 && TREE_CODE (subop
) != REAL_CST
3108 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3110 /* Note that TREE_CONSTANT isn't enough:
3111 static var addresses are constant but we can't
3112 do arithmetic on them. */
3115 else if (kind
== 'e' || kind
== '<'
3116 || kind
== '1' || kind
== '2' || kind
== 'r')
3118 register int len
= tree_code_length
[(int) code
];
3120 for (i
= 0; i
< len
; i
++)
3122 tree op
= TREE_OPERAND (t
, i
);
3126 continue; /* Valid for CALL_EXPR, at least. */
3128 if (kind
== '<' || code
== RSHIFT_EXPR
)
3130 /* Signedness matters here. Perhaps we can refine this
3132 STRIP_TYPE_NOPS (op
);
3136 /* Strip any conversions that don't change the mode. */
3140 if (TREE_CODE (op
) == COMPLEX_CST
)
3141 subop
= TREE_REALPART (op
);
3145 if (TREE_CODE (subop
) != INTEGER_CST
3146 #if ! defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3147 && TREE_CODE (subop
) != REAL_CST
3148 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3150 /* Note that TREE_CONSTANT isn't enough:
3151 static var addresses are constant but we can't
3152 do arithmetic on them. */
3162 /* If this is a commutative operation, and ARG0 is a constant, move it
3163 to ARG1 to reduce the number of tests below. */
3164 if ((code
== PLUS_EXPR
|| code
== MULT_EXPR
|| code
== MIN_EXPR
3165 || code
== MAX_EXPR
|| code
== BIT_IOR_EXPR
|| code
== BIT_XOR_EXPR
3166 || code
== BIT_AND_EXPR
)
3167 && (TREE_CODE (arg0
) == INTEGER_CST
|| TREE_CODE (arg0
) == REAL_CST
))
3169 tem
= arg0
; arg0
= arg1
; arg1
= tem
;
3171 tem
= TREE_OPERAND (t
, 0); TREE_OPERAND (t
, 0) = TREE_OPERAND (t
, 1);
3172 TREE_OPERAND (t
, 1) = tem
;
3175 /* Now WINS is set as described above,
3176 ARG0 is the first operand of EXPR,
3177 and ARG1 is the second operand (if it has more than one operand).
3179 First check for cases where an arithmetic operation is applied to a
3180 compound, conditional, or comparison operation. Push the arithmetic
3181 operation inside the compound or conditional to see if any folding
3182 can then be done. Convert comparison to conditional for this purpose.
3183 The also optimizes non-constant cases that used to be done in
3186 Before we do that, see if this is a BIT_AND_EXPR or a BIT_OR_EXPR,
3187 one of the operands is a comparison and the other is a comparison, a
3188 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
3189 code below would make the expression more complex. Change it to a
3190 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
3191 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
3193 if ((code
== BIT_AND_EXPR
|| code
== BIT_IOR_EXPR
3194 || code
== EQ_EXPR
|| code
== NE_EXPR
)
3195 && ((truth_value_p (TREE_CODE (arg0
))
3196 && (truth_value_p (TREE_CODE (arg1
))
3197 || (TREE_CODE (arg1
) == BIT_AND_EXPR
3198 && integer_onep (TREE_OPERAND (arg1
, 1)))))
3199 || (truth_value_p (TREE_CODE (arg1
))
3200 && (truth_value_p (TREE_CODE (arg0
))
3201 || (TREE_CODE (arg0
) == BIT_AND_EXPR
3202 && integer_onep (TREE_OPERAND (arg0
, 1)))))))
3204 t
= fold (build (code
== BIT_AND_EXPR
? TRUTH_AND_EXPR
3205 : code
== BIT_IOR_EXPR
? TRUTH_OR_EXPR
3209 if (code
== EQ_EXPR
)
3210 t
= invert_truthvalue (t
);
3215 if (TREE_CODE_CLASS (code
) == '1')
3217 if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
3218 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3219 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))));
3220 else if (TREE_CODE (arg0
) == COND_EXPR
)
3222 t
= fold (build (COND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3223 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 1))),
3224 fold (build1 (code
, type
, TREE_OPERAND (arg0
, 2)))));
3226 /* If this was a conversion, and all we did was to move into
3227 inside the COND_EXPR, bring it back out. But leave it if
3228 it is a conversion from integer to integer and the
3229 result precision is no wider than a word since such a
3230 conversion is cheap and may be optimized away by combine,
3231 while it couldn't if it were outside the COND_EXPR. Then return
3232 so we don't get into an infinite recursion loop taking the
3233 conversion out and then back in. */
3235 if ((code
== NOP_EXPR
|| code
== CONVERT_EXPR
3236 || code
== NON_LVALUE_EXPR
)
3237 && TREE_CODE (t
) == COND_EXPR
3238 && TREE_CODE (TREE_OPERAND (t
, 1)) == code
3239 && TREE_CODE (TREE_OPERAND (t
, 2)) == code
3240 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0))
3241 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 2), 0)))
3242 && ! (INTEGRAL_TYPE_P (TREE_TYPE (t
))
3243 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)))
3244 && TYPE_PRECISION (TREE_TYPE (t
)) <= BITS_PER_WORD
))
3245 t
= build1 (code
, type
,
3247 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 1), 0)),
3248 TREE_OPERAND (t
, 0),
3249 TREE_OPERAND (TREE_OPERAND (t
, 1), 0),
3250 TREE_OPERAND (TREE_OPERAND (t
, 2), 0)));
3253 else if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
3254 return fold (build (COND_EXPR
, type
, arg0
,
3255 fold (build1 (code
, type
, integer_one_node
)),
3256 fold (build1 (code
, type
, integer_zero_node
))));
3258 else if (TREE_CODE_CLASS (code
) == '2'
3259 || TREE_CODE_CLASS (code
) == '<')
3261 if (TREE_CODE (arg1
) == COMPOUND_EXPR
)
3262 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
3263 fold (build (code
, type
,
3264 arg0
, TREE_OPERAND (arg1
, 1))));
3265 else if (TREE_CODE (arg1
) == COND_EXPR
3266 || TREE_CODE_CLASS (TREE_CODE (arg1
)) == '<')
3268 tree test
, true_value
, false_value
;
3270 if (TREE_CODE (arg1
) == COND_EXPR
)
3272 test
= TREE_OPERAND (arg1
, 0);
3273 true_value
= TREE_OPERAND (arg1
, 1);
3274 false_value
= TREE_OPERAND (arg1
, 2);
3278 tree testtype
= TREE_TYPE (arg1
);
3280 true_value
= convert (testtype
, integer_one_node
);
3281 false_value
= convert (testtype
, integer_zero_node
);
3284 /* If ARG0 is complex we want to make sure we only evaluate
3285 it once. Though this is only required if it is volatile, it
3286 might be more efficient even if it is not. However, if we
3287 succeed in folding one part to a constant, we do not need
3288 to make this SAVE_EXPR. Since we do this optimization
3289 primarily to see if we do end up with constant and this
3290 SAVE_EXPR interferes with later optimizations, suppressing
3291 it when we can is important. */
3293 if (TREE_CODE (arg0
) != SAVE_EXPR
3294 && ((TREE_CODE (arg0
) != VAR_DECL
3295 && TREE_CODE (arg0
) != PARM_DECL
)
3296 || TREE_SIDE_EFFECTS (arg0
)))
3298 tree lhs
= fold (build (code
, type
, arg0
, true_value
));
3299 tree rhs
= fold (build (code
, type
, arg0
, false_value
));
3301 if (TREE_CONSTANT (lhs
) || TREE_CONSTANT (rhs
))
3302 return fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
3304 arg0
= save_expr (arg0
);
3307 test
= fold (build (COND_EXPR
, type
, test
,
3308 fold (build (code
, type
, arg0
, true_value
)),
3309 fold (build (code
, type
, arg0
, false_value
))));
3310 if (TREE_CODE (arg0
) == SAVE_EXPR
)
3311 return build (COMPOUND_EXPR
, type
,
3312 convert (void_type_node
, arg0
),
3313 strip_compound_expr (test
, arg0
));
3315 return convert (type
, test
);
3318 else if (TREE_CODE (arg0
) == COMPOUND_EXPR
)
3319 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3320 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
3321 else if (TREE_CODE (arg0
) == COND_EXPR
3322 || TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<')
3324 tree test
, true_value
, false_value
;
3326 if (TREE_CODE (arg0
) == COND_EXPR
)
3328 test
= TREE_OPERAND (arg0
, 0);
3329 true_value
= TREE_OPERAND (arg0
, 1);
3330 false_value
= TREE_OPERAND (arg0
, 2);
3334 tree testtype
= TREE_TYPE (arg0
);
3336 true_value
= convert (testtype
, integer_one_node
);
3337 false_value
= convert (testtype
, integer_zero_node
);
3340 if (TREE_CODE (arg1
) != SAVE_EXPR
3341 && ((TREE_CODE (arg1
) != VAR_DECL
3342 && TREE_CODE (arg1
) != PARM_DECL
)
3343 || TREE_SIDE_EFFECTS (arg1
)))
3345 tree lhs
= fold (build (code
, type
, true_value
, arg1
));
3346 tree rhs
= fold (build (code
, type
, false_value
, arg1
));
3348 if (TREE_CONSTANT (lhs
) || TREE_CONSTANT (rhs
)
3349 || TREE_CONSTANT (arg1
))
3350 return fold (build (COND_EXPR
, type
, test
, lhs
, rhs
));
3352 arg1
= save_expr (arg1
);
3355 test
= fold (build (COND_EXPR
, type
, test
,
3356 fold (build (code
, type
, true_value
, arg1
)),
3357 fold (build (code
, type
, false_value
, arg1
))));
3358 if (TREE_CODE (arg1
) == SAVE_EXPR
)
3359 return build (COMPOUND_EXPR
, type
,
3360 convert (void_type_node
, arg1
),
3361 strip_compound_expr (test
, arg1
));
3363 return convert (type
, test
);
3366 else if (TREE_CODE_CLASS (code
) == '<'
3367 && TREE_CODE (arg0
) == COMPOUND_EXPR
)
3368 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3369 fold (build (code
, type
, TREE_OPERAND (arg0
, 1), arg1
)));
3370 else if (TREE_CODE_CLASS (code
) == '<'
3371 && TREE_CODE (arg1
) == COMPOUND_EXPR
)
3372 return build (COMPOUND_EXPR
, type
, TREE_OPERAND (arg1
, 0),
3373 fold (build (code
, type
, arg0
, TREE_OPERAND (arg1
, 1))));
3385 return fold (DECL_INITIAL (t
));
3390 case FIX_TRUNC_EXPR
:
3391 /* Other kinds of FIX are not handled properly by fold_convert. */
3393 if (TREE_TYPE (TREE_OPERAND (t
, 0)) == TREE_TYPE (t
))
3394 return TREE_OPERAND (t
, 0);
3396 /* Handle cases of two conversions in a row. */
3397 if (TREE_CODE (TREE_OPERAND (t
, 0)) == NOP_EXPR
3398 || TREE_CODE (TREE_OPERAND (t
, 0)) == CONVERT_EXPR
)
3400 tree inside_type
= TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3401 tree inter_type
= TREE_TYPE (TREE_OPERAND (t
, 0));
3402 tree final_type
= TREE_TYPE (t
);
3403 int inside_int
= INTEGRAL_TYPE_P (inside_type
);
3404 int inside_ptr
= POINTER_TYPE_P (inside_type
);
3405 int inside_float
= FLOAT_TYPE_P (inside_type
);
3406 int inside_prec
= TYPE_PRECISION (inside_type
);
3407 int inside_unsignedp
= TREE_UNSIGNED (inside_type
);
3408 int inter_int
= INTEGRAL_TYPE_P (inter_type
);
3409 int inter_ptr
= POINTER_TYPE_P (inter_type
);
3410 int inter_float
= FLOAT_TYPE_P (inter_type
);
3411 int inter_prec
= TYPE_PRECISION (inter_type
);
3412 int inter_unsignedp
= TREE_UNSIGNED (inter_type
);
3413 int final_int
= INTEGRAL_TYPE_P (final_type
);
3414 int final_ptr
= POINTER_TYPE_P (final_type
);
3415 int final_float
= FLOAT_TYPE_P (final_type
);
3416 int final_prec
= TYPE_PRECISION (final_type
);
3417 int final_unsignedp
= TREE_UNSIGNED (final_type
);
3419 /* In addition to the cases of two conversions in a row
3420 handled below, if we are converting something to its own
3421 type via an object of identical or wider precision, neither
3422 conversion is needed. */
3423 if (inside_type
== final_type
3424 && ((inter_int
&& final_int
) || (inter_float
&& final_float
))
3425 && inter_prec
>= final_prec
)
3426 return TREE_OPERAND (TREE_OPERAND (t
, 0), 0);
3428 /* Likewise, if the intermediate and final types are either both
3429 float or both integer, we don't need the middle conversion if
3430 it is wider than the final type and doesn't change the signedness
3431 (for integers). Avoid this if the final type is a pointer
3432 since then we sometimes need the inner conversion. */
3433 if ((((inter_int
|| inter_ptr
) && (inside_int
|| inside_ptr
))
3434 || (inter_float
&& inside_float
))
3435 && inter_prec
>= inside_prec
3436 && (inter_float
|| inter_unsignedp
== inside_unsignedp
)
3438 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3440 /* Two conversions in a row are not needed unless:
3441 - some conversion is floating-point (overstrict for now), or
3442 - the intermediate type is narrower than both initial and
3444 - the intermediate type and innermost type differ in signedness,
3445 and the outermost type is wider than the intermediate, or
3446 - the initial type is a pointer type and the precisions of the
3447 intermediate and final types differ, or
3448 - the final type is a pointer type and the precisions of the
3449 initial and intermediate types differ. */
3450 if (! inside_float
&& ! inter_float
&& ! final_float
3451 && (inter_prec
> inside_prec
|| inter_prec
> final_prec
)
3452 && ! (inside_int
&& inter_int
3453 && inter_unsignedp
!= inside_unsignedp
3454 && inter_prec
< final_prec
)
3455 && ((inter_unsignedp
&& inter_prec
> inside_prec
)
3456 == (final_unsignedp
&& final_prec
> inter_prec
))
3457 && ! (inside_ptr
&& inter_prec
!= final_prec
)
3458 && ! (final_ptr
&& inside_prec
!= inter_prec
))
3459 return convert (final_type
, TREE_OPERAND (TREE_OPERAND (t
, 0), 0));
3462 if (TREE_CODE (TREE_OPERAND (t
, 0)) == MODIFY_EXPR
3463 && TREE_CONSTANT (TREE_OPERAND (TREE_OPERAND (t
, 0), 1))
3464 /* Detect assigning a bitfield. */
3465 && !(TREE_CODE (TREE_OPERAND (TREE_OPERAND (t
, 0), 0)) == COMPONENT_REF
3466 && DECL_BIT_FIELD (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t
, 0), 0), 1))))
3468 /* Don't leave an assignment inside a conversion
3469 unless assigning a bitfield. */
3470 tree prev
= TREE_OPERAND (t
, 0);
3471 TREE_OPERAND (t
, 0) = TREE_OPERAND (prev
, 1);
3472 /* First do the assignment, then return converted constant. */
3473 t
= build (COMPOUND_EXPR
, TREE_TYPE (t
), prev
, fold (t
));
3479 TREE_CONSTANT (t
) = TREE_CONSTANT (arg0
);
3482 return fold_convert (t
, arg0
);
3484 #if 0 /* This loses on &"foo"[0]. */
3489 /* Fold an expression like: "foo"[2] */
3490 if (TREE_CODE (arg0
) == STRING_CST
3491 && TREE_CODE (arg1
) == INTEGER_CST
3492 && !TREE_INT_CST_HIGH (arg1
)
3493 && (i
= TREE_INT_CST_LOW (arg1
)) < TREE_STRING_LENGTH (arg0
))
3495 t
= build_int_2 (TREE_STRING_POINTER (arg0
)[i
], 0);
3496 TREE_TYPE (t
) = TREE_TYPE (TREE_TYPE (arg0
));
3497 force_fit_type (t
, 0);
3504 if (TREE_CODE (arg0
) == CONSTRUCTOR
)
3506 tree m
= purpose_member (arg1
, CONSTRUCTOR_ELTS (arg0
));
3513 TREE_CONSTANT (t
) = wins
;
3519 if (TREE_CODE (arg0
) == INTEGER_CST
)
3521 HOST_WIDE_INT low
, high
;
3522 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
3523 TREE_INT_CST_HIGH (arg0
),
3525 t
= build_int_2 (low
, high
);
3526 TREE_TYPE (t
) = type
;
3528 = (TREE_OVERFLOW (arg0
)
3529 | force_fit_type (t
, overflow
));
3530 TREE_CONSTANT_OVERFLOW (t
)
3531 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
3533 else if (TREE_CODE (arg0
) == REAL_CST
)
3534 t
= build_real (type
, REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
3535 TREE_TYPE (t
) = type
;
3537 else if (TREE_CODE (arg0
) == NEGATE_EXPR
)
3538 return TREE_OPERAND (arg0
, 0);
3540 /* Convert - (a - b) to (b - a) for non-floating-point. */
3541 else if (TREE_CODE (arg0
) == MINUS_EXPR
&& ! FLOAT_TYPE_P (type
))
3542 return build (MINUS_EXPR
, type
, TREE_OPERAND (arg0
, 1),
3543 TREE_OPERAND (arg0
, 0));
3550 if (TREE_CODE (arg0
) == INTEGER_CST
)
3552 if (! TREE_UNSIGNED (type
)
3553 && TREE_INT_CST_HIGH (arg0
) < 0)
3555 HOST_WIDE_INT low
, high
;
3556 int overflow
= neg_double (TREE_INT_CST_LOW (arg0
),
3557 TREE_INT_CST_HIGH (arg0
),
3559 t
= build_int_2 (low
, high
);
3560 TREE_TYPE (t
) = type
;
3562 = (TREE_OVERFLOW (arg0
)
3563 | force_fit_type (t
, overflow
));
3564 TREE_CONSTANT_OVERFLOW (t
)
3565 = TREE_OVERFLOW (t
) | TREE_CONSTANT_OVERFLOW (arg0
);
3568 else if (TREE_CODE (arg0
) == REAL_CST
)
3570 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0
)))
3571 t
= build_real (type
,
3572 REAL_VALUE_NEGATE (TREE_REAL_CST (arg0
)));
3574 TREE_TYPE (t
) = type
;
3576 else if (TREE_CODE (arg0
) == ABS_EXPR
|| TREE_CODE (arg0
) == NEGATE_EXPR
)
3577 return build1 (ABS_EXPR
, type
, TREE_OPERAND (arg0
, 0));
3581 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
3583 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
3584 return build (COMPLEX_EXPR
, TREE_TYPE (arg0
),
3585 TREE_OPERAND (arg0
, 0),
3586 fold (build1 (NEGATE_EXPR
,
3587 TREE_TYPE (TREE_TYPE (arg0
)),
3588 TREE_OPERAND (arg0
, 1))));
3589 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
3590 return build_complex (TREE_OPERAND (arg0
, 0),
3591 fold (build1 (NEGATE_EXPR
,
3592 TREE_TYPE (TREE_TYPE (arg0
)),
3593 TREE_OPERAND (arg0
, 1))));
3594 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
3595 return fold (build (TREE_CODE (arg0
), type
,
3596 fold (build1 (CONJ_EXPR
, type
,
3597 TREE_OPERAND (arg0
, 0))),
3598 fold (build1 (CONJ_EXPR
,
3599 type
, TREE_OPERAND (arg0
, 1)))));
3600 else if (TREE_CODE (arg0
) == CONJ_EXPR
)
3601 return TREE_OPERAND (arg0
, 0);
3607 if (TREE_CODE (arg0
) == INTEGER_CST
)
3608 t
= build_int_2 (~ TREE_INT_CST_LOW (arg0
),
3609 ~ TREE_INT_CST_HIGH (arg0
));
3610 TREE_TYPE (t
) = type
;
3611 force_fit_type (t
, 0);
3612 TREE_OVERFLOW (t
) = TREE_OVERFLOW (arg0
);
3613 TREE_CONSTANT_OVERFLOW (t
) = TREE_CONSTANT_OVERFLOW (arg0
);
3615 else if (TREE_CODE (arg0
) == BIT_NOT_EXPR
)
3616 return TREE_OPERAND (arg0
, 0);
3620 /* A + (-B) -> A - B */
3621 if (TREE_CODE (arg1
) == NEGATE_EXPR
)
3622 return fold (build (MINUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
3623 else if (! FLOAT_TYPE_P (type
))
3625 if (integer_zerop (arg1
))
3626 return non_lvalue (convert (type
, arg0
));
3628 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
3629 with a constant, and the two constants have no bits in common,
3630 we should treat this as a BIT_IOR_EXPR since this may produce more
3632 if (TREE_CODE (arg0
) == BIT_AND_EXPR
3633 && TREE_CODE (arg1
) == BIT_AND_EXPR
3634 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3635 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3636 && integer_zerop (const_binop (BIT_AND_EXPR
,
3637 TREE_OPERAND (arg0
, 1),
3638 TREE_OPERAND (arg1
, 1), 0)))
3640 code
= BIT_IOR_EXPR
;
3644 /* (A * C) + (B * C) -> (A+B) * C. Since we are most concerned
3645 about the case where C is a constant, just try one of the
3646 four possibilities. */
3648 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
3649 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3650 TREE_OPERAND (arg1
, 1), 0))
3651 return fold (build (MULT_EXPR
, type
,
3652 fold (build (PLUS_EXPR
, type
,
3653 TREE_OPERAND (arg0
, 0),
3654 TREE_OPERAND (arg1
, 0))),
3655 TREE_OPERAND (arg0
, 1)));
3657 /* In IEEE floating point, x+0 may not equal x. */
3658 else if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3660 && real_zerop (arg1
))
3661 return non_lvalue (convert (type
, arg0
));
3663 /* In most languages, can't associate operations on floats
3664 through parentheses. Rather than remember where the parentheses
3665 were, we don't associate floats at all. It shouldn't matter much.
3666 However, associating multiplications is only very slightly
3667 inaccurate, so do that if -ffast-math is specified. */
3668 if (FLOAT_TYPE_P (type
)
3669 && ! (flag_fast_math
&& code
== MULT_EXPR
))
3672 /* The varsign == -1 cases happen only for addition and subtraction.
3673 It says that the arg that was split was really CON minus VAR.
3674 The rest of the code applies to all associative operations. */
3680 if (split_tree (arg0
, code
, &var
, &con
, &varsign
))
3684 /* EXPR is (CON-VAR) +- ARG1. */
3685 /* If it is + and VAR==ARG1, return just CONST. */
3686 if (code
== PLUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
3687 return convert (TREE_TYPE (t
), con
);
3689 /* If ARG0 is a constant, don't change things around;
3690 instead keep all the constant computations together. */
3692 if (TREE_CONSTANT (arg0
))
3695 /* Otherwise return (CON +- ARG1) - VAR. */
3696 t
= build (MINUS_EXPR
, type
,
3697 fold (build (code
, type
, con
, arg1
)), var
);
3701 /* EXPR is (VAR+CON) +- ARG1. */
3702 /* If it is - and VAR==ARG1, return just CONST. */
3703 if (code
== MINUS_EXPR
&& operand_equal_p (var
, arg1
, 0))
3704 return convert (TREE_TYPE (t
), con
);
3706 /* If ARG0 is a constant, don't change things around;
3707 instead keep all the constant computations together. */
3709 if (TREE_CONSTANT (arg0
))
3712 /* Otherwise return VAR +- (ARG1 +- CON). */
3713 tem
= fold (build (code
, type
, arg1
, con
));
3714 t
= build (code
, type
, var
, tem
);
3716 if (integer_zerop (tem
)
3717 && (code
== PLUS_EXPR
|| code
== MINUS_EXPR
))
3718 return convert (type
, var
);
3719 /* If we have x +/- (c - d) [c an explicit integer]
3720 change it to x -/+ (d - c) since if d is relocatable
3721 then the latter can be a single immediate insn
3722 and the former cannot. */
3723 if (TREE_CODE (tem
) == MINUS_EXPR
3724 && TREE_CODE (TREE_OPERAND (tem
, 0)) == INTEGER_CST
)
3726 tree tem1
= TREE_OPERAND (tem
, 1);
3727 TREE_OPERAND (tem
, 1) = TREE_OPERAND (tem
, 0);
3728 TREE_OPERAND (tem
, 0) = tem1
;
3730 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
3736 if (split_tree (arg1
, code
, &var
, &con
, &varsign
))
3738 if (TREE_CONSTANT (arg1
))
3743 (code
== PLUS_EXPR
? MINUS_EXPR
: PLUS_EXPR
));
3745 /* EXPR is ARG0 +- (CON +- VAR). */
3746 if (TREE_CODE (t
) == MINUS_EXPR
3747 && operand_equal_p (var
, arg0
, 0))
3749 /* If VAR and ARG0 cancel, return just CON or -CON. */
3750 if (code
== PLUS_EXPR
)
3751 return convert (TREE_TYPE (t
), con
);
3752 return fold (build1 (NEGATE_EXPR
, TREE_TYPE (t
),
3753 convert (TREE_TYPE (t
), con
)));
3756 t
= build (TREE_CODE (t
), type
,
3757 fold (build (code
, TREE_TYPE (t
), arg0
, con
)), var
);
3759 if (integer_zerop (TREE_OPERAND (t
, 0))
3760 && TREE_CODE (t
) == PLUS_EXPR
)
3761 return convert (TREE_TYPE (t
), var
);
3766 #if defined (REAL_IS_NOT_DOUBLE) && ! defined (REAL_ARITHMETIC)
3767 if (TREE_CODE (arg1
) == REAL_CST
)
3769 #endif /* REAL_IS_NOT_DOUBLE, and no REAL_ARITHMETIC */
3771 t1
= const_binop (code
, arg0
, arg1
, 0);
3772 if (t1
!= NULL_TREE
)
3774 /* The return value should always have
3775 the same type as the original expression. */
3776 TREE_TYPE (t1
) = TREE_TYPE (t
);
3782 if (! FLOAT_TYPE_P (type
))
3784 if (! wins
&& integer_zerop (arg0
))
3785 return build1 (NEGATE_EXPR
, type
, arg1
);
3786 if (integer_zerop (arg1
))
3787 return non_lvalue (convert (type
, arg0
));
3789 /* (A * C) - (B * C) -> (A-B) * C. Since we are most concerned
3790 about the case where C is a constant, just try one of the
3791 four possibilities. */
3793 if (TREE_CODE (arg0
) == MULT_EXPR
&& TREE_CODE (arg1
) == MULT_EXPR
3794 && operand_equal_p (TREE_OPERAND (arg0
, 1),
3795 TREE_OPERAND (arg1
, 1), 0))
3796 return fold (build (MULT_EXPR
, type
,
3797 fold (build (MINUS_EXPR
, type
,
3798 TREE_OPERAND (arg0
, 0),
3799 TREE_OPERAND (arg1
, 0))),
3800 TREE_OPERAND (arg0
, 1)));
3802 /* Convert A - (-B) to A + B. */
3803 else if (TREE_CODE (arg1
) == NEGATE_EXPR
)
3804 return fold (build (PLUS_EXPR
, type
, arg0
, TREE_OPERAND (arg1
, 0)));
3806 else if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3809 /* Except with IEEE floating point, 0-x equals -x. */
3810 if (! wins
&& real_zerop (arg0
))
3811 return build1 (NEGATE_EXPR
, type
, arg1
);
3812 /* Except with IEEE floating point, x-0 equals x. */
3813 if (real_zerop (arg1
))
3814 return non_lvalue (convert (type
, arg0
));
3817 /* Fold &x - &x. This can happen from &x.foo - &x.
3818 This is unsafe for certain floats even in non-IEEE formats.
3819 In IEEE, it is unsafe because it does wrong for NaNs.
3820 Also note that operand_equal_p is always false if an operand
3823 if ((! FLOAT_TYPE_P (type
) || flag_fast_math
)
3824 && operand_equal_p (arg0
, arg1
, 0))
3825 return convert (type
, integer_zero_node
);
3830 if (! FLOAT_TYPE_P (type
))
3832 if (integer_zerop (arg1
))
3833 return omit_one_operand (type
, arg1
, arg0
);
3834 if (integer_onep (arg1
))
3835 return non_lvalue (convert (type
, arg0
));
3837 /* ((A / C) * C) is A if the division is an
3838 EXACT_DIV_EXPR. Since C is normally a constant,
3839 just check for one of the four possibilities. */
3841 if (TREE_CODE (arg0
) == EXACT_DIV_EXPR
3842 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
3843 return TREE_OPERAND (arg0
, 0);
3845 /* (a * (1 << b)) is (a << b) */
3846 if (TREE_CODE (arg1
) == LSHIFT_EXPR
3847 && integer_onep (TREE_OPERAND (arg1
, 0)))
3848 return fold (build (LSHIFT_EXPR
, type
, arg0
,
3849 TREE_OPERAND (arg1
, 1)));
3850 if (TREE_CODE (arg0
) == LSHIFT_EXPR
3851 && integer_onep (TREE_OPERAND (arg0
, 0)))
3852 return fold (build (LSHIFT_EXPR
, type
, arg1
,
3853 TREE_OPERAND (arg0
, 1)));
3857 /* x*0 is 0, except for IEEE floating point. */
3858 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3860 && real_zerop (arg1
))
3861 return omit_one_operand (type
, arg1
, arg0
);
3862 /* In IEEE floating point, x*1 is not equivalent to x for snans.
3863 However, ANSI says we can drop signals,
3864 so we can do this anyway. */
3865 if (real_onep (arg1
))
3866 return non_lvalue (convert (type
, arg0
));
3868 if (! wins
&& real_twop (arg1
))
3870 tree arg
= save_expr (arg0
);
3871 return build (PLUS_EXPR
, type
, arg
, arg
);
3878 if (integer_all_onesp (arg1
))
3879 return omit_one_operand (type
, arg1
, arg0
);
3880 if (integer_zerop (arg1
))
3881 return non_lvalue (convert (type
, arg0
));
3882 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
3883 if (t1
!= NULL_TREE
)
3886 /* (a << C1) | (a >> C2) if A is unsigned and C1+C2 is the size of A
3887 is a rotate of A by C1 bits. */
3889 if ((TREE_CODE (arg0
) == RSHIFT_EXPR
3890 || TREE_CODE (arg0
) == LSHIFT_EXPR
)
3891 && (TREE_CODE (arg1
) == RSHIFT_EXPR
3892 || TREE_CODE (arg1
) == LSHIFT_EXPR
)
3893 && TREE_CODE (arg0
) != TREE_CODE (arg1
)
3894 && operand_equal_p (TREE_OPERAND (arg0
, 0), TREE_OPERAND (arg1
,0), 0)
3895 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
3896 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
3897 && TREE_CODE (TREE_OPERAND (arg1
, 1)) == INTEGER_CST
3898 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
3899 && TREE_INT_CST_HIGH (TREE_OPERAND (arg1
, 1)) == 0
3900 && ((TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1))
3901 + TREE_INT_CST_LOW (TREE_OPERAND (arg1
, 1)))
3902 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)))))
3903 return build (LROTATE_EXPR
, type
, TREE_OPERAND (arg0
, 0),
3904 TREE_CODE (arg0
) == LSHIFT_EXPR
3905 ? TREE_OPERAND (arg0
, 1) : TREE_OPERAND (arg1
, 1));
3910 if (integer_zerop (arg1
))
3911 return non_lvalue (convert (type
, arg0
));
3912 if (integer_all_onesp (arg1
))
3913 return fold (build1 (BIT_NOT_EXPR
, type
, arg0
));
3918 if (integer_all_onesp (arg1
))
3919 return non_lvalue (convert (type
, arg0
));
3920 if (integer_zerop (arg1
))
3921 return omit_one_operand (type
, arg1
, arg0
);
3922 t1
= distribute_bit_expr (code
, type
, arg0
, arg1
);
3923 if (t1
!= NULL_TREE
)
3925 /* Simplify ((int)c & 0x377) into (int)c, if c is unsigned char. */
3926 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == NOP_EXPR
3927 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1
, 0))))
3929 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1
, 0)));
3930 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
3931 && (~TREE_INT_CST_LOW (arg0
)
3932 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
3933 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg1
, 0));
3935 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) == NOP_EXPR
3936 && TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0
, 0))))
3938 int prec
= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0
, 0)));
3939 if (prec
< BITS_PER_WORD
&& prec
< HOST_BITS_PER_WIDE_INT
3940 && (~TREE_INT_CST_LOW (arg1
)
3941 & (((HOST_WIDE_INT
) 1 << prec
) - 1)) == 0)
3942 return build1 (NOP_EXPR
, type
, TREE_OPERAND (arg0
, 0));
3946 case BIT_ANDTC_EXPR
:
3947 if (integer_all_onesp (arg0
))
3948 return non_lvalue (convert (type
, arg1
));
3949 if (integer_zerop (arg0
))
3950 return omit_one_operand (type
, arg0
, arg1
);
3951 if (TREE_CODE (arg1
) == INTEGER_CST
)
3953 arg1
= fold (build1 (BIT_NOT_EXPR
, type
, arg1
));
3954 code
= BIT_AND_EXPR
;
3960 /* In most cases, do nothing with a divide by zero. */
3961 #if !defined (REAL_IS_NOT_DOUBLE) || defined (REAL_ARITHMETIC)
3962 #ifndef REAL_INFINITY
3963 if (TREE_CODE (arg1
) == REAL_CST
&& real_zerop (arg1
))
3966 #endif /* not REAL_IS_NOT_DOUBLE, or REAL_ARITHMETIC */
3968 /* In IEEE floating point, x/1 is not equivalent to x for snans.
3969 However, ANSI says we can drop signals, so we can do this anyway. */
3970 if (real_onep (arg1
))
3971 return non_lvalue (convert (type
, arg0
));
3973 /* If ARG1 is a constant, we can convert this to a multiply by the
3974 reciprocal. This does not have the same rounding properties,
3975 so only do this if -ffast-math. We can actually always safely
3976 do it if ARG1 is a power of two, but it's hard to tell if it is
3977 or not in a portable manner. */
3978 if (TREE_CODE (arg1
) == REAL_CST
&& flag_fast_math
3979 && 0 != (tem
= const_binop (code
, build_real (type
, dconst1
),
3981 return fold (build (MULT_EXPR
, type
, arg0
, tem
));
3985 case TRUNC_DIV_EXPR
:
3986 case ROUND_DIV_EXPR
:
3987 case FLOOR_DIV_EXPR
:
3989 case EXACT_DIV_EXPR
:
3990 if (integer_onep (arg1
))
3991 return non_lvalue (convert (type
, arg0
));
3992 if (integer_zerop (arg1
))
3995 /* If we have ((a / C1) / C2) where both division are the same type, try
3996 to simplify. First see if C1 * C2 overflows or not. */
3997 if (TREE_CODE (arg0
) == code
&& TREE_CODE (arg1
) == INTEGER_CST
3998 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
4002 new_divisor
= const_binop (MULT_EXPR
, TREE_OPERAND (arg0
, 1), arg1
, 0);
4003 tem
= const_binop (FLOOR_DIV_EXPR
, new_divisor
, arg1
, 0);
4005 if (TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_LOW (tem
)
4006 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == TREE_INT_CST_HIGH (tem
))
4008 /* If no overflow, divide by C1*C2. */
4009 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), new_divisor
));
4013 /* Look for ((a * C1) / C3) or (((a * C1) + C2) / C3),
4014 where C1 % C3 == 0 or C3 % C1 == 0. We can simplify these
4015 expressions, which often appear in the offsets or sizes of
4016 objects with a varying size. Only deal with positive divisors
4017 and multiplicands. If C2 is negative, we must have C2 % C3 == 0.
4019 Look for NOPs and SAVE_EXPRs inside. */
4021 if (TREE_CODE (arg1
) == INTEGER_CST
4022 && tree_int_cst_sgn (arg1
) >= 0)
4024 int have_save_expr
= 0;
4025 tree c2
= integer_zero_node
;
4028 if (TREE_CODE (xarg0
) == SAVE_EXPR
)
4029 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
4033 if (TREE_CODE (xarg0
) == PLUS_EXPR
4034 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
4035 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
4036 else if (TREE_CODE (xarg0
) == MINUS_EXPR
4037 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4038 /* If we are doing this computation unsigned, the negate
4040 && ! TREE_UNSIGNED (type
))
4042 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
4043 xarg0
= TREE_OPERAND (xarg0
, 0);
4046 if (TREE_CODE (xarg0
) == SAVE_EXPR
)
4047 have_save_expr
= 1, xarg0
= TREE_OPERAND (xarg0
, 0);
4051 if (TREE_CODE (xarg0
) == MULT_EXPR
4052 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4053 && tree_int_cst_sgn (TREE_OPERAND (xarg0
, 1)) >= 0
4054 && (integer_zerop (const_binop (TRUNC_MOD_EXPR
,
4055 TREE_OPERAND (xarg0
, 1), arg1
, 1))
4056 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, arg1
,
4057 TREE_OPERAND (xarg0
, 1), 1)))
4058 && (tree_int_cst_sgn (c2
) >= 0
4059 || integer_zerop (const_binop (TRUNC_MOD_EXPR
, c2
,
4062 tree outer_div
= integer_one_node
;
4063 tree c1
= TREE_OPERAND (xarg0
, 1);
4066 /* If C3 > C1, set them equal and do a divide by
4067 C3/C1 at the end of the operation. */
4068 if (tree_int_cst_lt (c1
, c3
))
4069 outer_div
= const_binop (code
, c3
, c1
, 0), c3
= c1
;
4071 /* The result is A * (C1/C3) + (C2/C3). */
4072 t
= fold (build (PLUS_EXPR
, type
,
4073 fold (build (MULT_EXPR
, type
,
4074 TREE_OPERAND (xarg0
, 0),
4075 const_binop (code
, c1
, c3
, 1))),
4076 const_binop (code
, c2
, c3
, 1)));
4078 if (! integer_onep (outer_div
))
4079 t
= fold (build (code
, type
, t
, convert (type
, outer_div
)));
4091 case FLOOR_MOD_EXPR
:
4092 case ROUND_MOD_EXPR
:
4093 case TRUNC_MOD_EXPR
:
4094 if (integer_onep (arg1
))
4095 return omit_one_operand (type
, integer_zero_node
, arg0
);
4096 if (integer_zerop (arg1
))
4099 /* Look for ((a * C1) % C3) or (((a * C1) + C2) % C3),
4100 where C1 % C3 == 0. Handle similarly to the division case,
4101 but don't bother with SAVE_EXPRs. */
4103 if (TREE_CODE (arg1
) == INTEGER_CST
4104 && ! integer_zerop (arg1
))
4106 tree c2
= integer_zero_node
;
4109 if (TREE_CODE (xarg0
) == PLUS_EXPR
4110 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
)
4111 c2
= TREE_OPERAND (xarg0
, 1), xarg0
= TREE_OPERAND (xarg0
, 0);
4112 else if (TREE_CODE (xarg0
) == MINUS_EXPR
4113 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4114 && ! TREE_UNSIGNED (type
))
4116 c2
= fold (build1 (NEGATE_EXPR
, type
, TREE_OPERAND (xarg0
, 1)));
4117 xarg0
= TREE_OPERAND (xarg0
, 0);
4122 if (TREE_CODE (xarg0
) == MULT_EXPR
4123 && TREE_CODE (TREE_OPERAND (xarg0
, 1)) == INTEGER_CST
4124 && integer_zerop (const_binop (TRUNC_MOD_EXPR
,
4125 TREE_OPERAND (xarg0
, 1),
4127 && tree_int_cst_sgn (c2
) >= 0)
4128 /* The result is (C2%C3). */
4129 return omit_one_operand (type
, const_binop (code
, c2
, arg1
, 1),
4130 TREE_OPERAND (xarg0
, 0));
4139 if (integer_zerop (arg1
))
4140 return non_lvalue (convert (type
, arg0
));
4141 /* Since negative shift count is not well-defined,
4142 don't try to compute it in the compiler. */
4143 if (TREE_CODE (arg1
) == INTEGER_CST
&& tree_int_cst_sgn (arg1
) < 0)
4145 /* Rewrite an LROTATE_EXPR by a constant into an
4146 RROTATE_EXPR by a new constant. */
4147 if (code
== LROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
)
4149 TREE_SET_CODE (t
, RROTATE_EXPR
);
4150 code
= RROTATE_EXPR
;
4151 TREE_OPERAND (t
, 1) = arg1
4154 convert (TREE_TYPE (arg1
),
4155 build_int_2 (GET_MODE_BITSIZE (TYPE_MODE (type
)), 0)),
4157 if (tree_int_cst_sgn (arg1
) < 0)
4161 /* If we have a rotate of a bit operation with the rotate count and
4162 the second operand of the bit operation both constant,
4163 permute the two operations. */
4164 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
4165 && (TREE_CODE (arg0
) == BIT_AND_EXPR
4166 || TREE_CODE (arg0
) == BIT_ANDTC_EXPR
4167 || TREE_CODE (arg0
) == BIT_IOR_EXPR
4168 || TREE_CODE (arg0
) == BIT_XOR_EXPR
)
4169 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
)
4170 return fold (build (TREE_CODE (arg0
), type
,
4171 fold (build (code
, type
,
4172 TREE_OPERAND (arg0
, 0), arg1
)),
4173 fold (build (code
, type
,
4174 TREE_OPERAND (arg0
, 1), arg1
))));
4176 /* Two consecutive rotates adding up to the width of the mode can
4178 if (code
== RROTATE_EXPR
&& TREE_CODE (arg1
) == INTEGER_CST
4179 && TREE_CODE (arg0
) == RROTATE_EXPR
4180 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
4181 && TREE_INT_CST_HIGH (arg1
) == 0
4182 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0
, 1)) == 0
4183 && ((TREE_INT_CST_LOW (arg1
)
4184 + TREE_INT_CST_LOW (TREE_OPERAND (arg0
, 1)))
4185 == GET_MODE_BITSIZE (TYPE_MODE (type
))))
4186 return TREE_OPERAND (arg0
, 0);
4191 if (operand_equal_p (arg0
, arg1
, 0))
4193 if (INTEGRAL_TYPE_P (type
)
4194 && operand_equal_p (arg1
, TYPE_MIN_VALUE (type
), 1))
4195 return omit_one_operand (type
, arg1
, arg0
);
4199 if (operand_equal_p (arg0
, arg1
, 0))
4201 if (INTEGRAL_TYPE_P (type
)
4202 && operand_equal_p (arg1
, TYPE_MAX_VALUE (type
), 1))
4203 return omit_one_operand (type
, arg1
, arg0
);
4206 case TRUTH_NOT_EXPR
:
4207 /* Note that the operand of this must be an int
4208 and its values must be 0 or 1.
4209 ("true" is a fixed value perhaps depending on the language,
4210 but we don't handle values other than 1 correctly yet.) */
4211 tem
= invert_truthvalue (arg0
);
4212 /* Avoid infinite recursion. */
4213 if (TREE_CODE (tem
) == TRUTH_NOT_EXPR
)
4215 return convert (type
, tem
);
4217 case TRUTH_ANDIF_EXPR
:
4218 /* Note that the operands of this must be ints
4219 and their values must be 0 or 1.
4220 ("true" is a fixed value perhaps depending on the language.) */
4221 /* If first arg is constant zero, return it. */
4222 if (integer_zerop (arg0
))
4224 case TRUTH_AND_EXPR
:
4225 /* If either arg is constant true, drop it. */
4226 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
4227 return non_lvalue (arg1
);
4228 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
4229 return non_lvalue (arg0
);
4230 /* If second arg is constant zero, result is zero, but first arg
4231 must be evaluated. */
4232 if (integer_zerop (arg1
))
4233 return omit_one_operand (type
, arg1
, arg0
);
4236 /* We only do these simplifications if we are optimizing. */
4240 /* Check for things like (A || B) && (A || C). We can convert this
4241 to A || (B && C). Note that either operator can be any of the four
4242 truth and/or operations and the transformation will still be
4243 valid. Also note that we only care about order for the
4244 ANDIF and ORIF operators. */
4245 if (TREE_CODE (arg0
) == TREE_CODE (arg1
)
4246 && (TREE_CODE (arg0
) == TRUTH_ANDIF_EXPR
4247 || TREE_CODE (arg0
) == TRUTH_ORIF_EXPR
4248 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
4249 || TREE_CODE (arg0
) == TRUTH_OR_EXPR
))
4251 tree a00
= TREE_OPERAND (arg0
, 0);
4252 tree a01
= TREE_OPERAND (arg0
, 1);
4253 tree a10
= TREE_OPERAND (arg1
, 0);
4254 tree a11
= TREE_OPERAND (arg1
, 1);
4255 int commutative
= ((TREE_CODE (arg0
) == TRUTH_OR_EXPR
4256 || TREE_CODE (arg0
) == TRUTH_AND_EXPR
)
4257 && (code
== TRUTH_AND_EXPR
4258 || code
== TRUTH_OR_EXPR
));
4260 if (operand_equal_p (a00
, a10
, 0))
4261 return fold (build (TREE_CODE (arg0
), type
, a00
,
4262 fold (build (code
, type
, a01
, a11
))));
4263 else if (commutative
&& operand_equal_p (a00
, a11
, 0))
4264 return fold (build (TREE_CODE (arg0
), type
, a00
,
4265 fold (build (code
, type
, a01
, a10
))));
4266 else if (commutative
&& operand_equal_p (a01
, a10
, 0))
4267 return fold (build (TREE_CODE (arg0
), type
, a01
,
4268 fold (build (code
, type
, a00
, a11
))));
4270 /* This case if tricky because we must either have commutative
4271 operators or else A10 must not have side-effects. */
4273 else if ((commutative
|| ! TREE_SIDE_EFFECTS (a10
))
4274 && operand_equal_p (a01
, a11
, 0))
4275 return fold (build (TREE_CODE (arg0
), type
,
4276 fold (build (code
, type
, a00
, a10
)),
4280 /* Check for the possibility of merging component references. If our
4281 lhs is another similar operation, try to merge its rhs with our
4282 rhs. Then try to merge our lhs and rhs. */
4283 if (TREE_CODE (arg0
) == code
4284 && 0 != (tem
= fold_truthop (code
, type
,
4285 TREE_OPERAND (arg0
, 1), arg1
)))
4286 return fold (build (code
, type
, TREE_OPERAND (arg0
, 0), tem
));
4288 if ((tem
= fold_truthop (code
, type
, arg0
, arg1
)) != 0)
4293 case TRUTH_ORIF_EXPR
:
4294 /* Note that the operands of this must be ints
4295 and their values must be 0 or true.
4296 ("true" is a fixed value perhaps depending on the language.) */
4297 /* If first arg is constant true, return it. */
4298 if (TREE_CODE (arg0
) == INTEGER_CST
&& ! integer_zerop (arg0
))
4301 /* If either arg is constant zero, drop it. */
4302 if (TREE_CODE (arg0
) == INTEGER_CST
&& integer_zerop (arg0
))
4303 return non_lvalue (arg1
);
4304 if (TREE_CODE (arg1
) == INTEGER_CST
&& integer_zerop (arg1
))
4305 return non_lvalue (arg0
);
4306 /* If second arg is constant true, result is true, but we must
4307 evaluate first arg. */
4308 if (TREE_CODE (arg1
) == INTEGER_CST
&& ! integer_zerop (arg1
))
4309 return omit_one_operand (type
, arg1
, arg0
);
4312 case TRUTH_XOR_EXPR
:
4313 /* If either arg is constant zero, drop it. */
4314 if (integer_zerop (arg0
))
4315 return non_lvalue (arg1
);
4316 if (integer_zerop (arg1
))
4317 return non_lvalue (arg0
);
4318 /* If either arg is constant true, this is a logical inversion. */
4319 if (integer_onep (arg0
))
4320 return non_lvalue (invert_truthvalue (arg1
));
4321 if (integer_onep (arg1
))
4322 return non_lvalue (invert_truthvalue (arg0
));
4331 /* If one arg is a constant integer, put it last. */
4332 if (TREE_CODE (arg0
) == INTEGER_CST
4333 && TREE_CODE (arg1
) != INTEGER_CST
)
4335 TREE_OPERAND (t
, 0) = arg1
;
4336 TREE_OPERAND (t
, 1) = arg0
;
4337 arg0
= TREE_OPERAND (t
, 0);
4338 arg1
= TREE_OPERAND (t
, 1);
4339 code
= swap_tree_comparison (code
);
4340 TREE_SET_CODE (t
, code
);
4343 /* Convert foo++ == CONST into ++foo == CONST + INCR.
4344 First, see if one arg is constant; find the constant arg
4345 and the other one. */
4347 tree constop
= 0, varop
;
4348 int constopnum
= -1;
4350 if (TREE_CONSTANT (arg1
))
4351 constopnum
= 1, constop
= arg1
, varop
= arg0
;
4352 if (TREE_CONSTANT (arg0
))
4353 constopnum
= 0, constop
= arg0
, varop
= arg1
;
4355 if (constop
&& TREE_CODE (varop
) == POSTINCREMENT_EXPR
)
4357 /* This optimization is invalid for ordered comparisons
4358 if CONST+INCR overflows or if foo+incr might overflow.
4359 This optimization is invalid for floating point due to rounding.
4360 For pointer types we assume overflow doesn't happen. */
4361 if (TREE_CODE (TREE_TYPE (varop
)) == POINTER_TYPE
4362 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
4363 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
4366 = fold (build (PLUS_EXPR
, TREE_TYPE (varop
),
4367 constop
, TREE_OPERAND (varop
, 1)));
4368 TREE_SET_CODE (varop
, PREINCREMENT_EXPR
);
4370 t
= build (code
, type
, TREE_OPERAND (t
, 0),
4371 TREE_OPERAND (t
, 1));
4372 TREE_OPERAND (t
, constopnum
) = newconst
;
4376 else if (constop
&& TREE_CODE (varop
) == POSTDECREMENT_EXPR
)
4378 if (TREE_CODE (TREE_TYPE (varop
)) == POINTER_TYPE
4379 || (! FLOAT_TYPE_P (TREE_TYPE (varop
))
4380 && (code
== EQ_EXPR
|| code
== NE_EXPR
)))
4383 = fold (build (MINUS_EXPR
, TREE_TYPE (varop
),
4384 constop
, TREE_OPERAND (varop
, 1)));
4385 TREE_SET_CODE (varop
, PREDECREMENT_EXPR
);
4386 t
= build (code
, type
, TREE_OPERAND (t
, 0),
4387 TREE_OPERAND (t
, 1));
4388 TREE_OPERAND (t
, constopnum
) = newconst
;
4394 /* Change X >= CST to X > (CST - 1) if CST is positive. */
4395 if (TREE_CODE (arg1
) == INTEGER_CST
4396 && TREE_CODE (arg0
) != INTEGER_CST
4397 && tree_int_cst_sgn (arg1
) > 0)
4399 switch (TREE_CODE (t
))
4403 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
4404 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
4409 arg1
= const_binop (MINUS_EXPR
, arg1
, integer_one_node
, 0);
4410 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
);
4415 /* If this is an EQ or NE comparison with zero and ARG0 is
4416 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
4417 two operations, but the latter can be done in one less insn
4418 one machine that have only two-operand insns or on which a
4419 constant cannot be the first operand. */
4420 if (integer_zerop (arg1
) && (code
== EQ_EXPR
|| code
== NE_EXPR
)
4421 && TREE_CODE (arg0
) == BIT_AND_EXPR
)
4423 if (TREE_CODE (TREE_OPERAND (arg0
, 0)) == LSHIFT_EXPR
4424 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 0)))
4426 fold (build (code
, type
,
4427 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
4429 TREE_TYPE (TREE_OPERAND (arg0
, 0)),
4430 TREE_OPERAND (arg0
, 1),
4431 TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1)),
4432 convert (TREE_TYPE (arg0
),
4435 else if (TREE_CODE (TREE_OPERAND (arg0
, 1)) == LSHIFT_EXPR
4436 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg0
, 1), 0)))
4438 fold (build (code
, type
,
4439 build (BIT_AND_EXPR
, TREE_TYPE (arg0
),
4441 TREE_TYPE (TREE_OPERAND (arg0
, 1)),
4442 TREE_OPERAND (arg0
, 0),
4443 TREE_OPERAND (TREE_OPERAND (arg0
, 1), 1)),
4444 convert (TREE_TYPE (arg0
),
4449 /* If this is an NE or EQ comparison of zero against the result of a
4450 signed MOD operation whose second operand is a power of 2, make
4451 the MOD operation unsigned since it is simpler and equivalent. */
4452 if ((code
== NE_EXPR
|| code
== EQ_EXPR
)
4453 && integer_zerop (arg1
)
4454 && ! TREE_UNSIGNED (TREE_TYPE (arg0
))
4455 && (TREE_CODE (arg0
) == TRUNC_MOD_EXPR
4456 || TREE_CODE (arg0
) == CEIL_MOD_EXPR
4457 || TREE_CODE (arg0
) == FLOOR_MOD_EXPR
4458 || TREE_CODE (arg0
) == ROUND_MOD_EXPR
)
4459 && integer_pow2p (TREE_OPERAND (arg0
, 1)))
4461 tree newtype
= unsigned_type (TREE_TYPE (arg0
));
4462 tree newmod
= build (TREE_CODE (arg0
), newtype
,
4463 convert (newtype
, TREE_OPERAND (arg0
, 0)),
4464 convert (newtype
, TREE_OPERAND (arg0
, 1)));
4466 return build (code
, type
, newmod
, convert (newtype
, arg1
));
4469 /* If this is an NE comparison of zero with an AND of one, remove the
4470 comparison since the AND will give the correct value. */
4471 if (code
== NE_EXPR
&& integer_zerop (arg1
)
4472 && TREE_CODE (arg0
) == BIT_AND_EXPR
4473 && integer_onep (TREE_OPERAND (arg0
, 1)))
4474 return convert (type
, arg0
);
4476 /* If we have (A & C) == C where C is a power of 2, convert this into
4477 (A & C) != 0. Similarly for NE_EXPR. */
4478 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
4479 && TREE_CODE (arg0
) == BIT_AND_EXPR
4480 && integer_pow2p (TREE_OPERAND (arg0
, 1))
4481 && operand_equal_p (TREE_OPERAND (arg0
, 1), arg1
, 0))
4482 return build (code
== EQ_EXPR
? NE_EXPR
: EQ_EXPR
, type
,
4483 arg0
, integer_zero_node
);
4485 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
4486 and similarly for >= into !=. */
4487 if ((code
== LT_EXPR
|| code
== GE_EXPR
)
4488 && TREE_UNSIGNED (TREE_TYPE (arg0
))
4489 && TREE_CODE (arg1
) == LSHIFT_EXPR
4490 && integer_onep (TREE_OPERAND (arg1
, 0)))
4491 return build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
4492 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
4493 TREE_OPERAND (arg1
, 1)),
4494 convert (TREE_TYPE (arg0
), integer_zero_node
));
4496 else if ((code
== LT_EXPR
|| code
== GE_EXPR
)
4497 && TREE_UNSIGNED (TREE_TYPE (arg0
))
4498 && (TREE_CODE (arg1
) == NOP_EXPR
4499 || TREE_CODE (arg1
) == CONVERT_EXPR
)
4500 && TREE_CODE (TREE_OPERAND (arg1
, 0)) == LSHIFT_EXPR
4501 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1
, 0), 0)))
4503 build (code
== LT_EXPR
? EQ_EXPR
: NE_EXPR
, type
,
4504 convert (TREE_TYPE (arg0
),
4505 build (RSHIFT_EXPR
, TREE_TYPE (arg0
), arg0
,
4506 TREE_OPERAND (TREE_OPERAND (arg1
, 0), 1))),
4507 convert (TREE_TYPE (arg0
), integer_zero_node
));
4509 /* Simplify comparison of something with itself. (For IEEE
4510 floating-point, we can only do some of these simplifications.) */
4511 if (operand_equal_p (arg0
, arg1
, 0))
4518 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
4520 t
= build_int_2 (1, 0);
4521 TREE_TYPE (t
) = type
;
4525 TREE_SET_CODE (t
, code
);
4529 /* For NE, we can only do this simplification if integer. */
4530 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0
)))
4532 /* ... fall through ... */
4535 t
= build_int_2 (0, 0);
4536 TREE_TYPE (t
) = type
;
4541 /* An unsigned comparison against 0 can be simplified. */
4542 if (integer_zerop (arg1
)
4543 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1
))
4544 || TREE_CODE (TREE_TYPE (arg1
)) == POINTER_TYPE
)
4545 && TREE_UNSIGNED (TREE_TYPE (arg1
)))
4547 switch (TREE_CODE (t
))
4551 TREE_SET_CODE (t
, NE_EXPR
);
4555 TREE_SET_CODE (t
, EQ_EXPR
);
4558 return omit_one_operand (type
,
4559 convert (type
, integer_one_node
),
4562 return omit_one_operand (type
,
4563 convert (type
, integer_zero_node
),
4568 /* If we are comparing an expression that just has comparisons
4569 of two integer values, arithmetic expressions of those comparisons,
4570 and constants, we can simplify it. There are only three cases
4571 to check: the two values can either be equal, the first can be
4572 greater, or the second can be greater. Fold the expression for
4573 those three values. Since each value must be 0 or 1, we have
4574 eight possibilities, each of which corresponds to the constant 0
4575 or 1 or one of the six possible comparisons.
4577 This handles common cases like (a > b) == 0 but also handles
4578 expressions like ((x > y) - (y > x)) > 0, which supposedly
4579 occur in macroized code. */
4581 if (TREE_CODE (arg1
) == INTEGER_CST
&& TREE_CODE (arg0
) != INTEGER_CST
)
4583 tree cval1
= 0, cval2
= 0;
4586 if (twoval_comparison_p (arg0
, &cval1
, &cval2
, &save_p
)
4587 /* Don't handle degenerate cases here; they should already
4588 have been handled anyway. */
4589 && cval1
!= 0 && cval2
!= 0
4590 && ! (TREE_CONSTANT (cval1
) && TREE_CONSTANT (cval2
))
4591 && TREE_TYPE (cval1
) == TREE_TYPE (cval2
)
4592 && INTEGRAL_TYPE_P (TREE_TYPE (cval1
))
4593 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1
)),
4594 TYPE_MAX_VALUE (TREE_TYPE (cval2
)), 0))
4596 tree maxval
= TYPE_MAX_VALUE (TREE_TYPE (cval1
));
4597 tree minval
= TYPE_MIN_VALUE (TREE_TYPE (cval1
));
4599 /* We can't just pass T to eval_subst in case cval1 or cval2
4600 was the same as ARG1. */
4603 = fold (build (code
, type
,
4604 eval_subst (arg0
, cval1
, maxval
, cval2
, minval
),
4607 = fold (build (code
, type
,
4608 eval_subst (arg0
, cval1
, maxval
, cval2
, maxval
),
4611 = fold (build (code
, type
,
4612 eval_subst (arg0
, cval1
, minval
, cval2
, maxval
),
4615 /* All three of these results should be 0 or 1. Confirm they
4616 are. Then use those values to select the proper code
4619 if ((integer_zerop (high_result
)
4620 || integer_onep (high_result
))
4621 && (integer_zerop (equal_result
)
4622 || integer_onep (equal_result
))
4623 && (integer_zerop (low_result
)
4624 || integer_onep (low_result
)))
4626 /* Make a 3-bit mask with the high-order bit being the
4627 value for `>', the next for '=', and the low for '<'. */
4628 switch ((integer_onep (high_result
) * 4)
4629 + (integer_onep (equal_result
) * 2)
4630 + integer_onep (low_result
))
4634 return omit_one_operand (type
, integer_zero_node
, arg0
);
4655 return omit_one_operand (type
, integer_one_node
, arg0
);
4658 t
= build (code
, type
, cval1
, cval2
);
4660 return save_expr (t
);
4667 /* If this is a comparison of a field, we may be able to simplify it. */
4668 if ((TREE_CODE (arg0
) == COMPONENT_REF
4669 || TREE_CODE (arg0
) == BIT_FIELD_REF
)
4670 && (code
== EQ_EXPR
|| code
== NE_EXPR
)
4671 /* Handle the constant case even without -O
4672 to make sure the warnings are given. */
4673 && (optimize
|| TREE_CODE (arg1
) == INTEGER_CST
))
4675 t1
= optimize_bit_field_compare (code
, type
, arg0
, arg1
);
4679 /* If this is a comparison of complex values and either or both
4680 sizes are a COMPLEX_EXPR, it is best to split up the comparisons
4681 and join them with a TRUTH_ANDIF_EXPR or TRUTH_ORIF_EXPR. This
4682 may prevent needless evaluations. */
4683 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
4684 && TREE_CODE (TREE_TYPE (arg0
)) == COMPLEX_TYPE
4685 && (TREE_CODE (arg0
) == COMPLEX_EXPR
4686 || TREE_CODE (arg1
) == COMPLEX_EXPR
))
4688 tree subtype
= TREE_TYPE (TREE_TYPE (arg0
));
4689 tree real0
= fold (build1 (REALPART_EXPR
, subtype
, arg0
));
4690 tree imag0
= fold (build1 (IMAGPART_EXPR
, subtype
, arg0
));
4691 tree real1
= fold (build1 (REALPART_EXPR
, subtype
, arg1
));
4692 tree imag1
= fold (build1 (IMAGPART_EXPR
, subtype
, arg1
));
4694 return fold (build ((code
== EQ_EXPR
? TRUTH_ANDIF_EXPR
4697 fold (build (code
, type
, real0
, real1
)),
4698 fold (build (code
, type
, imag0
, imag1
))));
4701 /* From here on, the only cases we handle are when the result is
4702 known to be a constant.
4704 To compute GT, swap the arguments and do LT.
4705 To compute GE, do LT and invert the result.
4706 To compute LE, swap the arguments, do LT and invert the result.
4707 To compute NE, do EQ and invert the result.
4709 Therefore, the code below must handle only EQ and LT. */
4711 if (code
== LE_EXPR
|| code
== GT_EXPR
)
4713 tem
= arg0
, arg0
= arg1
, arg1
= tem
;
4714 code
= swap_tree_comparison (code
);
4717 /* Note that it is safe to invert for real values here because we
4718 will check below in the one case that it matters. */
4721 if (code
== NE_EXPR
|| code
== GE_EXPR
)
4724 code
= invert_tree_comparison (code
);
4727 /* Compute a result for LT or EQ if args permit;
4728 otherwise return T. */
4729 if (TREE_CODE (arg0
) == INTEGER_CST
&& TREE_CODE (arg1
) == INTEGER_CST
)
4731 if (code
== EQ_EXPR
)
4732 t1
= build_int_2 ((TREE_INT_CST_LOW (arg0
)
4733 == TREE_INT_CST_LOW (arg1
))
4734 && (TREE_INT_CST_HIGH (arg0
)
4735 == TREE_INT_CST_HIGH (arg1
)),
4738 t1
= build_int_2 ((TREE_UNSIGNED (TREE_TYPE (arg0
))
4739 ? INT_CST_LT_UNSIGNED (arg0
, arg1
)
4740 : INT_CST_LT (arg0
, arg1
)),
4744 /* Assume a nonexplicit constant cannot equal an explicit one,
4745 since such code would be undefined anyway.
4746 Exception: on sysvr4, using #pragma weak,
4747 a label can come out as 0. */
4748 else if (TREE_CODE (arg1
) == INTEGER_CST
4749 && !integer_zerop (arg1
)
4750 && TREE_CONSTANT (arg0
)
4751 && TREE_CODE (arg0
) == ADDR_EXPR
4753 t1
= build_int_2 (0, 0);
4755 /* Two real constants can be compared explicitly. */
4756 else if (TREE_CODE (arg0
) == REAL_CST
&& TREE_CODE (arg1
) == REAL_CST
)
4758 /* If either operand is a NaN, the result is false with two
4759 exceptions: First, an NE_EXPR is true on NaNs, but that case
4760 is already handled correctly since we will be inverting the
4761 result for NE_EXPR. Second, if we had inverted a LE_EXPR
4762 or a GE_EXPR into a LT_EXPR, we must return true so that it
4763 will be inverted into false. */
4765 if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg0
))
4766 || REAL_VALUE_ISNAN (TREE_REAL_CST (arg1
)))
4767 t1
= build_int_2 (invert
&& code
== LT_EXPR
, 0);
4769 else if (code
== EQ_EXPR
)
4770 t1
= build_int_2 (REAL_VALUES_EQUAL (TREE_REAL_CST (arg0
),
4771 TREE_REAL_CST (arg1
)),
4774 t1
= build_int_2 (REAL_VALUES_LESS (TREE_REAL_CST (arg0
),
4775 TREE_REAL_CST (arg1
)),
4779 if (t1
== NULL_TREE
)
4783 TREE_INT_CST_LOW (t1
) ^= 1;
4785 TREE_TYPE (t1
) = type
;
4789 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
4790 so all simple results must be passed through pedantic_non_lvalue. */
4791 if (TREE_CODE (arg0
) == INTEGER_CST
)
4792 return pedantic_non_lvalue
4793 (TREE_OPERAND (t
, (integer_zerop (arg0
) ? 2 : 1)));
4794 else if (operand_equal_p (arg1
, TREE_OPERAND (expr
, 2), 0))
4795 return pedantic_omit_one_operand (type
, arg1
, arg0
);
4797 /* If the second operand is zero, invert the comparison and swap
4798 the second and third operands. Likewise if the second operand
4799 is constant and the third is not or if the third operand is
4800 equivalent to the first operand of the comparison. */
4802 if (integer_zerop (arg1
)
4803 || (TREE_CONSTANT (arg1
) && ! TREE_CONSTANT (TREE_OPERAND (t
, 2)))
4804 || (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4805 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
4806 TREE_OPERAND (t
, 2),
4807 TREE_OPERAND (arg0
, 1))))
4809 /* See if this can be inverted. If it can't, possibly because
4810 it was a floating-point inequality comparison, don't do
4812 tem
= invert_truthvalue (arg0
);
4814 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
4816 t
= build (code
, type
, tem
,
4817 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
4819 arg1
= TREE_OPERAND (t
, 2);
4824 /* If we have A op B ? A : C, we may be able to convert this to a
4825 simpler expression, depending on the operation and the values
4826 of B and C. IEEE floating point prevents this though,
4827 because A or B might be -0.0 or a NaN. */
4829 if (TREE_CODE_CLASS (TREE_CODE (arg0
)) == '<'
4830 && (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
4831 || ! FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 0)))
4833 && operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 0),
4834 arg1
, TREE_OPERAND (arg0
, 1)))
4836 tree arg2
= TREE_OPERAND (t
, 2);
4837 enum tree_code comp_code
= TREE_CODE (arg0
);
4841 /* If we have A op 0 ? A : -A, this is A, -A, abs (A), or abs (-A),
4842 depending on the comparison operation. */
4843 if ((FLOAT_TYPE_P (TREE_TYPE (TREE_OPERAND (arg0
, 1)))
4844 ? real_zerop (TREE_OPERAND (arg0
, 1))
4845 : integer_zerop (TREE_OPERAND (arg0
, 1)))
4846 && TREE_CODE (arg2
) == NEGATE_EXPR
4847 && operand_equal_p (TREE_OPERAND (arg2
, 0), arg1
, 0))
4851 return pedantic_non_lvalue
4852 (fold (build1 (NEGATE_EXPR
, type
, arg1
)));
4854 return pedantic_non_lvalue (convert (type
, arg1
));
4857 return pedantic_non_lvalue
4858 (convert (type
, fold (build1 (ABS_EXPR
,
4859 TREE_TYPE (arg1
), arg1
))));
4862 return pedantic_non_lvalue
4863 (fold (build1 (NEGATE_EXPR
, type
,
4865 fold (build1 (ABS_EXPR
,
4870 /* If this is A != 0 ? A : 0, this is simply A. For ==, it is
4873 if (integer_zerop (TREE_OPERAND (arg0
, 1)) && integer_zerop (arg2
))
4875 if (comp_code
== NE_EXPR
)
4876 return pedantic_non_lvalue (convert (type
, arg1
));
4877 else if (comp_code
== EQ_EXPR
)
4878 return pedantic_non_lvalue (convert (type
, integer_zero_node
));
4881 /* If this is A op B ? A : B, this is either A, B, min (A, B),
4882 or max (A, B), depending on the operation. */
4884 if (operand_equal_for_comparison_p (TREE_OPERAND (arg0
, 1),
4885 arg2
, TREE_OPERAND (arg0
, 0)))
4887 tree comp_op0
= TREE_OPERAND (arg0
, 0);
4888 tree comp_op1
= TREE_OPERAND (arg0
, 1);
4889 tree comp_type
= TREE_TYPE (comp_op0
);
4894 return pedantic_non_lvalue (convert (type
, arg2
));
4896 return pedantic_non_lvalue (convert (type
, arg1
));
4899 return pedantic_non_lvalue
4900 (convert (type
, (fold (build (MIN_EXPR
, comp_type
,
4901 comp_op0
, comp_op1
)))));
4904 return pedantic_non_lvalue
4905 (convert (type
, fold (build (MAX_EXPR
, comp_type
,
4906 comp_op0
, comp_op1
))));
4910 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4911 we might still be able to simplify this. For example,
4912 if C1 is one less or one more than C2, this might have started
4913 out as a MIN or MAX and been transformed by this function.
4914 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4916 if (INTEGRAL_TYPE_P (type
)
4917 && TREE_CODE (TREE_OPERAND (arg0
, 1)) == INTEGER_CST
4918 && TREE_CODE (arg2
) == INTEGER_CST
)
4922 /* We can replace A with C1 in this case. */
4923 arg1
= convert (type
, TREE_OPERAND (arg0
, 1));
4924 t
= build (code
, type
, TREE_OPERAND (t
, 0), arg1
,
4925 TREE_OPERAND (t
, 2));
4929 /* If C1 is C2 + 1, this is min(A, C2). */
4930 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
4931 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4932 const_binop (PLUS_EXPR
, arg2
,
4933 integer_one_node
, 0), 1))
4934 return pedantic_non_lvalue
4935 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
4939 /* If C1 is C2 - 1, this is min(A, C2). */
4940 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
4941 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4942 const_binop (MINUS_EXPR
, arg2
,
4943 integer_one_node
, 0), 1))
4944 return pedantic_non_lvalue
4945 (fold (build (MIN_EXPR
, type
, arg1
, arg2
)));
4949 /* If C1 is C2 - 1, this is max(A, C2). */
4950 if (! operand_equal_p (arg2
, TYPE_MIN_VALUE (type
), 1)
4951 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4952 const_binop (MINUS_EXPR
, arg2
,
4953 integer_one_node
, 0), 1))
4954 return pedantic_non_lvalue
4955 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
4959 /* If C1 is C2 + 1, this is max(A, C2). */
4960 if (! operand_equal_p (arg2
, TYPE_MAX_VALUE (type
), 1)
4961 && operand_equal_p (TREE_OPERAND (arg0
, 1),
4962 const_binop (PLUS_EXPR
, arg2
,
4963 integer_one_node
, 0), 1))
4964 return pedantic_non_lvalue
4965 (fold (build (MAX_EXPR
, type
, arg1
, arg2
)));
4970 /* If the second operand is simpler than the third, swap them
4971 since that produces better jump optimization results. */
4972 if ((TREE_CONSTANT (arg1
) || TREE_CODE_CLASS (TREE_CODE (arg1
)) == 'd'
4973 || TREE_CODE (arg1
) == SAVE_EXPR
)
4974 && ! (TREE_CONSTANT (TREE_OPERAND (t
, 2))
4975 || TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (t
, 2))) == 'd'
4976 || TREE_CODE (TREE_OPERAND (t
, 2)) == SAVE_EXPR
))
4978 /* See if this can be inverted. If it can't, possibly because
4979 it was a floating-point inequality comparison, don't do
4981 tem
= invert_truthvalue (arg0
);
4983 if (TREE_CODE (tem
) != TRUTH_NOT_EXPR
)
4985 t
= build (code
, type
, tem
,
4986 TREE_OPERAND (t
, 2), TREE_OPERAND (t
, 1));
4988 arg1
= TREE_OPERAND (t
, 2);
4993 /* Convert A ? 1 : 0 to simply A. */
4994 if (integer_onep (TREE_OPERAND (t
, 1))
4995 && integer_zerop (TREE_OPERAND (t
, 2))
4996 /* If we try to convert TREE_OPERAND (t, 0) to our type, the
4997 call to fold will try to move the conversion inside
4998 a COND, which will recurse. In that case, the COND_EXPR
4999 is probably the best choice, so leave it alone. */
5000 && type
== TREE_TYPE (arg0
))
5001 return pedantic_non_lvalue (arg0
);
5003 /* Look for expressions of the form A & 2 ? 2 : 0. The result of this
5004 operation is simply A & 2. */
5006 if (integer_zerop (TREE_OPERAND (t
, 2))
5007 && TREE_CODE (arg0
) == NE_EXPR
5008 && integer_zerop (TREE_OPERAND (arg0
, 1))
5009 && integer_pow2p (arg1
)
5010 && TREE_CODE (TREE_OPERAND (arg0
, 0)) == BIT_AND_EXPR
5011 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0
, 0), 1),
5013 return pedantic_non_lvalue (convert (type
, TREE_OPERAND (arg0
, 0)));
5018 /* When pedantic, a compound expression can be neither an lvalue
5019 nor an integer constant expression. */
5020 if (TREE_SIDE_EFFECTS (arg0
) || pedantic
)
5022 /* Don't let (0, 0) be null pointer constant. */
5023 if (integer_zerop (arg1
))
5024 return non_lvalue (arg1
);
5029 return build_complex (arg0
, arg1
);
5033 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5035 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5036 return omit_one_operand (type
, TREE_OPERAND (arg0
, 0),
5037 TREE_OPERAND (arg0
, 1));
5038 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5039 return TREE_REALPART (arg0
);
5040 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5041 return fold (build (TREE_CODE (arg0
), type
,
5042 fold (build1 (REALPART_EXPR
, type
,
5043 TREE_OPERAND (arg0
, 0))),
5044 fold (build1 (REALPART_EXPR
,
5045 type
, TREE_OPERAND (arg0
, 1)))));
5049 if (TREE_CODE (TREE_TYPE (arg0
)) != COMPLEX_TYPE
)
5050 return convert (type
, integer_zero_node
);
5051 else if (TREE_CODE (arg0
) == COMPLEX_EXPR
)
5052 return omit_one_operand (type
, TREE_OPERAND (arg0
, 1),
5053 TREE_OPERAND (arg0
, 0));
5054 else if (TREE_CODE (arg0
) == COMPLEX_CST
)
5055 return TREE_IMAGPART (arg0
);
5056 else if (TREE_CODE (arg0
) == PLUS_EXPR
|| TREE_CODE (arg0
) == MINUS_EXPR
)
5057 return fold (build (TREE_CODE (arg0
), type
,
5058 fold (build1 (IMAGPART_EXPR
, type
,
5059 TREE_OPERAND (arg0
, 0))),
5060 fold (build1 (IMAGPART_EXPR
, type
,
5061 TREE_OPERAND (arg0
, 1)))));
5064 /* Pull arithmetic ops out of the CLEANUP_POINT_EXPR where
5066 case CLEANUP_POINT_EXPR
:
5067 if (! TREE_SIDE_EFFECTS (arg0
))
5068 return convert (type
, arg0
);
5071 enum tree_code code0
= TREE_CODE (arg0
);
5072 int kind0
= TREE_CODE_CLASS (code0
);
5073 tree arg00
= TREE_OPERAND (arg0
, 0);
5076 if (kind0
== '1' || code0
== TRUTH_NOT_EXPR
)
5077 return fold (build1 (code0
, type
,
5078 fold (build1 (CLEANUP_POINT_EXPR
,
5079 TREE_TYPE (arg00
), arg00
))));
5081 if (kind0
== '<' || kind0
== '2'
5082 || code0
== TRUTH_ANDIF_EXPR
|| code0
== TRUTH_ORIF_EXPR
5083 || code0
== TRUTH_AND_EXPR
|| code0
== TRUTH_OR_EXPR
5084 || code0
== TRUTH_XOR_EXPR
)
5086 arg01
= TREE_OPERAND (arg0
, 1);
5088 if (! TREE_SIDE_EFFECTS (arg00
))
5089 return fold (build (code0
, type
, arg00
,
5090 fold (build1 (CLEANUP_POINT_EXPR
,
5091 TREE_TYPE (arg01
), arg01
))));
5093 if (! TREE_SIDE_EFFECTS (arg01
))
5094 return fold (build (code0
, type
,
5095 fold (build1 (CLEANUP_POINT_EXPR
,
5096 TREE_TYPE (arg00
), arg00
)),
5105 } /* switch (code) */