]> gcc.gnu.org Git - gcc.git/blob - gcc/gimple-fold.c
gimple-fold.c (gimplify_and_update_call_from_tree): Set gctx.into_ssa after push_gimp...
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
34
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
38
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
58 {
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
75 }
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
108 }
109 return true;
110 }
111
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
114
115 tree
116 canonicalize_constructor_val (tree cval)
117 {
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
121 {
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
130 }
131 if (TREE_CODE (cval) == ADDR_EXPR)
132 {
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
134
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (cfun && base && TREE_CODE (base) == VAR_DECL)
141 add_referenced_var (base);
142 /* Fixup types in global initializers. */
143 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
144 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
145 }
146 return cval;
147 }
148
149 /* If SYM is a constant variable with known value, return the value.
150 NULL_TREE is returned otherwise. */
151
152 tree
153 get_symbol_constant_value (tree sym)
154 {
155 if (const_value_known_p (sym))
156 {
157 tree val = DECL_INITIAL (sym);
158 if (val)
159 {
160 val = canonicalize_constructor_val (val);
161 if (val && is_gimple_min_invariant (val))
162 return val;
163 else
164 return NULL_TREE;
165 }
166 /* Variables declared 'const' without an initializer
167 have zero as the initializer if they may not be
168 overridden at link or run time. */
169 if (!val
170 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
171 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
172 return build_zero_cst (TREE_TYPE (sym));
173 }
174
175 return NULL_TREE;
176 }
177
178
179
180 /* Subroutine of fold_stmt. We perform several simplifications of the
181 memory reference tree EXPR and make sure to re-gimplify them properly
182 after propagation of constant addresses. IS_LHS is true if the
183 reference is supposed to be an lvalue. */
184
185 static tree
186 maybe_fold_reference (tree expr, bool is_lhs)
187 {
188 tree *t = &expr;
189 tree result;
190
191 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
192 || TREE_CODE (expr) == REALPART_EXPR
193 || TREE_CODE (expr) == IMAGPART_EXPR)
194 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
195 return fold_unary_loc (EXPR_LOCATION (expr),
196 TREE_CODE (expr),
197 TREE_TYPE (expr),
198 TREE_OPERAND (expr, 0));
199 else if (TREE_CODE (expr) == BIT_FIELD_REF
200 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
201 return fold_ternary_loc (EXPR_LOCATION (expr),
202 TREE_CODE (expr),
203 TREE_TYPE (expr),
204 TREE_OPERAND (expr, 0),
205 TREE_OPERAND (expr, 1),
206 TREE_OPERAND (expr, 2));
207
208 while (handled_component_p (*t))
209 t = &TREE_OPERAND (*t, 0);
210
211 /* Canonicalize MEM_REFs invariant address operand. Do this first
212 to avoid feeding non-canonical MEM_REFs elsewhere. */
213 if (TREE_CODE (*t) == MEM_REF
214 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
215 {
216 bool volatile_p = TREE_THIS_VOLATILE (*t);
217 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
218 TREE_OPERAND (*t, 0),
219 TREE_OPERAND (*t, 1));
220 if (tem)
221 {
222 TREE_THIS_VOLATILE (tem) = volatile_p;
223 *t = tem;
224 tem = maybe_fold_reference (expr, is_lhs);
225 if (tem)
226 return tem;
227 return expr;
228 }
229 }
230
231 if (!is_lhs
232 && (result = fold_const_aggregate_ref (expr))
233 && is_gimple_min_invariant (result))
234 return result;
235
236 /* Fold back MEM_REFs to reference trees. */
237 if (TREE_CODE (*t) == MEM_REF
238 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
239 && integer_zerop (TREE_OPERAND (*t, 1))
240 && (TREE_THIS_VOLATILE (*t)
241 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
242 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
243 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
244 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
245 /* We have to look out here to not drop a required conversion
246 from the rhs to the lhs if is_lhs, but we don't have the
247 rhs here to verify that. Thus require strict type
248 compatibility. */
249 && types_compatible_p (TREE_TYPE (*t),
250 TREE_TYPE (TREE_OPERAND
251 (TREE_OPERAND (*t, 0), 0))))
252 {
253 tree tem;
254 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
255 tem = maybe_fold_reference (expr, is_lhs);
256 if (tem)
257 return tem;
258 return expr;
259 }
260 else if (TREE_CODE (*t) == TARGET_MEM_REF)
261 {
262 tree tem = maybe_fold_tmr (*t);
263 if (tem)
264 {
265 *t = tem;
266 tem = maybe_fold_reference (expr, is_lhs);
267 if (tem)
268 return tem;
269 return expr;
270 }
271 }
272
273 return NULL_TREE;
274 }
275
276
277 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
278 replacement rhs for the statement or NULL_TREE if no simplification
279 could be made. It is assumed that the operands have been previously
280 folded. */
281
282 static tree
283 fold_gimple_assign (gimple_stmt_iterator *si)
284 {
285 gimple stmt = gsi_stmt (*si);
286 enum tree_code subcode = gimple_assign_rhs_code (stmt);
287 location_t loc = gimple_location (stmt);
288
289 tree result = NULL_TREE;
290
291 switch (get_gimple_rhs_class (subcode))
292 {
293 case GIMPLE_SINGLE_RHS:
294 {
295 tree rhs = gimple_assign_rhs1 (stmt);
296
297 if (REFERENCE_CLASS_P (rhs))
298 return maybe_fold_reference (rhs, false);
299
300 else if (TREE_CODE (rhs) == ADDR_EXPR)
301 {
302 tree ref = TREE_OPERAND (rhs, 0);
303 tree tem = maybe_fold_reference (ref, true);
304 if (tem
305 && TREE_CODE (tem) == MEM_REF
306 && integer_zerop (TREE_OPERAND (tem, 1)))
307 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
308 else if (tem)
309 result = fold_convert (TREE_TYPE (rhs),
310 build_fold_addr_expr_loc (loc, tem));
311 else if (TREE_CODE (ref) == MEM_REF
312 && integer_zerop (TREE_OPERAND (ref, 1)))
313 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
314 }
315
316 else if (TREE_CODE (rhs) == CONSTRUCTOR
317 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
318 && (CONSTRUCTOR_NELTS (rhs)
319 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
320 {
321 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
322 unsigned i;
323 tree val;
324
325 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
326 if (TREE_CODE (val) != INTEGER_CST
327 && TREE_CODE (val) != REAL_CST
328 && TREE_CODE (val) != FIXED_CST)
329 return NULL_TREE;
330
331 return build_vector_from_ctor (TREE_TYPE (rhs),
332 CONSTRUCTOR_ELTS (rhs));
333 }
334
335 else if (DECL_P (rhs))
336 return unshare_expr (get_symbol_constant_value (rhs));
337
338 /* If we couldn't fold the RHS, hand over to the generic
339 fold routines. */
340 if (result == NULL_TREE)
341 result = fold (rhs);
342
343 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
344 that may have been added by fold, and "useless" type
345 conversions that might now be apparent due to propagation. */
346 STRIP_USELESS_TYPE_CONVERSION (result);
347
348 if (result != rhs && valid_gimple_rhs_p (result))
349 return result;
350
351 return NULL_TREE;
352 }
353 break;
354
355 case GIMPLE_UNARY_RHS:
356 {
357 tree rhs = gimple_assign_rhs1 (stmt);
358
359 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
360 if (result)
361 {
362 /* If the operation was a conversion do _not_ mark a
363 resulting constant with TREE_OVERFLOW if the original
364 constant was not. These conversions have implementation
365 defined behavior and retaining the TREE_OVERFLOW flag
366 here would confuse later passes such as VRP. */
367 if (CONVERT_EXPR_CODE_P (subcode)
368 && TREE_CODE (result) == INTEGER_CST
369 && TREE_CODE (rhs) == INTEGER_CST)
370 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
371
372 STRIP_USELESS_TYPE_CONVERSION (result);
373 if (valid_gimple_rhs_p (result))
374 return result;
375 }
376 }
377 break;
378
379 case GIMPLE_BINARY_RHS:
380 /* Try to canonicalize for boolean-typed X the comparisons
381 X == 0, X == 1, X != 0, and X != 1. */
382 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
383 || gimple_assign_rhs_code (stmt) == NE_EXPR)
384 {
385 tree lhs = gimple_assign_lhs (stmt);
386 tree op1 = gimple_assign_rhs1 (stmt);
387 tree op2 = gimple_assign_rhs2 (stmt);
388 tree type = TREE_TYPE (op1);
389
390 /* Check whether the comparison operands are of the same boolean
391 type as the result type is.
392 Check that second operand is an integer-constant with value
393 one or zero. */
394 if (TREE_CODE (op2) == INTEGER_CST
395 && (integer_zerop (op2) || integer_onep (op2))
396 && useless_type_conversion_p (TREE_TYPE (lhs), type))
397 {
398 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
399 bool is_logical_not = false;
400
401 /* X == 0 and X != 1 is a logical-not.of X
402 X == 1 and X != 0 is X */
403 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
404 || (cmp_code == NE_EXPR && integer_onep (op2)))
405 is_logical_not = true;
406
407 if (is_logical_not == false)
408 result = op1;
409 /* Only for one-bit precision typed X the transformation
410 !X -> ~X is valied. */
411 else if (TYPE_PRECISION (type) == 1)
412 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
413 type, op1);
414 /* Otherwise we use !X -> X ^ 1. */
415 else
416 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
417 type, op1, build_int_cst (type, 1));
418
419 }
420 }
421
422 if (!result)
423 result = fold_binary_loc (loc, subcode,
424 TREE_TYPE (gimple_assign_lhs (stmt)),
425 gimple_assign_rhs1 (stmt),
426 gimple_assign_rhs2 (stmt));
427
428 if (result)
429 {
430 STRIP_USELESS_TYPE_CONVERSION (result);
431 if (valid_gimple_rhs_p (result))
432 return result;
433 }
434 break;
435
436 case GIMPLE_TERNARY_RHS:
437 /* Try to fold a conditional expression. */
438 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
439 {
440 tree op0 = gimple_assign_rhs1 (stmt);
441 tree tem;
442 bool set = false;
443 location_t cond_loc = gimple_location (stmt);
444
445 if (COMPARISON_CLASS_P (op0))
446 {
447 fold_defer_overflow_warnings ();
448 tem = fold_binary_loc (cond_loc,
449 TREE_CODE (op0), TREE_TYPE (op0),
450 TREE_OPERAND (op0, 0),
451 TREE_OPERAND (op0, 1));
452 /* This is actually a conditional expression, not a GIMPLE
453 conditional statement, however, the valid_gimple_rhs_p
454 test still applies. */
455 set = (tem && is_gimple_condexpr (tem)
456 && valid_gimple_rhs_p (tem));
457 fold_undefer_overflow_warnings (set, stmt, 0);
458 }
459 else if (is_gimple_min_invariant (op0))
460 {
461 tem = op0;
462 set = true;
463 }
464 else
465 return NULL_TREE;
466
467 if (set)
468 result = fold_build3_loc (cond_loc, COND_EXPR,
469 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
470 gimple_assign_rhs2 (stmt),
471 gimple_assign_rhs3 (stmt));
472 }
473
474 if (!result)
475 result = fold_ternary_loc (loc, subcode,
476 TREE_TYPE (gimple_assign_lhs (stmt)),
477 gimple_assign_rhs1 (stmt),
478 gimple_assign_rhs2 (stmt),
479 gimple_assign_rhs3 (stmt));
480
481 if (result)
482 {
483 STRIP_USELESS_TYPE_CONVERSION (result);
484 if (valid_gimple_rhs_p (result))
485 return result;
486 }
487 break;
488
489 case GIMPLE_INVALID_RHS:
490 gcc_unreachable ();
491 }
492
493 return NULL_TREE;
494 }
495
496 /* Attempt to fold a conditional statement. Return true if any changes were
497 made. We only attempt to fold the condition expression, and do not perform
498 any transformation that would require alteration of the cfg. It is
499 assumed that the operands have been previously folded. */
500
501 static bool
502 fold_gimple_cond (gimple stmt)
503 {
504 tree result = fold_binary_loc (gimple_location (stmt),
505 gimple_cond_code (stmt),
506 boolean_type_node,
507 gimple_cond_lhs (stmt),
508 gimple_cond_rhs (stmt));
509
510 if (result)
511 {
512 STRIP_USELESS_TYPE_CONVERSION (result);
513 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
514 {
515 gimple_cond_set_condition_from_tree (stmt, result);
516 return true;
517 }
518 }
519
520 return false;
521 }
522
523 /* Convert EXPR into a GIMPLE value suitable for substitution on the
524 RHS of an assignment. Insert the necessary statements before
525 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
526 is replaced. If the call is expected to produces a result, then it
527 is replaced by an assignment of the new RHS to the result variable.
528 If the result is to be ignored, then the call is replaced by a
529 GIMPLE_NOP. A proper VDEF chain is retained by making the first
530 VUSE and the last VDEF of the whole sequence be the same as the replaced
531 statement and using new SSA names for stores in between. */
532
533 void
534 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
535 {
536 tree lhs;
537 tree tmp = NULL_TREE; /* Silence warning. */
538 gimple stmt, new_stmt;
539 gimple_stmt_iterator i;
540 gimple_seq stmts = gimple_seq_alloc();
541 struct gimplify_ctx gctx;
542 gimple last = NULL;
543 gimple laststore = NULL;
544 tree reaching_vuse;
545
546 stmt = gsi_stmt (*si_p);
547
548 gcc_assert (is_gimple_call (stmt));
549
550 lhs = gimple_call_lhs (stmt);
551 reaching_vuse = gimple_vuse (stmt);
552
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
555
556 if (lhs == NULL_TREE)
557 {
558 gimplify_and_add (expr, &stmts);
559 /* We can end up with folding a memcpy of an empty class assignment
560 which gets optimized away by C++ gimplification. */
561 if (gimple_seq_empty_p (stmts))
562 {
563 pop_gimplify_context (NULL);
564 if (gimple_in_ssa_p (cfun))
565 {
566 unlink_stmt_vdef (stmt);
567 release_defs (stmt);
568 }
569 gsi_remove (si_p, true);
570 return;
571 }
572 }
573 else
574 tmp = get_initialized_tmp_var (expr, &stmts, NULL);
575
576 pop_gimplify_context (NULL);
577
578 if (gimple_has_location (stmt))
579 annotate_all_with_location (stmts, gimple_location (stmt));
580
581 /* The replacement can expose previously unreferenced variables. */
582 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
583 {
584 if (last)
585 {
586 gsi_insert_before (si_p, last, GSI_NEW_STMT);
587 gsi_next (si_p);
588 }
589 new_stmt = gsi_stmt (i);
590 if (gimple_in_ssa_p (cfun))
591 {
592 find_new_referenced_vars (new_stmt);
593 mark_symbols_for_renaming (new_stmt);
594 }
595 /* If the new statement has a VUSE, update it with exact SSA name we
596 know will reach this one. */
597 if (gimple_vuse (new_stmt))
598 {
599 /* If we've also seen a previous store create a new VDEF for
600 the latter one, and make that the new reaching VUSE. */
601 if (laststore)
602 {
603 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
604 gimple_set_vdef (laststore, reaching_vuse);
605 update_stmt (laststore);
606 laststore = NULL;
607 }
608 gimple_set_vuse (new_stmt, reaching_vuse);
609 gimple_set_modified (new_stmt, true);
610 }
611 if (gimple_assign_single_p (new_stmt)
612 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
613 {
614 laststore = new_stmt;
615 }
616 last = new_stmt;
617 }
618
619 if (lhs == NULL_TREE)
620 {
621 /* If we replace a call without LHS that has a VDEF and our new
622 sequence ends with a store we must make that store have the same
623 vdef in order not to break the sequencing. This can happen
624 for instance when folding memcpy calls into assignments. */
625 if (gimple_vdef (stmt) && laststore)
626 {
627 gimple_set_vdef (laststore, gimple_vdef (stmt));
628 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
629 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
630 update_stmt (laststore);
631 }
632 else if (gimple_in_ssa_p (cfun))
633 {
634 unlink_stmt_vdef (stmt);
635 release_defs (stmt);
636 }
637 new_stmt = last;
638 }
639 else
640 {
641 if (last)
642 {
643 gsi_insert_before (si_p, last, GSI_NEW_STMT);
644 gsi_next (si_p);
645 }
646 if (laststore && is_gimple_reg (lhs))
647 {
648 gimple_set_vdef (laststore, gimple_vdef (stmt));
649 update_stmt (laststore);
650 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
651 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = laststore;
652 laststore = NULL;
653 }
654 else if (laststore)
655 {
656 reaching_vuse = make_ssa_name (gimple_vop (cfun), laststore);
657 gimple_set_vdef (laststore, reaching_vuse);
658 update_stmt (laststore);
659 laststore = NULL;
660 }
661 new_stmt = gimple_build_assign (lhs, tmp);
662 if (!is_gimple_reg (tmp))
663 gimple_set_vuse (new_stmt, reaching_vuse);
664 if (!is_gimple_reg (lhs))
665 {
666 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
667 if (TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
668 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = new_stmt;
669 }
670 else if (reaching_vuse == gimple_vuse (stmt))
671 unlink_stmt_vdef (stmt);
672 }
673
674 gimple_set_location (new_stmt, gimple_location (stmt));
675 gsi_replace (si_p, new_stmt, false);
676 }
677
678 /* Return the string length, maximum string length or maximum value of
679 ARG in LENGTH.
680 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
681 is not NULL and, for TYPE == 0, its value is not equal to the length
682 we determine or if we are unable to determine the length or value,
683 return false. VISITED is a bitmap of visited variables.
684 TYPE is 0 if string length should be returned, 1 for maximum string
685 length and 2 for maximum value ARG can have. */
686
687 static bool
688 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
689 {
690 tree var, val;
691 gimple def_stmt;
692
693 if (TREE_CODE (arg) != SSA_NAME)
694 {
695 if (TREE_CODE (arg) == COND_EXPR)
696 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
697 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
698 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
699 else if (TREE_CODE (arg) == ADDR_EXPR
700 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
701 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
702 {
703 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
704 if (TREE_CODE (aop0) == INDIRECT_REF
705 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
706 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
707 length, visited, type);
708 }
709
710 if (type == 2)
711 {
712 val = arg;
713 if (TREE_CODE (val) != INTEGER_CST
714 || tree_int_cst_sgn (val) < 0)
715 return false;
716 }
717 else
718 val = c_strlen (arg, 1);
719 if (!val)
720 return false;
721
722 if (*length)
723 {
724 if (type > 0)
725 {
726 if (TREE_CODE (*length) != INTEGER_CST
727 || TREE_CODE (val) != INTEGER_CST)
728 return false;
729
730 if (tree_int_cst_lt (*length, val))
731 *length = val;
732 return true;
733 }
734 else if (simple_cst_equal (val, *length) != 1)
735 return false;
736 }
737
738 *length = val;
739 return true;
740 }
741
742 /* If we were already here, break the infinite cycle. */
743 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
744 return true;
745
746 var = arg;
747 def_stmt = SSA_NAME_DEF_STMT (var);
748
749 switch (gimple_code (def_stmt))
750 {
751 case GIMPLE_ASSIGN:
752 /* The RHS of the statement defining VAR must either have a
753 constant length or come from another SSA_NAME with a constant
754 length. */
755 if (gimple_assign_single_p (def_stmt)
756 || gimple_assign_unary_nop_p (def_stmt))
757 {
758 tree rhs = gimple_assign_rhs1 (def_stmt);
759 return get_maxval_strlen (rhs, length, visited, type);
760 }
761 return false;
762
763 case GIMPLE_PHI:
764 {
765 /* All the arguments of the PHI node must have the same constant
766 length. */
767 unsigned i;
768
769 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
770 {
771 tree arg = gimple_phi_arg (def_stmt, i)->def;
772
773 /* If this PHI has itself as an argument, we cannot
774 determine the string length of this argument. However,
775 if we can find a constant string length for the other
776 PHI args then we can still be sure that this is a
777 constant string length. So be optimistic and just
778 continue with the next argument. */
779 if (arg == gimple_phi_result (def_stmt))
780 continue;
781
782 if (!get_maxval_strlen (arg, length, visited, type))
783 return false;
784 }
785 }
786 return true;
787
788 default:
789 return false;
790 }
791 }
792
793
794 /* Fold builtin call in statement STMT. Returns a simplified tree.
795 We may return a non-constant expression, including another call
796 to a different function and with different arguments, e.g.,
797 substituting memcpy for strcpy when the string length is known.
798 Note that some builtins expand into inline code that may not
799 be valid in GIMPLE. Callers must take care. */
800
801 tree
802 gimple_fold_builtin (gimple stmt)
803 {
804 tree result, val[3];
805 tree callee, a;
806 int arg_idx, type;
807 bitmap visited;
808 bool ignore;
809 int nargs;
810 location_t loc = gimple_location (stmt);
811
812 gcc_assert (is_gimple_call (stmt));
813
814 ignore = (gimple_call_lhs (stmt) == NULL);
815
816 /* First try the generic builtin folder. If that succeeds, return the
817 result directly. */
818 result = fold_call_stmt (stmt, ignore);
819 if (result)
820 {
821 if (ignore)
822 STRIP_NOPS (result);
823 return result;
824 }
825
826 /* Ignore MD builtins. */
827 callee = gimple_call_fndecl (stmt);
828 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
829 return NULL_TREE;
830
831 /* If the builtin could not be folded, and it has no argument list,
832 we're done. */
833 nargs = gimple_call_num_args (stmt);
834 if (nargs == 0)
835 return NULL_TREE;
836
837 /* Limit the work only for builtins we know how to simplify. */
838 switch (DECL_FUNCTION_CODE (callee))
839 {
840 case BUILT_IN_STRLEN:
841 case BUILT_IN_FPUTS:
842 case BUILT_IN_FPUTS_UNLOCKED:
843 arg_idx = 0;
844 type = 0;
845 break;
846 case BUILT_IN_STRCPY:
847 case BUILT_IN_STRNCPY:
848 arg_idx = 1;
849 type = 0;
850 break;
851 case BUILT_IN_MEMCPY_CHK:
852 case BUILT_IN_MEMPCPY_CHK:
853 case BUILT_IN_MEMMOVE_CHK:
854 case BUILT_IN_MEMSET_CHK:
855 case BUILT_IN_STRNCPY_CHK:
856 arg_idx = 2;
857 type = 2;
858 break;
859 case BUILT_IN_STRCPY_CHK:
860 case BUILT_IN_STPCPY_CHK:
861 arg_idx = 1;
862 type = 1;
863 break;
864 case BUILT_IN_SNPRINTF_CHK:
865 case BUILT_IN_VSNPRINTF_CHK:
866 arg_idx = 1;
867 type = 2;
868 break;
869 default:
870 return NULL_TREE;
871 }
872
873 if (arg_idx >= nargs)
874 return NULL_TREE;
875
876 /* Try to use the dataflow information gathered by the CCP process. */
877 visited = BITMAP_ALLOC (NULL);
878 bitmap_clear (visited);
879
880 memset (val, 0, sizeof (val));
881 a = gimple_call_arg (stmt, arg_idx);
882 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
883 val[arg_idx] = NULL_TREE;
884
885 BITMAP_FREE (visited);
886
887 result = NULL_TREE;
888 switch (DECL_FUNCTION_CODE (callee))
889 {
890 case BUILT_IN_STRLEN:
891 if (val[0] && nargs == 1)
892 {
893 tree new_val =
894 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
895
896 /* If the result is not a valid gimple value, or not a cast
897 of a valid gimple value, then we cannot use the result. */
898 if (is_gimple_val (new_val)
899 || (CONVERT_EXPR_P (new_val)
900 && is_gimple_val (TREE_OPERAND (new_val, 0))))
901 return new_val;
902 }
903 break;
904
905 case BUILT_IN_STRCPY:
906 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
907 result = fold_builtin_strcpy (loc, callee,
908 gimple_call_arg (stmt, 0),
909 gimple_call_arg (stmt, 1),
910 val[1]);
911 break;
912
913 case BUILT_IN_STRNCPY:
914 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
915 result = fold_builtin_strncpy (loc, callee,
916 gimple_call_arg (stmt, 0),
917 gimple_call_arg (stmt, 1),
918 gimple_call_arg (stmt, 2),
919 val[1]);
920 break;
921
922 case BUILT_IN_FPUTS:
923 if (nargs == 2)
924 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
925 gimple_call_arg (stmt, 1),
926 ignore, false, val[0]);
927 break;
928
929 case BUILT_IN_FPUTS_UNLOCKED:
930 if (nargs == 2)
931 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
932 gimple_call_arg (stmt, 1),
933 ignore, true, val[0]);
934 break;
935
936 case BUILT_IN_MEMCPY_CHK:
937 case BUILT_IN_MEMPCPY_CHK:
938 case BUILT_IN_MEMMOVE_CHK:
939 case BUILT_IN_MEMSET_CHK:
940 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
941 result = fold_builtin_memory_chk (loc, callee,
942 gimple_call_arg (stmt, 0),
943 gimple_call_arg (stmt, 1),
944 gimple_call_arg (stmt, 2),
945 gimple_call_arg (stmt, 3),
946 val[2], ignore,
947 DECL_FUNCTION_CODE (callee));
948 break;
949
950 case BUILT_IN_STRCPY_CHK:
951 case BUILT_IN_STPCPY_CHK:
952 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
953 result = fold_builtin_stxcpy_chk (loc, callee,
954 gimple_call_arg (stmt, 0),
955 gimple_call_arg (stmt, 1),
956 gimple_call_arg (stmt, 2),
957 val[1], ignore,
958 DECL_FUNCTION_CODE (callee));
959 break;
960
961 case BUILT_IN_STRNCPY_CHK:
962 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
963 result = fold_builtin_strncpy_chk (loc, gimple_call_arg (stmt, 0),
964 gimple_call_arg (stmt, 1),
965 gimple_call_arg (stmt, 2),
966 gimple_call_arg (stmt, 3),
967 val[2]);
968 break;
969
970 case BUILT_IN_SNPRINTF_CHK:
971 case BUILT_IN_VSNPRINTF_CHK:
972 if (val[1] && is_gimple_val (val[1]))
973 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
974 DECL_FUNCTION_CODE (callee));
975 break;
976
977 default:
978 gcc_unreachable ();
979 }
980
981 if (result && ignore)
982 result = fold_ignored_result (result);
983 return result;
984 }
985
986 /* Generate code adjusting the first parameter of a call statement determined
987 by GSI by DELTA. */
988
989 void
990 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
991 {
992 gimple call_stmt = gsi_stmt (*gsi);
993 tree parm, tmp;
994 gimple new_stmt;
995
996 delta = convert_to_ptrofftype (delta);
997 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
998 parm = gimple_call_arg (call_stmt, 0);
999 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
1000 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
1001 add_referenced_var (tmp);
1002
1003 tmp = make_ssa_name (tmp, NULL);
1004 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
1005 SSA_NAME_DEF_STMT (tmp) = new_stmt;
1006 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1007 gimple_call_set_arg (call_stmt, 0, tmp);
1008 }
1009
1010 /* Return a binfo to be used for devirtualization of calls based on an object
1011 represented by a declaration (i.e. a global or automatically allocated one)
1012 or NULL if it cannot be found or is not safe. CST is expected to be an
1013 ADDR_EXPR of such object or the function will return NULL. Currently it is
1014 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
1015
1016 tree
1017 gimple_extract_devirt_binfo_from_cst (tree cst)
1018 {
1019 HOST_WIDE_INT offset, size, max_size;
1020 tree base, type, expected_type, binfo;
1021 bool last_artificial = false;
1022
1023 if (!flag_devirtualize
1024 || TREE_CODE (cst) != ADDR_EXPR
1025 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1026 return NULL_TREE;
1027
1028 cst = TREE_OPERAND (cst, 0);
1029 expected_type = TREE_TYPE (cst);
1030 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1031 type = TREE_TYPE (base);
1032 if (!DECL_P (base)
1033 || max_size == -1
1034 || max_size != size
1035 || TREE_CODE (type) != RECORD_TYPE)
1036 return NULL_TREE;
1037
1038 /* Find the sub-object the constant actually refers to and mark whether it is
1039 an artificial one (as opposed to a user-defined one). */
1040 while (true)
1041 {
1042 HOST_WIDE_INT pos, size;
1043 tree fld;
1044
1045 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1046 break;
1047 if (offset < 0)
1048 return NULL_TREE;
1049
1050 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1051 {
1052 if (TREE_CODE (fld) != FIELD_DECL)
1053 continue;
1054
1055 pos = int_bit_position (fld);
1056 size = tree_low_cst (DECL_SIZE (fld), 1);
1057 if (pos <= offset && (pos + size) > offset)
1058 break;
1059 }
1060 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1061 return NULL_TREE;
1062
1063 last_artificial = DECL_ARTIFICIAL (fld);
1064 type = TREE_TYPE (fld);
1065 offset -= pos;
1066 }
1067 /* Artifical sub-objects are ancestors, we do not want to use them for
1068 devirtualization, at least not here. */
1069 if (last_artificial)
1070 return NULL_TREE;
1071 binfo = TYPE_BINFO (type);
1072 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1073 return NULL_TREE;
1074 else
1075 return binfo;
1076 }
1077
1078 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1079 The statement may be replaced by another statement, e.g., if the call
1080 simplifies to a constant value. Return true if any changes were made.
1081 It is assumed that the operands have been previously folded. */
1082
1083 bool
1084 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1085 {
1086 gimple stmt = gsi_stmt (*gsi);
1087 tree callee;
1088
1089 /* Check for builtins that CCP can handle using information not
1090 available in the generic fold routines. */
1091 callee = gimple_call_fndecl (stmt);
1092 if (!inplace && callee && DECL_BUILT_IN (callee))
1093 {
1094 tree result = gimple_fold_builtin (stmt);
1095
1096 if (result)
1097 {
1098 if (!update_call_from_tree (gsi, result))
1099 gimplify_and_update_call_from_tree (gsi, result);
1100 return true;
1101 }
1102 }
1103
1104 /* Check for virtual calls that became direct calls. */
1105 callee = gimple_call_fn (stmt);
1106 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1107 {
1108 tree binfo, fndecl, obj;
1109 HOST_WIDE_INT token;
1110
1111 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1112 {
1113 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1114 return true;
1115 }
1116
1117 obj = OBJ_TYPE_REF_OBJECT (callee);
1118 binfo = gimple_extract_devirt_binfo_from_cst (obj);
1119 if (!binfo)
1120 return false;
1121 token = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1122 fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1123 if (!fndecl)
1124 return false;
1125 gimple_call_set_fndecl (stmt, fndecl);
1126 return true;
1127 }
1128
1129 return false;
1130 }
1131
1132 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1133 distinguishes both cases. */
1134
1135 static bool
1136 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1137 {
1138 bool changed = false;
1139 gimple stmt = gsi_stmt (*gsi);
1140 unsigned i;
1141 gimple_stmt_iterator gsinext = *gsi;
1142 gimple next_stmt;
1143
1144 gsi_next (&gsinext);
1145 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1146
1147 /* Fold the main computation performed by the statement. */
1148 switch (gimple_code (stmt))
1149 {
1150 case GIMPLE_ASSIGN:
1151 {
1152 unsigned old_num_ops = gimple_num_ops (stmt);
1153 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1154 tree lhs = gimple_assign_lhs (stmt);
1155 tree new_rhs;
1156 /* First canonicalize operand order. This avoids building new
1157 trees if this is the only thing fold would later do. */
1158 if ((commutative_tree_code (subcode)
1159 || commutative_ternary_tree_code (subcode))
1160 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1161 gimple_assign_rhs2 (stmt), false))
1162 {
1163 tree tem = gimple_assign_rhs1 (stmt);
1164 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1165 gimple_assign_set_rhs2 (stmt, tem);
1166 changed = true;
1167 }
1168 new_rhs = fold_gimple_assign (gsi);
1169 if (new_rhs
1170 && !useless_type_conversion_p (TREE_TYPE (lhs),
1171 TREE_TYPE (new_rhs)))
1172 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1173 if (new_rhs
1174 && (!inplace
1175 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1176 {
1177 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1178 changed = true;
1179 }
1180 break;
1181 }
1182
1183 case GIMPLE_COND:
1184 changed |= fold_gimple_cond (stmt);
1185 break;
1186
1187 case GIMPLE_CALL:
1188 /* Fold *& in call arguments. */
1189 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1190 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1191 {
1192 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1193 if (tmp)
1194 {
1195 gimple_call_set_arg (stmt, i, tmp);
1196 changed = true;
1197 }
1198 }
1199 changed |= gimple_fold_call (gsi, inplace);
1200 break;
1201
1202 case GIMPLE_ASM:
1203 /* Fold *& in asm operands. */
1204 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1205 {
1206 tree link = gimple_asm_output_op (stmt, i);
1207 tree op = TREE_VALUE (link);
1208 if (REFERENCE_CLASS_P (op)
1209 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1210 {
1211 TREE_VALUE (link) = op;
1212 changed = true;
1213 }
1214 }
1215 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1216 {
1217 tree link = gimple_asm_input_op (stmt, i);
1218 tree op = TREE_VALUE (link);
1219 if (REFERENCE_CLASS_P (op)
1220 && (op = maybe_fold_reference (op, false)) != NULL_TREE)
1221 {
1222 TREE_VALUE (link) = op;
1223 changed = true;
1224 }
1225 }
1226 break;
1227
1228 case GIMPLE_DEBUG:
1229 if (gimple_debug_bind_p (stmt))
1230 {
1231 tree val = gimple_debug_bind_get_value (stmt);
1232 if (val
1233 && REFERENCE_CLASS_P (val))
1234 {
1235 tree tem = maybe_fold_reference (val, false);
1236 if (tem)
1237 {
1238 gimple_debug_bind_set_value (stmt, tem);
1239 changed = true;
1240 }
1241 }
1242 }
1243 break;
1244
1245 default:;
1246 }
1247
1248 /* If stmt folds into nothing and it was the last stmt in a bb,
1249 don't call gsi_stmt. */
1250 if (gsi_end_p (*gsi))
1251 {
1252 gcc_assert (next_stmt == NULL);
1253 return changed;
1254 }
1255
1256 stmt = gsi_stmt (*gsi);
1257
1258 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1259 as we'd changing the next stmt. */
1260 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1261 {
1262 tree lhs = gimple_get_lhs (stmt);
1263 if (lhs && REFERENCE_CLASS_P (lhs))
1264 {
1265 tree new_lhs = maybe_fold_reference (lhs, true);
1266 if (new_lhs)
1267 {
1268 gimple_set_lhs (stmt, new_lhs);
1269 changed = true;
1270 }
1271 }
1272 }
1273
1274 return changed;
1275 }
1276
1277 /* Fold the statement pointed to by GSI. In some cases, this function may
1278 replace the whole statement with a new one. Returns true iff folding
1279 makes any changes.
1280 The statement pointed to by GSI should be in valid gimple form but may
1281 be in unfolded state as resulting from for example constant propagation
1282 which can produce *&x = 0. */
1283
1284 bool
1285 fold_stmt (gimple_stmt_iterator *gsi)
1286 {
1287 return fold_stmt_1 (gsi, false);
1288 }
1289
1290 /* Perform the minimal folding on statement *GSI. Only operations like
1291 *&x created by constant propagation are handled. The statement cannot
1292 be replaced with a new one. Return true if the statement was
1293 changed, false otherwise.
1294 The statement *GSI should be in valid gimple form but may
1295 be in unfolded state as resulting from for example constant propagation
1296 which can produce *&x = 0. */
1297
1298 bool
1299 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1300 {
1301 gimple stmt = gsi_stmt (*gsi);
1302 bool changed = fold_stmt_1 (gsi, true);
1303 gcc_assert (gsi_stmt (*gsi) == stmt);
1304 return changed;
1305 }
1306
1307 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1308 if EXPR is null or we don't know how.
1309 If non-null, the result always has boolean type. */
1310
1311 static tree
1312 canonicalize_bool (tree expr, bool invert)
1313 {
1314 if (!expr)
1315 return NULL_TREE;
1316 else if (invert)
1317 {
1318 if (integer_nonzerop (expr))
1319 return boolean_false_node;
1320 else if (integer_zerop (expr))
1321 return boolean_true_node;
1322 else if (TREE_CODE (expr) == SSA_NAME)
1323 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1324 build_int_cst (TREE_TYPE (expr), 0));
1325 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1326 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1327 boolean_type_node,
1328 TREE_OPERAND (expr, 0),
1329 TREE_OPERAND (expr, 1));
1330 else
1331 return NULL_TREE;
1332 }
1333 else
1334 {
1335 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1336 return expr;
1337 if (integer_nonzerop (expr))
1338 return boolean_true_node;
1339 else if (integer_zerop (expr))
1340 return boolean_false_node;
1341 else if (TREE_CODE (expr) == SSA_NAME)
1342 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1343 build_int_cst (TREE_TYPE (expr), 0));
1344 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1345 return fold_build2 (TREE_CODE (expr),
1346 boolean_type_node,
1347 TREE_OPERAND (expr, 0),
1348 TREE_OPERAND (expr, 1));
1349 else
1350 return NULL_TREE;
1351 }
1352 }
1353
1354 /* Check to see if a boolean expression EXPR is logically equivalent to the
1355 comparison (OP1 CODE OP2). Check for various identities involving
1356 SSA_NAMEs. */
1357
1358 static bool
1359 same_bool_comparison_p (const_tree expr, enum tree_code code,
1360 const_tree op1, const_tree op2)
1361 {
1362 gimple s;
1363
1364 /* The obvious case. */
1365 if (TREE_CODE (expr) == code
1366 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1367 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1368 return true;
1369
1370 /* Check for comparing (name, name != 0) and the case where expr
1371 is an SSA_NAME with a definition matching the comparison. */
1372 if (TREE_CODE (expr) == SSA_NAME
1373 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1374 {
1375 if (operand_equal_p (expr, op1, 0))
1376 return ((code == NE_EXPR && integer_zerop (op2))
1377 || (code == EQ_EXPR && integer_nonzerop (op2)));
1378 s = SSA_NAME_DEF_STMT (expr);
1379 if (is_gimple_assign (s)
1380 && gimple_assign_rhs_code (s) == code
1381 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1382 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1383 return true;
1384 }
1385
1386 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1387 of name is a comparison, recurse. */
1388 if (TREE_CODE (op1) == SSA_NAME
1389 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1390 {
1391 s = SSA_NAME_DEF_STMT (op1);
1392 if (is_gimple_assign (s)
1393 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1394 {
1395 enum tree_code c = gimple_assign_rhs_code (s);
1396 if ((c == NE_EXPR && integer_zerop (op2))
1397 || (c == EQ_EXPR && integer_nonzerop (op2)))
1398 return same_bool_comparison_p (expr, c,
1399 gimple_assign_rhs1 (s),
1400 gimple_assign_rhs2 (s));
1401 if ((c == EQ_EXPR && integer_zerop (op2))
1402 || (c == NE_EXPR && integer_nonzerop (op2)))
1403 return same_bool_comparison_p (expr,
1404 invert_tree_comparison (c, false),
1405 gimple_assign_rhs1 (s),
1406 gimple_assign_rhs2 (s));
1407 }
1408 }
1409 return false;
1410 }
1411
1412 /* Check to see if two boolean expressions OP1 and OP2 are logically
1413 equivalent. */
1414
1415 static bool
1416 same_bool_result_p (const_tree op1, const_tree op2)
1417 {
1418 /* Simple cases first. */
1419 if (operand_equal_p (op1, op2, 0))
1420 return true;
1421
1422 /* Check the cases where at least one of the operands is a comparison.
1423 These are a bit smarter than operand_equal_p in that they apply some
1424 identifies on SSA_NAMEs. */
1425 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1426 && same_bool_comparison_p (op1, TREE_CODE (op2),
1427 TREE_OPERAND (op2, 0),
1428 TREE_OPERAND (op2, 1)))
1429 return true;
1430 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1431 && same_bool_comparison_p (op2, TREE_CODE (op1),
1432 TREE_OPERAND (op1, 0),
1433 TREE_OPERAND (op1, 1)))
1434 return true;
1435
1436 /* Default case. */
1437 return false;
1438 }
1439
1440 /* Forward declarations for some mutually recursive functions. */
1441
1442 static tree
1443 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1444 enum tree_code code2, tree op2a, tree op2b);
1445 static tree
1446 and_var_with_comparison (tree var, bool invert,
1447 enum tree_code code2, tree op2a, tree op2b);
1448 static tree
1449 and_var_with_comparison_1 (gimple stmt,
1450 enum tree_code code2, tree op2a, tree op2b);
1451 static tree
1452 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1453 enum tree_code code2, tree op2a, tree op2b);
1454 static tree
1455 or_var_with_comparison (tree var, bool invert,
1456 enum tree_code code2, tree op2a, tree op2b);
1457 static tree
1458 or_var_with_comparison_1 (gimple stmt,
1459 enum tree_code code2, tree op2a, tree op2b);
1460
1461 /* Helper function for and_comparisons_1: try to simplify the AND of the
1462 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1463 If INVERT is true, invert the value of the VAR before doing the AND.
1464 Return NULL_EXPR if we can't simplify this to a single expression. */
1465
1466 static tree
1467 and_var_with_comparison (tree var, bool invert,
1468 enum tree_code code2, tree op2a, tree op2b)
1469 {
1470 tree t;
1471 gimple stmt = SSA_NAME_DEF_STMT (var);
1472
1473 /* We can only deal with variables whose definitions are assignments. */
1474 if (!is_gimple_assign (stmt))
1475 return NULL_TREE;
1476
1477 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1478 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1479 Then we only have to consider the simpler non-inverted cases. */
1480 if (invert)
1481 t = or_var_with_comparison_1 (stmt,
1482 invert_tree_comparison (code2, false),
1483 op2a, op2b);
1484 else
1485 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1486 return canonicalize_bool (t, invert);
1487 }
1488
1489 /* Try to simplify the AND of the ssa variable defined by the assignment
1490 STMT with the comparison specified by (OP2A CODE2 OP2B).
1491 Return NULL_EXPR if we can't simplify this to a single expression. */
1492
1493 static tree
1494 and_var_with_comparison_1 (gimple stmt,
1495 enum tree_code code2, tree op2a, tree op2b)
1496 {
1497 tree var = gimple_assign_lhs (stmt);
1498 tree true_test_var = NULL_TREE;
1499 tree false_test_var = NULL_TREE;
1500 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1501
1502 /* Check for identities like (var AND (var == 0)) => false. */
1503 if (TREE_CODE (op2a) == SSA_NAME
1504 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1505 {
1506 if ((code2 == NE_EXPR && integer_zerop (op2b))
1507 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1508 {
1509 true_test_var = op2a;
1510 if (var == true_test_var)
1511 return var;
1512 }
1513 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1514 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1515 {
1516 false_test_var = op2a;
1517 if (var == false_test_var)
1518 return boolean_false_node;
1519 }
1520 }
1521
1522 /* If the definition is a comparison, recurse on it. */
1523 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1524 {
1525 tree t = and_comparisons_1 (innercode,
1526 gimple_assign_rhs1 (stmt),
1527 gimple_assign_rhs2 (stmt),
1528 code2,
1529 op2a,
1530 op2b);
1531 if (t)
1532 return t;
1533 }
1534
1535 /* If the definition is an AND or OR expression, we may be able to
1536 simplify by reassociating. */
1537 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1538 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1539 {
1540 tree inner1 = gimple_assign_rhs1 (stmt);
1541 tree inner2 = gimple_assign_rhs2 (stmt);
1542 gimple s;
1543 tree t;
1544 tree partial = NULL_TREE;
1545 bool is_and = (innercode == BIT_AND_EXPR);
1546
1547 /* Check for boolean identities that don't require recursive examination
1548 of inner1/inner2:
1549 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1550 inner1 AND (inner1 OR inner2) => inner1
1551 !inner1 AND (inner1 AND inner2) => false
1552 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1553 Likewise for similar cases involving inner2. */
1554 if (inner1 == true_test_var)
1555 return (is_and ? var : inner1);
1556 else if (inner2 == true_test_var)
1557 return (is_and ? var : inner2);
1558 else if (inner1 == false_test_var)
1559 return (is_and
1560 ? boolean_false_node
1561 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1562 else if (inner2 == false_test_var)
1563 return (is_and
1564 ? boolean_false_node
1565 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1566
1567 /* Next, redistribute/reassociate the AND across the inner tests.
1568 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1569 if (TREE_CODE (inner1) == SSA_NAME
1570 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1571 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1572 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1573 gimple_assign_rhs1 (s),
1574 gimple_assign_rhs2 (s),
1575 code2, op2a, op2b)))
1576 {
1577 /* Handle the AND case, where we are reassociating:
1578 (inner1 AND inner2) AND (op2a code2 op2b)
1579 => (t AND inner2)
1580 If the partial result t is a constant, we win. Otherwise
1581 continue on to try reassociating with the other inner test. */
1582 if (is_and)
1583 {
1584 if (integer_onep (t))
1585 return inner2;
1586 else if (integer_zerop (t))
1587 return boolean_false_node;
1588 }
1589
1590 /* Handle the OR case, where we are redistributing:
1591 (inner1 OR inner2) AND (op2a code2 op2b)
1592 => (t OR (inner2 AND (op2a code2 op2b))) */
1593 else if (integer_onep (t))
1594 return boolean_true_node;
1595
1596 /* Save partial result for later. */
1597 partial = t;
1598 }
1599
1600 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1601 if (TREE_CODE (inner2) == SSA_NAME
1602 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1603 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1604 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1605 gimple_assign_rhs1 (s),
1606 gimple_assign_rhs2 (s),
1607 code2, op2a, op2b)))
1608 {
1609 /* Handle the AND case, where we are reassociating:
1610 (inner1 AND inner2) AND (op2a code2 op2b)
1611 => (inner1 AND t) */
1612 if (is_and)
1613 {
1614 if (integer_onep (t))
1615 return inner1;
1616 else if (integer_zerop (t))
1617 return boolean_false_node;
1618 /* If both are the same, we can apply the identity
1619 (x AND x) == x. */
1620 else if (partial && same_bool_result_p (t, partial))
1621 return t;
1622 }
1623
1624 /* Handle the OR case. where we are redistributing:
1625 (inner1 OR inner2) AND (op2a code2 op2b)
1626 => (t OR (inner1 AND (op2a code2 op2b)))
1627 => (t OR partial) */
1628 else
1629 {
1630 if (integer_onep (t))
1631 return boolean_true_node;
1632 else if (partial)
1633 {
1634 /* We already got a simplification for the other
1635 operand to the redistributed OR expression. The
1636 interesting case is when at least one is false.
1637 Or, if both are the same, we can apply the identity
1638 (x OR x) == x. */
1639 if (integer_zerop (partial))
1640 return t;
1641 else if (integer_zerop (t))
1642 return partial;
1643 else if (same_bool_result_p (t, partial))
1644 return t;
1645 }
1646 }
1647 }
1648 }
1649 return NULL_TREE;
1650 }
1651
1652 /* Try to simplify the AND of two comparisons defined by
1653 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1654 If this can be done without constructing an intermediate value,
1655 return the resulting tree; otherwise NULL_TREE is returned.
1656 This function is deliberately asymmetric as it recurses on SSA_DEFs
1657 in the first comparison but not the second. */
1658
1659 static tree
1660 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1661 enum tree_code code2, tree op2a, tree op2b)
1662 {
1663 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1664 if (operand_equal_p (op1a, op2a, 0)
1665 && operand_equal_p (op1b, op2b, 0))
1666 {
1667 /* Result will be either NULL_TREE, or a combined comparison. */
1668 tree t = combine_comparisons (UNKNOWN_LOCATION,
1669 TRUTH_ANDIF_EXPR, code1, code2,
1670 boolean_type_node, op1a, op1b);
1671 if (t)
1672 return t;
1673 }
1674
1675 /* Likewise the swapped case of the above. */
1676 if (operand_equal_p (op1a, op2b, 0)
1677 && operand_equal_p (op1b, op2a, 0))
1678 {
1679 /* Result will be either NULL_TREE, or a combined comparison. */
1680 tree t = combine_comparisons (UNKNOWN_LOCATION,
1681 TRUTH_ANDIF_EXPR, code1,
1682 swap_tree_comparison (code2),
1683 boolean_type_node, op1a, op1b);
1684 if (t)
1685 return t;
1686 }
1687
1688 /* If both comparisons are of the same value against constants, we might
1689 be able to merge them. */
1690 if (operand_equal_p (op1a, op2a, 0)
1691 && TREE_CODE (op1b) == INTEGER_CST
1692 && TREE_CODE (op2b) == INTEGER_CST)
1693 {
1694 int cmp = tree_int_cst_compare (op1b, op2b);
1695
1696 /* If we have (op1a == op1b), we should either be able to
1697 return that or FALSE, depending on whether the constant op1b
1698 also satisfies the other comparison against op2b. */
1699 if (code1 == EQ_EXPR)
1700 {
1701 bool done = true;
1702 bool val;
1703 switch (code2)
1704 {
1705 case EQ_EXPR: val = (cmp == 0); break;
1706 case NE_EXPR: val = (cmp != 0); break;
1707 case LT_EXPR: val = (cmp < 0); break;
1708 case GT_EXPR: val = (cmp > 0); break;
1709 case LE_EXPR: val = (cmp <= 0); break;
1710 case GE_EXPR: val = (cmp >= 0); break;
1711 default: done = false;
1712 }
1713 if (done)
1714 {
1715 if (val)
1716 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1717 else
1718 return boolean_false_node;
1719 }
1720 }
1721 /* Likewise if the second comparison is an == comparison. */
1722 else if (code2 == EQ_EXPR)
1723 {
1724 bool done = true;
1725 bool val;
1726 switch (code1)
1727 {
1728 case EQ_EXPR: val = (cmp == 0); break;
1729 case NE_EXPR: val = (cmp != 0); break;
1730 case LT_EXPR: val = (cmp > 0); break;
1731 case GT_EXPR: val = (cmp < 0); break;
1732 case LE_EXPR: val = (cmp >= 0); break;
1733 case GE_EXPR: val = (cmp <= 0); break;
1734 default: done = false;
1735 }
1736 if (done)
1737 {
1738 if (val)
1739 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1740 else
1741 return boolean_false_node;
1742 }
1743 }
1744
1745 /* Same business with inequality tests. */
1746 else if (code1 == NE_EXPR)
1747 {
1748 bool val;
1749 switch (code2)
1750 {
1751 case EQ_EXPR: val = (cmp != 0); break;
1752 case NE_EXPR: val = (cmp == 0); break;
1753 case LT_EXPR: val = (cmp >= 0); break;
1754 case GT_EXPR: val = (cmp <= 0); break;
1755 case LE_EXPR: val = (cmp > 0); break;
1756 case GE_EXPR: val = (cmp < 0); break;
1757 default:
1758 val = false;
1759 }
1760 if (val)
1761 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1762 }
1763 else if (code2 == NE_EXPR)
1764 {
1765 bool val;
1766 switch (code1)
1767 {
1768 case EQ_EXPR: val = (cmp == 0); break;
1769 case NE_EXPR: val = (cmp != 0); break;
1770 case LT_EXPR: val = (cmp <= 0); break;
1771 case GT_EXPR: val = (cmp >= 0); break;
1772 case LE_EXPR: val = (cmp < 0); break;
1773 case GE_EXPR: val = (cmp > 0); break;
1774 default:
1775 val = false;
1776 }
1777 if (val)
1778 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1779 }
1780
1781 /* Chose the more restrictive of two < or <= comparisons. */
1782 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1783 && (code2 == LT_EXPR || code2 == LE_EXPR))
1784 {
1785 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1786 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1787 else
1788 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1789 }
1790
1791 /* Likewise chose the more restrictive of two > or >= comparisons. */
1792 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1793 && (code2 == GT_EXPR || code2 == GE_EXPR))
1794 {
1795 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1796 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1797 else
1798 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1799 }
1800
1801 /* Check for singleton ranges. */
1802 else if (cmp == 0
1803 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1804 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1805 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1806
1807 /* Check for disjoint ranges. */
1808 else if (cmp <= 0
1809 && (code1 == LT_EXPR || code1 == LE_EXPR)
1810 && (code2 == GT_EXPR || code2 == GE_EXPR))
1811 return boolean_false_node;
1812 else if (cmp >= 0
1813 && (code1 == GT_EXPR || code1 == GE_EXPR)
1814 && (code2 == LT_EXPR || code2 == LE_EXPR))
1815 return boolean_false_node;
1816 }
1817
1818 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1819 NAME's definition is a truth value. See if there are any simplifications
1820 that can be done against the NAME's definition. */
1821 if (TREE_CODE (op1a) == SSA_NAME
1822 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1823 && (integer_zerop (op1b) || integer_onep (op1b)))
1824 {
1825 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1826 || (code1 == NE_EXPR && integer_onep (op1b)));
1827 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1828 switch (gimple_code (stmt))
1829 {
1830 case GIMPLE_ASSIGN:
1831 /* Try to simplify by copy-propagating the definition. */
1832 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1833
1834 case GIMPLE_PHI:
1835 /* If every argument to the PHI produces the same result when
1836 ANDed with the second comparison, we win.
1837 Do not do this unless the type is bool since we need a bool
1838 result here anyway. */
1839 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1840 {
1841 tree result = NULL_TREE;
1842 unsigned i;
1843 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1844 {
1845 tree arg = gimple_phi_arg_def (stmt, i);
1846
1847 /* If this PHI has itself as an argument, ignore it.
1848 If all the other args produce the same result,
1849 we're still OK. */
1850 if (arg == gimple_phi_result (stmt))
1851 continue;
1852 else if (TREE_CODE (arg) == INTEGER_CST)
1853 {
1854 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1855 {
1856 if (!result)
1857 result = boolean_false_node;
1858 else if (!integer_zerop (result))
1859 return NULL_TREE;
1860 }
1861 else if (!result)
1862 result = fold_build2 (code2, boolean_type_node,
1863 op2a, op2b);
1864 else if (!same_bool_comparison_p (result,
1865 code2, op2a, op2b))
1866 return NULL_TREE;
1867 }
1868 else if (TREE_CODE (arg) == SSA_NAME
1869 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1870 {
1871 tree temp;
1872 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1873 /* In simple cases we can look through PHI nodes,
1874 but we have to be careful with loops.
1875 See PR49073. */
1876 if (! dom_info_available_p (CDI_DOMINATORS)
1877 || gimple_bb (def_stmt) == gimple_bb (stmt)
1878 || dominated_by_p (CDI_DOMINATORS,
1879 gimple_bb (def_stmt),
1880 gimple_bb (stmt)))
1881 return NULL_TREE;
1882 temp = and_var_with_comparison (arg, invert, code2,
1883 op2a, op2b);
1884 if (!temp)
1885 return NULL_TREE;
1886 else if (!result)
1887 result = temp;
1888 else if (!same_bool_result_p (result, temp))
1889 return NULL_TREE;
1890 }
1891 else
1892 return NULL_TREE;
1893 }
1894 return result;
1895 }
1896
1897 default:
1898 break;
1899 }
1900 }
1901 return NULL_TREE;
1902 }
1903
1904 /* Try to simplify the AND of two comparisons, specified by
1905 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1906 If this can be simplified to a single expression (without requiring
1907 introducing more SSA variables to hold intermediate values),
1908 return the resulting tree. Otherwise return NULL_TREE.
1909 If the result expression is non-null, it has boolean type. */
1910
1911 tree
1912 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1913 enum tree_code code2, tree op2a, tree op2b)
1914 {
1915 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1916 if (t)
1917 return t;
1918 else
1919 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1920 }
1921
1922 /* Helper function for or_comparisons_1: try to simplify the OR of the
1923 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1924 If INVERT is true, invert the value of VAR before doing the OR.
1925 Return NULL_EXPR if we can't simplify this to a single expression. */
1926
1927 static tree
1928 or_var_with_comparison (tree var, bool invert,
1929 enum tree_code code2, tree op2a, tree op2b)
1930 {
1931 tree t;
1932 gimple stmt = SSA_NAME_DEF_STMT (var);
1933
1934 /* We can only deal with variables whose definitions are assignments. */
1935 if (!is_gimple_assign (stmt))
1936 return NULL_TREE;
1937
1938 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1939 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1940 Then we only have to consider the simpler non-inverted cases. */
1941 if (invert)
1942 t = and_var_with_comparison_1 (stmt,
1943 invert_tree_comparison (code2, false),
1944 op2a, op2b);
1945 else
1946 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1947 return canonicalize_bool (t, invert);
1948 }
1949
1950 /* Try to simplify the OR of the ssa variable defined by the assignment
1951 STMT with the comparison specified by (OP2A CODE2 OP2B).
1952 Return NULL_EXPR if we can't simplify this to a single expression. */
1953
1954 static tree
1955 or_var_with_comparison_1 (gimple stmt,
1956 enum tree_code code2, tree op2a, tree op2b)
1957 {
1958 tree var = gimple_assign_lhs (stmt);
1959 tree true_test_var = NULL_TREE;
1960 tree false_test_var = NULL_TREE;
1961 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1962
1963 /* Check for identities like (var OR (var != 0)) => true . */
1964 if (TREE_CODE (op2a) == SSA_NAME
1965 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1966 {
1967 if ((code2 == NE_EXPR && integer_zerop (op2b))
1968 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1969 {
1970 true_test_var = op2a;
1971 if (var == true_test_var)
1972 return var;
1973 }
1974 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1975 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1976 {
1977 false_test_var = op2a;
1978 if (var == false_test_var)
1979 return boolean_true_node;
1980 }
1981 }
1982
1983 /* If the definition is a comparison, recurse on it. */
1984 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1985 {
1986 tree t = or_comparisons_1 (innercode,
1987 gimple_assign_rhs1 (stmt),
1988 gimple_assign_rhs2 (stmt),
1989 code2,
1990 op2a,
1991 op2b);
1992 if (t)
1993 return t;
1994 }
1995
1996 /* If the definition is an AND or OR expression, we may be able to
1997 simplify by reassociating. */
1998 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1999 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2000 {
2001 tree inner1 = gimple_assign_rhs1 (stmt);
2002 tree inner2 = gimple_assign_rhs2 (stmt);
2003 gimple s;
2004 tree t;
2005 tree partial = NULL_TREE;
2006 bool is_or = (innercode == BIT_IOR_EXPR);
2007
2008 /* Check for boolean identities that don't require recursive examination
2009 of inner1/inner2:
2010 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2011 inner1 OR (inner1 AND inner2) => inner1
2012 !inner1 OR (inner1 OR inner2) => true
2013 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2014 */
2015 if (inner1 == true_test_var)
2016 return (is_or ? var : inner1);
2017 else if (inner2 == true_test_var)
2018 return (is_or ? var : inner2);
2019 else if (inner1 == false_test_var)
2020 return (is_or
2021 ? boolean_true_node
2022 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2023 else if (inner2 == false_test_var)
2024 return (is_or
2025 ? boolean_true_node
2026 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2027
2028 /* Next, redistribute/reassociate the OR across the inner tests.
2029 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2030 if (TREE_CODE (inner1) == SSA_NAME
2031 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2032 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2033 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2034 gimple_assign_rhs1 (s),
2035 gimple_assign_rhs2 (s),
2036 code2, op2a, op2b)))
2037 {
2038 /* Handle the OR case, where we are reassociating:
2039 (inner1 OR inner2) OR (op2a code2 op2b)
2040 => (t OR inner2)
2041 If the partial result t is a constant, we win. Otherwise
2042 continue on to try reassociating with the other inner test. */
2043 if (is_or)
2044 {
2045 if (integer_onep (t))
2046 return boolean_true_node;
2047 else if (integer_zerop (t))
2048 return inner2;
2049 }
2050
2051 /* Handle the AND case, where we are redistributing:
2052 (inner1 AND inner2) OR (op2a code2 op2b)
2053 => (t AND (inner2 OR (op2a code op2b))) */
2054 else if (integer_zerop (t))
2055 return boolean_false_node;
2056
2057 /* Save partial result for later. */
2058 partial = t;
2059 }
2060
2061 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2062 if (TREE_CODE (inner2) == SSA_NAME
2063 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2064 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2065 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2066 gimple_assign_rhs1 (s),
2067 gimple_assign_rhs2 (s),
2068 code2, op2a, op2b)))
2069 {
2070 /* Handle the OR case, where we are reassociating:
2071 (inner1 OR inner2) OR (op2a code2 op2b)
2072 => (inner1 OR t)
2073 => (t OR partial) */
2074 if (is_or)
2075 {
2076 if (integer_zerop (t))
2077 return inner1;
2078 else if (integer_onep (t))
2079 return boolean_true_node;
2080 /* If both are the same, we can apply the identity
2081 (x OR x) == x. */
2082 else if (partial && same_bool_result_p (t, partial))
2083 return t;
2084 }
2085
2086 /* Handle the AND case, where we are redistributing:
2087 (inner1 AND inner2) OR (op2a code2 op2b)
2088 => (t AND (inner1 OR (op2a code2 op2b)))
2089 => (t AND partial) */
2090 else
2091 {
2092 if (integer_zerop (t))
2093 return boolean_false_node;
2094 else if (partial)
2095 {
2096 /* We already got a simplification for the other
2097 operand to the redistributed AND expression. The
2098 interesting case is when at least one is true.
2099 Or, if both are the same, we can apply the identity
2100 (x AND x) == x. */
2101 if (integer_onep (partial))
2102 return t;
2103 else if (integer_onep (t))
2104 return partial;
2105 else if (same_bool_result_p (t, partial))
2106 return t;
2107 }
2108 }
2109 }
2110 }
2111 return NULL_TREE;
2112 }
2113
2114 /* Try to simplify the OR of two comparisons defined by
2115 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2116 If this can be done without constructing an intermediate value,
2117 return the resulting tree; otherwise NULL_TREE is returned.
2118 This function is deliberately asymmetric as it recurses on SSA_DEFs
2119 in the first comparison but not the second. */
2120
2121 static tree
2122 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2123 enum tree_code code2, tree op2a, tree op2b)
2124 {
2125 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2126 if (operand_equal_p (op1a, op2a, 0)
2127 && operand_equal_p (op1b, op2b, 0))
2128 {
2129 /* Result will be either NULL_TREE, or a combined comparison. */
2130 tree t = combine_comparisons (UNKNOWN_LOCATION,
2131 TRUTH_ORIF_EXPR, code1, code2,
2132 boolean_type_node, op1a, op1b);
2133 if (t)
2134 return t;
2135 }
2136
2137 /* Likewise the swapped case of the above. */
2138 if (operand_equal_p (op1a, op2b, 0)
2139 && operand_equal_p (op1b, op2a, 0))
2140 {
2141 /* Result will be either NULL_TREE, or a combined comparison. */
2142 tree t = combine_comparisons (UNKNOWN_LOCATION,
2143 TRUTH_ORIF_EXPR, code1,
2144 swap_tree_comparison (code2),
2145 boolean_type_node, op1a, op1b);
2146 if (t)
2147 return t;
2148 }
2149
2150 /* If both comparisons are of the same value against constants, we might
2151 be able to merge them. */
2152 if (operand_equal_p (op1a, op2a, 0)
2153 && TREE_CODE (op1b) == INTEGER_CST
2154 && TREE_CODE (op2b) == INTEGER_CST)
2155 {
2156 int cmp = tree_int_cst_compare (op1b, op2b);
2157
2158 /* If we have (op1a != op1b), we should either be able to
2159 return that or TRUE, depending on whether the constant op1b
2160 also satisfies the other comparison against op2b. */
2161 if (code1 == NE_EXPR)
2162 {
2163 bool done = true;
2164 bool val;
2165 switch (code2)
2166 {
2167 case EQ_EXPR: val = (cmp == 0); break;
2168 case NE_EXPR: val = (cmp != 0); break;
2169 case LT_EXPR: val = (cmp < 0); break;
2170 case GT_EXPR: val = (cmp > 0); break;
2171 case LE_EXPR: val = (cmp <= 0); break;
2172 case GE_EXPR: val = (cmp >= 0); break;
2173 default: done = false;
2174 }
2175 if (done)
2176 {
2177 if (val)
2178 return boolean_true_node;
2179 else
2180 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2181 }
2182 }
2183 /* Likewise if the second comparison is a != comparison. */
2184 else if (code2 == NE_EXPR)
2185 {
2186 bool done = true;
2187 bool val;
2188 switch (code1)
2189 {
2190 case EQ_EXPR: val = (cmp == 0); break;
2191 case NE_EXPR: val = (cmp != 0); break;
2192 case LT_EXPR: val = (cmp > 0); break;
2193 case GT_EXPR: val = (cmp < 0); break;
2194 case LE_EXPR: val = (cmp >= 0); break;
2195 case GE_EXPR: val = (cmp <= 0); break;
2196 default: done = false;
2197 }
2198 if (done)
2199 {
2200 if (val)
2201 return boolean_true_node;
2202 else
2203 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2204 }
2205 }
2206
2207 /* See if an equality test is redundant with the other comparison. */
2208 else if (code1 == EQ_EXPR)
2209 {
2210 bool val;
2211 switch (code2)
2212 {
2213 case EQ_EXPR: val = (cmp == 0); break;
2214 case NE_EXPR: val = (cmp != 0); break;
2215 case LT_EXPR: val = (cmp < 0); break;
2216 case GT_EXPR: val = (cmp > 0); break;
2217 case LE_EXPR: val = (cmp <= 0); break;
2218 case GE_EXPR: val = (cmp >= 0); break;
2219 default:
2220 val = false;
2221 }
2222 if (val)
2223 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2224 }
2225 else if (code2 == EQ_EXPR)
2226 {
2227 bool val;
2228 switch (code1)
2229 {
2230 case EQ_EXPR: val = (cmp == 0); break;
2231 case NE_EXPR: val = (cmp != 0); break;
2232 case LT_EXPR: val = (cmp > 0); break;
2233 case GT_EXPR: val = (cmp < 0); break;
2234 case LE_EXPR: val = (cmp >= 0); break;
2235 case GE_EXPR: val = (cmp <= 0); break;
2236 default:
2237 val = false;
2238 }
2239 if (val)
2240 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2241 }
2242
2243 /* Chose the less restrictive of two < or <= comparisons. */
2244 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2245 && (code2 == LT_EXPR || code2 == LE_EXPR))
2246 {
2247 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2248 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2249 else
2250 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2251 }
2252
2253 /* Likewise chose the less restrictive of two > or >= comparisons. */
2254 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2255 && (code2 == GT_EXPR || code2 == GE_EXPR))
2256 {
2257 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2258 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2259 else
2260 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2261 }
2262
2263 /* Check for singleton ranges. */
2264 else if (cmp == 0
2265 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2266 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2267 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2268
2269 /* Check for less/greater pairs that don't restrict the range at all. */
2270 else if (cmp >= 0
2271 && (code1 == LT_EXPR || code1 == LE_EXPR)
2272 && (code2 == GT_EXPR || code2 == GE_EXPR))
2273 return boolean_true_node;
2274 else if (cmp <= 0
2275 && (code1 == GT_EXPR || code1 == GE_EXPR)
2276 && (code2 == LT_EXPR || code2 == LE_EXPR))
2277 return boolean_true_node;
2278 }
2279
2280 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2281 NAME's definition is a truth value. See if there are any simplifications
2282 that can be done against the NAME's definition. */
2283 if (TREE_CODE (op1a) == SSA_NAME
2284 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2285 && (integer_zerop (op1b) || integer_onep (op1b)))
2286 {
2287 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2288 || (code1 == NE_EXPR && integer_onep (op1b)));
2289 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2290 switch (gimple_code (stmt))
2291 {
2292 case GIMPLE_ASSIGN:
2293 /* Try to simplify by copy-propagating the definition. */
2294 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2295
2296 case GIMPLE_PHI:
2297 /* If every argument to the PHI produces the same result when
2298 ORed with the second comparison, we win.
2299 Do not do this unless the type is bool since we need a bool
2300 result here anyway. */
2301 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2302 {
2303 tree result = NULL_TREE;
2304 unsigned i;
2305 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2306 {
2307 tree arg = gimple_phi_arg_def (stmt, i);
2308
2309 /* If this PHI has itself as an argument, ignore it.
2310 If all the other args produce the same result,
2311 we're still OK. */
2312 if (arg == gimple_phi_result (stmt))
2313 continue;
2314 else if (TREE_CODE (arg) == INTEGER_CST)
2315 {
2316 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2317 {
2318 if (!result)
2319 result = boolean_true_node;
2320 else if (!integer_onep (result))
2321 return NULL_TREE;
2322 }
2323 else if (!result)
2324 result = fold_build2 (code2, boolean_type_node,
2325 op2a, op2b);
2326 else if (!same_bool_comparison_p (result,
2327 code2, op2a, op2b))
2328 return NULL_TREE;
2329 }
2330 else if (TREE_CODE (arg) == SSA_NAME
2331 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2332 {
2333 tree temp;
2334 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2335 /* In simple cases we can look through PHI nodes,
2336 but we have to be careful with loops.
2337 See PR49073. */
2338 if (! dom_info_available_p (CDI_DOMINATORS)
2339 || gimple_bb (def_stmt) == gimple_bb (stmt)
2340 || dominated_by_p (CDI_DOMINATORS,
2341 gimple_bb (def_stmt),
2342 gimple_bb (stmt)))
2343 return NULL_TREE;
2344 temp = or_var_with_comparison (arg, invert, code2,
2345 op2a, op2b);
2346 if (!temp)
2347 return NULL_TREE;
2348 else if (!result)
2349 result = temp;
2350 else if (!same_bool_result_p (result, temp))
2351 return NULL_TREE;
2352 }
2353 else
2354 return NULL_TREE;
2355 }
2356 return result;
2357 }
2358
2359 default:
2360 break;
2361 }
2362 }
2363 return NULL_TREE;
2364 }
2365
2366 /* Try to simplify the OR of two comparisons, specified by
2367 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2368 If this can be simplified to a single expression (without requiring
2369 introducing more SSA variables to hold intermediate values),
2370 return the resulting tree. Otherwise return NULL_TREE.
2371 If the result expression is non-null, it has boolean type. */
2372
2373 tree
2374 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2375 enum tree_code code2, tree op2a, tree op2b)
2376 {
2377 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2378 if (t)
2379 return t;
2380 else
2381 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2382 }
2383
2384
2385 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2386
2387 Either NULL_TREE, a simplified but non-constant or a constant
2388 is returned.
2389
2390 ??? This should go into a gimple-fold-inline.h file to be eventually
2391 privatized with the single valueize function used in the various TUs
2392 to avoid the indirect function call overhead. */
2393
2394 tree
2395 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2396 {
2397 location_t loc = gimple_location (stmt);
2398 switch (gimple_code (stmt))
2399 {
2400 case GIMPLE_ASSIGN:
2401 {
2402 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2403
2404 switch (get_gimple_rhs_class (subcode))
2405 {
2406 case GIMPLE_SINGLE_RHS:
2407 {
2408 tree rhs = gimple_assign_rhs1 (stmt);
2409 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2410
2411 if (TREE_CODE (rhs) == SSA_NAME)
2412 {
2413 /* If the RHS is an SSA_NAME, return its known constant value,
2414 if any. */
2415 return (*valueize) (rhs);
2416 }
2417 /* Handle propagating invariant addresses into address
2418 operations. */
2419 else if (TREE_CODE (rhs) == ADDR_EXPR
2420 && !is_gimple_min_invariant (rhs))
2421 {
2422 HOST_WIDE_INT offset;
2423 tree base;
2424 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2425 &offset,
2426 valueize);
2427 if (base
2428 && (CONSTANT_CLASS_P (base)
2429 || decl_address_invariant_p (base)))
2430 return build_invariant_address (TREE_TYPE (rhs),
2431 base, offset);
2432 }
2433 else if (TREE_CODE (rhs) == CONSTRUCTOR
2434 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2435 && (CONSTRUCTOR_NELTS (rhs)
2436 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2437 {
2438 unsigned i;
2439 tree val, list;
2440
2441 list = NULL_TREE;
2442 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2443 {
2444 val = (*valueize) (val);
2445 if (TREE_CODE (val) == INTEGER_CST
2446 || TREE_CODE (val) == REAL_CST
2447 || TREE_CODE (val) == FIXED_CST)
2448 list = tree_cons (NULL_TREE, val, list);
2449 else
2450 return NULL_TREE;
2451 }
2452
2453 return build_vector (TREE_TYPE (rhs), nreverse (list));
2454 }
2455
2456 if (kind == tcc_reference)
2457 {
2458 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2459 || TREE_CODE (rhs) == REALPART_EXPR
2460 || TREE_CODE (rhs) == IMAGPART_EXPR)
2461 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2462 {
2463 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2464 return fold_unary_loc (EXPR_LOCATION (rhs),
2465 TREE_CODE (rhs),
2466 TREE_TYPE (rhs), val);
2467 }
2468 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2469 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2470 {
2471 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2472 return fold_ternary_loc (EXPR_LOCATION (rhs),
2473 TREE_CODE (rhs),
2474 TREE_TYPE (rhs), val,
2475 TREE_OPERAND (rhs, 1),
2476 TREE_OPERAND (rhs, 2));
2477 }
2478 else if (TREE_CODE (rhs) == MEM_REF
2479 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2480 {
2481 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2482 if (TREE_CODE (val) == ADDR_EXPR
2483 && is_gimple_min_invariant (val))
2484 {
2485 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2486 unshare_expr (val),
2487 TREE_OPERAND (rhs, 1));
2488 if (tem)
2489 rhs = tem;
2490 }
2491 }
2492 return fold_const_aggregate_ref_1 (rhs, valueize);
2493 }
2494 else if (kind == tcc_declaration)
2495 return get_symbol_constant_value (rhs);
2496 return rhs;
2497 }
2498
2499 case GIMPLE_UNARY_RHS:
2500 {
2501 /* Handle unary operators that can appear in GIMPLE form.
2502 Note that we know the single operand must be a constant,
2503 so this should almost always return a simplified RHS. */
2504 tree lhs = gimple_assign_lhs (stmt);
2505 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2506
2507 /* Conversions are useless for CCP purposes if they are
2508 value-preserving. Thus the restrictions that
2509 useless_type_conversion_p places for restrict qualification
2510 of pointer types should not apply here.
2511 Substitution later will only substitute to allowed places. */
2512 if (CONVERT_EXPR_CODE_P (subcode)
2513 && POINTER_TYPE_P (TREE_TYPE (lhs))
2514 && POINTER_TYPE_P (TREE_TYPE (op0))
2515 && (TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2516 == TYPE_ADDR_SPACE (TREE_TYPE (op0))))
2517 return op0;
2518
2519 return
2520 fold_unary_ignore_overflow_loc (loc, subcode,
2521 gimple_expr_type (stmt), op0);
2522 }
2523
2524 case GIMPLE_BINARY_RHS:
2525 {
2526 /* Handle binary operators that can appear in GIMPLE form. */
2527 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2528 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2529
2530 /* Translate &x + CST into an invariant form suitable for
2531 further propagation. */
2532 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2533 && TREE_CODE (op0) == ADDR_EXPR
2534 && TREE_CODE (op1) == INTEGER_CST)
2535 {
2536 tree off = fold_convert (ptr_type_node, op1);
2537 return build_fold_addr_expr_loc
2538 (loc,
2539 fold_build2 (MEM_REF,
2540 TREE_TYPE (TREE_TYPE (op0)),
2541 unshare_expr (op0), off));
2542 }
2543
2544 return fold_binary_loc (loc, subcode,
2545 gimple_expr_type (stmt), op0, op1);
2546 }
2547
2548 case GIMPLE_TERNARY_RHS:
2549 {
2550 /* Handle ternary operators that can appear in GIMPLE form. */
2551 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2552 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2553 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2554
2555 return fold_ternary_loc (loc, subcode,
2556 gimple_expr_type (stmt), op0, op1, op2);
2557 }
2558
2559 default:
2560 gcc_unreachable ();
2561 }
2562 }
2563
2564 case GIMPLE_CALL:
2565 {
2566 tree fn;
2567
2568 if (gimple_call_internal_p (stmt))
2569 /* No folding yet for these functions. */
2570 return NULL_TREE;
2571
2572 fn = (*valueize) (gimple_call_fn (stmt));
2573 if (TREE_CODE (fn) == ADDR_EXPR
2574 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2575 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2576 {
2577 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2578 tree call, retval;
2579 unsigned i;
2580 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2581 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2582 call = build_call_array_loc (loc,
2583 gimple_call_return_type (stmt),
2584 fn, gimple_call_num_args (stmt), args);
2585 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2586 if (retval)
2587 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2588 STRIP_NOPS (retval);
2589 return retval;
2590 }
2591 return NULL_TREE;
2592 }
2593
2594 default:
2595 return NULL_TREE;
2596 }
2597 }
2598
2599 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2600 Returns NULL_TREE if folding to a constant is not possible, otherwise
2601 returns a constant according to is_gimple_min_invariant. */
2602
2603 tree
2604 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2605 {
2606 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2607 if (res && is_gimple_min_invariant (res))
2608 return res;
2609 return NULL_TREE;
2610 }
2611
2612
2613 /* The following set of functions are supposed to fold references using
2614 their constant initializers. */
2615
2616 static tree fold_ctor_reference (tree type, tree ctor,
2617 unsigned HOST_WIDE_INT offset,
2618 unsigned HOST_WIDE_INT size);
2619
2620 /* See if we can find constructor defining value of BASE.
2621 When we know the consructor with constant offset (such as
2622 base is array[40] and we do know constructor of array), then
2623 BIT_OFFSET is adjusted accordingly.
2624
2625 As a special case, return error_mark_node when constructor
2626 is not explicitly available, but it is known to be zero
2627 such as 'static const int a;'. */
2628 static tree
2629 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2630 tree (*valueize)(tree))
2631 {
2632 HOST_WIDE_INT bit_offset2, size, max_size;
2633 if (TREE_CODE (base) == MEM_REF)
2634 {
2635 if (!integer_zerop (TREE_OPERAND (base, 1)))
2636 {
2637 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2638 return NULL_TREE;
2639 *bit_offset += (mem_ref_offset (base).low
2640 * BITS_PER_UNIT);
2641 }
2642
2643 if (valueize
2644 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2645 base = valueize (TREE_OPERAND (base, 0));
2646 if (!base || TREE_CODE (base) != ADDR_EXPR)
2647 return NULL_TREE;
2648 base = TREE_OPERAND (base, 0);
2649 }
2650
2651 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2652 DECL_INITIAL. If BASE is a nested reference into another
2653 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2654 the inner reference. */
2655 switch (TREE_CODE (base))
2656 {
2657 case VAR_DECL:
2658 if (!const_value_known_p (base))
2659 return NULL_TREE;
2660
2661 /* Fallthru. */
2662 case CONST_DECL:
2663 if (!DECL_INITIAL (base)
2664 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2665 return error_mark_node;
2666 return DECL_INITIAL (base);
2667
2668 case ARRAY_REF:
2669 case COMPONENT_REF:
2670 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2671 if (max_size == -1 || size != max_size)
2672 return NULL_TREE;
2673 *bit_offset += bit_offset2;
2674 return get_base_constructor (base, bit_offset, valueize);
2675
2676 case STRING_CST:
2677 case CONSTRUCTOR:
2678 return base;
2679
2680 default:
2681 return NULL_TREE;
2682 }
2683 }
2684
2685 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2686 to the memory at bit OFFSET.
2687
2688 We do only simple job of folding byte accesses. */
2689
2690 static tree
2691 fold_string_cst_ctor_reference (tree type, tree ctor,
2692 unsigned HOST_WIDE_INT offset,
2693 unsigned HOST_WIDE_INT size)
2694 {
2695 if (INTEGRAL_TYPE_P (type)
2696 && (TYPE_MODE (type)
2697 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2698 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2699 == MODE_INT)
2700 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2701 && size == BITS_PER_UNIT
2702 && !(offset % BITS_PER_UNIT))
2703 {
2704 offset /= BITS_PER_UNIT;
2705 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2706 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2707 [offset]));
2708 /* Folding
2709 const char a[20]="hello";
2710 return a[10];
2711
2712 might lead to offset greater than string length. In this case we
2713 know value is either initialized to 0 or out of bounds. Return 0
2714 in both cases. */
2715 return build_zero_cst (type);
2716 }
2717 return NULL_TREE;
2718 }
2719
2720 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2721 SIZE to the memory at bit OFFSET. */
2722
2723 static tree
2724 fold_array_ctor_reference (tree type, tree ctor,
2725 unsigned HOST_WIDE_INT offset,
2726 unsigned HOST_WIDE_INT size)
2727 {
2728 unsigned HOST_WIDE_INT cnt;
2729 tree cfield, cval;
2730 double_int low_bound, elt_size;
2731 double_int index, max_index;
2732 double_int access_index;
2733 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2734 HOST_WIDE_INT inner_offset;
2735
2736 /* Compute low bound and elt size. */
2737 if (domain_type && TYPE_MIN_VALUE (domain_type))
2738 {
2739 /* Static constructors for variably sized objects makes no sense. */
2740 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2741 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2742 }
2743 else
2744 low_bound = double_int_zero;
2745 /* Static constructors for variably sized objects makes no sense. */
2746 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2747 == INTEGER_CST);
2748 elt_size =
2749 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2750
2751
2752 /* We can handle only constantly sized accesses that are known to not
2753 be larger than size of array element. */
2754 if (!TYPE_SIZE_UNIT (type)
2755 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2756 || double_int_cmp (elt_size,
2757 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2758 return NULL_TREE;
2759
2760 /* Compute the array index we look for. */
2761 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2762 elt_size, TRUNC_DIV_EXPR);
2763 access_index = double_int_add (access_index, low_bound);
2764
2765 /* And offset within the access. */
2766 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2767
2768 /* See if the array field is large enough to span whole access. We do not
2769 care to fold accesses spanning multiple array indexes. */
2770 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2771 return NULL_TREE;
2772
2773 index = double_int_sub (low_bound, double_int_one);
2774 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2775 {
2776 /* Array constructor might explicitely set index, or specify range
2777 or leave index NULL meaning that it is next index after previous
2778 one. */
2779 if (cfield)
2780 {
2781 if (TREE_CODE (cfield) == INTEGER_CST)
2782 max_index = index = tree_to_double_int (cfield);
2783 else
2784 {
2785 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2786 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2787 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2788 }
2789 }
2790 else
2791 max_index = index = double_int_add (index, double_int_one);
2792
2793 /* Do we have match? */
2794 if (double_int_cmp (access_index, index, 1) >= 0
2795 && double_int_cmp (access_index, max_index, 1) <= 0)
2796 return fold_ctor_reference (type, cval, inner_offset, size);
2797 }
2798 /* When memory is not explicitely mentioned in constructor,
2799 it is 0 (or out of range). */
2800 return build_zero_cst (type);
2801 }
2802
2803 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2804 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2805
2806 static tree
2807 fold_nonarray_ctor_reference (tree type, tree ctor,
2808 unsigned HOST_WIDE_INT offset,
2809 unsigned HOST_WIDE_INT size)
2810 {
2811 unsigned HOST_WIDE_INT cnt;
2812 tree cfield, cval;
2813
2814 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2815 cval)
2816 {
2817 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2818 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2819 tree field_size = DECL_SIZE (cfield);
2820 double_int bitoffset;
2821 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2822 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2823 double_int bitoffset_end, access_end;
2824
2825 /* Variable sized objects in static constructors makes no sense,
2826 but field_size can be NULL for flexible array members. */
2827 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2828 && TREE_CODE (byte_offset) == INTEGER_CST
2829 && (field_size != NULL_TREE
2830 ? TREE_CODE (field_size) == INTEGER_CST
2831 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2832
2833 /* Compute bit offset of the field. */
2834 bitoffset = double_int_add (tree_to_double_int (field_offset),
2835 double_int_mul (byte_offset_cst,
2836 bits_per_unit_cst));
2837 /* Compute bit offset where the field ends. */
2838 if (field_size != NULL_TREE)
2839 bitoffset_end = double_int_add (bitoffset,
2840 tree_to_double_int (field_size));
2841 else
2842 bitoffset_end = double_int_zero;
2843
2844 access_end = double_int_add (uhwi_to_double_int (offset),
2845 uhwi_to_double_int (size));
2846
2847 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2848 [BITOFFSET, BITOFFSET_END)? */
2849 if (double_int_cmp (access_end, bitoffset, 0) > 0
2850 && (field_size == NULL_TREE
2851 || double_int_cmp (uhwi_to_double_int (offset),
2852 bitoffset_end, 0) < 0))
2853 {
2854 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2855 bitoffset);
2856 /* We do have overlap. Now see if field is large enough to
2857 cover the access. Give up for accesses spanning multiple
2858 fields. */
2859 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2860 return NULL_TREE;
2861 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2862 return NULL_TREE;
2863 return fold_ctor_reference (type, cval,
2864 double_int_to_uhwi (inner_offset), size);
2865 }
2866 }
2867 /* When memory is not explicitely mentioned in constructor, it is 0. */
2868 return build_zero_cst (type);
2869 }
2870
2871 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2872 to the memory at bit OFFSET. */
2873
2874 static tree
2875 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2876 unsigned HOST_WIDE_INT size)
2877 {
2878 tree ret;
2879
2880 /* We found the field with exact match. */
2881 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2882 && !offset)
2883 return canonicalize_constructor_val (ctor);
2884
2885 /* We are at the end of walk, see if we can view convert the
2886 result. */
2887 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2888 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2889 && operand_equal_p (TYPE_SIZE (type),
2890 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2891 {
2892 ret = canonicalize_constructor_val (ctor);
2893 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2894 if (ret)
2895 STRIP_NOPS (ret);
2896 return ret;
2897 }
2898 if (TREE_CODE (ctor) == STRING_CST)
2899 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2900 if (TREE_CODE (ctor) == CONSTRUCTOR)
2901 {
2902
2903 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2904 return fold_array_ctor_reference (type, ctor, offset, size);
2905 else
2906 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2907 }
2908
2909 return NULL_TREE;
2910 }
2911
2912 /* Return the tree representing the element referenced by T if T is an
2913 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2914 names using VALUEIZE. Return NULL_TREE otherwise. */
2915
2916 tree
2917 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2918 {
2919 tree ctor, idx, base;
2920 HOST_WIDE_INT offset, size, max_size;
2921 tree tem;
2922
2923 if (TREE_THIS_VOLATILE (t))
2924 return NULL_TREE;
2925
2926 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2927 return get_symbol_constant_value (t);
2928
2929 tem = fold_read_from_constant_string (t);
2930 if (tem)
2931 return tem;
2932
2933 switch (TREE_CODE (t))
2934 {
2935 case ARRAY_REF:
2936 case ARRAY_RANGE_REF:
2937 /* Constant indexes are handled well by get_base_constructor.
2938 Only special case variable offsets.
2939 FIXME: This code can't handle nested references with variable indexes
2940 (they will be handled only by iteration of ccp). Perhaps we can bring
2941 get_ref_base_and_extent here and make it use a valueize callback. */
2942 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2943 && valueize
2944 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2945 && host_integerp (idx, 0))
2946 {
2947 tree low_bound, unit_size;
2948
2949 /* If the resulting bit-offset is constant, track it. */
2950 if ((low_bound = array_ref_low_bound (t),
2951 host_integerp (low_bound, 0))
2952 && (unit_size = array_ref_element_size (t),
2953 host_integerp (unit_size, 1)))
2954 {
2955 offset = TREE_INT_CST_LOW (idx);
2956 offset -= TREE_INT_CST_LOW (low_bound);
2957 offset *= TREE_INT_CST_LOW (unit_size);
2958 offset *= BITS_PER_UNIT;
2959
2960 base = TREE_OPERAND (t, 0);
2961 ctor = get_base_constructor (base, &offset, valueize);
2962 /* Empty constructor. Always fold to 0. */
2963 if (ctor == error_mark_node)
2964 return build_zero_cst (TREE_TYPE (t));
2965 /* Out of bound array access. Value is undefined,
2966 but don't fold. */
2967 if (offset < 0)
2968 return NULL_TREE;
2969 /* We can not determine ctor. */
2970 if (!ctor)
2971 return NULL_TREE;
2972 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
2973 TREE_INT_CST_LOW (unit_size)
2974 * BITS_PER_UNIT);
2975 }
2976 }
2977 /* Fallthru. */
2978
2979 case COMPONENT_REF:
2980 case BIT_FIELD_REF:
2981 case TARGET_MEM_REF:
2982 case MEM_REF:
2983 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
2984 ctor = get_base_constructor (base, &offset, valueize);
2985
2986 /* Empty constructor. Always fold to 0. */
2987 if (ctor == error_mark_node)
2988 return build_zero_cst (TREE_TYPE (t));
2989 /* We do not know precise address. */
2990 if (max_size == -1 || max_size != size)
2991 return NULL_TREE;
2992 /* We can not determine ctor. */
2993 if (!ctor)
2994 return NULL_TREE;
2995
2996 /* Out of bound array access. Value is undefined, but don't fold. */
2997 if (offset < 0)
2998 return NULL_TREE;
2999
3000 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3001
3002 case REALPART_EXPR:
3003 case IMAGPART_EXPR:
3004 {
3005 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3006 if (c && TREE_CODE (c) == COMPLEX_CST)
3007 return fold_build1_loc (EXPR_LOCATION (t),
3008 TREE_CODE (t), TREE_TYPE (t), c);
3009 break;
3010 }
3011
3012 default:
3013 break;
3014 }
3015
3016 return NULL_TREE;
3017 }
3018
3019 tree
3020 fold_const_aggregate_ref (tree t)
3021 {
3022 return fold_const_aggregate_ref_1 (t, NULL);
3023 }
3024
3025 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3026 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3027 KNOWN_BINFO carries the binfo describing the true type of
3028 OBJ_TYPE_REF_OBJECT(REF). */
3029
3030 tree
3031 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3032 {
3033 unsigned HOST_WIDE_INT offset, size;
3034 tree v, fn;
3035
3036 v = BINFO_VTABLE (known_binfo);
3037 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3038 if (!v)
3039 return NULL_TREE;
3040
3041 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3042 {
3043 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3044 v = TREE_OPERAND (v, 0);
3045 }
3046 else
3047 offset = 0;
3048
3049 if (TREE_CODE (v) != ADDR_EXPR)
3050 return NULL_TREE;
3051 v = TREE_OPERAND (v, 0);
3052
3053 if (TREE_CODE (v) != VAR_DECL
3054 || !DECL_VIRTUAL_P (v)
3055 || !DECL_INITIAL (v)
3056 || DECL_INITIAL (v) == error_mark_node)
3057 return NULL_TREE;
3058 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3059 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3060 offset += token * size;
3061 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3062 offset, size);
3063 if (!fn)
3064 return NULL_TREE;
3065 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3066 || TREE_CODE (fn) == FDESC_EXPR);
3067 fn = TREE_OPERAND (fn, 0);
3068 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3069
3070 /* When cgraph node is missing and function is not public, we cannot
3071 devirtualize. This can happen in WHOPR when the actual method
3072 ends up in other partition, because we found devirtualization
3073 possibility too late. */
3074 if (!can_refer_decl_in_current_unit_p (fn))
3075 return NULL_TREE;
3076
3077 return fn;
3078 }
3079
3080 /* Return true iff VAL is a gimple expression that is known to be
3081 non-negative. Restricted to floating-point inputs. */
3082
3083 bool
3084 gimple_val_nonnegative_real_p (tree val)
3085 {
3086 gimple def_stmt;
3087
3088 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3089
3090 /* Use existing logic for non-gimple trees. */
3091 if (tree_expr_nonnegative_p (val))
3092 return true;
3093
3094 if (TREE_CODE (val) != SSA_NAME)
3095 return false;
3096
3097 /* Currently we look only at the immediately defining statement
3098 to make this determination, since recursion on defining
3099 statements of operands can lead to quadratic behavior in the
3100 worst case. This is expected to catch almost all occurrences
3101 in practice. It would be possible to implement limited-depth
3102 recursion if important cases are lost. Alternatively, passes
3103 that need this information (such as the pow/powi lowering code
3104 in the cse_sincos pass) could be revised to provide it through
3105 dataflow propagation. */
3106
3107 def_stmt = SSA_NAME_DEF_STMT (val);
3108
3109 if (is_gimple_assign (def_stmt))
3110 {
3111 tree op0, op1;
3112
3113 /* See fold-const.c:tree_expr_nonnegative_p for additional
3114 cases that could be handled with recursion. */
3115
3116 switch (gimple_assign_rhs_code (def_stmt))
3117 {
3118 case ABS_EXPR:
3119 /* Always true for floating-point operands. */
3120 return true;
3121
3122 case MULT_EXPR:
3123 /* True if the two operands are identical (since we are
3124 restricted to floating-point inputs). */
3125 op0 = gimple_assign_rhs1 (def_stmt);
3126 op1 = gimple_assign_rhs2 (def_stmt);
3127
3128 if (op0 == op1
3129 || operand_equal_p (op0, op1, 0))
3130 return true;
3131
3132 default:
3133 return false;
3134 }
3135 }
3136 else if (is_gimple_call (def_stmt))
3137 {
3138 tree fndecl = gimple_call_fndecl (def_stmt);
3139 if (fndecl
3140 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3141 {
3142 tree arg1;
3143
3144 switch (DECL_FUNCTION_CODE (fndecl))
3145 {
3146 CASE_FLT_FN (BUILT_IN_ACOS):
3147 CASE_FLT_FN (BUILT_IN_ACOSH):
3148 CASE_FLT_FN (BUILT_IN_CABS):
3149 CASE_FLT_FN (BUILT_IN_COSH):
3150 CASE_FLT_FN (BUILT_IN_ERFC):
3151 CASE_FLT_FN (BUILT_IN_EXP):
3152 CASE_FLT_FN (BUILT_IN_EXP10):
3153 CASE_FLT_FN (BUILT_IN_EXP2):
3154 CASE_FLT_FN (BUILT_IN_FABS):
3155 CASE_FLT_FN (BUILT_IN_FDIM):
3156 CASE_FLT_FN (BUILT_IN_HYPOT):
3157 CASE_FLT_FN (BUILT_IN_POW10):
3158 return true;
3159
3160 CASE_FLT_FN (BUILT_IN_SQRT):
3161 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3162 nonnegative inputs. */
3163 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3164 return true;
3165
3166 break;
3167
3168 CASE_FLT_FN (BUILT_IN_POWI):
3169 /* True if the second argument is an even integer. */
3170 arg1 = gimple_call_arg (def_stmt, 1);
3171
3172 if (TREE_CODE (arg1) == INTEGER_CST
3173 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3174 return true;
3175
3176 break;
3177
3178 CASE_FLT_FN (BUILT_IN_POW):
3179 /* True if the second argument is an even integer-valued
3180 real. */
3181 arg1 = gimple_call_arg (def_stmt, 1);
3182
3183 if (TREE_CODE (arg1) == REAL_CST)
3184 {
3185 REAL_VALUE_TYPE c;
3186 HOST_WIDE_INT n;
3187
3188 c = TREE_REAL_CST (arg1);
3189 n = real_to_integer (&c);
3190
3191 if ((n & 1) == 0)
3192 {
3193 REAL_VALUE_TYPE cint;
3194 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3195 if (real_identical (&c, &cint))
3196 return true;
3197 }
3198 }
3199
3200 break;
3201
3202 default:
3203 return false;
3204 }
3205 }
3206 }
3207
3208 return false;
3209 }
This page took 0.192335 seconds and 5 git commands to generate.