]> gcc.gnu.org Git - gcc.git/blob - gcc/gimple-fold.c
builtins.def (BUILT_IN_STPNCPY_CHK): New definition.
[gcc.git] / gcc / gimple-fold.c
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010, 2011 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.c.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "function.h"
28 #include "tree-dump.h"
29 #include "tree-flow.h"
30 #include "tree-pass.h"
31 #include "tree-ssa-propagate.h"
32 #include "target.h"
33 #include "gimple-fold.h"
34
35 /* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
38
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
56 static bool
57 can_refer_decl_in_current_unit_p (tree decl)
58 {
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
73 || !vnode->finalized);
74 return false;
75 }
76 /* When function is public, we always can introduce new reference.
77 Exception are the COMDAT functions where introducing a direct
78 reference imply need to include function body in the curren tunit. */
79 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
80 return true;
81 /* We are not at ltrans stage; so don't worry about WHOPR.
82 Also when still gimplifying all referred comdat functions will be
83 produced.
84 ??? as observed in PR20991 for already optimized out comdat virtual functions
85 we may not neccesarily give up because the copy will be output elsewhere when
86 corresponding vtable is output. */
87 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
88 return true;
89 /* If we already output the function body, we are safe. */
90 if (TREE_ASM_WRITTEN (decl))
91 return true;
92 if (TREE_CODE (decl) == FUNCTION_DECL)
93 {
94 node = cgraph_get_node (decl);
95 /* Check that we still have function body and that we didn't took
96 the decision to eliminate offline copy of the function yet.
97 The second is important when devirtualization happens during final
98 compilation stage when making a new reference no longer makes callee
99 to be compiled. */
100 if (!node || !node->analyzed || node->global.inlined_to)
101 return false;
102 }
103 else if (TREE_CODE (decl) == VAR_DECL)
104 {
105 vnode = varpool_get_node (decl);
106 if (!vnode || !vnode->finalized)
107 return false;
108 }
109 return true;
110 }
111
112 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
113 acceptable form for is_gimple_min_invariant. */
114
115 tree
116 canonicalize_constructor_val (tree cval)
117 {
118 STRIP_NOPS (cval);
119 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
120 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
121 {
122 tree ptr = TREE_OPERAND (cval, 0);
123 if (is_gimple_min_invariant (ptr))
124 cval = build1_loc (EXPR_LOCATION (cval),
125 ADDR_EXPR, TREE_TYPE (ptr),
126 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
127 ptr,
128 fold_convert (ptr_type_node,
129 TREE_OPERAND (cval, 1))));
130 }
131 if (TREE_CODE (cval) == ADDR_EXPR)
132 {
133 tree base = get_base_address (TREE_OPERAND (cval, 0));
134
135 if (base
136 && (TREE_CODE (base) == VAR_DECL
137 || TREE_CODE (base) == FUNCTION_DECL)
138 && !can_refer_decl_in_current_unit_p (base))
139 return NULL_TREE;
140 if (base && TREE_CODE (base) == VAR_DECL)
141 {
142 TREE_ADDRESSABLE (base) = 1;
143 if (cfun && gimple_referenced_vars (cfun))
144 add_referenced_var (base);
145 }
146 /* Fixup types in global initializers. */
147 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
148 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
149 }
150 return cval;
151 }
152
153 /* If SYM is a constant variable with known value, return the value.
154 NULL_TREE is returned otherwise. */
155
156 tree
157 get_symbol_constant_value (tree sym)
158 {
159 if (const_value_known_p (sym))
160 {
161 tree val = DECL_INITIAL (sym);
162 if (val)
163 {
164 val = canonicalize_constructor_val (val);
165 if (val && is_gimple_min_invariant (val))
166 return val;
167 else
168 return NULL_TREE;
169 }
170 /* Variables declared 'const' without an initializer
171 have zero as the initializer if they may not be
172 overridden at link or run time. */
173 if (!val
174 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
175 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
176 return build_zero_cst (TREE_TYPE (sym));
177 }
178
179 return NULL_TREE;
180 }
181
182
183
184 /* Subroutine of fold_stmt. We perform several simplifications of the
185 memory reference tree EXPR and make sure to re-gimplify them properly
186 after propagation of constant addresses. IS_LHS is true if the
187 reference is supposed to be an lvalue. */
188
189 static tree
190 maybe_fold_reference (tree expr, bool is_lhs)
191 {
192 tree *t = &expr;
193 tree result;
194
195 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
196 || TREE_CODE (expr) == REALPART_EXPR
197 || TREE_CODE (expr) == IMAGPART_EXPR)
198 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
199 return fold_unary_loc (EXPR_LOCATION (expr),
200 TREE_CODE (expr),
201 TREE_TYPE (expr),
202 TREE_OPERAND (expr, 0));
203 else if (TREE_CODE (expr) == BIT_FIELD_REF
204 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
205 return fold_ternary_loc (EXPR_LOCATION (expr),
206 TREE_CODE (expr),
207 TREE_TYPE (expr),
208 TREE_OPERAND (expr, 0),
209 TREE_OPERAND (expr, 1),
210 TREE_OPERAND (expr, 2));
211
212 while (handled_component_p (*t))
213 t = &TREE_OPERAND (*t, 0);
214
215 /* Canonicalize MEM_REFs invariant address operand. Do this first
216 to avoid feeding non-canonical MEM_REFs elsewhere. */
217 if (TREE_CODE (*t) == MEM_REF
218 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
219 {
220 bool volatile_p = TREE_THIS_VOLATILE (*t);
221 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
222 TREE_OPERAND (*t, 0),
223 TREE_OPERAND (*t, 1));
224 if (tem)
225 {
226 TREE_THIS_VOLATILE (tem) = volatile_p;
227 *t = tem;
228 tem = maybe_fold_reference (expr, is_lhs);
229 if (tem)
230 return tem;
231 return expr;
232 }
233 }
234
235 if (!is_lhs
236 && (result = fold_const_aggregate_ref (expr))
237 && is_gimple_min_invariant (result))
238 return result;
239
240 /* Fold back MEM_REFs to reference trees. */
241 if (TREE_CODE (*t) == MEM_REF
242 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
243 && integer_zerop (TREE_OPERAND (*t, 1))
244 && (TREE_THIS_VOLATILE (*t)
245 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
246 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
247 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
248 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
249 /* We have to look out here to not drop a required conversion
250 from the rhs to the lhs if is_lhs, but we don't have the
251 rhs here to verify that. Thus require strict type
252 compatibility. */
253 && types_compatible_p (TREE_TYPE (*t),
254 TREE_TYPE (TREE_OPERAND
255 (TREE_OPERAND (*t, 0), 0))))
256 {
257 tree tem;
258 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
259 tem = maybe_fold_reference (expr, is_lhs);
260 if (tem)
261 return tem;
262 return expr;
263 }
264 else if (TREE_CODE (*t) == TARGET_MEM_REF)
265 {
266 tree tem = maybe_fold_tmr (*t);
267 if (tem)
268 {
269 *t = tem;
270 tem = maybe_fold_reference (expr, is_lhs);
271 if (tem)
272 return tem;
273 return expr;
274 }
275 }
276
277 return NULL_TREE;
278 }
279
280
281 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
282 replacement rhs for the statement or NULL_TREE if no simplification
283 could be made. It is assumed that the operands have been previously
284 folded. */
285
286 static tree
287 fold_gimple_assign (gimple_stmt_iterator *si)
288 {
289 gimple stmt = gsi_stmt (*si);
290 enum tree_code subcode = gimple_assign_rhs_code (stmt);
291 location_t loc = gimple_location (stmt);
292
293 tree result = NULL_TREE;
294
295 switch (get_gimple_rhs_class (subcode))
296 {
297 case GIMPLE_SINGLE_RHS:
298 {
299 tree rhs = gimple_assign_rhs1 (stmt);
300
301 if (REFERENCE_CLASS_P (rhs))
302 return maybe_fold_reference (rhs, false);
303
304 else if (TREE_CODE (rhs) == ADDR_EXPR)
305 {
306 tree ref = TREE_OPERAND (rhs, 0);
307 tree tem = maybe_fold_reference (ref, true);
308 if (tem
309 && TREE_CODE (tem) == MEM_REF
310 && integer_zerop (TREE_OPERAND (tem, 1)))
311 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
312 else if (tem)
313 result = fold_convert (TREE_TYPE (rhs),
314 build_fold_addr_expr_loc (loc, tem));
315 else if (TREE_CODE (ref) == MEM_REF
316 && integer_zerop (TREE_OPERAND (ref, 1)))
317 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
318 }
319
320 else if (TREE_CODE (rhs) == CONSTRUCTOR
321 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
322 && (CONSTRUCTOR_NELTS (rhs)
323 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
324 {
325 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
326 unsigned i;
327 tree val;
328
329 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
330 if (TREE_CODE (val) != INTEGER_CST
331 && TREE_CODE (val) != REAL_CST
332 && TREE_CODE (val) != FIXED_CST)
333 return NULL_TREE;
334
335 return build_vector_from_ctor (TREE_TYPE (rhs),
336 CONSTRUCTOR_ELTS (rhs));
337 }
338
339 else if (DECL_P (rhs))
340 return unshare_expr (get_symbol_constant_value (rhs));
341
342 /* If we couldn't fold the RHS, hand over to the generic
343 fold routines. */
344 if (result == NULL_TREE)
345 result = fold (rhs);
346
347 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
348 that may have been added by fold, and "useless" type
349 conversions that might now be apparent due to propagation. */
350 STRIP_USELESS_TYPE_CONVERSION (result);
351
352 if (result != rhs && valid_gimple_rhs_p (result))
353 return result;
354
355 return NULL_TREE;
356 }
357 break;
358
359 case GIMPLE_UNARY_RHS:
360 {
361 tree rhs = gimple_assign_rhs1 (stmt);
362
363 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
364 if (result)
365 {
366 /* If the operation was a conversion do _not_ mark a
367 resulting constant with TREE_OVERFLOW if the original
368 constant was not. These conversions have implementation
369 defined behavior and retaining the TREE_OVERFLOW flag
370 here would confuse later passes such as VRP. */
371 if (CONVERT_EXPR_CODE_P (subcode)
372 && TREE_CODE (result) == INTEGER_CST
373 && TREE_CODE (rhs) == INTEGER_CST)
374 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
375
376 STRIP_USELESS_TYPE_CONVERSION (result);
377 if (valid_gimple_rhs_p (result))
378 return result;
379 }
380 }
381 break;
382
383 case GIMPLE_BINARY_RHS:
384 /* Try to canonicalize for boolean-typed X the comparisons
385 X == 0, X == 1, X != 0, and X != 1. */
386 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
387 || gimple_assign_rhs_code (stmt) == NE_EXPR)
388 {
389 tree lhs = gimple_assign_lhs (stmt);
390 tree op1 = gimple_assign_rhs1 (stmt);
391 tree op2 = gimple_assign_rhs2 (stmt);
392 tree type = TREE_TYPE (op1);
393
394 /* Check whether the comparison operands are of the same boolean
395 type as the result type is.
396 Check that second operand is an integer-constant with value
397 one or zero. */
398 if (TREE_CODE (op2) == INTEGER_CST
399 && (integer_zerop (op2) || integer_onep (op2))
400 && useless_type_conversion_p (TREE_TYPE (lhs), type))
401 {
402 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
403 bool is_logical_not = false;
404
405 /* X == 0 and X != 1 is a logical-not.of X
406 X == 1 and X != 0 is X */
407 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
408 || (cmp_code == NE_EXPR && integer_onep (op2)))
409 is_logical_not = true;
410
411 if (is_logical_not == false)
412 result = op1;
413 /* Only for one-bit precision typed X the transformation
414 !X -> ~X is valied. */
415 else if (TYPE_PRECISION (type) == 1)
416 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
417 type, op1);
418 /* Otherwise we use !X -> X ^ 1. */
419 else
420 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
421 type, op1, build_int_cst (type, 1));
422
423 }
424 }
425
426 if (!result)
427 result = fold_binary_loc (loc, subcode,
428 TREE_TYPE (gimple_assign_lhs (stmt)),
429 gimple_assign_rhs1 (stmt),
430 gimple_assign_rhs2 (stmt));
431
432 if (result)
433 {
434 STRIP_USELESS_TYPE_CONVERSION (result);
435 if (valid_gimple_rhs_p (result))
436 return result;
437 }
438 break;
439
440 case GIMPLE_TERNARY_RHS:
441 /* Try to fold a conditional expression. */
442 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
443 {
444 tree op0 = gimple_assign_rhs1 (stmt);
445 tree tem;
446 bool set = false;
447 location_t cond_loc = gimple_location (stmt);
448
449 if (COMPARISON_CLASS_P (op0))
450 {
451 fold_defer_overflow_warnings ();
452 tem = fold_binary_loc (cond_loc,
453 TREE_CODE (op0), TREE_TYPE (op0),
454 TREE_OPERAND (op0, 0),
455 TREE_OPERAND (op0, 1));
456 /* This is actually a conditional expression, not a GIMPLE
457 conditional statement, however, the valid_gimple_rhs_p
458 test still applies. */
459 set = (tem && is_gimple_condexpr (tem)
460 && valid_gimple_rhs_p (tem));
461 fold_undefer_overflow_warnings (set, stmt, 0);
462 }
463 else if (is_gimple_min_invariant (op0))
464 {
465 tem = op0;
466 set = true;
467 }
468 else
469 return NULL_TREE;
470
471 if (set)
472 result = fold_build3_loc (cond_loc, COND_EXPR,
473 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
474 gimple_assign_rhs2 (stmt),
475 gimple_assign_rhs3 (stmt));
476 }
477
478 if (!result)
479 result = fold_ternary_loc (loc, subcode,
480 TREE_TYPE (gimple_assign_lhs (stmt)),
481 gimple_assign_rhs1 (stmt),
482 gimple_assign_rhs2 (stmt),
483 gimple_assign_rhs3 (stmt));
484
485 if (result)
486 {
487 STRIP_USELESS_TYPE_CONVERSION (result);
488 if (valid_gimple_rhs_p (result))
489 return result;
490 }
491 break;
492
493 case GIMPLE_INVALID_RHS:
494 gcc_unreachable ();
495 }
496
497 return NULL_TREE;
498 }
499
500 /* Attempt to fold a conditional statement. Return true if any changes were
501 made. We only attempt to fold the condition expression, and do not perform
502 any transformation that would require alteration of the cfg. It is
503 assumed that the operands have been previously folded. */
504
505 static bool
506 fold_gimple_cond (gimple stmt)
507 {
508 tree result = fold_binary_loc (gimple_location (stmt),
509 gimple_cond_code (stmt),
510 boolean_type_node,
511 gimple_cond_lhs (stmt),
512 gimple_cond_rhs (stmt));
513
514 if (result)
515 {
516 STRIP_USELESS_TYPE_CONVERSION (result);
517 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
518 {
519 gimple_cond_set_condition_from_tree (stmt, result);
520 return true;
521 }
522 }
523
524 return false;
525 }
526
527 /* Convert EXPR into a GIMPLE value suitable for substitution on the
528 RHS of an assignment. Insert the necessary statements before
529 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
530 is replaced. If the call is expected to produces a result, then it
531 is replaced by an assignment of the new RHS to the result variable.
532 If the result is to be ignored, then the call is replaced by a
533 GIMPLE_NOP. A proper VDEF chain is retained by making the first
534 VUSE and the last VDEF of the whole sequence be the same as the replaced
535 statement and using new SSA names for stores in between. */
536
537 void
538 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
539 {
540 tree lhs;
541 gimple stmt, new_stmt;
542 gimple_stmt_iterator i;
543 gimple_seq stmts = gimple_seq_alloc();
544 struct gimplify_ctx gctx;
545 gimple last;
546 gimple laststore;
547 tree reaching_vuse;
548
549 stmt = gsi_stmt (*si_p);
550
551 gcc_assert (is_gimple_call (stmt));
552
553 push_gimplify_context (&gctx);
554 gctx.into_ssa = gimple_in_ssa_p (cfun);
555
556 lhs = gimple_call_lhs (stmt);
557 if (lhs == NULL_TREE)
558 {
559 gimplify_and_add (expr, &stmts);
560 /* We can end up with folding a memcpy of an empty class assignment
561 which gets optimized away by C++ gimplification. */
562 if (gimple_seq_empty_p (stmts))
563 {
564 pop_gimplify_context (NULL);
565 if (gimple_in_ssa_p (cfun))
566 {
567 unlink_stmt_vdef (stmt);
568 release_defs (stmt);
569 }
570 gsi_remove (si_p, true);
571 return;
572 }
573 }
574 else
575 {
576 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
577 new_stmt = gimple_build_assign (lhs, tmp);
578 i = gsi_last (stmts);
579 gsi_insert_after_without_update (&i, new_stmt,
580 GSI_CONTINUE_LINKING);
581 }
582
583 pop_gimplify_context (NULL);
584
585 if (gimple_has_location (stmt))
586 annotate_all_with_location (stmts, gimple_location (stmt));
587
588 /* First iterate over the replacement statements backward, assigning
589 virtual operands to their defining statements. */
590 laststore = NULL;
591 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
592 {
593 new_stmt = gsi_stmt (i);
594 if (gimple_assign_single_p (new_stmt)
595 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
596 {
597 tree vdef;
598 if (!laststore)
599 vdef = gimple_vdef (stmt);
600 else
601 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
602 gimple_set_vdef (new_stmt, vdef);
603 if (vdef && TREE_CODE (vdef) == SSA_NAME)
604 SSA_NAME_DEF_STMT (vdef) = new_stmt;
605 laststore = new_stmt;
606 }
607 }
608
609 /* Second iterate over the statements forward, assigning virtual
610 operands to their uses. */
611 last = NULL;
612 reaching_vuse = gimple_vuse (stmt);
613 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
614 {
615 /* Do not insert the last stmt in this loop but remember it
616 for replacing the original statement. */
617 if (last)
618 {
619 gsi_insert_before (si_p, last, GSI_NEW_STMT);
620 gsi_next (si_p);
621 }
622 new_stmt = gsi_stmt (i);
623 /* The replacement can expose previously unreferenced variables. */
624 if (gimple_in_ssa_p (cfun))
625 find_new_referenced_vars (new_stmt);
626 /* If the new statement possibly has a VUSE, update it with exact SSA
627 name we know will reach this one. */
628 if (gimple_has_mem_ops (new_stmt))
629 gimple_set_vuse (new_stmt, reaching_vuse);
630 gimple_set_modified (new_stmt, true);
631 if (gimple_vdef (new_stmt))
632 reaching_vuse = gimple_vdef (new_stmt);
633 last = new_stmt;
634 }
635
636 /* If the new sequence does not do a store release the virtual
637 definition of the original statement. */
638 if (reaching_vuse
639 && reaching_vuse == gimple_vuse (stmt))
640 {
641 tree vdef = gimple_vdef (stmt);
642 if (vdef
643 && TREE_CODE (vdef) == SSA_NAME)
644 {
645 unlink_stmt_vdef (stmt);
646 release_ssa_name (vdef);
647 }
648 }
649
650 /* Finally replace rhe original statement with the last. */
651 gsi_replace (si_p, last, false);
652 }
653
654 /* Return the string length, maximum string length or maximum value of
655 ARG in LENGTH.
656 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
657 is not NULL and, for TYPE == 0, its value is not equal to the length
658 we determine or if we are unable to determine the length or value,
659 return false. VISITED is a bitmap of visited variables.
660 TYPE is 0 if string length should be returned, 1 for maximum string
661 length and 2 for maximum value ARG can have. */
662
663 static bool
664 get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
665 {
666 tree var, val;
667 gimple def_stmt;
668
669 if (TREE_CODE (arg) != SSA_NAME)
670 {
671 if (TREE_CODE (arg) == COND_EXPR)
672 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
673 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
674 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
675 else if (TREE_CODE (arg) == ADDR_EXPR
676 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
677 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
678 {
679 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
680 if (TREE_CODE (aop0) == INDIRECT_REF
681 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
682 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
683 length, visited, type);
684 }
685
686 if (type == 2)
687 {
688 val = arg;
689 if (TREE_CODE (val) != INTEGER_CST
690 || tree_int_cst_sgn (val) < 0)
691 return false;
692 }
693 else
694 val = c_strlen (arg, 1);
695 if (!val)
696 return false;
697
698 if (*length)
699 {
700 if (type > 0)
701 {
702 if (TREE_CODE (*length) != INTEGER_CST
703 || TREE_CODE (val) != INTEGER_CST)
704 return false;
705
706 if (tree_int_cst_lt (*length, val))
707 *length = val;
708 return true;
709 }
710 else if (simple_cst_equal (val, *length) != 1)
711 return false;
712 }
713
714 *length = val;
715 return true;
716 }
717
718 /* If we were already here, break the infinite cycle. */
719 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
720 return true;
721
722 var = arg;
723 def_stmt = SSA_NAME_DEF_STMT (var);
724
725 switch (gimple_code (def_stmt))
726 {
727 case GIMPLE_ASSIGN:
728 /* The RHS of the statement defining VAR must either have a
729 constant length or come from another SSA_NAME with a constant
730 length. */
731 if (gimple_assign_single_p (def_stmt)
732 || gimple_assign_unary_nop_p (def_stmt))
733 {
734 tree rhs = gimple_assign_rhs1 (def_stmt);
735 return get_maxval_strlen (rhs, length, visited, type);
736 }
737 return false;
738
739 case GIMPLE_PHI:
740 {
741 /* All the arguments of the PHI node must have the same constant
742 length. */
743 unsigned i;
744
745 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
746 {
747 tree arg = gimple_phi_arg (def_stmt, i)->def;
748
749 /* If this PHI has itself as an argument, we cannot
750 determine the string length of this argument. However,
751 if we can find a constant string length for the other
752 PHI args then we can still be sure that this is a
753 constant string length. So be optimistic and just
754 continue with the next argument. */
755 if (arg == gimple_phi_result (def_stmt))
756 continue;
757
758 if (!get_maxval_strlen (arg, length, visited, type))
759 return false;
760 }
761 }
762 return true;
763
764 default:
765 return false;
766 }
767 }
768
769
770 /* Fold builtin call in statement STMT. Returns a simplified tree.
771 We may return a non-constant expression, including another call
772 to a different function and with different arguments, e.g.,
773 substituting memcpy for strcpy when the string length is known.
774 Note that some builtins expand into inline code that may not
775 be valid in GIMPLE. Callers must take care. */
776
777 tree
778 gimple_fold_builtin (gimple stmt)
779 {
780 tree result, val[3];
781 tree callee, a;
782 int arg_idx, type;
783 bitmap visited;
784 bool ignore;
785 int nargs;
786 location_t loc = gimple_location (stmt);
787
788 gcc_assert (is_gimple_call (stmt));
789
790 ignore = (gimple_call_lhs (stmt) == NULL);
791
792 /* First try the generic builtin folder. If that succeeds, return the
793 result directly. */
794 result = fold_call_stmt (stmt, ignore);
795 if (result)
796 {
797 if (ignore)
798 STRIP_NOPS (result);
799 return result;
800 }
801
802 /* Ignore MD builtins. */
803 callee = gimple_call_fndecl (stmt);
804 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
805 return NULL_TREE;
806
807 /* Give up for always_inline inline builtins until they are
808 inlined. */
809 if (avoid_folding_inline_builtin (callee))
810 return NULL_TREE;
811
812 /* If the builtin could not be folded, and it has no argument list,
813 we're done. */
814 nargs = gimple_call_num_args (stmt);
815 if (nargs == 0)
816 return NULL_TREE;
817
818 /* Limit the work only for builtins we know how to simplify. */
819 switch (DECL_FUNCTION_CODE (callee))
820 {
821 case BUILT_IN_STRLEN:
822 case BUILT_IN_FPUTS:
823 case BUILT_IN_FPUTS_UNLOCKED:
824 arg_idx = 0;
825 type = 0;
826 break;
827 case BUILT_IN_STRCPY:
828 case BUILT_IN_STRNCPY:
829 arg_idx = 1;
830 type = 0;
831 break;
832 case BUILT_IN_MEMCPY_CHK:
833 case BUILT_IN_MEMPCPY_CHK:
834 case BUILT_IN_MEMMOVE_CHK:
835 case BUILT_IN_MEMSET_CHK:
836 case BUILT_IN_STRNCPY_CHK:
837 case BUILT_IN_STPNCPY_CHK:
838 arg_idx = 2;
839 type = 2;
840 break;
841 case BUILT_IN_STRCPY_CHK:
842 case BUILT_IN_STPCPY_CHK:
843 arg_idx = 1;
844 type = 1;
845 break;
846 case BUILT_IN_SNPRINTF_CHK:
847 case BUILT_IN_VSNPRINTF_CHK:
848 arg_idx = 1;
849 type = 2;
850 break;
851 default:
852 return NULL_TREE;
853 }
854
855 if (arg_idx >= nargs)
856 return NULL_TREE;
857
858 /* Try to use the dataflow information gathered by the CCP process. */
859 visited = BITMAP_ALLOC (NULL);
860 bitmap_clear (visited);
861
862 memset (val, 0, sizeof (val));
863 a = gimple_call_arg (stmt, arg_idx);
864 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
865 val[arg_idx] = NULL_TREE;
866
867 BITMAP_FREE (visited);
868
869 result = NULL_TREE;
870 switch (DECL_FUNCTION_CODE (callee))
871 {
872 case BUILT_IN_STRLEN:
873 if (val[0] && nargs == 1)
874 {
875 tree new_val =
876 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
877
878 /* If the result is not a valid gimple value, or not a cast
879 of a valid gimple value, then we cannot use the result. */
880 if (is_gimple_val (new_val)
881 || (CONVERT_EXPR_P (new_val)
882 && is_gimple_val (TREE_OPERAND (new_val, 0))))
883 return new_val;
884 }
885 break;
886
887 case BUILT_IN_STRCPY:
888 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
889 result = fold_builtin_strcpy (loc, callee,
890 gimple_call_arg (stmt, 0),
891 gimple_call_arg (stmt, 1),
892 val[1]);
893 break;
894
895 case BUILT_IN_STRNCPY:
896 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
897 result = fold_builtin_strncpy (loc, callee,
898 gimple_call_arg (stmt, 0),
899 gimple_call_arg (stmt, 1),
900 gimple_call_arg (stmt, 2),
901 val[1]);
902 break;
903
904 case BUILT_IN_FPUTS:
905 if (nargs == 2)
906 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
907 gimple_call_arg (stmt, 1),
908 ignore, false, val[0]);
909 break;
910
911 case BUILT_IN_FPUTS_UNLOCKED:
912 if (nargs == 2)
913 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
914 gimple_call_arg (stmt, 1),
915 ignore, true, val[0]);
916 break;
917
918 case BUILT_IN_MEMCPY_CHK:
919 case BUILT_IN_MEMPCPY_CHK:
920 case BUILT_IN_MEMMOVE_CHK:
921 case BUILT_IN_MEMSET_CHK:
922 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
923 result = fold_builtin_memory_chk (loc, callee,
924 gimple_call_arg (stmt, 0),
925 gimple_call_arg (stmt, 1),
926 gimple_call_arg (stmt, 2),
927 gimple_call_arg (stmt, 3),
928 val[2], ignore,
929 DECL_FUNCTION_CODE (callee));
930 break;
931
932 case BUILT_IN_STRCPY_CHK:
933 case BUILT_IN_STPCPY_CHK:
934 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
935 result = fold_builtin_stxcpy_chk (loc, callee,
936 gimple_call_arg (stmt, 0),
937 gimple_call_arg (stmt, 1),
938 gimple_call_arg (stmt, 2),
939 val[1], ignore,
940 DECL_FUNCTION_CODE (callee));
941 break;
942
943 case BUILT_IN_STRNCPY_CHK:
944 case BUILT_IN_STPNCPY_CHK:
945 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
946 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
947 gimple_call_arg (stmt, 1),
948 gimple_call_arg (stmt, 2),
949 gimple_call_arg (stmt, 3),
950 val[2], ignore,
951 DECL_FUNCTION_CODE (callee));
952 break;
953
954 case BUILT_IN_SNPRINTF_CHK:
955 case BUILT_IN_VSNPRINTF_CHK:
956 if (val[1] && is_gimple_val (val[1]))
957 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
958 DECL_FUNCTION_CODE (callee));
959 break;
960
961 default:
962 gcc_unreachable ();
963 }
964
965 if (result && ignore)
966 result = fold_ignored_result (result);
967 return result;
968 }
969
970 /* Generate code adjusting the first parameter of a call statement determined
971 by GSI by DELTA. */
972
973 void
974 gimple_adjust_this_by_delta (gimple_stmt_iterator *gsi, tree delta)
975 {
976 gimple call_stmt = gsi_stmt (*gsi);
977 tree parm, tmp;
978 gimple new_stmt;
979
980 delta = convert_to_ptrofftype (delta);
981 gcc_assert (gimple_call_num_args (call_stmt) >= 1);
982 parm = gimple_call_arg (call_stmt, 0);
983 gcc_assert (POINTER_TYPE_P (TREE_TYPE (parm)));
984 tmp = create_tmp_var (TREE_TYPE (parm), NULL);
985 add_referenced_var (tmp);
986
987 tmp = make_ssa_name (tmp, NULL);
988 new_stmt = gimple_build_assign_with_ops (POINTER_PLUS_EXPR, tmp, parm, delta);
989 SSA_NAME_DEF_STMT (tmp) = new_stmt;
990 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
991 gimple_call_set_arg (call_stmt, 0, tmp);
992 }
993
994 /* Return a binfo to be used for devirtualization of calls based on an object
995 represented by a declaration (i.e. a global or automatically allocated one)
996 or NULL if it cannot be found or is not safe. CST is expected to be an
997 ADDR_EXPR of such object or the function will return NULL. Currently it is
998 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
999
1000 tree
1001 gimple_extract_devirt_binfo_from_cst (tree cst)
1002 {
1003 HOST_WIDE_INT offset, size, max_size;
1004 tree base, type, expected_type, binfo;
1005 bool last_artificial = false;
1006
1007 if (!flag_devirtualize
1008 || TREE_CODE (cst) != ADDR_EXPR
1009 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
1010 return NULL_TREE;
1011
1012 cst = TREE_OPERAND (cst, 0);
1013 expected_type = TREE_TYPE (cst);
1014 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
1015 type = TREE_TYPE (base);
1016 if (!DECL_P (base)
1017 || max_size == -1
1018 || max_size != size
1019 || TREE_CODE (type) != RECORD_TYPE)
1020 return NULL_TREE;
1021
1022 /* Find the sub-object the constant actually refers to and mark whether it is
1023 an artificial one (as opposed to a user-defined one). */
1024 while (true)
1025 {
1026 HOST_WIDE_INT pos, size;
1027 tree fld;
1028
1029 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1030 break;
1031 if (offset < 0)
1032 return NULL_TREE;
1033
1034 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1035 {
1036 if (TREE_CODE (fld) != FIELD_DECL)
1037 continue;
1038
1039 pos = int_bit_position (fld);
1040 size = tree_low_cst (DECL_SIZE (fld), 1);
1041 if (pos <= offset && (pos + size) > offset)
1042 break;
1043 }
1044 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1045 return NULL_TREE;
1046
1047 last_artificial = DECL_ARTIFICIAL (fld);
1048 type = TREE_TYPE (fld);
1049 offset -= pos;
1050 }
1051 /* Artifical sub-objects are ancestors, we do not want to use them for
1052 devirtualization, at least not here. */
1053 if (last_artificial)
1054 return NULL_TREE;
1055 binfo = TYPE_BINFO (type);
1056 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1057 return NULL_TREE;
1058 else
1059 return binfo;
1060 }
1061
1062 /* Attempt to fold a call statement referenced by the statement iterator GSI.
1063 The statement may be replaced by another statement, e.g., if the call
1064 simplifies to a constant value. Return true if any changes were made.
1065 It is assumed that the operands have been previously folded. */
1066
1067 static bool
1068 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
1069 {
1070 gimple stmt = gsi_stmt (*gsi);
1071 tree callee;
1072 bool changed = false;
1073 unsigned i;
1074
1075 /* Fold *& in call arguments. */
1076 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1077 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1078 {
1079 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1080 if (tmp)
1081 {
1082 gimple_call_set_arg (stmt, i, tmp);
1083 changed = true;
1084 }
1085 }
1086
1087 /* Check for virtual calls that became direct calls. */
1088 callee = gimple_call_fn (stmt);
1089 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
1090 {
1091 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1092 {
1093 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
1094 changed = true;
1095 }
1096 else
1097 {
1098 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1099 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1100 if (binfo)
1101 {
1102 HOST_WIDE_INT token
1103 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1104 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1105 if (fndecl)
1106 {
1107 gimple_call_set_fndecl (stmt, fndecl);
1108 changed = true;
1109 }
1110 }
1111 }
1112 }
1113
1114 if (inplace)
1115 return changed;
1116
1117 /* Check for builtins that CCP can handle using information not
1118 available in the generic fold routines. */
1119 callee = gimple_call_fndecl (stmt);
1120 if (callee && DECL_BUILT_IN (callee))
1121 {
1122 tree result = gimple_fold_builtin (stmt);
1123 if (result)
1124 {
1125 if (!update_call_from_tree (gsi, result))
1126 gimplify_and_update_call_from_tree (gsi, result);
1127 changed = true;
1128 }
1129 }
1130
1131 return changed;
1132 }
1133
1134 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1135 distinguishes both cases. */
1136
1137 static bool
1138 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1139 {
1140 bool changed = false;
1141 gimple stmt = gsi_stmt (*gsi);
1142 unsigned i;
1143 gimple_stmt_iterator gsinext = *gsi;
1144 gimple next_stmt;
1145
1146 gsi_next (&gsinext);
1147 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
1148
1149 /* Fold the main computation performed by the statement. */
1150 switch (gimple_code (stmt))
1151 {
1152 case GIMPLE_ASSIGN:
1153 {
1154 unsigned old_num_ops = gimple_num_ops (stmt);
1155 enum tree_code subcode = gimple_assign_rhs_code (stmt);
1156 tree lhs = gimple_assign_lhs (stmt);
1157 tree new_rhs;
1158 /* First canonicalize operand order. This avoids building new
1159 trees if this is the only thing fold would later do. */
1160 if ((commutative_tree_code (subcode)
1161 || commutative_ternary_tree_code (subcode))
1162 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1163 gimple_assign_rhs2 (stmt), false))
1164 {
1165 tree tem = gimple_assign_rhs1 (stmt);
1166 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1167 gimple_assign_set_rhs2 (stmt, tem);
1168 changed = true;
1169 }
1170 new_rhs = fold_gimple_assign (gsi);
1171 if (new_rhs
1172 && !useless_type_conversion_p (TREE_TYPE (lhs),
1173 TREE_TYPE (new_rhs)))
1174 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1175 if (new_rhs
1176 && (!inplace
1177 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1178 {
1179 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1180 changed = true;
1181 }
1182 break;
1183 }
1184
1185 case GIMPLE_COND:
1186 changed |= fold_gimple_cond (stmt);
1187 break;
1188
1189 case GIMPLE_CALL:
1190 changed |= gimple_fold_call (gsi, inplace);
1191 break;
1192
1193 case GIMPLE_ASM:
1194 /* Fold *& in asm operands. */
1195 {
1196 size_t noutputs;
1197 const char **oconstraints;
1198 const char *constraint;
1199 bool allows_mem, allows_reg;
1200
1201 noutputs = gimple_asm_noutputs (stmt);
1202 oconstraints = XALLOCAVEC (const char *, noutputs);
1203
1204 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1205 {
1206 tree link = gimple_asm_output_op (stmt, i);
1207 tree op = TREE_VALUE (link);
1208 oconstraints[i]
1209 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1210 if (REFERENCE_CLASS_P (op)
1211 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1212 {
1213 TREE_VALUE (link) = op;
1214 changed = true;
1215 }
1216 }
1217 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1218 {
1219 tree link = gimple_asm_input_op (stmt, i);
1220 tree op = TREE_VALUE (link);
1221 constraint
1222 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1223 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1224 oconstraints, &allows_mem, &allows_reg);
1225 if (REFERENCE_CLASS_P (op)
1226 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1227 != NULL_TREE)
1228 {
1229 TREE_VALUE (link) = op;
1230 changed = true;
1231 }
1232 }
1233 }
1234 break;
1235
1236 case GIMPLE_DEBUG:
1237 if (gimple_debug_bind_p (stmt))
1238 {
1239 tree val = gimple_debug_bind_get_value (stmt);
1240 if (val
1241 && REFERENCE_CLASS_P (val))
1242 {
1243 tree tem = maybe_fold_reference (val, false);
1244 if (tem)
1245 {
1246 gimple_debug_bind_set_value (stmt, tem);
1247 changed = true;
1248 }
1249 }
1250 }
1251 break;
1252
1253 default:;
1254 }
1255
1256 /* If stmt folds into nothing and it was the last stmt in a bb,
1257 don't call gsi_stmt. */
1258 if (gsi_end_p (*gsi))
1259 {
1260 gcc_assert (next_stmt == NULL);
1261 return changed;
1262 }
1263
1264 stmt = gsi_stmt (*gsi);
1265
1266 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1267 as we'd changing the next stmt. */
1268 if (gimple_has_lhs (stmt) && stmt != next_stmt)
1269 {
1270 tree lhs = gimple_get_lhs (stmt);
1271 if (lhs && REFERENCE_CLASS_P (lhs))
1272 {
1273 tree new_lhs = maybe_fold_reference (lhs, true);
1274 if (new_lhs)
1275 {
1276 gimple_set_lhs (stmt, new_lhs);
1277 changed = true;
1278 }
1279 }
1280 }
1281
1282 return changed;
1283 }
1284
1285 /* Fold the statement pointed to by GSI. In some cases, this function may
1286 replace the whole statement with a new one. Returns true iff folding
1287 makes any changes.
1288 The statement pointed to by GSI should be in valid gimple form but may
1289 be in unfolded state as resulting from for example constant propagation
1290 which can produce *&x = 0. */
1291
1292 bool
1293 fold_stmt (gimple_stmt_iterator *gsi)
1294 {
1295 return fold_stmt_1 (gsi, false);
1296 }
1297
1298 /* Perform the minimal folding on statement *GSI. Only operations like
1299 *&x created by constant propagation are handled. The statement cannot
1300 be replaced with a new one. Return true if the statement was
1301 changed, false otherwise.
1302 The statement *GSI should be in valid gimple form but may
1303 be in unfolded state as resulting from for example constant propagation
1304 which can produce *&x = 0. */
1305
1306 bool
1307 fold_stmt_inplace (gimple_stmt_iterator *gsi)
1308 {
1309 gimple stmt = gsi_stmt (*gsi);
1310 bool changed = fold_stmt_1 (gsi, true);
1311 gcc_assert (gsi_stmt (*gsi) == stmt);
1312 return changed;
1313 }
1314
1315 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1316 if EXPR is null or we don't know how.
1317 If non-null, the result always has boolean type. */
1318
1319 static tree
1320 canonicalize_bool (tree expr, bool invert)
1321 {
1322 if (!expr)
1323 return NULL_TREE;
1324 else if (invert)
1325 {
1326 if (integer_nonzerop (expr))
1327 return boolean_false_node;
1328 else if (integer_zerop (expr))
1329 return boolean_true_node;
1330 else if (TREE_CODE (expr) == SSA_NAME)
1331 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1332 build_int_cst (TREE_TYPE (expr), 0));
1333 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1334 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1335 boolean_type_node,
1336 TREE_OPERAND (expr, 0),
1337 TREE_OPERAND (expr, 1));
1338 else
1339 return NULL_TREE;
1340 }
1341 else
1342 {
1343 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1344 return expr;
1345 if (integer_nonzerop (expr))
1346 return boolean_true_node;
1347 else if (integer_zerop (expr))
1348 return boolean_false_node;
1349 else if (TREE_CODE (expr) == SSA_NAME)
1350 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1351 build_int_cst (TREE_TYPE (expr), 0));
1352 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1353 return fold_build2 (TREE_CODE (expr),
1354 boolean_type_node,
1355 TREE_OPERAND (expr, 0),
1356 TREE_OPERAND (expr, 1));
1357 else
1358 return NULL_TREE;
1359 }
1360 }
1361
1362 /* Check to see if a boolean expression EXPR is logically equivalent to the
1363 comparison (OP1 CODE OP2). Check for various identities involving
1364 SSA_NAMEs. */
1365
1366 static bool
1367 same_bool_comparison_p (const_tree expr, enum tree_code code,
1368 const_tree op1, const_tree op2)
1369 {
1370 gimple s;
1371
1372 /* The obvious case. */
1373 if (TREE_CODE (expr) == code
1374 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1375 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1376 return true;
1377
1378 /* Check for comparing (name, name != 0) and the case where expr
1379 is an SSA_NAME with a definition matching the comparison. */
1380 if (TREE_CODE (expr) == SSA_NAME
1381 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1382 {
1383 if (operand_equal_p (expr, op1, 0))
1384 return ((code == NE_EXPR && integer_zerop (op2))
1385 || (code == EQ_EXPR && integer_nonzerop (op2)));
1386 s = SSA_NAME_DEF_STMT (expr);
1387 if (is_gimple_assign (s)
1388 && gimple_assign_rhs_code (s) == code
1389 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1390 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1391 return true;
1392 }
1393
1394 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1395 of name is a comparison, recurse. */
1396 if (TREE_CODE (op1) == SSA_NAME
1397 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1398 {
1399 s = SSA_NAME_DEF_STMT (op1);
1400 if (is_gimple_assign (s)
1401 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1402 {
1403 enum tree_code c = gimple_assign_rhs_code (s);
1404 if ((c == NE_EXPR && integer_zerop (op2))
1405 || (c == EQ_EXPR && integer_nonzerop (op2)))
1406 return same_bool_comparison_p (expr, c,
1407 gimple_assign_rhs1 (s),
1408 gimple_assign_rhs2 (s));
1409 if ((c == EQ_EXPR && integer_zerop (op2))
1410 || (c == NE_EXPR && integer_nonzerop (op2)))
1411 return same_bool_comparison_p (expr,
1412 invert_tree_comparison (c, false),
1413 gimple_assign_rhs1 (s),
1414 gimple_assign_rhs2 (s));
1415 }
1416 }
1417 return false;
1418 }
1419
1420 /* Check to see if two boolean expressions OP1 and OP2 are logically
1421 equivalent. */
1422
1423 static bool
1424 same_bool_result_p (const_tree op1, const_tree op2)
1425 {
1426 /* Simple cases first. */
1427 if (operand_equal_p (op1, op2, 0))
1428 return true;
1429
1430 /* Check the cases where at least one of the operands is a comparison.
1431 These are a bit smarter than operand_equal_p in that they apply some
1432 identifies on SSA_NAMEs. */
1433 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1434 && same_bool_comparison_p (op1, TREE_CODE (op2),
1435 TREE_OPERAND (op2, 0),
1436 TREE_OPERAND (op2, 1)))
1437 return true;
1438 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1439 && same_bool_comparison_p (op2, TREE_CODE (op1),
1440 TREE_OPERAND (op1, 0),
1441 TREE_OPERAND (op1, 1)))
1442 return true;
1443
1444 /* Default case. */
1445 return false;
1446 }
1447
1448 /* Forward declarations for some mutually recursive functions. */
1449
1450 static tree
1451 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1452 enum tree_code code2, tree op2a, tree op2b);
1453 static tree
1454 and_var_with_comparison (tree var, bool invert,
1455 enum tree_code code2, tree op2a, tree op2b);
1456 static tree
1457 and_var_with_comparison_1 (gimple stmt,
1458 enum tree_code code2, tree op2a, tree op2b);
1459 static tree
1460 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1461 enum tree_code code2, tree op2a, tree op2b);
1462 static tree
1463 or_var_with_comparison (tree var, bool invert,
1464 enum tree_code code2, tree op2a, tree op2b);
1465 static tree
1466 or_var_with_comparison_1 (gimple stmt,
1467 enum tree_code code2, tree op2a, tree op2b);
1468
1469 /* Helper function for and_comparisons_1: try to simplify the AND of the
1470 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1471 If INVERT is true, invert the value of the VAR before doing the AND.
1472 Return NULL_EXPR if we can't simplify this to a single expression. */
1473
1474 static tree
1475 and_var_with_comparison (tree var, bool invert,
1476 enum tree_code code2, tree op2a, tree op2b)
1477 {
1478 tree t;
1479 gimple stmt = SSA_NAME_DEF_STMT (var);
1480
1481 /* We can only deal with variables whose definitions are assignments. */
1482 if (!is_gimple_assign (stmt))
1483 return NULL_TREE;
1484
1485 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1486 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1487 Then we only have to consider the simpler non-inverted cases. */
1488 if (invert)
1489 t = or_var_with_comparison_1 (stmt,
1490 invert_tree_comparison (code2, false),
1491 op2a, op2b);
1492 else
1493 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1494 return canonicalize_bool (t, invert);
1495 }
1496
1497 /* Try to simplify the AND of the ssa variable defined by the assignment
1498 STMT with the comparison specified by (OP2A CODE2 OP2B).
1499 Return NULL_EXPR if we can't simplify this to a single expression. */
1500
1501 static tree
1502 and_var_with_comparison_1 (gimple stmt,
1503 enum tree_code code2, tree op2a, tree op2b)
1504 {
1505 tree var = gimple_assign_lhs (stmt);
1506 tree true_test_var = NULL_TREE;
1507 tree false_test_var = NULL_TREE;
1508 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1509
1510 /* Check for identities like (var AND (var == 0)) => false. */
1511 if (TREE_CODE (op2a) == SSA_NAME
1512 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1513 {
1514 if ((code2 == NE_EXPR && integer_zerop (op2b))
1515 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1516 {
1517 true_test_var = op2a;
1518 if (var == true_test_var)
1519 return var;
1520 }
1521 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1522 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1523 {
1524 false_test_var = op2a;
1525 if (var == false_test_var)
1526 return boolean_false_node;
1527 }
1528 }
1529
1530 /* If the definition is a comparison, recurse on it. */
1531 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1532 {
1533 tree t = and_comparisons_1 (innercode,
1534 gimple_assign_rhs1 (stmt),
1535 gimple_assign_rhs2 (stmt),
1536 code2,
1537 op2a,
1538 op2b);
1539 if (t)
1540 return t;
1541 }
1542
1543 /* If the definition is an AND or OR expression, we may be able to
1544 simplify by reassociating. */
1545 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1546 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
1547 {
1548 tree inner1 = gimple_assign_rhs1 (stmt);
1549 tree inner2 = gimple_assign_rhs2 (stmt);
1550 gimple s;
1551 tree t;
1552 tree partial = NULL_TREE;
1553 bool is_and = (innercode == BIT_AND_EXPR);
1554
1555 /* Check for boolean identities that don't require recursive examination
1556 of inner1/inner2:
1557 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1558 inner1 AND (inner1 OR inner2) => inner1
1559 !inner1 AND (inner1 AND inner2) => false
1560 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1561 Likewise for similar cases involving inner2. */
1562 if (inner1 == true_test_var)
1563 return (is_and ? var : inner1);
1564 else if (inner2 == true_test_var)
1565 return (is_and ? var : inner2);
1566 else if (inner1 == false_test_var)
1567 return (is_and
1568 ? boolean_false_node
1569 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1570 else if (inner2 == false_test_var)
1571 return (is_and
1572 ? boolean_false_node
1573 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1574
1575 /* Next, redistribute/reassociate the AND across the inner tests.
1576 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1577 if (TREE_CODE (inner1) == SSA_NAME
1578 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1579 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1580 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1581 gimple_assign_rhs1 (s),
1582 gimple_assign_rhs2 (s),
1583 code2, op2a, op2b)))
1584 {
1585 /* Handle the AND case, where we are reassociating:
1586 (inner1 AND inner2) AND (op2a code2 op2b)
1587 => (t AND inner2)
1588 If the partial result t is a constant, we win. Otherwise
1589 continue on to try reassociating with the other inner test. */
1590 if (is_and)
1591 {
1592 if (integer_onep (t))
1593 return inner2;
1594 else if (integer_zerop (t))
1595 return boolean_false_node;
1596 }
1597
1598 /* Handle the OR case, where we are redistributing:
1599 (inner1 OR inner2) AND (op2a code2 op2b)
1600 => (t OR (inner2 AND (op2a code2 op2b))) */
1601 else if (integer_onep (t))
1602 return boolean_true_node;
1603
1604 /* Save partial result for later. */
1605 partial = t;
1606 }
1607
1608 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1609 if (TREE_CODE (inner2) == SSA_NAME
1610 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1611 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1612 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1613 gimple_assign_rhs1 (s),
1614 gimple_assign_rhs2 (s),
1615 code2, op2a, op2b)))
1616 {
1617 /* Handle the AND case, where we are reassociating:
1618 (inner1 AND inner2) AND (op2a code2 op2b)
1619 => (inner1 AND t) */
1620 if (is_and)
1621 {
1622 if (integer_onep (t))
1623 return inner1;
1624 else if (integer_zerop (t))
1625 return boolean_false_node;
1626 /* If both are the same, we can apply the identity
1627 (x AND x) == x. */
1628 else if (partial && same_bool_result_p (t, partial))
1629 return t;
1630 }
1631
1632 /* Handle the OR case. where we are redistributing:
1633 (inner1 OR inner2) AND (op2a code2 op2b)
1634 => (t OR (inner1 AND (op2a code2 op2b)))
1635 => (t OR partial) */
1636 else
1637 {
1638 if (integer_onep (t))
1639 return boolean_true_node;
1640 else if (partial)
1641 {
1642 /* We already got a simplification for the other
1643 operand to the redistributed OR expression. The
1644 interesting case is when at least one is false.
1645 Or, if both are the same, we can apply the identity
1646 (x OR x) == x. */
1647 if (integer_zerop (partial))
1648 return t;
1649 else if (integer_zerop (t))
1650 return partial;
1651 else if (same_bool_result_p (t, partial))
1652 return t;
1653 }
1654 }
1655 }
1656 }
1657 return NULL_TREE;
1658 }
1659
1660 /* Try to simplify the AND of two comparisons defined by
1661 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1662 If this can be done without constructing an intermediate value,
1663 return the resulting tree; otherwise NULL_TREE is returned.
1664 This function is deliberately asymmetric as it recurses on SSA_DEFs
1665 in the first comparison but not the second. */
1666
1667 static tree
1668 and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1669 enum tree_code code2, tree op2a, tree op2b)
1670 {
1671 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1672 if (operand_equal_p (op1a, op2a, 0)
1673 && operand_equal_p (op1b, op2b, 0))
1674 {
1675 /* Result will be either NULL_TREE, or a combined comparison. */
1676 tree t = combine_comparisons (UNKNOWN_LOCATION,
1677 TRUTH_ANDIF_EXPR, code1, code2,
1678 boolean_type_node, op1a, op1b);
1679 if (t)
1680 return t;
1681 }
1682
1683 /* Likewise the swapped case of the above. */
1684 if (operand_equal_p (op1a, op2b, 0)
1685 && operand_equal_p (op1b, op2a, 0))
1686 {
1687 /* Result will be either NULL_TREE, or a combined comparison. */
1688 tree t = combine_comparisons (UNKNOWN_LOCATION,
1689 TRUTH_ANDIF_EXPR, code1,
1690 swap_tree_comparison (code2),
1691 boolean_type_node, op1a, op1b);
1692 if (t)
1693 return t;
1694 }
1695
1696 /* If both comparisons are of the same value against constants, we might
1697 be able to merge them. */
1698 if (operand_equal_p (op1a, op2a, 0)
1699 && TREE_CODE (op1b) == INTEGER_CST
1700 && TREE_CODE (op2b) == INTEGER_CST)
1701 {
1702 int cmp = tree_int_cst_compare (op1b, op2b);
1703
1704 /* If we have (op1a == op1b), we should either be able to
1705 return that or FALSE, depending on whether the constant op1b
1706 also satisfies the other comparison against op2b. */
1707 if (code1 == EQ_EXPR)
1708 {
1709 bool done = true;
1710 bool val;
1711 switch (code2)
1712 {
1713 case EQ_EXPR: val = (cmp == 0); break;
1714 case NE_EXPR: val = (cmp != 0); break;
1715 case LT_EXPR: val = (cmp < 0); break;
1716 case GT_EXPR: val = (cmp > 0); break;
1717 case LE_EXPR: val = (cmp <= 0); break;
1718 case GE_EXPR: val = (cmp >= 0); break;
1719 default: done = false;
1720 }
1721 if (done)
1722 {
1723 if (val)
1724 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1725 else
1726 return boolean_false_node;
1727 }
1728 }
1729 /* Likewise if the second comparison is an == comparison. */
1730 else if (code2 == EQ_EXPR)
1731 {
1732 bool done = true;
1733 bool val;
1734 switch (code1)
1735 {
1736 case EQ_EXPR: val = (cmp == 0); break;
1737 case NE_EXPR: val = (cmp != 0); break;
1738 case LT_EXPR: val = (cmp > 0); break;
1739 case GT_EXPR: val = (cmp < 0); break;
1740 case LE_EXPR: val = (cmp >= 0); break;
1741 case GE_EXPR: val = (cmp <= 0); break;
1742 default: done = false;
1743 }
1744 if (done)
1745 {
1746 if (val)
1747 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1748 else
1749 return boolean_false_node;
1750 }
1751 }
1752
1753 /* Same business with inequality tests. */
1754 else if (code1 == NE_EXPR)
1755 {
1756 bool val;
1757 switch (code2)
1758 {
1759 case EQ_EXPR: val = (cmp != 0); break;
1760 case NE_EXPR: val = (cmp == 0); break;
1761 case LT_EXPR: val = (cmp >= 0); break;
1762 case GT_EXPR: val = (cmp <= 0); break;
1763 case LE_EXPR: val = (cmp > 0); break;
1764 case GE_EXPR: val = (cmp < 0); break;
1765 default:
1766 val = false;
1767 }
1768 if (val)
1769 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1770 }
1771 else if (code2 == NE_EXPR)
1772 {
1773 bool val;
1774 switch (code1)
1775 {
1776 case EQ_EXPR: val = (cmp == 0); break;
1777 case NE_EXPR: val = (cmp != 0); break;
1778 case LT_EXPR: val = (cmp <= 0); break;
1779 case GT_EXPR: val = (cmp >= 0); break;
1780 case LE_EXPR: val = (cmp < 0); break;
1781 case GE_EXPR: val = (cmp > 0); break;
1782 default:
1783 val = false;
1784 }
1785 if (val)
1786 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1787 }
1788
1789 /* Chose the more restrictive of two < or <= comparisons. */
1790 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1791 && (code2 == LT_EXPR || code2 == LE_EXPR))
1792 {
1793 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1794 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1795 else
1796 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1797 }
1798
1799 /* Likewise chose the more restrictive of two > or >= comparisons. */
1800 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1801 && (code2 == GT_EXPR || code2 == GE_EXPR))
1802 {
1803 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1804 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1805 else
1806 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1807 }
1808
1809 /* Check for singleton ranges. */
1810 else if (cmp == 0
1811 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1812 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1813 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1814
1815 /* Check for disjoint ranges. */
1816 else if (cmp <= 0
1817 && (code1 == LT_EXPR || code1 == LE_EXPR)
1818 && (code2 == GT_EXPR || code2 == GE_EXPR))
1819 return boolean_false_node;
1820 else if (cmp >= 0
1821 && (code1 == GT_EXPR || code1 == GE_EXPR)
1822 && (code2 == LT_EXPR || code2 == LE_EXPR))
1823 return boolean_false_node;
1824 }
1825
1826 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1827 NAME's definition is a truth value. See if there are any simplifications
1828 that can be done against the NAME's definition. */
1829 if (TREE_CODE (op1a) == SSA_NAME
1830 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1831 && (integer_zerop (op1b) || integer_onep (op1b)))
1832 {
1833 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1834 || (code1 == NE_EXPR && integer_onep (op1b)));
1835 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1836 switch (gimple_code (stmt))
1837 {
1838 case GIMPLE_ASSIGN:
1839 /* Try to simplify by copy-propagating the definition. */
1840 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1841
1842 case GIMPLE_PHI:
1843 /* If every argument to the PHI produces the same result when
1844 ANDed with the second comparison, we win.
1845 Do not do this unless the type is bool since we need a bool
1846 result here anyway. */
1847 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1848 {
1849 tree result = NULL_TREE;
1850 unsigned i;
1851 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1852 {
1853 tree arg = gimple_phi_arg_def (stmt, i);
1854
1855 /* If this PHI has itself as an argument, ignore it.
1856 If all the other args produce the same result,
1857 we're still OK. */
1858 if (arg == gimple_phi_result (stmt))
1859 continue;
1860 else if (TREE_CODE (arg) == INTEGER_CST)
1861 {
1862 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1863 {
1864 if (!result)
1865 result = boolean_false_node;
1866 else if (!integer_zerop (result))
1867 return NULL_TREE;
1868 }
1869 else if (!result)
1870 result = fold_build2 (code2, boolean_type_node,
1871 op2a, op2b);
1872 else if (!same_bool_comparison_p (result,
1873 code2, op2a, op2b))
1874 return NULL_TREE;
1875 }
1876 else if (TREE_CODE (arg) == SSA_NAME
1877 && !SSA_NAME_IS_DEFAULT_DEF (arg))
1878 {
1879 tree temp;
1880 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1881 /* In simple cases we can look through PHI nodes,
1882 but we have to be careful with loops.
1883 See PR49073. */
1884 if (! dom_info_available_p (CDI_DOMINATORS)
1885 || gimple_bb (def_stmt) == gimple_bb (stmt)
1886 || dominated_by_p (CDI_DOMINATORS,
1887 gimple_bb (def_stmt),
1888 gimple_bb (stmt)))
1889 return NULL_TREE;
1890 temp = and_var_with_comparison (arg, invert, code2,
1891 op2a, op2b);
1892 if (!temp)
1893 return NULL_TREE;
1894 else if (!result)
1895 result = temp;
1896 else if (!same_bool_result_p (result, temp))
1897 return NULL_TREE;
1898 }
1899 else
1900 return NULL_TREE;
1901 }
1902 return result;
1903 }
1904
1905 default:
1906 break;
1907 }
1908 }
1909 return NULL_TREE;
1910 }
1911
1912 /* Try to simplify the AND of two comparisons, specified by
1913 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1914 If this can be simplified to a single expression (without requiring
1915 introducing more SSA variables to hold intermediate values),
1916 return the resulting tree. Otherwise return NULL_TREE.
1917 If the result expression is non-null, it has boolean type. */
1918
1919 tree
1920 maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1921 enum tree_code code2, tree op2a, tree op2b)
1922 {
1923 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1924 if (t)
1925 return t;
1926 else
1927 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1928 }
1929
1930 /* Helper function for or_comparisons_1: try to simplify the OR of the
1931 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1932 If INVERT is true, invert the value of VAR before doing the OR.
1933 Return NULL_EXPR if we can't simplify this to a single expression. */
1934
1935 static tree
1936 or_var_with_comparison (tree var, bool invert,
1937 enum tree_code code2, tree op2a, tree op2b)
1938 {
1939 tree t;
1940 gimple stmt = SSA_NAME_DEF_STMT (var);
1941
1942 /* We can only deal with variables whose definitions are assignments. */
1943 if (!is_gimple_assign (stmt))
1944 return NULL_TREE;
1945
1946 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1947 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1948 Then we only have to consider the simpler non-inverted cases. */
1949 if (invert)
1950 t = and_var_with_comparison_1 (stmt,
1951 invert_tree_comparison (code2, false),
1952 op2a, op2b);
1953 else
1954 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1955 return canonicalize_bool (t, invert);
1956 }
1957
1958 /* Try to simplify the OR of the ssa variable defined by the assignment
1959 STMT with the comparison specified by (OP2A CODE2 OP2B).
1960 Return NULL_EXPR if we can't simplify this to a single expression. */
1961
1962 static tree
1963 or_var_with_comparison_1 (gimple stmt,
1964 enum tree_code code2, tree op2a, tree op2b)
1965 {
1966 tree var = gimple_assign_lhs (stmt);
1967 tree true_test_var = NULL_TREE;
1968 tree false_test_var = NULL_TREE;
1969 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1970
1971 /* Check for identities like (var OR (var != 0)) => true . */
1972 if (TREE_CODE (op2a) == SSA_NAME
1973 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1974 {
1975 if ((code2 == NE_EXPR && integer_zerop (op2b))
1976 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1977 {
1978 true_test_var = op2a;
1979 if (var == true_test_var)
1980 return var;
1981 }
1982 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1983 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1984 {
1985 false_test_var = op2a;
1986 if (var == false_test_var)
1987 return boolean_true_node;
1988 }
1989 }
1990
1991 /* If the definition is a comparison, recurse on it. */
1992 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1993 {
1994 tree t = or_comparisons_1 (innercode,
1995 gimple_assign_rhs1 (stmt),
1996 gimple_assign_rhs2 (stmt),
1997 code2,
1998 op2a,
1999 op2b);
2000 if (t)
2001 return t;
2002 }
2003
2004 /* If the definition is an AND or OR expression, we may be able to
2005 simplify by reassociating. */
2006 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2007 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
2008 {
2009 tree inner1 = gimple_assign_rhs1 (stmt);
2010 tree inner2 = gimple_assign_rhs2 (stmt);
2011 gimple s;
2012 tree t;
2013 tree partial = NULL_TREE;
2014 bool is_or = (innercode == BIT_IOR_EXPR);
2015
2016 /* Check for boolean identities that don't require recursive examination
2017 of inner1/inner2:
2018 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2019 inner1 OR (inner1 AND inner2) => inner1
2020 !inner1 OR (inner1 OR inner2) => true
2021 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2022 */
2023 if (inner1 == true_test_var)
2024 return (is_or ? var : inner1);
2025 else if (inner2 == true_test_var)
2026 return (is_or ? var : inner2);
2027 else if (inner1 == false_test_var)
2028 return (is_or
2029 ? boolean_true_node
2030 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2031 else if (inner2 == false_test_var)
2032 return (is_or
2033 ? boolean_true_node
2034 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2035
2036 /* Next, redistribute/reassociate the OR across the inner tests.
2037 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2038 if (TREE_CODE (inner1) == SSA_NAME
2039 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2040 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2041 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2042 gimple_assign_rhs1 (s),
2043 gimple_assign_rhs2 (s),
2044 code2, op2a, op2b)))
2045 {
2046 /* Handle the OR case, where we are reassociating:
2047 (inner1 OR inner2) OR (op2a code2 op2b)
2048 => (t OR inner2)
2049 If the partial result t is a constant, we win. Otherwise
2050 continue on to try reassociating with the other inner test. */
2051 if (is_or)
2052 {
2053 if (integer_onep (t))
2054 return boolean_true_node;
2055 else if (integer_zerop (t))
2056 return inner2;
2057 }
2058
2059 /* Handle the AND case, where we are redistributing:
2060 (inner1 AND inner2) OR (op2a code2 op2b)
2061 => (t AND (inner2 OR (op2a code op2b))) */
2062 else if (integer_zerop (t))
2063 return boolean_false_node;
2064
2065 /* Save partial result for later. */
2066 partial = t;
2067 }
2068
2069 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2070 if (TREE_CODE (inner2) == SSA_NAME
2071 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2072 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2073 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2074 gimple_assign_rhs1 (s),
2075 gimple_assign_rhs2 (s),
2076 code2, op2a, op2b)))
2077 {
2078 /* Handle the OR case, where we are reassociating:
2079 (inner1 OR inner2) OR (op2a code2 op2b)
2080 => (inner1 OR t)
2081 => (t OR partial) */
2082 if (is_or)
2083 {
2084 if (integer_zerop (t))
2085 return inner1;
2086 else if (integer_onep (t))
2087 return boolean_true_node;
2088 /* If both are the same, we can apply the identity
2089 (x OR x) == x. */
2090 else if (partial && same_bool_result_p (t, partial))
2091 return t;
2092 }
2093
2094 /* Handle the AND case, where we are redistributing:
2095 (inner1 AND inner2) OR (op2a code2 op2b)
2096 => (t AND (inner1 OR (op2a code2 op2b)))
2097 => (t AND partial) */
2098 else
2099 {
2100 if (integer_zerop (t))
2101 return boolean_false_node;
2102 else if (partial)
2103 {
2104 /* We already got a simplification for the other
2105 operand to the redistributed AND expression. The
2106 interesting case is when at least one is true.
2107 Or, if both are the same, we can apply the identity
2108 (x AND x) == x. */
2109 if (integer_onep (partial))
2110 return t;
2111 else if (integer_onep (t))
2112 return partial;
2113 else if (same_bool_result_p (t, partial))
2114 return t;
2115 }
2116 }
2117 }
2118 }
2119 return NULL_TREE;
2120 }
2121
2122 /* Try to simplify the OR of two comparisons defined by
2123 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2124 If this can be done without constructing an intermediate value,
2125 return the resulting tree; otherwise NULL_TREE is returned.
2126 This function is deliberately asymmetric as it recurses on SSA_DEFs
2127 in the first comparison but not the second. */
2128
2129 static tree
2130 or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2131 enum tree_code code2, tree op2a, tree op2b)
2132 {
2133 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2134 if (operand_equal_p (op1a, op2a, 0)
2135 && operand_equal_p (op1b, op2b, 0))
2136 {
2137 /* Result will be either NULL_TREE, or a combined comparison. */
2138 tree t = combine_comparisons (UNKNOWN_LOCATION,
2139 TRUTH_ORIF_EXPR, code1, code2,
2140 boolean_type_node, op1a, op1b);
2141 if (t)
2142 return t;
2143 }
2144
2145 /* Likewise the swapped case of the above. */
2146 if (operand_equal_p (op1a, op2b, 0)
2147 && operand_equal_p (op1b, op2a, 0))
2148 {
2149 /* Result will be either NULL_TREE, or a combined comparison. */
2150 tree t = combine_comparisons (UNKNOWN_LOCATION,
2151 TRUTH_ORIF_EXPR, code1,
2152 swap_tree_comparison (code2),
2153 boolean_type_node, op1a, op1b);
2154 if (t)
2155 return t;
2156 }
2157
2158 /* If both comparisons are of the same value against constants, we might
2159 be able to merge them. */
2160 if (operand_equal_p (op1a, op2a, 0)
2161 && TREE_CODE (op1b) == INTEGER_CST
2162 && TREE_CODE (op2b) == INTEGER_CST)
2163 {
2164 int cmp = tree_int_cst_compare (op1b, op2b);
2165
2166 /* If we have (op1a != op1b), we should either be able to
2167 return that or TRUE, depending on whether the constant op1b
2168 also satisfies the other comparison against op2b. */
2169 if (code1 == NE_EXPR)
2170 {
2171 bool done = true;
2172 bool val;
2173 switch (code2)
2174 {
2175 case EQ_EXPR: val = (cmp == 0); break;
2176 case NE_EXPR: val = (cmp != 0); break;
2177 case LT_EXPR: val = (cmp < 0); break;
2178 case GT_EXPR: val = (cmp > 0); break;
2179 case LE_EXPR: val = (cmp <= 0); break;
2180 case GE_EXPR: val = (cmp >= 0); break;
2181 default: done = false;
2182 }
2183 if (done)
2184 {
2185 if (val)
2186 return boolean_true_node;
2187 else
2188 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2189 }
2190 }
2191 /* Likewise if the second comparison is a != comparison. */
2192 else if (code2 == NE_EXPR)
2193 {
2194 bool done = true;
2195 bool val;
2196 switch (code1)
2197 {
2198 case EQ_EXPR: val = (cmp == 0); break;
2199 case NE_EXPR: val = (cmp != 0); break;
2200 case LT_EXPR: val = (cmp > 0); break;
2201 case GT_EXPR: val = (cmp < 0); break;
2202 case LE_EXPR: val = (cmp >= 0); break;
2203 case GE_EXPR: val = (cmp <= 0); break;
2204 default: done = false;
2205 }
2206 if (done)
2207 {
2208 if (val)
2209 return boolean_true_node;
2210 else
2211 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2212 }
2213 }
2214
2215 /* See if an equality test is redundant with the other comparison. */
2216 else if (code1 == EQ_EXPR)
2217 {
2218 bool val;
2219 switch (code2)
2220 {
2221 case EQ_EXPR: val = (cmp == 0); break;
2222 case NE_EXPR: val = (cmp != 0); break;
2223 case LT_EXPR: val = (cmp < 0); break;
2224 case GT_EXPR: val = (cmp > 0); break;
2225 case LE_EXPR: val = (cmp <= 0); break;
2226 case GE_EXPR: val = (cmp >= 0); break;
2227 default:
2228 val = false;
2229 }
2230 if (val)
2231 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2232 }
2233 else if (code2 == EQ_EXPR)
2234 {
2235 bool val;
2236 switch (code1)
2237 {
2238 case EQ_EXPR: val = (cmp == 0); break;
2239 case NE_EXPR: val = (cmp != 0); break;
2240 case LT_EXPR: val = (cmp > 0); break;
2241 case GT_EXPR: val = (cmp < 0); break;
2242 case LE_EXPR: val = (cmp >= 0); break;
2243 case GE_EXPR: val = (cmp <= 0); break;
2244 default:
2245 val = false;
2246 }
2247 if (val)
2248 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2249 }
2250
2251 /* Chose the less restrictive of two < or <= comparisons. */
2252 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2253 && (code2 == LT_EXPR || code2 == LE_EXPR))
2254 {
2255 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2256 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2257 else
2258 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2259 }
2260
2261 /* Likewise chose the less restrictive of two > or >= comparisons. */
2262 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2263 && (code2 == GT_EXPR || code2 == GE_EXPR))
2264 {
2265 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2266 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2267 else
2268 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2269 }
2270
2271 /* Check for singleton ranges. */
2272 else if (cmp == 0
2273 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2274 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2275 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2276
2277 /* Check for less/greater pairs that don't restrict the range at all. */
2278 else if (cmp >= 0
2279 && (code1 == LT_EXPR || code1 == LE_EXPR)
2280 && (code2 == GT_EXPR || code2 == GE_EXPR))
2281 return boolean_true_node;
2282 else if (cmp <= 0
2283 && (code1 == GT_EXPR || code1 == GE_EXPR)
2284 && (code2 == LT_EXPR || code2 == LE_EXPR))
2285 return boolean_true_node;
2286 }
2287
2288 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2289 NAME's definition is a truth value. See if there are any simplifications
2290 that can be done against the NAME's definition. */
2291 if (TREE_CODE (op1a) == SSA_NAME
2292 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2293 && (integer_zerop (op1b) || integer_onep (op1b)))
2294 {
2295 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2296 || (code1 == NE_EXPR && integer_onep (op1b)));
2297 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2298 switch (gimple_code (stmt))
2299 {
2300 case GIMPLE_ASSIGN:
2301 /* Try to simplify by copy-propagating the definition. */
2302 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2303
2304 case GIMPLE_PHI:
2305 /* If every argument to the PHI produces the same result when
2306 ORed with the second comparison, we win.
2307 Do not do this unless the type is bool since we need a bool
2308 result here anyway. */
2309 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2310 {
2311 tree result = NULL_TREE;
2312 unsigned i;
2313 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2314 {
2315 tree arg = gimple_phi_arg_def (stmt, i);
2316
2317 /* If this PHI has itself as an argument, ignore it.
2318 If all the other args produce the same result,
2319 we're still OK. */
2320 if (arg == gimple_phi_result (stmt))
2321 continue;
2322 else if (TREE_CODE (arg) == INTEGER_CST)
2323 {
2324 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2325 {
2326 if (!result)
2327 result = boolean_true_node;
2328 else if (!integer_onep (result))
2329 return NULL_TREE;
2330 }
2331 else if (!result)
2332 result = fold_build2 (code2, boolean_type_node,
2333 op2a, op2b);
2334 else if (!same_bool_comparison_p (result,
2335 code2, op2a, op2b))
2336 return NULL_TREE;
2337 }
2338 else if (TREE_CODE (arg) == SSA_NAME
2339 && !SSA_NAME_IS_DEFAULT_DEF (arg))
2340 {
2341 tree temp;
2342 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2343 /* In simple cases we can look through PHI nodes,
2344 but we have to be careful with loops.
2345 See PR49073. */
2346 if (! dom_info_available_p (CDI_DOMINATORS)
2347 || gimple_bb (def_stmt) == gimple_bb (stmt)
2348 || dominated_by_p (CDI_DOMINATORS,
2349 gimple_bb (def_stmt),
2350 gimple_bb (stmt)))
2351 return NULL_TREE;
2352 temp = or_var_with_comparison (arg, invert, code2,
2353 op2a, op2b);
2354 if (!temp)
2355 return NULL_TREE;
2356 else if (!result)
2357 result = temp;
2358 else if (!same_bool_result_p (result, temp))
2359 return NULL_TREE;
2360 }
2361 else
2362 return NULL_TREE;
2363 }
2364 return result;
2365 }
2366
2367 default:
2368 break;
2369 }
2370 }
2371 return NULL_TREE;
2372 }
2373
2374 /* Try to simplify the OR of two comparisons, specified by
2375 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2376 If this can be simplified to a single expression (without requiring
2377 introducing more SSA variables to hold intermediate values),
2378 return the resulting tree. Otherwise return NULL_TREE.
2379 If the result expression is non-null, it has boolean type. */
2380
2381 tree
2382 maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2383 enum tree_code code2, tree op2a, tree op2b)
2384 {
2385 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2386 if (t)
2387 return t;
2388 else
2389 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2390 }
2391
2392
2393 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2394
2395 Either NULL_TREE, a simplified but non-constant or a constant
2396 is returned.
2397
2398 ??? This should go into a gimple-fold-inline.h file to be eventually
2399 privatized with the single valueize function used in the various TUs
2400 to avoid the indirect function call overhead. */
2401
2402 tree
2403 gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2404 {
2405 location_t loc = gimple_location (stmt);
2406 switch (gimple_code (stmt))
2407 {
2408 case GIMPLE_ASSIGN:
2409 {
2410 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2411
2412 switch (get_gimple_rhs_class (subcode))
2413 {
2414 case GIMPLE_SINGLE_RHS:
2415 {
2416 tree rhs = gimple_assign_rhs1 (stmt);
2417 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2418
2419 if (TREE_CODE (rhs) == SSA_NAME)
2420 {
2421 /* If the RHS is an SSA_NAME, return its known constant value,
2422 if any. */
2423 return (*valueize) (rhs);
2424 }
2425 /* Handle propagating invariant addresses into address
2426 operations. */
2427 else if (TREE_CODE (rhs) == ADDR_EXPR
2428 && !is_gimple_min_invariant (rhs))
2429 {
2430 HOST_WIDE_INT offset;
2431 tree base;
2432 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2433 &offset,
2434 valueize);
2435 if (base
2436 && (CONSTANT_CLASS_P (base)
2437 || decl_address_invariant_p (base)))
2438 return build_invariant_address (TREE_TYPE (rhs),
2439 base, offset);
2440 }
2441 else if (TREE_CODE (rhs) == CONSTRUCTOR
2442 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2443 && (CONSTRUCTOR_NELTS (rhs)
2444 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2445 {
2446 unsigned i;
2447 tree val, list;
2448
2449 list = NULL_TREE;
2450 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2451 {
2452 val = (*valueize) (val);
2453 if (TREE_CODE (val) == INTEGER_CST
2454 || TREE_CODE (val) == REAL_CST
2455 || TREE_CODE (val) == FIXED_CST)
2456 list = tree_cons (NULL_TREE, val, list);
2457 else
2458 return NULL_TREE;
2459 }
2460
2461 return build_vector (TREE_TYPE (rhs), nreverse (list));
2462 }
2463
2464 if (kind == tcc_reference)
2465 {
2466 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2467 || TREE_CODE (rhs) == REALPART_EXPR
2468 || TREE_CODE (rhs) == IMAGPART_EXPR)
2469 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2470 {
2471 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2472 return fold_unary_loc (EXPR_LOCATION (rhs),
2473 TREE_CODE (rhs),
2474 TREE_TYPE (rhs), val);
2475 }
2476 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2477 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2478 {
2479 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2480 return fold_ternary_loc (EXPR_LOCATION (rhs),
2481 TREE_CODE (rhs),
2482 TREE_TYPE (rhs), val,
2483 TREE_OPERAND (rhs, 1),
2484 TREE_OPERAND (rhs, 2));
2485 }
2486 else if (TREE_CODE (rhs) == MEM_REF
2487 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2488 {
2489 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2490 if (TREE_CODE (val) == ADDR_EXPR
2491 && is_gimple_min_invariant (val))
2492 {
2493 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2494 unshare_expr (val),
2495 TREE_OPERAND (rhs, 1));
2496 if (tem)
2497 rhs = tem;
2498 }
2499 }
2500 return fold_const_aggregate_ref_1 (rhs, valueize);
2501 }
2502 else if (kind == tcc_declaration)
2503 return get_symbol_constant_value (rhs);
2504 return rhs;
2505 }
2506
2507 case GIMPLE_UNARY_RHS:
2508 {
2509 /* Handle unary operators that can appear in GIMPLE form.
2510 Note that we know the single operand must be a constant,
2511 so this should almost always return a simplified RHS. */
2512 tree lhs = gimple_assign_lhs (stmt);
2513 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2514
2515 /* Conversions are useless for CCP purposes if they are
2516 value-preserving. Thus the restrictions that
2517 useless_type_conversion_p places for restrict qualification
2518 of pointer types should not apply here.
2519 Substitution later will only substitute to allowed places. */
2520 if (CONVERT_EXPR_CODE_P (subcode)
2521 && POINTER_TYPE_P (TREE_TYPE (lhs))
2522 && POINTER_TYPE_P (TREE_TYPE (op0))
2523 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2524 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2525 && TYPE_MODE (TREE_TYPE (lhs))
2526 == TYPE_MODE (TREE_TYPE (op0)))
2527 return op0;
2528
2529 return
2530 fold_unary_ignore_overflow_loc (loc, subcode,
2531 gimple_expr_type (stmt), op0);
2532 }
2533
2534 case GIMPLE_BINARY_RHS:
2535 {
2536 /* Handle binary operators that can appear in GIMPLE form. */
2537 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2538 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2539
2540 /* Translate &x + CST into an invariant form suitable for
2541 further propagation. */
2542 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2543 && TREE_CODE (op0) == ADDR_EXPR
2544 && TREE_CODE (op1) == INTEGER_CST)
2545 {
2546 tree off = fold_convert (ptr_type_node, op1);
2547 return build_fold_addr_expr_loc
2548 (loc,
2549 fold_build2 (MEM_REF,
2550 TREE_TYPE (TREE_TYPE (op0)),
2551 unshare_expr (op0), off));
2552 }
2553
2554 return fold_binary_loc (loc, subcode,
2555 gimple_expr_type (stmt), op0, op1);
2556 }
2557
2558 case GIMPLE_TERNARY_RHS:
2559 {
2560 /* Handle ternary operators that can appear in GIMPLE form. */
2561 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2562 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2563 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2564
2565 /* Fold embedded expressions in ternary codes. */
2566 if ((subcode == COND_EXPR
2567 || subcode == VEC_COND_EXPR)
2568 && COMPARISON_CLASS_P (op0))
2569 {
2570 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2571 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2572 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2573 TREE_TYPE (op0), op00, op01);
2574 if (tem)
2575 op0 = tem;
2576 }
2577
2578 return fold_ternary_loc (loc, subcode,
2579 gimple_expr_type (stmt), op0, op1, op2);
2580 }
2581
2582 default:
2583 gcc_unreachable ();
2584 }
2585 }
2586
2587 case GIMPLE_CALL:
2588 {
2589 tree fn;
2590
2591 if (gimple_call_internal_p (stmt))
2592 /* No folding yet for these functions. */
2593 return NULL_TREE;
2594
2595 fn = (*valueize) (gimple_call_fn (stmt));
2596 if (TREE_CODE (fn) == ADDR_EXPR
2597 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2598 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2599 {
2600 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2601 tree call, retval;
2602 unsigned i;
2603 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2604 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2605 call = build_call_array_loc (loc,
2606 gimple_call_return_type (stmt),
2607 fn, gimple_call_num_args (stmt), args);
2608 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2609 if (retval)
2610 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2611 STRIP_NOPS (retval);
2612 return retval;
2613 }
2614 return NULL_TREE;
2615 }
2616
2617 default:
2618 return NULL_TREE;
2619 }
2620 }
2621
2622 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2623 Returns NULL_TREE if folding to a constant is not possible, otherwise
2624 returns a constant according to is_gimple_min_invariant. */
2625
2626 tree
2627 gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2628 {
2629 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2630 if (res && is_gimple_min_invariant (res))
2631 return res;
2632 return NULL_TREE;
2633 }
2634
2635
2636 /* The following set of functions are supposed to fold references using
2637 their constant initializers. */
2638
2639 static tree fold_ctor_reference (tree type, tree ctor,
2640 unsigned HOST_WIDE_INT offset,
2641 unsigned HOST_WIDE_INT size);
2642
2643 /* See if we can find constructor defining value of BASE.
2644 When we know the consructor with constant offset (such as
2645 base is array[40] and we do know constructor of array), then
2646 BIT_OFFSET is adjusted accordingly.
2647
2648 As a special case, return error_mark_node when constructor
2649 is not explicitly available, but it is known to be zero
2650 such as 'static const int a;'. */
2651 static tree
2652 get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2653 tree (*valueize)(tree))
2654 {
2655 HOST_WIDE_INT bit_offset2, size, max_size;
2656 if (TREE_CODE (base) == MEM_REF)
2657 {
2658 if (!integer_zerop (TREE_OPERAND (base, 1)))
2659 {
2660 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2661 return NULL_TREE;
2662 *bit_offset += (mem_ref_offset (base).low
2663 * BITS_PER_UNIT);
2664 }
2665
2666 if (valueize
2667 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2668 base = valueize (TREE_OPERAND (base, 0));
2669 if (!base || TREE_CODE (base) != ADDR_EXPR)
2670 return NULL_TREE;
2671 base = TREE_OPERAND (base, 0);
2672 }
2673
2674 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2675 DECL_INITIAL. If BASE is a nested reference into another
2676 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2677 the inner reference. */
2678 switch (TREE_CODE (base))
2679 {
2680 case VAR_DECL:
2681 if (!const_value_known_p (base))
2682 return NULL_TREE;
2683
2684 /* Fallthru. */
2685 case CONST_DECL:
2686 if (!DECL_INITIAL (base)
2687 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2688 return error_mark_node;
2689 return DECL_INITIAL (base);
2690
2691 case ARRAY_REF:
2692 case COMPONENT_REF:
2693 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2694 if (max_size == -1 || size != max_size)
2695 return NULL_TREE;
2696 *bit_offset += bit_offset2;
2697 return get_base_constructor (base, bit_offset, valueize);
2698
2699 case STRING_CST:
2700 case CONSTRUCTOR:
2701 return base;
2702
2703 default:
2704 return NULL_TREE;
2705 }
2706 }
2707
2708 /* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2709 to the memory at bit OFFSET.
2710
2711 We do only simple job of folding byte accesses. */
2712
2713 static tree
2714 fold_string_cst_ctor_reference (tree type, tree ctor,
2715 unsigned HOST_WIDE_INT offset,
2716 unsigned HOST_WIDE_INT size)
2717 {
2718 if (INTEGRAL_TYPE_P (type)
2719 && (TYPE_MODE (type)
2720 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2721 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2722 == MODE_INT)
2723 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2724 && size == BITS_PER_UNIT
2725 && !(offset % BITS_PER_UNIT))
2726 {
2727 offset /= BITS_PER_UNIT;
2728 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2729 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2730 [offset]));
2731 /* Folding
2732 const char a[20]="hello";
2733 return a[10];
2734
2735 might lead to offset greater than string length. In this case we
2736 know value is either initialized to 0 or out of bounds. Return 0
2737 in both cases. */
2738 return build_zero_cst (type);
2739 }
2740 return NULL_TREE;
2741 }
2742
2743 /* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2744 SIZE to the memory at bit OFFSET. */
2745
2746 static tree
2747 fold_array_ctor_reference (tree type, tree ctor,
2748 unsigned HOST_WIDE_INT offset,
2749 unsigned HOST_WIDE_INT size)
2750 {
2751 unsigned HOST_WIDE_INT cnt;
2752 tree cfield, cval;
2753 double_int low_bound, elt_size;
2754 double_int index, max_index;
2755 double_int access_index;
2756 tree domain_type = NULL_TREE;
2757 HOST_WIDE_INT inner_offset;
2758
2759 /* Compute low bound and elt size. */
2760 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2761 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
2762 if (domain_type && TYPE_MIN_VALUE (domain_type))
2763 {
2764 /* Static constructors for variably sized objects makes no sense. */
2765 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2766 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2767 }
2768 else
2769 low_bound = double_int_zero;
2770 /* Static constructors for variably sized objects makes no sense. */
2771 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2772 == INTEGER_CST);
2773 elt_size =
2774 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2775
2776
2777 /* We can handle only constantly sized accesses that are known to not
2778 be larger than size of array element. */
2779 if (!TYPE_SIZE_UNIT (type)
2780 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2781 || double_int_cmp (elt_size,
2782 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2783 return NULL_TREE;
2784
2785 /* Compute the array index we look for. */
2786 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2787 elt_size, TRUNC_DIV_EXPR);
2788 access_index = double_int_add (access_index, low_bound);
2789
2790 /* And offset within the access. */
2791 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2792
2793 /* See if the array field is large enough to span whole access. We do not
2794 care to fold accesses spanning multiple array indexes. */
2795 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2796 return NULL_TREE;
2797
2798 index = double_int_sub (low_bound, double_int_one);
2799 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2800 {
2801 /* Array constructor might explicitely set index, or specify range
2802 or leave index NULL meaning that it is next index after previous
2803 one. */
2804 if (cfield)
2805 {
2806 if (TREE_CODE (cfield) == INTEGER_CST)
2807 max_index = index = tree_to_double_int (cfield);
2808 else
2809 {
2810 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2811 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2812 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2813 }
2814 }
2815 else
2816 max_index = index = double_int_add (index, double_int_one);
2817
2818 /* Do we have match? */
2819 if (double_int_cmp (access_index, index, 1) >= 0
2820 && double_int_cmp (access_index, max_index, 1) <= 0)
2821 return fold_ctor_reference (type, cval, inner_offset, size);
2822 }
2823 /* When memory is not explicitely mentioned in constructor,
2824 it is 0 (or out of range). */
2825 return build_zero_cst (type);
2826 }
2827
2828 /* CTOR is CONSTRUCTOR of an aggregate or vector.
2829 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2830
2831 static tree
2832 fold_nonarray_ctor_reference (tree type, tree ctor,
2833 unsigned HOST_WIDE_INT offset,
2834 unsigned HOST_WIDE_INT size)
2835 {
2836 unsigned HOST_WIDE_INT cnt;
2837 tree cfield, cval;
2838
2839 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2840 cval)
2841 {
2842 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2843 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2844 tree field_size = DECL_SIZE (cfield);
2845 double_int bitoffset;
2846 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2847 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
2848 double_int bitoffset_end, access_end;
2849
2850 /* Variable sized objects in static constructors makes no sense,
2851 but field_size can be NULL for flexible array members. */
2852 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2853 && TREE_CODE (byte_offset) == INTEGER_CST
2854 && (field_size != NULL_TREE
2855 ? TREE_CODE (field_size) == INTEGER_CST
2856 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2857
2858 /* Compute bit offset of the field. */
2859 bitoffset = double_int_add (tree_to_double_int (field_offset),
2860 double_int_mul (byte_offset_cst,
2861 bits_per_unit_cst));
2862 /* Compute bit offset where the field ends. */
2863 if (field_size != NULL_TREE)
2864 bitoffset_end = double_int_add (bitoffset,
2865 tree_to_double_int (field_size));
2866 else
2867 bitoffset_end = double_int_zero;
2868
2869 access_end = double_int_add (uhwi_to_double_int (offset),
2870 uhwi_to_double_int (size));
2871
2872 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2873 [BITOFFSET, BITOFFSET_END)? */
2874 if (double_int_cmp (access_end, bitoffset, 0) > 0
2875 && (field_size == NULL_TREE
2876 || double_int_cmp (uhwi_to_double_int (offset),
2877 bitoffset_end, 0) < 0))
2878 {
2879 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2880 bitoffset);
2881 /* We do have overlap. Now see if field is large enough to
2882 cover the access. Give up for accesses spanning multiple
2883 fields. */
2884 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2885 return NULL_TREE;
2886 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2887 return NULL_TREE;
2888 return fold_ctor_reference (type, cval,
2889 double_int_to_uhwi (inner_offset), size);
2890 }
2891 }
2892 /* When memory is not explicitely mentioned in constructor, it is 0. */
2893 return build_zero_cst (type);
2894 }
2895
2896 /* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2897 to the memory at bit OFFSET. */
2898
2899 static tree
2900 fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2901 unsigned HOST_WIDE_INT size)
2902 {
2903 tree ret;
2904
2905 /* We found the field with exact match. */
2906 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2907 && !offset)
2908 return canonicalize_constructor_val (ctor);
2909
2910 /* We are at the end of walk, see if we can view convert the
2911 result. */
2912 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2913 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2914 && operand_equal_p (TYPE_SIZE (type),
2915 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2916 {
2917 ret = canonicalize_constructor_val (ctor);
2918 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2919 if (ret)
2920 STRIP_NOPS (ret);
2921 return ret;
2922 }
2923 if (TREE_CODE (ctor) == STRING_CST)
2924 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2925 if (TREE_CODE (ctor) == CONSTRUCTOR)
2926 {
2927
2928 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2929 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
2930 return fold_array_ctor_reference (type, ctor, offset, size);
2931 else
2932 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2933 }
2934
2935 return NULL_TREE;
2936 }
2937
2938 /* Return the tree representing the element referenced by T if T is an
2939 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2940 names using VALUEIZE. Return NULL_TREE otherwise. */
2941
2942 tree
2943 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2944 {
2945 tree ctor, idx, base;
2946 HOST_WIDE_INT offset, size, max_size;
2947 tree tem;
2948
2949 if (TREE_THIS_VOLATILE (t))
2950 return NULL_TREE;
2951
2952 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2953 return get_symbol_constant_value (t);
2954
2955 tem = fold_read_from_constant_string (t);
2956 if (tem)
2957 return tem;
2958
2959 switch (TREE_CODE (t))
2960 {
2961 case ARRAY_REF:
2962 case ARRAY_RANGE_REF:
2963 /* Constant indexes are handled well by get_base_constructor.
2964 Only special case variable offsets.
2965 FIXME: This code can't handle nested references with variable indexes
2966 (they will be handled only by iteration of ccp). Perhaps we can bring
2967 get_ref_base_and_extent here and make it use a valueize callback. */
2968 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2969 && valueize
2970 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2971 && host_integerp (idx, 0))
2972 {
2973 tree low_bound, unit_size;
2974
2975 /* If the resulting bit-offset is constant, track it. */
2976 if ((low_bound = array_ref_low_bound (t),
2977 host_integerp (low_bound, 0))
2978 && (unit_size = array_ref_element_size (t),
2979 host_integerp (unit_size, 1)))
2980 {
2981 offset = TREE_INT_CST_LOW (idx);
2982 offset -= TREE_INT_CST_LOW (low_bound);
2983 offset *= TREE_INT_CST_LOW (unit_size);
2984 offset *= BITS_PER_UNIT;
2985
2986 base = TREE_OPERAND (t, 0);
2987 ctor = get_base_constructor (base, &offset, valueize);
2988 /* Empty constructor. Always fold to 0. */
2989 if (ctor == error_mark_node)
2990 return build_zero_cst (TREE_TYPE (t));
2991 /* Out of bound array access. Value is undefined,
2992 but don't fold. */
2993 if (offset < 0)
2994 return NULL_TREE;
2995 /* We can not determine ctor. */
2996 if (!ctor)
2997 return NULL_TREE;
2998 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
2999 TREE_INT_CST_LOW (unit_size)
3000 * BITS_PER_UNIT);
3001 }
3002 }
3003 /* Fallthru. */
3004
3005 case COMPONENT_REF:
3006 case BIT_FIELD_REF:
3007 case TARGET_MEM_REF:
3008 case MEM_REF:
3009 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3010 ctor = get_base_constructor (base, &offset, valueize);
3011
3012 /* Empty constructor. Always fold to 0. */
3013 if (ctor == error_mark_node)
3014 return build_zero_cst (TREE_TYPE (t));
3015 /* We do not know precise address. */
3016 if (max_size == -1 || max_size != size)
3017 return NULL_TREE;
3018 /* We can not determine ctor. */
3019 if (!ctor)
3020 return NULL_TREE;
3021
3022 /* Out of bound array access. Value is undefined, but don't fold. */
3023 if (offset < 0)
3024 return NULL_TREE;
3025
3026 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3027
3028 case REALPART_EXPR:
3029 case IMAGPART_EXPR:
3030 {
3031 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3032 if (c && TREE_CODE (c) == COMPLEX_CST)
3033 return fold_build1_loc (EXPR_LOCATION (t),
3034 TREE_CODE (t), TREE_TYPE (t), c);
3035 break;
3036 }
3037
3038 default:
3039 break;
3040 }
3041
3042 return NULL_TREE;
3043 }
3044
3045 tree
3046 fold_const_aggregate_ref (tree t)
3047 {
3048 return fold_const_aggregate_ref_1 (t, NULL);
3049 }
3050
3051 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3052 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3053 KNOWN_BINFO carries the binfo describing the true type of
3054 OBJ_TYPE_REF_OBJECT(REF). */
3055
3056 tree
3057 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3058 {
3059 unsigned HOST_WIDE_INT offset, size;
3060 tree v, fn;
3061
3062 v = BINFO_VTABLE (known_binfo);
3063 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3064 if (!v)
3065 return NULL_TREE;
3066
3067 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3068 {
3069 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3070 v = TREE_OPERAND (v, 0);
3071 }
3072 else
3073 offset = 0;
3074
3075 if (TREE_CODE (v) != ADDR_EXPR)
3076 return NULL_TREE;
3077 v = TREE_OPERAND (v, 0);
3078
3079 if (TREE_CODE (v) != VAR_DECL
3080 || !DECL_VIRTUAL_P (v)
3081 || !DECL_INITIAL (v)
3082 || DECL_INITIAL (v) == error_mark_node)
3083 return NULL_TREE;
3084 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3085 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3086 offset += token * size;
3087 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3088 offset, size);
3089 if (!fn)
3090 return NULL_TREE;
3091 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3092 || TREE_CODE (fn) == FDESC_EXPR);
3093 fn = TREE_OPERAND (fn, 0);
3094 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3095
3096 /* When cgraph node is missing and function is not public, we cannot
3097 devirtualize. This can happen in WHOPR when the actual method
3098 ends up in other partition, because we found devirtualization
3099 possibility too late. */
3100 if (!can_refer_decl_in_current_unit_p (fn))
3101 return NULL_TREE;
3102
3103 return fn;
3104 }
3105
3106 /* Return true iff VAL is a gimple expression that is known to be
3107 non-negative. Restricted to floating-point inputs. */
3108
3109 bool
3110 gimple_val_nonnegative_real_p (tree val)
3111 {
3112 gimple def_stmt;
3113
3114 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3115
3116 /* Use existing logic for non-gimple trees. */
3117 if (tree_expr_nonnegative_p (val))
3118 return true;
3119
3120 if (TREE_CODE (val) != SSA_NAME)
3121 return false;
3122
3123 /* Currently we look only at the immediately defining statement
3124 to make this determination, since recursion on defining
3125 statements of operands can lead to quadratic behavior in the
3126 worst case. This is expected to catch almost all occurrences
3127 in practice. It would be possible to implement limited-depth
3128 recursion if important cases are lost. Alternatively, passes
3129 that need this information (such as the pow/powi lowering code
3130 in the cse_sincos pass) could be revised to provide it through
3131 dataflow propagation. */
3132
3133 def_stmt = SSA_NAME_DEF_STMT (val);
3134
3135 if (is_gimple_assign (def_stmt))
3136 {
3137 tree op0, op1;
3138
3139 /* See fold-const.c:tree_expr_nonnegative_p for additional
3140 cases that could be handled with recursion. */
3141
3142 switch (gimple_assign_rhs_code (def_stmt))
3143 {
3144 case ABS_EXPR:
3145 /* Always true for floating-point operands. */
3146 return true;
3147
3148 case MULT_EXPR:
3149 /* True if the two operands are identical (since we are
3150 restricted to floating-point inputs). */
3151 op0 = gimple_assign_rhs1 (def_stmt);
3152 op1 = gimple_assign_rhs2 (def_stmt);
3153
3154 if (op0 == op1
3155 || operand_equal_p (op0, op1, 0))
3156 return true;
3157
3158 default:
3159 return false;
3160 }
3161 }
3162 else if (is_gimple_call (def_stmt))
3163 {
3164 tree fndecl = gimple_call_fndecl (def_stmt);
3165 if (fndecl
3166 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3167 {
3168 tree arg1;
3169
3170 switch (DECL_FUNCTION_CODE (fndecl))
3171 {
3172 CASE_FLT_FN (BUILT_IN_ACOS):
3173 CASE_FLT_FN (BUILT_IN_ACOSH):
3174 CASE_FLT_FN (BUILT_IN_CABS):
3175 CASE_FLT_FN (BUILT_IN_COSH):
3176 CASE_FLT_FN (BUILT_IN_ERFC):
3177 CASE_FLT_FN (BUILT_IN_EXP):
3178 CASE_FLT_FN (BUILT_IN_EXP10):
3179 CASE_FLT_FN (BUILT_IN_EXP2):
3180 CASE_FLT_FN (BUILT_IN_FABS):
3181 CASE_FLT_FN (BUILT_IN_FDIM):
3182 CASE_FLT_FN (BUILT_IN_HYPOT):
3183 CASE_FLT_FN (BUILT_IN_POW10):
3184 return true;
3185
3186 CASE_FLT_FN (BUILT_IN_SQRT):
3187 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3188 nonnegative inputs. */
3189 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3190 return true;
3191
3192 break;
3193
3194 CASE_FLT_FN (BUILT_IN_POWI):
3195 /* True if the second argument is an even integer. */
3196 arg1 = gimple_call_arg (def_stmt, 1);
3197
3198 if (TREE_CODE (arg1) == INTEGER_CST
3199 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3200 return true;
3201
3202 break;
3203
3204 CASE_FLT_FN (BUILT_IN_POW):
3205 /* True if the second argument is an even integer-valued
3206 real. */
3207 arg1 = gimple_call_arg (def_stmt, 1);
3208
3209 if (TREE_CODE (arg1) == REAL_CST)
3210 {
3211 REAL_VALUE_TYPE c;
3212 HOST_WIDE_INT n;
3213
3214 c = TREE_REAL_CST (arg1);
3215 n = real_to_integer (&c);
3216
3217 if ((n & 1) == 0)
3218 {
3219 REAL_VALUE_TYPE cint;
3220 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3221 if (real_identical (&c, &cint))
3222 return true;
3223 }
3224 }
3225
3226 break;
3227
3228 default:
3229 return false;
3230 }
3231 }
3232 }
3233
3234 return false;
3235 }
This page took 0.189298 seconds and 6 git commands to generate.