]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple-fold.c
coretypes.h (gimple_seq, [...]): Typedef as gimple.
[gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
949e47e5 2 Copyright (C) 2010, 2011, 2012 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "flags.h"
cbdd87d4 27#include "function.h"
cbdd87d4
RG
28#include "tree-dump.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
31#include "tree-ssa-propagate.h"
cbdd87d4 32#include "target.h"
cfef45c8 33#include "gimple-fold.h"
cbdd87d4 34
b3b9f3d0
JH
35/* Return true when DECL can be referenced from current unit.
36 We can get declarations that are not possible to reference for
37 various reasons:
1389294c 38
1389294c
JH
39 1) When analyzing C++ virtual tables.
40 C++ virtual tables do have known constructors even
41 when they are keyed to other compilation unit.
42 Those tables can contain pointers to methods and vars
43 in other units. Those methods have both STATIC and EXTERNAL
44 set.
45 2) In WHOPR mode devirtualization might lead to reference
46 to method that was partitioned elsehwere.
47 In this case we have static VAR_DECL or FUNCTION_DECL
48 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
49 declaring the body.
50 3) COMDAT functions referred by external vtables that
51 we devirtualize only during final copmilation stage.
52 At this time we already decided that we will not output
53 the function body and thus we can't reference the symbol
54 directly. */
55
1389294c 56static bool
b3b9f3d0 57can_refer_decl_in_current_unit_p (tree decl)
1389294c
JH
58{
59 struct varpool_node *vnode;
60 struct cgraph_node *node;
61
b3b9f3d0
JH
62 if (!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
63 return true;
1389294c
JH
64 /* External flag is set, so we deal with C++ reference
65 to static object from other file. */
b3b9f3d0
JH
66 if (DECL_EXTERNAL (decl) && TREE_STATIC (decl)
67 && TREE_CODE (decl) == VAR_DECL)
1389294c
JH
68 {
69 /* Just be sure it is not big in frontend setting
70 flags incorrectly. Those variables should never
71 be finalized. */
72 gcc_checking_assert (!(vnode = varpool_get_node (decl))
ead84f73 73 || vnode->alias
1389294c 74 || !vnode->finalized);
b3b9f3d0 75 return false;
1389294c 76 }
b3b9f3d0
JH
77 /* When function is public, we always can introduce new reference.
78 Exception are the COMDAT functions where introducing a direct
79 reference imply need to include function body in the curren tunit. */
80 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
81 return true;
82 /* We are not at ltrans stage; so don't worry about WHOPR.
83 Also when still gimplifying all referred comdat functions will be
2e9bb6ba
JH
84 produced.
85 ??? as observed in PR20991 for already optimized out comdat virtual functions
86 we may not neccesarily give up because the copy will be output elsewhere when
87 corresponding vtable is output. */
b3b9f3d0
JH
88 if (!flag_ltrans && (!DECL_COMDAT (decl) || !cgraph_function_flags_ready))
89 return true;
90 /* If we already output the function body, we are safe. */
91 if (TREE_ASM_WRITTEN (decl))
92 return true;
1389294c
JH
93 if (TREE_CODE (decl) == FUNCTION_DECL)
94 {
95 node = cgraph_get_node (decl);
b3b9f3d0
JH
96 /* Check that we still have function body and that we didn't took
97 the decision to eliminate offline copy of the function yet.
98 The second is important when devirtualization happens during final
99 compilation stage when making a new reference no longer makes callee
100 to be compiled. */
101 if (!node || !node->analyzed || node->global.inlined_to)
102 return false;
1389294c
JH
103 }
104 else if (TREE_CODE (decl) == VAR_DECL)
105 {
106 vnode = varpool_get_node (decl);
107 if (!vnode || !vnode->finalized)
b3b9f3d0 108 return false;
1389294c 109 }
b3b9f3d0 110 return true;
1389294c
JH
111}
112
0038d4e0 113/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
17f39a39
JH
114 acceptable form for is_gimple_min_invariant. */
115
116tree
117canonicalize_constructor_val (tree cval)
118{
119 STRIP_NOPS (cval);
315f5f1b
RG
120 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
121 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 122 {
315f5f1b
RG
123 tree ptr = TREE_OPERAND (cval, 0);
124 if (is_gimple_min_invariant (ptr))
125 cval = build1_loc (EXPR_LOCATION (cval),
126 ADDR_EXPR, TREE_TYPE (ptr),
127 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
128 ptr,
129 fold_convert (ptr_type_node,
130 TREE_OPERAND (cval, 1))));
17f39a39
JH
131 }
132 if (TREE_CODE (cval) == ADDR_EXPR)
133 {
134 tree base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
135 if (!base)
136 return NULL_TREE;
b3b9f3d0 137
7501ca28
RG
138 if ((TREE_CODE (base) == VAR_DECL
139 || TREE_CODE (base) == FUNCTION_DECL)
b3b9f3d0 140 && !can_refer_decl_in_current_unit_p (base))
1389294c 141 return NULL_TREE;
7501ca28 142 if (TREE_CODE (base) == VAR_DECL)
c5bdb340
RG
143 {
144 TREE_ADDRESSABLE (base) = 1;
145 if (cfun && gimple_referenced_vars (cfun))
146 add_referenced_var (base);
147 }
7501ca28
RG
148 else if (TREE_CODE (base) == FUNCTION_DECL)
149 {
150 /* Make sure we create a cgraph node for functions we'll reference.
151 They can be non-existent if the reference comes from an entry
152 of an external vtable for example. */
153 cgraph_get_create_node (base);
154 }
0038d4e0 155 /* Fixup types in global initializers. */
73aef89e
RG
156 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
157 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
17f39a39
JH
158 }
159 return cval;
160}
cbdd87d4
RG
161
162/* If SYM is a constant variable with known value, return the value.
163 NULL_TREE is returned otherwise. */
164
165tree
166get_symbol_constant_value (tree sym)
167{
64e0f5ff 168 if (const_value_known_p (sym))
cbdd87d4
RG
169 {
170 tree val = DECL_INITIAL (sym);
171 if (val)
172 {
17f39a39 173 val = canonicalize_constructor_val (val);
1389294c 174 if (val && is_gimple_min_invariant (val))
17f39a39 175 return val;
1389294c
JH
176 else
177 return NULL_TREE;
cbdd87d4
RG
178 }
179 /* Variables declared 'const' without an initializer
180 have zero as the initializer if they may not be
181 overridden at link or run time. */
182 if (!val
cbdd87d4
RG
183 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
184 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 185 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
186 }
187
188 return NULL_TREE;
189}
190
191
cbdd87d4
RG
192
193/* Subroutine of fold_stmt. We perform several simplifications of the
194 memory reference tree EXPR and make sure to re-gimplify them properly
195 after propagation of constant addresses. IS_LHS is true if the
196 reference is supposed to be an lvalue. */
197
198static tree
199maybe_fold_reference (tree expr, bool is_lhs)
200{
201 tree *t = &expr;
17f39a39 202 tree result;
cbdd87d4 203
f0eddb90
RG
204 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
205 || TREE_CODE (expr) == REALPART_EXPR
206 || TREE_CODE (expr) == IMAGPART_EXPR)
207 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
208 return fold_unary_loc (EXPR_LOCATION (expr),
209 TREE_CODE (expr),
210 TREE_TYPE (expr),
211 TREE_OPERAND (expr, 0));
212 else if (TREE_CODE (expr) == BIT_FIELD_REF
213 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
214 return fold_ternary_loc (EXPR_LOCATION (expr),
215 TREE_CODE (expr),
216 TREE_TYPE (expr),
217 TREE_OPERAND (expr, 0),
218 TREE_OPERAND (expr, 1),
219 TREE_OPERAND (expr, 2));
220
221 while (handled_component_p (*t))
222 t = &TREE_OPERAND (*t, 0);
cbdd87d4 223
f0eddb90
RG
224 /* Canonicalize MEM_REFs invariant address operand. Do this first
225 to avoid feeding non-canonical MEM_REFs elsewhere. */
226 if (TREE_CODE (*t) == MEM_REF
227 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 228 {
f0eddb90
RG
229 bool volatile_p = TREE_THIS_VOLATILE (*t);
230 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
231 TREE_OPERAND (*t, 0),
232 TREE_OPERAND (*t, 1));
233 if (tem)
234 {
235 TREE_THIS_VOLATILE (tem) = volatile_p;
236 *t = tem;
237 tem = maybe_fold_reference (expr, is_lhs);
238 if (tem)
239 return tem;
240 return expr;
241 }
cbdd87d4
RG
242 }
243
f0eddb90
RG
244 if (!is_lhs
245 && (result = fold_const_aggregate_ref (expr))
246 && is_gimple_min_invariant (result))
247 return result;
cbdd87d4 248
70f34814
RG
249 /* Fold back MEM_REFs to reference trees. */
250 if (TREE_CODE (*t) == MEM_REF
251 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
252 && integer_zerop (TREE_OPERAND (*t, 1))
253 && (TREE_THIS_VOLATILE (*t)
254 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
255 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
256 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
257 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
258 /* We have to look out here to not drop a required conversion
259 from the rhs to the lhs if is_lhs, but we don't have the
260 rhs here to verify that. Thus require strict type
261 compatibility. */
262 && types_compatible_p (TREE_TYPE (*t),
263 TREE_TYPE (TREE_OPERAND
f0eddb90 264 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 265 {
70f34814
RG
266 tree tem;
267 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
268 tem = maybe_fold_reference (expr, is_lhs);
269 if (tem)
270 return tem;
271 return expr;
272 }
4d948885
RG
273 else if (TREE_CODE (*t) == TARGET_MEM_REF)
274 {
275 tree tem = maybe_fold_tmr (*t);
276 if (tem)
277 {
278 *t = tem;
279 tem = maybe_fold_reference (expr, is_lhs);
280 if (tem)
281 return tem;
282 return expr;
283 }
284 }
cbdd87d4
RG
285
286 return NULL_TREE;
287}
288
289
290/* Attempt to fold an assignment statement pointed-to by SI. Returns a
291 replacement rhs for the statement or NULL_TREE if no simplification
292 could be made. It is assumed that the operands have been previously
293 folded. */
294
295static tree
296fold_gimple_assign (gimple_stmt_iterator *si)
297{
298 gimple stmt = gsi_stmt (*si);
299 enum tree_code subcode = gimple_assign_rhs_code (stmt);
300 location_t loc = gimple_location (stmt);
301
302 tree result = NULL_TREE;
303
304 switch (get_gimple_rhs_class (subcode))
305 {
306 case GIMPLE_SINGLE_RHS:
307 {
308 tree rhs = gimple_assign_rhs1 (stmt);
309
4e71066d 310 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
311 return maybe_fold_reference (rhs, false);
312
313 else if (TREE_CODE (rhs) == ADDR_EXPR)
314 {
70f34814
RG
315 tree ref = TREE_OPERAND (rhs, 0);
316 tree tem = maybe_fold_reference (ref, true);
317 if (tem
318 && TREE_CODE (tem) == MEM_REF
319 && integer_zerop (TREE_OPERAND (tem, 1)))
320 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
321 else if (tem)
cbdd87d4
RG
322 result = fold_convert (TREE_TYPE (rhs),
323 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
324 else if (TREE_CODE (ref) == MEM_REF
325 && integer_zerop (TREE_OPERAND (ref, 1)))
326 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
327 }
328
329 else if (TREE_CODE (rhs) == CONSTRUCTOR
330 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
331 && (CONSTRUCTOR_NELTS (rhs)
332 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
333 {
334 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
335 unsigned i;
336 tree val;
337
338 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
339 if (TREE_CODE (val) != INTEGER_CST
340 && TREE_CODE (val) != REAL_CST
341 && TREE_CODE (val) != FIXED_CST)
342 return NULL_TREE;
343
344 return build_vector_from_ctor (TREE_TYPE (rhs),
345 CONSTRUCTOR_ELTS (rhs));
346 }
347
348 else if (DECL_P (rhs))
349 return unshare_expr (get_symbol_constant_value (rhs));
350
351 /* If we couldn't fold the RHS, hand over to the generic
352 fold routines. */
353 if (result == NULL_TREE)
354 result = fold (rhs);
355
356 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
357 that may have been added by fold, and "useless" type
358 conversions that might now be apparent due to propagation. */
359 STRIP_USELESS_TYPE_CONVERSION (result);
360
361 if (result != rhs && valid_gimple_rhs_p (result))
362 return result;
363
364 return NULL_TREE;
365 }
366 break;
367
368 case GIMPLE_UNARY_RHS:
369 {
370 tree rhs = gimple_assign_rhs1 (stmt);
371
372 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
373 if (result)
374 {
375 /* If the operation was a conversion do _not_ mark a
376 resulting constant with TREE_OVERFLOW if the original
377 constant was not. These conversions have implementation
378 defined behavior and retaining the TREE_OVERFLOW flag
379 here would confuse later passes such as VRP. */
380 if (CONVERT_EXPR_CODE_P (subcode)
381 && TREE_CODE (result) == INTEGER_CST
382 && TREE_CODE (rhs) == INTEGER_CST)
383 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
384
385 STRIP_USELESS_TYPE_CONVERSION (result);
386 if (valid_gimple_rhs_p (result))
387 return result;
388 }
cbdd87d4
RG
389 }
390 break;
391
392 case GIMPLE_BINARY_RHS:
9b80d091
KT
393 /* Try to canonicalize for boolean-typed X the comparisons
394 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
395 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
396 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
397 {
398 tree lhs = gimple_assign_lhs (stmt);
399 tree op1 = gimple_assign_rhs1 (stmt);
400 tree op2 = gimple_assign_rhs2 (stmt);
401 tree type = TREE_TYPE (op1);
402
403 /* Check whether the comparison operands are of the same boolean
404 type as the result type is.
405 Check that second operand is an integer-constant with value
406 one or zero. */
407 if (TREE_CODE (op2) == INTEGER_CST
408 && (integer_zerop (op2) || integer_onep (op2))
409 && useless_type_conversion_p (TREE_TYPE (lhs), type))
410 {
411 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
412 bool is_logical_not = false;
413
414 /* X == 0 and X != 1 is a logical-not.of X
415 X == 1 and X != 0 is X */
416 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
417 || (cmp_code == NE_EXPR && integer_onep (op2)))
418 is_logical_not = true;
419
420 if (is_logical_not == false)
421 result = op1;
422 /* Only for one-bit precision typed X the transformation
423 !X -> ~X is valied. */
424 else if (TYPE_PRECISION (type) == 1)
425 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
426 type, op1);
427 /* Otherwise we use !X -> X ^ 1. */
428 else
429 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
430 type, op1, build_int_cst (type, 1));
431
432 }
433 }
cbdd87d4
RG
434
435 if (!result)
436 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
437 TREE_TYPE (gimple_assign_lhs (stmt)),
438 gimple_assign_rhs1 (stmt),
439 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
440
441 if (result)
442 {
443 STRIP_USELESS_TYPE_CONVERSION (result);
444 if (valid_gimple_rhs_p (result))
445 return result;
cbdd87d4
RG
446 }
447 break;
448
0354c0c7 449 case GIMPLE_TERNARY_RHS:
4e71066d
RG
450 /* Try to fold a conditional expression. */
451 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
452 {
453 tree op0 = gimple_assign_rhs1 (stmt);
454 tree tem;
455 bool set = false;
456 location_t cond_loc = gimple_location (stmt);
457
458 if (COMPARISON_CLASS_P (op0))
459 {
460 fold_defer_overflow_warnings ();
461 tem = fold_binary_loc (cond_loc,
462 TREE_CODE (op0), TREE_TYPE (op0),
463 TREE_OPERAND (op0, 0),
464 TREE_OPERAND (op0, 1));
465 /* This is actually a conditional expression, not a GIMPLE
466 conditional statement, however, the valid_gimple_rhs_p
467 test still applies. */
468 set = (tem && is_gimple_condexpr (tem)
469 && valid_gimple_rhs_p (tem));
470 fold_undefer_overflow_warnings (set, stmt, 0);
471 }
472 else if (is_gimple_min_invariant (op0))
473 {
474 tem = op0;
475 set = true;
476 }
477 else
478 return NULL_TREE;
479
480 if (set)
481 result = fold_build3_loc (cond_loc, COND_EXPR,
482 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
483 gimple_assign_rhs2 (stmt),
484 gimple_assign_rhs3 (stmt));
485 }
486
487 if (!result)
488 result = fold_ternary_loc (loc, subcode,
489 TREE_TYPE (gimple_assign_lhs (stmt)),
490 gimple_assign_rhs1 (stmt),
491 gimple_assign_rhs2 (stmt),
492 gimple_assign_rhs3 (stmt));
0354c0c7
BS
493
494 if (result)
495 {
496 STRIP_USELESS_TYPE_CONVERSION (result);
497 if (valid_gimple_rhs_p (result))
498 return result;
0354c0c7
BS
499 }
500 break;
501
cbdd87d4
RG
502 case GIMPLE_INVALID_RHS:
503 gcc_unreachable ();
504 }
505
506 return NULL_TREE;
507}
508
509/* Attempt to fold a conditional statement. Return true if any changes were
510 made. We only attempt to fold the condition expression, and do not perform
511 any transformation that would require alteration of the cfg. It is
512 assumed that the operands have been previously folded. */
513
514static bool
515fold_gimple_cond (gimple stmt)
516{
517 tree result = fold_binary_loc (gimple_location (stmt),
518 gimple_cond_code (stmt),
519 boolean_type_node,
520 gimple_cond_lhs (stmt),
521 gimple_cond_rhs (stmt));
522
523 if (result)
524 {
525 STRIP_USELESS_TYPE_CONVERSION (result);
526 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
527 {
528 gimple_cond_set_condition_from_tree (stmt, result);
529 return true;
530 }
531 }
532
533 return false;
534}
535
536/* Convert EXPR into a GIMPLE value suitable for substitution on the
537 RHS of an assignment. Insert the necessary statements before
538 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
539 is replaced. If the call is expected to produces a result, then it
540 is replaced by an assignment of the new RHS to the result variable.
541 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
542 GIMPLE_NOP. A proper VDEF chain is retained by making the first
543 VUSE and the last VDEF of the whole sequence be the same as the replaced
544 statement and using new SSA names for stores in between. */
cbdd87d4
RG
545
546void
547gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
548{
549 tree lhs;
cbdd87d4
RG
550 gimple stmt, new_stmt;
551 gimple_stmt_iterator i;
355a7673 552 gimple_seq stmts = NULL;
cbdd87d4 553 struct gimplify_ctx gctx;
e256dfce 554 gimple laststore;
fe2ef088 555 tree reaching_vuse;
cbdd87d4
RG
556
557 stmt = gsi_stmt (*si_p);
558
559 gcc_assert (is_gimple_call (stmt));
560
cbdd87d4 561 push_gimplify_context (&gctx);
21860814 562 gctx.into_ssa = gimple_in_ssa_p (cfun);
cbdd87d4 563
e256dfce 564 lhs = gimple_call_lhs (stmt);
cbdd87d4 565 if (lhs == NULL_TREE)
6e572326
RG
566 {
567 gimplify_and_add (expr, &stmts);
568 /* We can end up with folding a memcpy of an empty class assignment
569 which gets optimized away by C++ gimplification. */
570 if (gimple_seq_empty_p (stmts))
571 {
9fdc58de 572 pop_gimplify_context (NULL);
6e572326
RG
573 if (gimple_in_ssa_p (cfun))
574 {
575 unlink_stmt_vdef (stmt);
576 release_defs (stmt);
577 }
578 gsi_remove (si_p, true);
579 return;
580 }
581 }
cbdd87d4 582 else
e256dfce
RG
583 {
584 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
585 new_stmt = gimple_build_assign (lhs, tmp);
586 i = gsi_last (stmts);
587 gsi_insert_after_without_update (&i, new_stmt,
588 GSI_CONTINUE_LINKING);
589 }
cbdd87d4
RG
590
591 pop_gimplify_context (NULL);
592
593 if (gimple_has_location (stmt))
594 annotate_all_with_location (stmts, gimple_location (stmt));
595
e256dfce
RG
596 /* First iterate over the replacement statements backward, assigning
597 virtual operands to their defining statements. */
598 laststore = NULL;
599 for (i = gsi_last (stmts); !gsi_end_p (i); gsi_prev (&i))
600 {
601 new_stmt = gsi_stmt (i);
949e47e5
JJ
602 if ((gimple_assign_single_p (new_stmt)
603 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
604 || (is_gimple_call (new_stmt)
605 && (gimple_call_flags (new_stmt)
606 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
e256dfce
RG
607 {
608 tree vdef;
609 if (!laststore)
610 vdef = gimple_vdef (stmt);
611 else
612 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
613 gimple_set_vdef (new_stmt, vdef);
52f26be4 614 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce
RG
615 SSA_NAME_DEF_STMT (vdef) = new_stmt;
616 laststore = new_stmt;
617 }
618 }
619
620 /* Second iterate over the statements forward, assigning virtual
621 operands to their uses. */
e256dfce 622 reaching_vuse = gimple_vuse (stmt);
cbdd87d4
RG
623 for (i = gsi_start (stmts); !gsi_end_p (i); gsi_next (&i))
624 {
cbdd87d4 625 new_stmt = gsi_stmt (i);
e256dfce 626 /* The replacement can expose previously unreferenced variables. */
edc74207 627 if (gimple_in_ssa_p (cfun))
a24ac460
RG
628 find_new_referenced_vars (new_stmt);
629 /* If the new statement possibly has a VUSE, update it with exact SSA
630 name we know will reach this one. */
631 if (gimple_has_mem_ops (new_stmt))
e256dfce
RG
632 gimple_set_vuse (new_stmt, reaching_vuse);
633 gimple_set_modified (new_stmt, true);
634 if (gimple_vdef (new_stmt))
635 reaching_vuse = gimple_vdef (new_stmt);
cbdd87d4
RG
636 }
637
e256dfce
RG
638 /* If the new sequence does not do a store release the virtual
639 definition of the original statement. */
640 if (reaching_vuse
641 && reaching_vuse == gimple_vuse (stmt))
cbdd87d4 642 {
e256dfce
RG
643 tree vdef = gimple_vdef (stmt);
644 if (vdef
645 && TREE_CODE (vdef) == SSA_NAME)
fe2ef088
MM
646 {
647 unlink_stmt_vdef (stmt);
e256dfce 648 release_ssa_name (vdef);
8a1561bc 649 }
cbdd87d4
RG
650 }
651
355a7673
MM
652 /* Finally replace the original statement with the sequence. */
653 gsi_replace_with_seq (si_p, stmts, false);
cbdd87d4
RG
654}
655
656/* Return the string length, maximum string length or maximum value of
657 ARG in LENGTH.
658 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
659 is not NULL and, for TYPE == 0, its value is not equal to the length
660 we determine or if we are unable to determine the length or value,
661 return false. VISITED is a bitmap of visited variables.
662 TYPE is 0 if string length should be returned, 1 for maximum string
663 length and 2 for maximum value ARG can have. */
664
665static bool
666get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
667{
668 tree var, val;
669 gimple def_stmt;
670
671 if (TREE_CODE (arg) != SSA_NAME)
672 {
673 if (TREE_CODE (arg) == COND_EXPR)
674 return get_maxval_strlen (COND_EXPR_THEN (arg), length, visited, type)
675 && get_maxval_strlen (COND_EXPR_ELSE (arg), length, visited, type);
676 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
677 else if (TREE_CODE (arg) == ADDR_EXPR
678 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
679 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
680 {
681 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
682 if (TREE_CODE (aop0) == INDIRECT_REF
683 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
684 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
685 length, visited, type);
686 }
687
688 if (type == 2)
689 {
690 val = arg;
691 if (TREE_CODE (val) != INTEGER_CST
692 || tree_int_cst_sgn (val) < 0)
693 return false;
694 }
695 else
696 val = c_strlen (arg, 1);
697 if (!val)
698 return false;
699
700 if (*length)
701 {
702 if (type > 0)
703 {
704 if (TREE_CODE (*length) != INTEGER_CST
705 || TREE_CODE (val) != INTEGER_CST)
706 return false;
707
708 if (tree_int_cst_lt (*length, val))
709 *length = val;
710 return true;
711 }
712 else if (simple_cst_equal (val, *length) != 1)
713 return false;
714 }
715
716 *length = val;
717 return true;
718 }
719
720 /* If we were already here, break the infinite cycle. */
fcaa4ca4 721 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
cbdd87d4 722 return true;
cbdd87d4
RG
723
724 var = arg;
725 def_stmt = SSA_NAME_DEF_STMT (var);
726
727 switch (gimple_code (def_stmt))
728 {
729 case GIMPLE_ASSIGN:
730 /* The RHS of the statement defining VAR must either have a
731 constant length or come from another SSA_NAME with a constant
732 length. */
733 if (gimple_assign_single_p (def_stmt)
734 || gimple_assign_unary_nop_p (def_stmt))
735 {
736 tree rhs = gimple_assign_rhs1 (def_stmt);
737 return get_maxval_strlen (rhs, length, visited, type);
738 }
739 return false;
740
741 case GIMPLE_PHI:
742 {
743 /* All the arguments of the PHI node must have the same constant
744 length. */
745 unsigned i;
746
747 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
748 {
749 tree arg = gimple_phi_arg (def_stmt, i)->def;
750
751 /* If this PHI has itself as an argument, we cannot
752 determine the string length of this argument. However,
753 if we can find a constant string length for the other
754 PHI args then we can still be sure that this is a
755 constant string length. So be optimistic and just
756 continue with the next argument. */
757 if (arg == gimple_phi_result (def_stmt))
758 continue;
759
760 if (!get_maxval_strlen (arg, length, visited, type))
761 return false;
762 }
763 }
764 return true;
765
766 default:
767 return false;
768 }
769}
770
771
772/* Fold builtin call in statement STMT. Returns a simplified tree.
773 We may return a non-constant expression, including another call
774 to a different function and with different arguments, e.g.,
775 substituting memcpy for strcpy when the string length is known.
776 Note that some builtins expand into inline code that may not
777 be valid in GIMPLE. Callers must take care. */
778
779tree
780gimple_fold_builtin (gimple stmt)
781{
782 tree result, val[3];
783 tree callee, a;
784 int arg_idx, type;
785 bitmap visited;
786 bool ignore;
787 int nargs;
788 location_t loc = gimple_location (stmt);
789
790 gcc_assert (is_gimple_call (stmt));
791
792 ignore = (gimple_call_lhs (stmt) == NULL);
793
794 /* First try the generic builtin folder. If that succeeds, return the
795 result directly. */
796 result = fold_call_stmt (stmt, ignore);
797 if (result)
798 {
799 if (ignore)
800 STRIP_NOPS (result);
801 return result;
802 }
803
804 /* Ignore MD builtins. */
805 callee = gimple_call_fndecl (stmt);
806 if (DECL_BUILT_IN_CLASS (callee) == BUILT_IN_MD)
807 return NULL_TREE;
808
e7f9dae0
JJ
809 /* Give up for always_inline inline builtins until they are
810 inlined. */
811 if (avoid_folding_inline_builtin (callee))
812 return NULL_TREE;
813
cbdd87d4
RG
814 /* If the builtin could not be folded, and it has no argument list,
815 we're done. */
816 nargs = gimple_call_num_args (stmt);
817 if (nargs == 0)
818 return NULL_TREE;
819
820 /* Limit the work only for builtins we know how to simplify. */
821 switch (DECL_FUNCTION_CODE (callee))
822 {
823 case BUILT_IN_STRLEN:
824 case BUILT_IN_FPUTS:
825 case BUILT_IN_FPUTS_UNLOCKED:
826 arg_idx = 0;
827 type = 0;
828 break;
829 case BUILT_IN_STRCPY:
830 case BUILT_IN_STRNCPY:
831 arg_idx = 1;
832 type = 0;
833 break;
834 case BUILT_IN_MEMCPY_CHK:
835 case BUILT_IN_MEMPCPY_CHK:
836 case BUILT_IN_MEMMOVE_CHK:
837 case BUILT_IN_MEMSET_CHK:
838 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 839 case BUILT_IN_STPNCPY_CHK:
cbdd87d4
RG
840 arg_idx = 2;
841 type = 2;
842 break;
843 case BUILT_IN_STRCPY_CHK:
844 case BUILT_IN_STPCPY_CHK:
845 arg_idx = 1;
846 type = 1;
847 break;
848 case BUILT_IN_SNPRINTF_CHK:
849 case BUILT_IN_VSNPRINTF_CHK:
850 arg_idx = 1;
851 type = 2;
852 break;
853 default:
854 return NULL_TREE;
855 }
856
857 if (arg_idx >= nargs)
858 return NULL_TREE;
859
860 /* Try to use the dataflow information gathered by the CCP process. */
861 visited = BITMAP_ALLOC (NULL);
862 bitmap_clear (visited);
863
864 memset (val, 0, sizeof (val));
865 a = gimple_call_arg (stmt, arg_idx);
866 if (!get_maxval_strlen (a, &val[arg_idx], visited, type))
867 val[arg_idx] = NULL_TREE;
868
869 BITMAP_FREE (visited);
870
871 result = NULL_TREE;
872 switch (DECL_FUNCTION_CODE (callee))
873 {
874 case BUILT_IN_STRLEN:
875 if (val[0] && nargs == 1)
876 {
877 tree new_val =
878 fold_convert (TREE_TYPE (gimple_call_lhs (stmt)), val[0]);
879
880 /* If the result is not a valid gimple value, or not a cast
6e4da084 881 of a valid gimple value, then we cannot use the result. */
cbdd87d4 882 if (is_gimple_val (new_val)
3dbe9454 883 || (CONVERT_EXPR_P (new_val)
cbdd87d4
RG
884 && is_gimple_val (TREE_OPERAND (new_val, 0))))
885 return new_val;
886 }
887 break;
888
889 case BUILT_IN_STRCPY:
890 if (val[1] && is_gimple_val (val[1]) && nargs == 2)
891 result = fold_builtin_strcpy (loc, callee,
892 gimple_call_arg (stmt, 0),
893 gimple_call_arg (stmt, 1),
894 val[1]);
895 break;
896
897 case BUILT_IN_STRNCPY:
898 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
899 result = fold_builtin_strncpy (loc, callee,
900 gimple_call_arg (stmt, 0),
901 gimple_call_arg (stmt, 1),
902 gimple_call_arg (stmt, 2),
903 val[1]);
904 break;
905
906 case BUILT_IN_FPUTS:
907 if (nargs == 2)
908 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
909 gimple_call_arg (stmt, 1),
910 ignore, false, val[0]);
911 break;
912
913 case BUILT_IN_FPUTS_UNLOCKED:
914 if (nargs == 2)
915 result = fold_builtin_fputs (loc, gimple_call_arg (stmt, 0),
916 gimple_call_arg (stmt, 1),
917 ignore, true, val[0]);
918 break;
919
920 case BUILT_IN_MEMCPY_CHK:
921 case BUILT_IN_MEMPCPY_CHK:
922 case BUILT_IN_MEMMOVE_CHK:
923 case BUILT_IN_MEMSET_CHK:
924 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
925 result = fold_builtin_memory_chk (loc, callee,
926 gimple_call_arg (stmt, 0),
927 gimple_call_arg (stmt, 1),
928 gimple_call_arg (stmt, 2),
929 gimple_call_arg (stmt, 3),
930 val[2], ignore,
931 DECL_FUNCTION_CODE (callee));
932 break;
933
934 case BUILT_IN_STRCPY_CHK:
935 case BUILT_IN_STPCPY_CHK:
936 if (val[1] && is_gimple_val (val[1]) && nargs == 3)
937 result = fold_builtin_stxcpy_chk (loc, callee,
938 gimple_call_arg (stmt, 0),
939 gimple_call_arg (stmt, 1),
940 gimple_call_arg (stmt, 2),
941 val[1], ignore,
942 DECL_FUNCTION_CODE (callee));
943 break;
944
945 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 946 case BUILT_IN_STPNCPY_CHK:
cbdd87d4 947 if (val[2] && is_gimple_val (val[2]) && nargs == 4)
f3fc9b80 948 result = fold_builtin_stxncpy_chk (loc, gimple_call_arg (stmt, 0),
cbdd87d4
RG
949 gimple_call_arg (stmt, 1),
950 gimple_call_arg (stmt, 2),
951 gimple_call_arg (stmt, 3),
f3fc9b80
RG
952 val[2], ignore,
953 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
954 break;
955
956 case BUILT_IN_SNPRINTF_CHK:
957 case BUILT_IN_VSNPRINTF_CHK:
958 if (val[1] && is_gimple_val (val[1]))
959 result = gimple_fold_builtin_snprintf_chk (stmt, val[1],
960 DECL_FUNCTION_CODE (callee));
961 break;
962
963 default:
964 gcc_unreachable ();
965 }
966
967 if (result && ignore)
968 result = fold_ignored_result (result);
969 return result;
970}
971
1ae6fe9b 972
49c471e3
MJ
973/* Return a binfo to be used for devirtualization of calls based on an object
974 represented by a declaration (i.e. a global or automatically allocated one)
975 or NULL if it cannot be found or is not safe. CST is expected to be an
976 ADDR_EXPR of such object or the function will return NULL. Currently it is
977 safe to use such binfo only if it has no base binfo (i.e. no ancestors). */
978
979tree
980gimple_extract_devirt_binfo_from_cst (tree cst)
981{
982 HOST_WIDE_INT offset, size, max_size;
983 tree base, type, expected_type, binfo;
984 bool last_artificial = false;
985
986 if (!flag_devirtualize
987 || TREE_CODE (cst) != ADDR_EXPR
988 || TREE_CODE (TREE_TYPE (TREE_TYPE (cst))) != RECORD_TYPE)
989 return NULL_TREE;
990
991 cst = TREE_OPERAND (cst, 0);
992 expected_type = TREE_TYPE (cst);
993 base = get_ref_base_and_extent (cst, &offset, &size, &max_size);
994 type = TREE_TYPE (base);
995 if (!DECL_P (base)
996 || max_size == -1
997 || max_size != size
998 || TREE_CODE (type) != RECORD_TYPE)
999 return NULL_TREE;
1000
1001 /* Find the sub-object the constant actually refers to and mark whether it is
1002 an artificial one (as opposed to a user-defined one). */
1003 while (true)
1004 {
1005 HOST_WIDE_INT pos, size;
1006 tree fld;
1007
1008 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (expected_type))
1009 break;
1010 if (offset < 0)
1011 return NULL_TREE;
1012
1013 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
1014 {
1015 if (TREE_CODE (fld) != FIELD_DECL)
1016 continue;
1017
1018 pos = int_bit_position (fld);
1019 size = tree_low_cst (DECL_SIZE (fld), 1);
1020 if (pos <= offset && (pos + size) > offset)
1021 break;
1022 }
1023 if (!fld || TREE_CODE (TREE_TYPE (fld)) != RECORD_TYPE)
1024 return NULL_TREE;
1025
1026 last_artificial = DECL_ARTIFICIAL (fld);
1027 type = TREE_TYPE (fld);
1028 offset -= pos;
1029 }
1030 /* Artifical sub-objects are ancestors, we do not want to use them for
1031 devirtualization, at least not here. */
1032 if (last_artificial)
1033 return NULL_TREE;
1034 binfo = TYPE_BINFO (type);
1035 if (!binfo || BINFO_N_BASE_BINFOS (binfo) > 0)
1036 return NULL_TREE;
1037 else
1038 return binfo;
1039}
1040
cbdd87d4
RG
1041/* Attempt to fold a call statement referenced by the statement iterator GSI.
1042 The statement may be replaced by another statement, e.g., if the call
1043 simplifies to a constant value. Return true if any changes were made.
1044 It is assumed that the operands have been previously folded. */
1045
e021c122 1046static bool
ceeffab0 1047gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
1048{
1049 gimple stmt = gsi_stmt (*gsi);
3b45a007 1050 tree callee;
e021c122
RG
1051 bool changed = false;
1052 unsigned i;
cbdd87d4 1053
e021c122
RG
1054 /* Fold *& in call arguments. */
1055 for (i = 0; i < gimple_call_num_args (stmt); ++i)
1056 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
1057 {
1058 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
1059 if (tmp)
1060 {
1061 gimple_call_set_arg (stmt, i, tmp);
1062 changed = true;
1063 }
1064 }
3b45a007
RG
1065
1066 /* Check for virtual calls that became direct calls. */
1067 callee = gimple_call_fn (stmt);
25583c4f 1068 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 1069 {
49c471e3
MJ
1070 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
1071 {
1072 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
1073 changed = true;
1074 }
1075 else
1076 {
1077 tree obj = OBJ_TYPE_REF_OBJECT (callee);
1078 tree binfo = gimple_extract_devirt_binfo_from_cst (obj);
1079 if (binfo)
1080 {
1081 HOST_WIDE_INT token
1082 = TREE_INT_CST_LOW (OBJ_TYPE_REF_TOKEN (callee));
1083 tree fndecl = gimple_get_virt_method_for_binfo (token, binfo);
1084 if (fndecl)
1085 {
1086 gimple_call_set_fndecl (stmt, fndecl);
1087 changed = true;
1088 }
1089 }
49c471e3 1090 }
e021c122 1091 }
49c471e3 1092
e021c122
RG
1093 if (inplace)
1094 return changed;
1095
1096 /* Check for builtins that CCP can handle using information not
1097 available in the generic fold routines. */
89faf322 1098 callee = gimple_call_fndecl (stmt);
e021c122
RG
1099 if (callee && DECL_BUILT_IN (callee))
1100 {
1101 tree result = gimple_fold_builtin (stmt);
f6dbed32 1102 if (result)
e021c122
RG
1103 {
1104 if (!update_call_from_tree (gsi, result))
1105 gimplify_and_update_call_from_tree (gsi, result);
1106 changed = true;
1107 }
3b45a007
RG
1108 }
1109
e021c122 1110 return changed;
cbdd87d4
RG
1111}
1112
1113/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
1114 distinguishes both cases. */
1115
1116static bool
1117fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
1118{
1119 bool changed = false;
1120 gimple stmt = gsi_stmt (*gsi);
1121 unsigned i;
a9d24544
JJ
1122 gimple_stmt_iterator gsinext = *gsi;
1123 gimple next_stmt;
1124
1125 gsi_next (&gsinext);
1126 next_stmt = gsi_end_p (gsinext) ? NULL : gsi_stmt (gsinext);
cbdd87d4
RG
1127
1128 /* Fold the main computation performed by the statement. */
1129 switch (gimple_code (stmt))
1130 {
1131 case GIMPLE_ASSIGN:
1132 {
1133 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 1134 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 1135 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
1136 tree new_rhs;
1137 /* First canonicalize operand order. This avoids building new
1138 trees if this is the only thing fold would later do. */
1139 if ((commutative_tree_code (subcode)
1140 || commutative_ternary_tree_code (subcode))
1141 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
1142 gimple_assign_rhs2 (stmt), false))
1143 {
1144 tree tem = gimple_assign_rhs1 (stmt);
1145 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
1146 gimple_assign_set_rhs2 (stmt, tem);
1147 changed = true;
1148 }
1149 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
1150 if (new_rhs
1151 && !useless_type_conversion_p (TREE_TYPE (lhs),
1152 TREE_TYPE (new_rhs)))
1153 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
1154 if (new_rhs
1155 && (!inplace
1156 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
1157 {
1158 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
1159 changed = true;
1160 }
1161 break;
1162 }
1163
1164 case GIMPLE_COND:
1165 changed |= fold_gimple_cond (stmt);
1166 break;
1167
1168 case GIMPLE_CALL:
ceeffab0 1169 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
1170 break;
1171
1172 case GIMPLE_ASM:
1173 /* Fold *& in asm operands. */
38384150
JJ
1174 {
1175 size_t noutputs;
1176 const char **oconstraints;
1177 const char *constraint;
1178 bool allows_mem, allows_reg;
1179
1180 noutputs = gimple_asm_noutputs (stmt);
1181 oconstraints = XALLOCAVEC (const char *, noutputs);
1182
1183 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
1184 {
1185 tree link = gimple_asm_output_op (stmt, i);
1186 tree op = TREE_VALUE (link);
1187 oconstraints[i]
1188 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1189 if (REFERENCE_CLASS_P (op)
1190 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
1191 {
1192 TREE_VALUE (link) = op;
1193 changed = true;
1194 }
1195 }
1196 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
1197 {
1198 tree link = gimple_asm_input_op (stmt, i);
1199 tree op = TREE_VALUE (link);
1200 constraint
1201 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
1202 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
1203 oconstraints, &allows_mem, &allows_reg);
1204 if (REFERENCE_CLASS_P (op)
1205 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
1206 != NULL_TREE)
1207 {
1208 TREE_VALUE (link) = op;
1209 changed = true;
1210 }
1211 }
1212 }
cbdd87d4
RG
1213 break;
1214
bd422c4a
RG
1215 case GIMPLE_DEBUG:
1216 if (gimple_debug_bind_p (stmt))
1217 {
1218 tree val = gimple_debug_bind_get_value (stmt);
1219 if (val
1220 && REFERENCE_CLASS_P (val))
1221 {
1222 tree tem = maybe_fold_reference (val, false);
1223 if (tem)
1224 {
1225 gimple_debug_bind_set_value (stmt, tem);
1226 changed = true;
1227 }
1228 }
3e888a5e
RG
1229 else if (val
1230 && TREE_CODE (val) == ADDR_EXPR)
1231 {
1232 tree ref = TREE_OPERAND (val, 0);
1233 tree tem = maybe_fold_reference (ref, false);
1234 if (tem)
1235 {
1236 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
1237 gimple_debug_bind_set_value (stmt, tem);
1238 changed = true;
1239 }
1240 }
bd422c4a
RG
1241 }
1242 break;
1243
cbdd87d4
RG
1244 default:;
1245 }
1246
a9d24544
JJ
1247 /* If stmt folds into nothing and it was the last stmt in a bb,
1248 don't call gsi_stmt. */
1249 if (gsi_end_p (*gsi))
1250 {
1251 gcc_assert (next_stmt == NULL);
1252 return changed;
1253 }
1254
cbdd87d4
RG
1255 stmt = gsi_stmt (*gsi);
1256
a9d24544
JJ
1257 /* Fold *& on the lhs. Don't do this if stmt folded into nothing,
1258 as we'd changing the next stmt. */
1259 if (gimple_has_lhs (stmt) && stmt != next_stmt)
cbdd87d4
RG
1260 {
1261 tree lhs = gimple_get_lhs (stmt);
1262 if (lhs && REFERENCE_CLASS_P (lhs))
1263 {
1264 tree new_lhs = maybe_fold_reference (lhs, true);
1265 if (new_lhs)
1266 {
1267 gimple_set_lhs (stmt, new_lhs);
1268 changed = true;
1269 }
1270 }
1271 }
1272
1273 return changed;
1274}
1275
1276/* Fold the statement pointed to by GSI. In some cases, this function may
1277 replace the whole statement with a new one. Returns true iff folding
1278 makes any changes.
1279 The statement pointed to by GSI should be in valid gimple form but may
1280 be in unfolded state as resulting from for example constant propagation
1281 which can produce *&x = 0. */
1282
1283bool
1284fold_stmt (gimple_stmt_iterator *gsi)
1285{
1286 return fold_stmt_1 (gsi, false);
1287}
1288
59401b92 1289/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
1290 *&x created by constant propagation are handled. The statement cannot
1291 be replaced with a new one. Return true if the statement was
1292 changed, false otherwise.
59401b92 1293 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
1294 be in unfolded state as resulting from for example constant propagation
1295 which can produce *&x = 0. */
1296
1297bool
59401b92 1298fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 1299{
59401b92
RG
1300 gimple stmt = gsi_stmt (*gsi);
1301 bool changed = fold_stmt_1 (gsi, true);
1302 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
1303 return changed;
1304}
1305
e89065a1
SL
1306/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
1307 if EXPR is null or we don't know how.
1308 If non-null, the result always has boolean type. */
1309
1310static tree
1311canonicalize_bool (tree expr, bool invert)
1312{
1313 if (!expr)
1314 return NULL_TREE;
1315 else if (invert)
1316 {
1317 if (integer_nonzerop (expr))
1318 return boolean_false_node;
1319 else if (integer_zerop (expr))
1320 return boolean_true_node;
1321 else if (TREE_CODE (expr) == SSA_NAME)
1322 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
1323 build_int_cst (TREE_TYPE (expr), 0));
1324 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1325 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
1326 boolean_type_node,
1327 TREE_OPERAND (expr, 0),
1328 TREE_OPERAND (expr, 1));
1329 else
1330 return NULL_TREE;
1331 }
1332 else
1333 {
1334 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1335 return expr;
1336 if (integer_nonzerop (expr))
1337 return boolean_true_node;
1338 else if (integer_zerop (expr))
1339 return boolean_false_node;
1340 else if (TREE_CODE (expr) == SSA_NAME)
1341 return fold_build2 (NE_EXPR, boolean_type_node, expr,
1342 build_int_cst (TREE_TYPE (expr), 0));
1343 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
1344 return fold_build2 (TREE_CODE (expr),
1345 boolean_type_node,
1346 TREE_OPERAND (expr, 0),
1347 TREE_OPERAND (expr, 1));
1348 else
1349 return NULL_TREE;
1350 }
1351}
1352
1353/* Check to see if a boolean expression EXPR is logically equivalent to the
1354 comparison (OP1 CODE OP2). Check for various identities involving
1355 SSA_NAMEs. */
1356
1357static bool
1358same_bool_comparison_p (const_tree expr, enum tree_code code,
1359 const_tree op1, const_tree op2)
1360{
1361 gimple s;
1362
1363 /* The obvious case. */
1364 if (TREE_CODE (expr) == code
1365 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
1366 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
1367 return true;
1368
1369 /* Check for comparing (name, name != 0) and the case where expr
1370 is an SSA_NAME with a definition matching the comparison. */
1371 if (TREE_CODE (expr) == SSA_NAME
1372 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
1373 {
1374 if (operand_equal_p (expr, op1, 0))
1375 return ((code == NE_EXPR && integer_zerop (op2))
1376 || (code == EQ_EXPR && integer_nonzerop (op2)));
1377 s = SSA_NAME_DEF_STMT (expr);
1378 if (is_gimple_assign (s)
1379 && gimple_assign_rhs_code (s) == code
1380 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
1381 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
1382 return true;
1383 }
1384
1385 /* If op1 is of the form (name != 0) or (name == 0), and the definition
1386 of name is a comparison, recurse. */
1387 if (TREE_CODE (op1) == SSA_NAME
1388 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
1389 {
1390 s = SSA_NAME_DEF_STMT (op1);
1391 if (is_gimple_assign (s)
1392 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
1393 {
1394 enum tree_code c = gimple_assign_rhs_code (s);
1395 if ((c == NE_EXPR && integer_zerop (op2))
1396 || (c == EQ_EXPR && integer_nonzerop (op2)))
1397 return same_bool_comparison_p (expr, c,
1398 gimple_assign_rhs1 (s),
1399 gimple_assign_rhs2 (s));
1400 if ((c == EQ_EXPR && integer_zerop (op2))
1401 || (c == NE_EXPR && integer_nonzerop (op2)))
1402 return same_bool_comparison_p (expr,
1403 invert_tree_comparison (c, false),
1404 gimple_assign_rhs1 (s),
1405 gimple_assign_rhs2 (s));
1406 }
1407 }
1408 return false;
1409}
1410
1411/* Check to see if two boolean expressions OP1 and OP2 are logically
1412 equivalent. */
1413
1414static bool
1415same_bool_result_p (const_tree op1, const_tree op2)
1416{
1417 /* Simple cases first. */
1418 if (operand_equal_p (op1, op2, 0))
1419 return true;
1420
1421 /* Check the cases where at least one of the operands is a comparison.
1422 These are a bit smarter than operand_equal_p in that they apply some
1423 identifies on SSA_NAMEs. */
1424 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
1425 && same_bool_comparison_p (op1, TREE_CODE (op2),
1426 TREE_OPERAND (op2, 0),
1427 TREE_OPERAND (op2, 1)))
1428 return true;
1429 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
1430 && same_bool_comparison_p (op2, TREE_CODE (op1),
1431 TREE_OPERAND (op1, 0),
1432 TREE_OPERAND (op1, 1)))
1433 return true;
1434
1435 /* Default case. */
1436 return false;
1437}
1438
1439/* Forward declarations for some mutually recursive functions. */
1440
1441static tree
1442and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1443 enum tree_code code2, tree op2a, tree op2b);
1444static tree
1445and_var_with_comparison (tree var, bool invert,
1446 enum tree_code code2, tree op2a, tree op2b);
1447static tree
1448and_var_with_comparison_1 (gimple stmt,
1449 enum tree_code code2, tree op2a, tree op2b);
1450static tree
1451or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1452 enum tree_code code2, tree op2a, tree op2b);
1453static tree
1454or_var_with_comparison (tree var, bool invert,
1455 enum tree_code code2, tree op2a, tree op2b);
1456static tree
1457or_var_with_comparison_1 (gimple stmt,
1458 enum tree_code code2, tree op2a, tree op2b);
1459
1460/* Helper function for and_comparisons_1: try to simplify the AND of the
1461 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1462 If INVERT is true, invert the value of the VAR before doing the AND.
1463 Return NULL_EXPR if we can't simplify this to a single expression. */
1464
1465static tree
1466and_var_with_comparison (tree var, bool invert,
1467 enum tree_code code2, tree op2a, tree op2b)
1468{
1469 tree t;
1470 gimple stmt = SSA_NAME_DEF_STMT (var);
1471
1472 /* We can only deal with variables whose definitions are assignments. */
1473 if (!is_gimple_assign (stmt))
1474 return NULL_TREE;
1475
1476 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1477 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
1478 Then we only have to consider the simpler non-inverted cases. */
1479 if (invert)
1480 t = or_var_with_comparison_1 (stmt,
1481 invert_tree_comparison (code2, false),
1482 op2a, op2b);
1483 else
1484 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
1485 return canonicalize_bool (t, invert);
1486}
1487
1488/* Try to simplify the AND of the ssa variable defined by the assignment
1489 STMT with the comparison specified by (OP2A CODE2 OP2B).
1490 Return NULL_EXPR if we can't simplify this to a single expression. */
1491
1492static tree
1493and_var_with_comparison_1 (gimple stmt,
1494 enum tree_code code2, tree op2a, tree op2b)
1495{
1496 tree var = gimple_assign_lhs (stmt);
1497 tree true_test_var = NULL_TREE;
1498 tree false_test_var = NULL_TREE;
1499 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1500
1501 /* Check for identities like (var AND (var == 0)) => false. */
1502 if (TREE_CODE (op2a) == SSA_NAME
1503 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1504 {
1505 if ((code2 == NE_EXPR && integer_zerop (op2b))
1506 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1507 {
1508 true_test_var = op2a;
1509 if (var == true_test_var)
1510 return var;
1511 }
1512 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1513 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1514 {
1515 false_test_var = op2a;
1516 if (var == false_test_var)
1517 return boolean_false_node;
1518 }
1519 }
1520
1521 /* If the definition is a comparison, recurse on it. */
1522 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1523 {
1524 tree t = and_comparisons_1 (innercode,
1525 gimple_assign_rhs1 (stmt),
1526 gimple_assign_rhs2 (stmt),
1527 code2,
1528 op2a,
1529 op2b);
1530 if (t)
1531 return t;
1532 }
1533
1534 /* If the definition is an AND or OR expression, we may be able to
1535 simplify by reassociating. */
eb9820c0
KT
1536 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1537 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
1538 {
1539 tree inner1 = gimple_assign_rhs1 (stmt);
1540 tree inner2 = gimple_assign_rhs2 (stmt);
1541 gimple s;
1542 tree t;
1543 tree partial = NULL_TREE;
eb9820c0 1544 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
1545
1546 /* Check for boolean identities that don't require recursive examination
1547 of inner1/inner2:
1548 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
1549 inner1 AND (inner1 OR inner2) => inner1
1550 !inner1 AND (inner1 AND inner2) => false
1551 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
1552 Likewise for similar cases involving inner2. */
1553 if (inner1 == true_test_var)
1554 return (is_and ? var : inner1);
1555 else if (inner2 == true_test_var)
1556 return (is_and ? var : inner2);
1557 else if (inner1 == false_test_var)
1558 return (is_and
1559 ? boolean_false_node
1560 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
1561 else if (inner2 == false_test_var)
1562 return (is_and
1563 ? boolean_false_node
1564 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
1565
1566 /* Next, redistribute/reassociate the AND across the inner tests.
1567 Compute the first partial result, (inner1 AND (op2a code op2b)) */
1568 if (TREE_CODE (inner1) == SSA_NAME
1569 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
1570 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1571 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1572 gimple_assign_rhs1 (s),
1573 gimple_assign_rhs2 (s),
1574 code2, op2a, op2b)))
1575 {
1576 /* Handle the AND case, where we are reassociating:
1577 (inner1 AND inner2) AND (op2a code2 op2b)
1578 => (t AND inner2)
1579 If the partial result t is a constant, we win. Otherwise
1580 continue on to try reassociating with the other inner test. */
1581 if (is_and)
1582 {
1583 if (integer_onep (t))
1584 return inner2;
1585 else if (integer_zerop (t))
1586 return boolean_false_node;
1587 }
1588
1589 /* Handle the OR case, where we are redistributing:
1590 (inner1 OR inner2) AND (op2a code2 op2b)
1591 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
1592 else if (integer_onep (t))
1593 return boolean_true_node;
1594
1595 /* Save partial result for later. */
1596 partial = t;
e89065a1
SL
1597 }
1598
1599 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
1600 if (TREE_CODE (inner2) == SSA_NAME
1601 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
1602 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
1603 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
1604 gimple_assign_rhs1 (s),
1605 gimple_assign_rhs2 (s),
1606 code2, op2a, op2b)))
1607 {
1608 /* Handle the AND case, where we are reassociating:
1609 (inner1 AND inner2) AND (op2a code2 op2b)
1610 => (inner1 AND t) */
1611 if (is_and)
1612 {
1613 if (integer_onep (t))
1614 return inner1;
1615 else if (integer_zerop (t))
1616 return boolean_false_node;
8236c8eb
JJ
1617 /* If both are the same, we can apply the identity
1618 (x AND x) == x. */
1619 else if (partial && same_bool_result_p (t, partial))
1620 return t;
e89065a1
SL
1621 }
1622
1623 /* Handle the OR case. where we are redistributing:
1624 (inner1 OR inner2) AND (op2a code2 op2b)
1625 => (t OR (inner1 AND (op2a code2 op2b)))
1626 => (t OR partial) */
1627 else
1628 {
1629 if (integer_onep (t))
1630 return boolean_true_node;
1631 else if (partial)
1632 {
1633 /* We already got a simplification for the other
1634 operand to the redistributed OR expression. The
1635 interesting case is when at least one is false.
1636 Or, if both are the same, we can apply the identity
1637 (x OR x) == x. */
1638 if (integer_zerop (partial))
1639 return t;
1640 else if (integer_zerop (t))
1641 return partial;
1642 else if (same_bool_result_p (t, partial))
1643 return t;
1644 }
1645 }
1646 }
1647 }
1648 return NULL_TREE;
1649}
1650
1651/* Try to simplify the AND of two comparisons defined by
1652 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
1653 If this can be done without constructing an intermediate value,
1654 return the resulting tree; otherwise NULL_TREE is returned.
1655 This function is deliberately asymmetric as it recurses on SSA_DEFs
1656 in the first comparison but not the second. */
1657
1658static tree
1659and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
1660 enum tree_code code2, tree op2a, tree op2b)
1661{
1662 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
1663 if (operand_equal_p (op1a, op2a, 0)
1664 && operand_equal_p (op1b, op2b, 0))
1665 {
eb9820c0 1666 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1667 tree t = combine_comparisons (UNKNOWN_LOCATION,
1668 TRUTH_ANDIF_EXPR, code1, code2,
1669 boolean_type_node, op1a, op1b);
1670 if (t)
1671 return t;
1672 }
1673
1674 /* Likewise the swapped case of the above. */
1675 if (operand_equal_p (op1a, op2b, 0)
1676 && operand_equal_p (op1b, op2a, 0))
1677 {
eb9820c0 1678 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
1679 tree t = combine_comparisons (UNKNOWN_LOCATION,
1680 TRUTH_ANDIF_EXPR, code1,
1681 swap_tree_comparison (code2),
1682 boolean_type_node, op1a, op1b);
1683 if (t)
1684 return t;
1685 }
1686
1687 /* If both comparisons are of the same value against constants, we might
1688 be able to merge them. */
1689 if (operand_equal_p (op1a, op2a, 0)
1690 && TREE_CODE (op1b) == INTEGER_CST
1691 && TREE_CODE (op2b) == INTEGER_CST)
1692 {
1693 int cmp = tree_int_cst_compare (op1b, op2b);
1694
1695 /* If we have (op1a == op1b), we should either be able to
1696 return that or FALSE, depending on whether the constant op1b
1697 also satisfies the other comparison against op2b. */
1698 if (code1 == EQ_EXPR)
1699 {
1700 bool done = true;
1701 bool val;
1702 switch (code2)
1703 {
1704 case EQ_EXPR: val = (cmp == 0); break;
1705 case NE_EXPR: val = (cmp != 0); break;
1706 case LT_EXPR: val = (cmp < 0); break;
1707 case GT_EXPR: val = (cmp > 0); break;
1708 case LE_EXPR: val = (cmp <= 0); break;
1709 case GE_EXPR: val = (cmp >= 0); break;
1710 default: done = false;
1711 }
1712 if (done)
1713 {
1714 if (val)
1715 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1716 else
1717 return boolean_false_node;
1718 }
1719 }
1720 /* Likewise if the second comparison is an == comparison. */
1721 else if (code2 == EQ_EXPR)
1722 {
1723 bool done = true;
1724 bool val;
1725 switch (code1)
1726 {
1727 case EQ_EXPR: val = (cmp == 0); break;
1728 case NE_EXPR: val = (cmp != 0); break;
1729 case LT_EXPR: val = (cmp > 0); break;
1730 case GT_EXPR: val = (cmp < 0); break;
1731 case LE_EXPR: val = (cmp >= 0); break;
1732 case GE_EXPR: val = (cmp <= 0); break;
1733 default: done = false;
1734 }
1735 if (done)
1736 {
1737 if (val)
1738 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1739 else
1740 return boolean_false_node;
1741 }
1742 }
1743
1744 /* Same business with inequality tests. */
1745 else if (code1 == NE_EXPR)
1746 {
1747 bool val;
1748 switch (code2)
1749 {
1750 case EQ_EXPR: val = (cmp != 0); break;
1751 case NE_EXPR: val = (cmp == 0); break;
1752 case LT_EXPR: val = (cmp >= 0); break;
1753 case GT_EXPR: val = (cmp <= 0); break;
1754 case LE_EXPR: val = (cmp > 0); break;
1755 case GE_EXPR: val = (cmp < 0); break;
1756 default:
1757 val = false;
1758 }
1759 if (val)
1760 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1761 }
1762 else if (code2 == NE_EXPR)
1763 {
1764 bool val;
1765 switch (code1)
1766 {
1767 case EQ_EXPR: val = (cmp == 0); break;
1768 case NE_EXPR: val = (cmp != 0); break;
1769 case LT_EXPR: val = (cmp <= 0); break;
1770 case GT_EXPR: val = (cmp >= 0); break;
1771 case LE_EXPR: val = (cmp < 0); break;
1772 case GE_EXPR: val = (cmp > 0); break;
1773 default:
1774 val = false;
1775 }
1776 if (val)
1777 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1778 }
1779
1780 /* Chose the more restrictive of two < or <= comparisons. */
1781 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
1782 && (code2 == LT_EXPR || code2 == LE_EXPR))
1783 {
1784 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
1785 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1786 else
1787 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1788 }
1789
1790 /* Likewise chose the more restrictive of two > or >= comparisons. */
1791 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
1792 && (code2 == GT_EXPR || code2 == GE_EXPR))
1793 {
1794 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
1795 return fold_build2 (code1, boolean_type_node, op1a, op1b);
1796 else
1797 return fold_build2 (code2, boolean_type_node, op2a, op2b);
1798 }
1799
1800 /* Check for singleton ranges. */
1801 else if (cmp == 0
1802 && ((code1 == LE_EXPR && code2 == GE_EXPR)
1803 || (code1 == GE_EXPR && code2 == LE_EXPR)))
1804 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
1805
1806 /* Check for disjoint ranges. */
1807 else if (cmp <= 0
1808 && (code1 == LT_EXPR || code1 == LE_EXPR)
1809 && (code2 == GT_EXPR || code2 == GE_EXPR))
1810 return boolean_false_node;
1811 else if (cmp >= 0
1812 && (code1 == GT_EXPR || code1 == GE_EXPR)
1813 && (code2 == LT_EXPR || code2 == LE_EXPR))
1814 return boolean_false_node;
1815 }
1816
1817 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
1818 NAME's definition is a truth value. See if there are any simplifications
1819 that can be done against the NAME's definition. */
1820 if (TREE_CODE (op1a) == SSA_NAME
1821 && (code1 == NE_EXPR || code1 == EQ_EXPR)
1822 && (integer_zerop (op1b) || integer_onep (op1b)))
1823 {
1824 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
1825 || (code1 == NE_EXPR && integer_onep (op1b)));
1826 gimple stmt = SSA_NAME_DEF_STMT (op1a);
1827 switch (gimple_code (stmt))
1828 {
1829 case GIMPLE_ASSIGN:
1830 /* Try to simplify by copy-propagating the definition. */
1831 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
1832
1833 case GIMPLE_PHI:
1834 /* If every argument to the PHI produces the same result when
1835 ANDed with the second comparison, we win.
1836 Do not do this unless the type is bool since we need a bool
1837 result here anyway. */
1838 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
1839 {
1840 tree result = NULL_TREE;
1841 unsigned i;
1842 for (i = 0; i < gimple_phi_num_args (stmt); i++)
1843 {
1844 tree arg = gimple_phi_arg_def (stmt, i);
1845
1846 /* If this PHI has itself as an argument, ignore it.
1847 If all the other args produce the same result,
1848 we're still OK. */
1849 if (arg == gimple_phi_result (stmt))
1850 continue;
1851 else if (TREE_CODE (arg) == INTEGER_CST)
1852 {
1853 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
1854 {
1855 if (!result)
1856 result = boolean_false_node;
1857 else if (!integer_zerop (result))
1858 return NULL_TREE;
1859 }
1860 else if (!result)
1861 result = fold_build2 (code2, boolean_type_node,
1862 op2a, op2b);
1863 else if (!same_bool_comparison_p (result,
1864 code2, op2a, op2b))
1865 return NULL_TREE;
1866 }
0e8b84ec
JJ
1867 else if (TREE_CODE (arg) == SSA_NAME
1868 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 1869 {
6c66f733
JJ
1870 tree temp;
1871 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
1872 /* In simple cases we can look through PHI nodes,
1873 but we have to be careful with loops.
1874 See PR49073. */
1875 if (! dom_info_available_p (CDI_DOMINATORS)
1876 || gimple_bb (def_stmt) == gimple_bb (stmt)
1877 || dominated_by_p (CDI_DOMINATORS,
1878 gimple_bb (def_stmt),
1879 gimple_bb (stmt)))
1880 return NULL_TREE;
1881 temp = and_var_with_comparison (arg, invert, code2,
1882 op2a, op2b);
e89065a1
SL
1883 if (!temp)
1884 return NULL_TREE;
1885 else if (!result)
1886 result = temp;
1887 else if (!same_bool_result_p (result, temp))
1888 return NULL_TREE;
1889 }
1890 else
1891 return NULL_TREE;
1892 }
1893 return result;
1894 }
1895
1896 default:
1897 break;
1898 }
1899 }
1900 return NULL_TREE;
1901}
1902
1903/* Try to simplify the AND of two comparisons, specified by
1904 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
1905 If this can be simplified to a single expression (without requiring
1906 introducing more SSA variables to hold intermediate values),
1907 return the resulting tree. Otherwise return NULL_TREE.
1908 If the result expression is non-null, it has boolean type. */
1909
1910tree
1911maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
1912 enum tree_code code2, tree op2a, tree op2b)
1913{
1914 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
1915 if (t)
1916 return t;
1917 else
1918 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
1919}
1920
1921/* Helper function for or_comparisons_1: try to simplify the OR of the
1922 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
1923 If INVERT is true, invert the value of VAR before doing the OR.
1924 Return NULL_EXPR if we can't simplify this to a single expression. */
1925
1926static tree
1927or_var_with_comparison (tree var, bool invert,
1928 enum tree_code code2, tree op2a, tree op2b)
1929{
1930 tree t;
1931 gimple stmt = SSA_NAME_DEF_STMT (var);
1932
1933 /* We can only deal with variables whose definitions are assignments. */
1934 if (!is_gimple_assign (stmt))
1935 return NULL_TREE;
1936
1937 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
1938 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
1939 Then we only have to consider the simpler non-inverted cases. */
1940 if (invert)
1941 t = and_var_with_comparison_1 (stmt,
1942 invert_tree_comparison (code2, false),
1943 op2a, op2b);
1944 else
1945 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
1946 return canonicalize_bool (t, invert);
1947}
1948
1949/* Try to simplify the OR of the ssa variable defined by the assignment
1950 STMT with the comparison specified by (OP2A CODE2 OP2B).
1951 Return NULL_EXPR if we can't simplify this to a single expression. */
1952
1953static tree
1954or_var_with_comparison_1 (gimple stmt,
1955 enum tree_code code2, tree op2a, tree op2b)
1956{
1957 tree var = gimple_assign_lhs (stmt);
1958 tree true_test_var = NULL_TREE;
1959 tree false_test_var = NULL_TREE;
1960 enum tree_code innercode = gimple_assign_rhs_code (stmt);
1961
1962 /* Check for identities like (var OR (var != 0)) => true . */
1963 if (TREE_CODE (op2a) == SSA_NAME
1964 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
1965 {
1966 if ((code2 == NE_EXPR && integer_zerop (op2b))
1967 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
1968 {
1969 true_test_var = op2a;
1970 if (var == true_test_var)
1971 return var;
1972 }
1973 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
1974 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
1975 {
1976 false_test_var = op2a;
1977 if (var == false_test_var)
1978 return boolean_true_node;
1979 }
1980 }
1981
1982 /* If the definition is a comparison, recurse on it. */
1983 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
1984 {
1985 tree t = or_comparisons_1 (innercode,
1986 gimple_assign_rhs1 (stmt),
1987 gimple_assign_rhs2 (stmt),
1988 code2,
1989 op2a,
1990 op2b);
1991 if (t)
1992 return t;
1993 }
1994
1995 /* If the definition is an AND or OR expression, we may be able to
1996 simplify by reassociating. */
eb9820c0
KT
1997 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
1998 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
1999 {
2000 tree inner1 = gimple_assign_rhs1 (stmt);
2001 tree inner2 = gimple_assign_rhs2 (stmt);
2002 gimple s;
2003 tree t;
2004 tree partial = NULL_TREE;
eb9820c0 2005 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
2006
2007 /* Check for boolean identities that don't require recursive examination
2008 of inner1/inner2:
2009 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
2010 inner1 OR (inner1 AND inner2) => inner1
2011 !inner1 OR (inner1 OR inner2) => true
2012 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
2013 */
2014 if (inner1 == true_test_var)
2015 return (is_or ? var : inner1);
2016 else if (inner2 == true_test_var)
2017 return (is_or ? var : inner2);
2018 else if (inner1 == false_test_var)
2019 return (is_or
2020 ? boolean_true_node
2021 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
2022 else if (inner2 == false_test_var)
2023 return (is_or
2024 ? boolean_true_node
2025 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
2026
2027 /* Next, redistribute/reassociate the OR across the inner tests.
2028 Compute the first partial result, (inner1 OR (op2a code op2b)) */
2029 if (TREE_CODE (inner1) == SSA_NAME
2030 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2031 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2032 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2033 gimple_assign_rhs1 (s),
2034 gimple_assign_rhs2 (s),
2035 code2, op2a, op2b)))
2036 {
2037 /* Handle the OR case, where we are reassociating:
2038 (inner1 OR inner2) OR (op2a code2 op2b)
2039 => (t OR inner2)
2040 If the partial result t is a constant, we win. Otherwise
2041 continue on to try reassociating with the other inner test. */
8236c8eb 2042 if (is_or)
e89065a1
SL
2043 {
2044 if (integer_onep (t))
2045 return boolean_true_node;
2046 else if (integer_zerop (t))
2047 return inner2;
2048 }
2049
2050 /* Handle the AND case, where we are redistributing:
2051 (inner1 AND inner2) OR (op2a code2 op2b)
2052 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
2053 else if (integer_zerop (t))
2054 return boolean_false_node;
2055
2056 /* Save partial result for later. */
2057 partial = t;
e89065a1
SL
2058 }
2059
2060 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
2061 if (TREE_CODE (inner2) == SSA_NAME
2062 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
2063 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2064 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
2065 gimple_assign_rhs1 (s),
2066 gimple_assign_rhs2 (s),
2067 code2, op2a, op2b)))
2068 {
2069 /* Handle the OR case, where we are reassociating:
2070 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
2071 => (inner1 OR t)
2072 => (t OR partial) */
2073 if (is_or)
e89065a1
SL
2074 {
2075 if (integer_zerop (t))
2076 return inner1;
2077 else if (integer_onep (t))
2078 return boolean_true_node;
8236c8eb
JJ
2079 /* If both are the same, we can apply the identity
2080 (x OR x) == x. */
2081 else if (partial && same_bool_result_p (t, partial))
2082 return t;
e89065a1
SL
2083 }
2084
2085 /* Handle the AND case, where we are redistributing:
2086 (inner1 AND inner2) OR (op2a code2 op2b)
2087 => (t AND (inner1 OR (op2a code2 op2b)))
2088 => (t AND partial) */
2089 else
2090 {
2091 if (integer_zerop (t))
2092 return boolean_false_node;
2093 else if (partial)
2094 {
2095 /* We already got a simplification for the other
2096 operand to the redistributed AND expression. The
2097 interesting case is when at least one is true.
2098 Or, if both are the same, we can apply the identity
8236c8eb 2099 (x AND x) == x. */
e89065a1
SL
2100 if (integer_onep (partial))
2101 return t;
2102 else if (integer_onep (t))
2103 return partial;
2104 else if (same_bool_result_p (t, partial))
8236c8eb 2105 return t;
e89065a1
SL
2106 }
2107 }
2108 }
2109 }
2110 return NULL_TREE;
2111}
2112
2113/* Try to simplify the OR of two comparisons defined by
2114 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
2115 If this can be done without constructing an intermediate value,
2116 return the resulting tree; otherwise NULL_TREE is returned.
2117 This function is deliberately asymmetric as it recurses on SSA_DEFs
2118 in the first comparison but not the second. */
2119
2120static tree
2121or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2122 enum tree_code code2, tree op2a, tree op2b)
2123{
2124 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
2125 if (operand_equal_p (op1a, op2a, 0)
2126 && operand_equal_p (op1b, op2b, 0))
2127 {
eb9820c0 2128 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2129 tree t = combine_comparisons (UNKNOWN_LOCATION,
2130 TRUTH_ORIF_EXPR, code1, code2,
2131 boolean_type_node, op1a, op1b);
2132 if (t)
2133 return t;
2134 }
2135
2136 /* Likewise the swapped case of the above. */
2137 if (operand_equal_p (op1a, op2b, 0)
2138 && operand_equal_p (op1b, op2a, 0))
2139 {
eb9820c0 2140 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
2141 tree t = combine_comparisons (UNKNOWN_LOCATION,
2142 TRUTH_ORIF_EXPR, code1,
2143 swap_tree_comparison (code2),
2144 boolean_type_node, op1a, op1b);
2145 if (t)
2146 return t;
2147 }
2148
2149 /* If both comparisons are of the same value against constants, we might
2150 be able to merge them. */
2151 if (operand_equal_p (op1a, op2a, 0)
2152 && TREE_CODE (op1b) == INTEGER_CST
2153 && TREE_CODE (op2b) == INTEGER_CST)
2154 {
2155 int cmp = tree_int_cst_compare (op1b, op2b);
2156
2157 /* If we have (op1a != op1b), we should either be able to
2158 return that or TRUE, depending on whether the constant op1b
2159 also satisfies the other comparison against op2b. */
2160 if (code1 == NE_EXPR)
2161 {
2162 bool done = true;
2163 bool val;
2164 switch (code2)
2165 {
2166 case EQ_EXPR: val = (cmp == 0); break;
2167 case NE_EXPR: val = (cmp != 0); break;
2168 case LT_EXPR: val = (cmp < 0); break;
2169 case GT_EXPR: val = (cmp > 0); break;
2170 case LE_EXPR: val = (cmp <= 0); break;
2171 case GE_EXPR: val = (cmp >= 0); break;
2172 default: done = false;
2173 }
2174 if (done)
2175 {
2176 if (val)
2177 return boolean_true_node;
2178 else
2179 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2180 }
2181 }
2182 /* Likewise if the second comparison is a != comparison. */
2183 else if (code2 == NE_EXPR)
2184 {
2185 bool done = true;
2186 bool val;
2187 switch (code1)
2188 {
2189 case EQ_EXPR: val = (cmp == 0); break;
2190 case NE_EXPR: val = (cmp != 0); break;
2191 case LT_EXPR: val = (cmp > 0); break;
2192 case GT_EXPR: val = (cmp < 0); break;
2193 case LE_EXPR: val = (cmp >= 0); break;
2194 case GE_EXPR: val = (cmp <= 0); break;
2195 default: done = false;
2196 }
2197 if (done)
2198 {
2199 if (val)
2200 return boolean_true_node;
2201 else
2202 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2203 }
2204 }
2205
2206 /* See if an equality test is redundant with the other comparison. */
2207 else if (code1 == EQ_EXPR)
2208 {
2209 bool val;
2210 switch (code2)
2211 {
2212 case EQ_EXPR: val = (cmp == 0); break;
2213 case NE_EXPR: val = (cmp != 0); break;
2214 case LT_EXPR: val = (cmp < 0); break;
2215 case GT_EXPR: val = (cmp > 0); break;
2216 case LE_EXPR: val = (cmp <= 0); break;
2217 case GE_EXPR: val = (cmp >= 0); break;
2218 default:
2219 val = false;
2220 }
2221 if (val)
2222 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2223 }
2224 else if (code2 == EQ_EXPR)
2225 {
2226 bool val;
2227 switch (code1)
2228 {
2229 case EQ_EXPR: val = (cmp == 0); break;
2230 case NE_EXPR: val = (cmp != 0); break;
2231 case LT_EXPR: val = (cmp > 0); break;
2232 case GT_EXPR: val = (cmp < 0); break;
2233 case LE_EXPR: val = (cmp >= 0); break;
2234 case GE_EXPR: val = (cmp <= 0); break;
2235 default:
2236 val = false;
2237 }
2238 if (val)
2239 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2240 }
2241
2242 /* Chose the less restrictive of two < or <= comparisons. */
2243 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
2244 && (code2 == LT_EXPR || code2 == LE_EXPR))
2245 {
2246 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
2247 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2248 else
2249 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2250 }
2251
2252 /* Likewise chose the less restrictive of two > or >= comparisons. */
2253 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
2254 && (code2 == GT_EXPR || code2 == GE_EXPR))
2255 {
2256 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
2257 return fold_build2 (code2, boolean_type_node, op2a, op2b);
2258 else
2259 return fold_build2 (code1, boolean_type_node, op1a, op1b);
2260 }
2261
2262 /* Check for singleton ranges. */
2263 else if (cmp == 0
2264 && ((code1 == LT_EXPR && code2 == GT_EXPR)
2265 || (code1 == GT_EXPR && code2 == LT_EXPR)))
2266 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
2267
2268 /* Check for less/greater pairs that don't restrict the range at all. */
2269 else if (cmp >= 0
2270 && (code1 == LT_EXPR || code1 == LE_EXPR)
2271 && (code2 == GT_EXPR || code2 == GE_EXPR))
2272 return boolean_true_node;
2273 else if (cmp <= 0
2274 && (code1 == GT_EXPR || code1 == GE_EXPR)
2275 && (code2 == LT_EXPR || code2 == LE_EXPR))
2276 return boolean_true_node;
2277 }
2278
2279 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
2280 NAME's definition is a truth value. See if there are any simplifications
2281 that can be done against the NAME's definition. */
2282 if (TREE_CODE (op1a) == SSA_NAME
2283 && (code1 == NE_EXPR || code1 == EQ_EXPR)
2284 && (integer_zerop (op1b) || integer_onep (op1b)))
2285 {
2286 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
2287 || (code1 == NE_EXPR && integer_onep (op1b)));
2288 gimple stmt = SSA_NAME_DEF_STMT (op1a);
2289 switch (gimple_code (stmt))
2290 {
2291 case GIMPLE_ASSIGN:
2292 /* Try to simplify by copy-propagating the definition. */
2293 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
2294
2295 case GIMPLE_PHI:
2296 /* If every argument to the PHI produces the same result when
2297 ORed with the second comparison, we win.
2298 Do not do this unless the type is bool since we need a bool
2299 result here anyway. */
2300 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
2301 {
2302 tree result = NULL_TREE;
2303 unsigned i;
2304 for (i = 0; i < gimple_phi_num_args (stmt); i++)
2305 {
2306 tree arg = gimple_phi_arg_def (stmt, i);
2307
2308 /* If this PHI has itself as an argument, ignore it.
2309 If all the other args produce the same result,
2310 we're still OK. */
2311 if (arg == gimple_phi_result (stmt))
2312 continue;
2313 else if (TREE_CODE (arg) == INTEGER_CST)
2314 {
2315 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
2316 {
2317 if (!result)
2318 result = boolean_true_node;
2319 else if (!integer_onep (result))
2320 return NULL_TREE;
2321 }
2322 else if (!result)
2323 result = fold_build2 (code2, boolean_type_node,
2324 op2a, op2b);
2325 else if (!same_bool_comparison_p (result,
2326 code2, op2a, op2b))
2327 return NULL_TREE;
2328 }
0e8b84ec
JJ
2329 else if (TREE_CODE (arg) == SSA_NAME
2330 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 2331 {
6c66f733
JJ
2332 tree temp;
2333 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
2334 /* In simple cases we can look through PHI nodes,
2335 but we have to be careful with loops.
2336 See PR49073. */
2337 if (! dom_info_available_p (CDI_DOMINATORS)
2338 || gimple_bb (def_stmt) == gimple_bb (stmt)
2339 || dominated_by_p (CDI_DOMINATORS,
2340 gimple_bb (def_stmt),
2341 gimple_bb (stmt)))
2342 return NULL_TREE;
2343 temp = or_var_with_comparison (arg, invert, code2,
2344 op2a, op2b);
e89065a1
SL
2345 if (!temp)
2346 return NULL_TREE;
2347 else if (!result)
2348 result = temp;
2349 else if (!same_bool_result_p (result, temp))
2350 return NULL_TREE;
2351 }
2352 else
2353 return NULL_TREE;
2354 }
2355 return result;
2356 }
2357
2358 default:
2359 break;
2360 }
2361 }
2362 return NULL_TREE;
2363}
2364
2365/* Try to simplify the OR of two comparisons, specified by
2366 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
2367 If this can be simplified to a single expression (without requiring
2368 introducing more SSA variables to hold intermediate values),
2369 return the resulting tree. Otherwise return NULL_TREE.
2370 If the result expression is non-null, it has boolean type. */
2371
2372tree
2373maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
2374 enum tree_code code2, tree op2a, tree op2b)
2375{
2376 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
2377 if (t)
2378 return t;
2379 else
2380 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
2381}
cfef45c8
RG
2382
2383
2384/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2385
2386 Either NULL_TREE, a simplified but non-constant or a constant
2387 is returned.
2388
2389 ??? This should go into a gimple-fold-inline.h file to be eventually
2390 privatized with the single valueize function used in the various TUs
2391 to avoid the indirect function call overhead. */
2392
2393tree
2394gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
2395{
2396 location_t loc = gimple_location (stmt);
2397 switch (gimple_code (stmt))
2398 {
2399 case GIMPLE_ASSIGN:
2400 {
2401 enum tree_code subcode = gimple_assign_rhs_code (stmt);
2402
2403 switch (get_gimple_rhs_class (subcode))
2404 {
2405 case GIMPLE_SINGLE_RHS:
2406 {
2407 tree rhs = gimple_assign_rhs1 (stmt);
2408 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
2409
2410 if (TREE_CODE (rhs) == SSA_NAME)
2411 {
2412 /* If the RHS is an SSA_NAME, return its known constant value,
2413 if any. */
2414 return (*valueize) (rhs);
2415 }
2416 /* Handle propagating invariant addresses into address
2417 operations. */
2418 else if (TREE_CODE (rhs) == ADDR_EXPR
2419 && !is_gimple_min_invariant (rhs))
2420 {
d25c4172 2421 HOST_WIDE_INT offset = 0;
cfef45c8
RG
2422 tree base;
2423 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
2424 &offset,
2425 valueize);
2426 if (base
2427 && (CONSTANT_CLASS_P (base)
2428 || decl_address_invariant_p (base)))
2429 return build_invariant_address (TREE_TYPE (rhs),
2430 base, offset);
2431 }
2432 else if (TREE_CODE (rhs) == CONSTRUCTOR
2433 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
2434 && (CONSTRUCTOR_NELTS (rhs)
2435 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
2436 {
2437 unsigned i;
d2a12ae7 2438 tree val, *vec;
cfef45c8 2439
d2a12ae7
RG
2440 vec = XALLOCAVEC (tree,
2441 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
2442 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
2443 {
2444 val = (*valueize) (val);
2445 if (TREE_CODE (val) == INTEGER_CST
2446 || TREE_CODE (val) == REAL_CST
2447 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 2448 vec[i] = val;
cfef45c8
RG
2449 else
2450 return NULL_TREE;
2451 }
2452
d2a12ae7 2453 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8
RG
2454 }
2455
2456 if (kind == tcc_reference)
2457 {
2458 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
2459 || TREE_CODE (rhs) == REALPART_EXPR
2460 || TREE_CODE (rhs) == IMAGPART_EXPR)
2461 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2462 {
2463 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2464 return fold_unary_loc (EXPR_LOCATION (rhs),
2465 TREE_CODE (rhs),
2466 TREE_TYPE (rhs), val);
2467 }
2468 else if (TREE_CODE (rhs) == BIT_FIELD_REF
2469 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2470 {
2471 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2472 return fold_ternary_loc (EXPR_LOCATION (rhs),
2473 TREE_CODE (rhs),
2474 TREE_TYPE (rhs), val,
2475 TREE_OPERAND (rhs, 1),
2476 TREE_OPERAND (rhs, 2));
2477 }
2478 else if (TREE_CODE (rhs) == MEM_REF
2479 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
2480 {
2481 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
2482 if (TREE_CODE (val) == ADDR_EXPR
2483 && is_gimple_min_invariant (val))
2484 {
2485 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
2486 unshare_expr (val),
2487 TREE_OPERAND (rhs, 1));
2488 if (tem)
2489 rhs = tem;
2490 }
2491 }
2492 return fold_const_aggregate_ref_1 (rhs, valueize);
2493 }
2494 else if (kind == tcc_declaration)
2495 return get_symbol_constant_value (rhs);
2496 return rhs;
2497 }
2498
2499 case GIMPLE_UNARY_RHS:
2500 {
2501 /* Handle unary operators that can appear in GIMPLE form.
2502 Note that we know the single operand must be a constant,
2503 so this should almost always return a simplified RHS. */
315f5f1b 2504 tree lhs = gimple_assign_lhs (stmt);
cfef45c8
RG
2505 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2506
2507 /* Conversions are useless for CCP purposes if they are
2508 value-preserving. Thus the restrictions that
315f5f1b
RG
2509 useless_type_conversion_p places for restrict qualification
2510 of pointer types should not apply here.
2511 Substitution later will only substitute to allowed places. */
cfef45c8
RG
2512 if (CONVERT_EXPR_CODE_P (subcode)
2513 && POINTER_TYPE_P (TREE_TYPE (lhs))
315f5f1b 2514 && POINTER_TYPE_P (TREE_TYPE (op0))
8f420307
EB
2515 && TYPE_ADDR_SPACE (TREE_TYPE (lhs))
2516 == TYPE_ADDR_SPACE (TREE_TYPE (op0))
2517 && TYPE_MODE (TREE_TYPE (lhs))
2518 == TYPE_MODE (TREE_TYPE (op0)))
315f5f1b 2519 return op0;
cfef45c8
RG
2520
2521 return
2522 fold_unary_ignore_overflow_loc (loc, subcode,
2523 gimple_expr_type (stmt), op0);
2524 }
2525
2526 case GIMPLE_BINARY_RHS:
2527 {
2528 /* Handle binary operators that can appear in GIMPLE form. */
2529 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2530 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2531
2532 /* Translate &x + CST into an invariant form suitable for
2533 further propagation. */
2534 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2535 && TREE_CODE (op0) == ADDR_EXPR
2536 && TREE_CODE (op1) == INTEGER_CST)
2537 {
2538 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
2539 return build_fold_addr_expr_loc
2540 (loc,
2541 fold_build2 (MEM_REF,
cfef45c8
RG
2542 TREE_TYPE (TREE_TYPE (op0)),
2543 unshare_expr (op0), off));
2544 }
2545
2546 return fold_binary_loc (loc, subcode,
2547 gimple_expr_type (stmt), op0, op1);
2548 }
2549
2550 case GIMPLE_TERNARY_RHS:
2551 {
2552 /* Handle ternary operators that can appear in GIMPLE form. */
2553 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
2554 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
2555 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
2556
d3878abf
RG
2557 /* Fold embedded expressions in ternary codes. */
2558 if ((subcode == COND_EXPR
2559 || subcode == VEC_COND_EXPR)
2560 && COMPARISON_CLASS_P (op0))
2561 {
2562 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
2563 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
2564 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
2565 TREE_TYPE (op0), op00, op01);
2566 if (tem)
2567 op0 = tem;
2568 }
2569
cfef45c8
RG
2570 return fold_ternary_loc (loc, subcode,
2571 gimple_expr_type (stmt), op0, op1, op2);
2572 }
2573
2574 default:
2575 gcc_unreachable ();
2576 }
2577 }
2578
2579 case GIMPLE_CALL:
2580 {
25583c4f
RS
2581 tree fn;
2582
2583 if (gimple_call_internal_p (stmt))
2584 /* No folding yet for these functions. */
2585 return NULL_TREE;
2586
2587 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
2588 if (TREE_CODE (fn) == ADDR_EXPR
2589 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
2590 && DECL_BUILT_IN (TREE_OPERAND (fn, 0)))
2591 {
2592 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
2593 tree call, retval;
2594 unsigned i;
2595 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2596 args[i] = (*valueize) (gimple_call_arg (stmt, i));
2597 call = build_call_array_loc (loc,
2598 gimple_call_return_type (stmt),
2599 fn, gimple_call_num_args (stmt), args);
2600 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
2601 if (retval)
2602 /* fold_call_expr wraps the result inside a NOP_EXPR. */
2603 STRIP_NOPS (retval);
2604 return retval;
2605 }
2606 return NULL_TREE;
2607 }
2608
2609 default:
2610 return NULL_TREE;
2611 }
2612}
2613
2614/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
2615 Returns NULL_TREE if folding to a constant is not possible, otherwise
2616 returns a constant according to is_gimple_min_invariant. */
2617
2618tree
2619gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
2620{
2621 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
2622 if (res && is_gimple_min_invariant (res))
2623 return res;
2624 return NULL_TREE;
2625}
2626
2627
2628/* The following set of functions are supposed to fold references using
2629 their constant initializers. */
2630
2631static tree fold_ctor_reference (tree type, tree ctor,
2632 unsigned HOST_WIDE_INT offset,
2633 unsigned HOST_WIDE_INT size);
2634
2635/* See if we can find constructor defining value of BASE.
2636 When we know the consructor with constant offset (such as
2637 base is array[40] and we do know constructor of array), then
2638 BIT_OFFSET is adjusted accordingly.
2639
2640 As a special case, return error_mark_node when constructor
2641 is not explicitly available, but it is known to be zero
2642 such as 'static const int a;'. */
2643static tree
2644get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
2645 tree (*valueize)(tree))
2646{
2647 HOST_WIDE_INT bit_offset2, size, max_size;
2648 if (TREE_CODE (base) == MEM_REF)
2649 {
2650 if (!integer_zerop (TREE_OPERAND (base, 1)))
2651 {
2652 if (!host_integerp (TREE_OPERAND (base, 1), 0))
2653 return NULL_TREE;
2654 *bit_offset += (mem_ref_offset (base).low
2655 * BITS_PER_UNIT);
2656 }
2657
2658 if (valueize
2659 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
2660 base = valueize (TREE_OPERAND (base, 0));
2661 if (!base || TREE_CODE (base) != ADDR_EXPR)
2662 return NULL_TREE;
2663 base = TREE_OPERAND (base, 0);
2664 }
2665
2666 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
2667 DECL_INITIAL. If BASE is a nested reference into another
2668 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
2669 the inner reference. */
2670 switch (TREE_CODE (base))
2671 {
2672 case VAR_DECL:
2673 if (!const_value_known_p (base))
2674 return NULL_TREE;
2675
2676 /* Fallthru. */
2677 case CONST_DECL:
2678 if (!DECL_INITIAL (base)
2679 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
2680 return error_mark_node;
2681 return DECL_INITIAL (base);
2682
2683 case ARRAY_REF:
2684 case COMPONENT_REF:
2685 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
2686 if (max_size == -1 || size != max_size)
2687 return NULL_TREE;
2688 *bit_offset += bit_offset2;
2689 return get_base_constructor (base, bit_offset, valueize);
2690
2691 case STRING_CST:
2692 case CONSTRUCTOR:
2693 return base;
2694
2695 default:
2696 return NULL_TREE;
2697 }
2698}
2699
2700/* CTOR is STRING_CST. Fold reference of type TYPE and size SIZE
2701 to the memory at bit OFFSET.
2702
2703 We do only simple job of folding byte accesses. */
2704
2705static tree
2706fold_string_cst_ctor_reference (tree type, tree ctor,
2707 unsigned HOST_WIDE_INT offset,
2708 unsigned HOST_WIDE_INT size)
2709{
2710 if (INTEGRAL_TYPE_P (type)
2711 && (TYPE_MODE (type)
2712 == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2713 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
2714 == MODE_INT)
2715 && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
2716 && size == BITS_PER_UNIT
2717 && !(offset % BITS_PER_UNIT))
2718 {
2719 offset /= BITS_PER_UNIT;
2720 if (offset < (unsigned HOST_WIDE_INT) TREE_STRING_LENGTH (ctor))
2721 return build_int_cst_type (type, (TREE_STRING_POINTER (ctor)
2722 [offset]));
2723 /* Folding
2724 const char a[20]="hello";
2725 return a[10];
2726
2727 might lead to offset greater than string length. In this case we
2728 know value is either initialized to 0 or out of bounds. Return 0
2729 in both cases. */
2730 return build_zero_cst (type);
2731 }
2732 return NULL_TREE;
2733}
2734
2735/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
2736 SIZE to the memory at bit OFFSET. */
2737
2738static tree
2739fold_array_ctor_reference (tree type, tree ctor,
2740 unsigned HOST_WIDE_INT offset,
2741 unsigned HOST_WIDE_INT size)
2742{
2743 unsigned HOST_WIDE_INT cnt;
2744 tree cfield, cval;
2745 double_int low_bound, elt_size;
2746 double_int index, max_index;
2747 double_int access_index;
eb8f1123 2748 tree domain_type = NULL_TREE;
cfef45c8
RG
2749 HOST_WIDE_INT inner_offset;
2750
2751 /* Compute low bound and elt size. */
eb8f1123
RG
2752 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
2753 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
2754 if (domain_type && TYPE_MIN_VALUE (domain_type))
2755 {
2756 /* Static constructors for variably sized objects makes no sense. */
2757 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
2758 low_bound = tree_to_double_int (TYPE_MIN_VALUE (domain_type));
2759 }
2760 else
2761 low_bound = double_int_zero;
2762 /* Static constructors for variably sized objects makes no sense. */
2763 gcc_assert (TREE_CODE(TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
2764 == INTEGER_CST);
2765 elt_size =
2766 tree_to_double_int (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
2767
2768
2769 /* We can handle only constantly sized accesses that are known to not
2770 be larger than size of array element. */
2771 if (!TYPE_SIZE_UNIT (type)
2772 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
2773 || double_int_cmp (elt_size,
2774 tree_to_double_int (TYPE_SIZE_UNIT (type)), 0) < 0)
2775 return NULL_TREE;
2776
2777 /* Compute the array index we look for. */
2778 access_index = double_int_udiv (uhwi_to_double_int (offset / BITS_PER_UNIT),
2779 elt_size, TRUNC_DIV_EXPR);
2780 access_index = double_int_add (access_index, low_bound);
2781
2782 /* And offset within the access. */
2783 inner_offset = offset % (double_int_to_uhwi (elt_size) * BITS_PER_UNIT);
2784
2785 /* See if the array field is large enough to span whole access. We do not
2786 care to fold accesses spanning multiple array indexes. */
2787 if (inner_offset + size > double_int_to_uhwi (elt_size) * BITS_PER_UNIT)
2788 return NULL_TREE;
2789
2790 index = double_int_sub (low_bound, double_int_one);
2791 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
2792 {
2793 /* Array constructor might explicitely set index, or specify range
2794 or leave index NULL meaning that it is next index after previous
2795 one. */
2796 if (cfield)
2797 {
2798 if (TREE_CODE (cfield) == INTEGER_CST)
2799 max_index = index = tree_to_double_int (cfield);
2800 else
2801 {
2802 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
2803 index = tree_to_double_int (TREE_OPERAND (cfield, 0));
2804 max_index = tree_to_double_int (TREE_OPERAND (cfield, 1));
2805 }
2806 }
2807 else
2808 max_index = index = double_int_add (index, double_int_one);
2809
2810 /* Do we have match? */
2811 if (double_int_cmp (access_index, index, 1) >= 0
2812 && double_int_cmp (access_index, max_index, 1) <= 0)
2813 return fold_ctor_reference (type, cval, inner_offset, size);
2814 }
2815 /* When memory is not explicitely mentioned in constructor,
2816 it is 0 (or out of range). */
2817 return build_zero_cst (type);
2818}
2819
2820/* CTOR is CONSTRUCTOR of an aggregate or vector.
2821 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
2822
2823static tree
2824fold_nonarray_ctor_reference (tree type, tree ctor,
2825 unsigned HOST_WIDE_INT offset,
2826 unsigned HOST_WIDE_INT size)
2827{
2828 unsigned HOST_WIDE_INT cnt;
2829 tree cfield, cval;
2830
2831 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
2832 cval)
2833 {
2834 tree byte_offset = DECL_FIELD_OFFSET (cfield);
2835 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
2836 tree field_size = DECL_SIZE (cfield);
2837 double_int bitoffset;
2838 double_int byte_offset_cst = tree_to_double_int (byte_offset);
2839 double_int bits_per_unit_cst = uhwi_to_double_int (BITS_PER_UNIT);
b8b2b009 2840 double_int bitoffset_end, access_end;
cfef45c8
RG
2841
2842 /* Variable sized objects in static constructors makes no sense,
2843 but field_size can be NULL for flexible array members. */
2844 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
2845 && TREE_CODE (byte_offset) == INTEGER_CST
2846 && (field_size != NULL_TREE
2847 ? TREE_CODE (field_size) == INTEGER_CST
2848 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
2849
2850 /* Compute bit offset of the field. */
2851 bitoffset = double_int_add (tree_to_double_int (field_offset),
2852 double_int_mul (byte_offset_cst,
2853 bits_per_unit_cst));
2854 /* Compute bit offset where the field ends. */
2855 if (field_size != NULL_TREE)
2856 bitoffset_end = double_int_add (bitoffset,
2857 tree_to_double_int (field_size));
2858 else
2859 bitoffset_end = double_int_zero;
2860
b8b2b009
JJ
2861 access_end = double_int_add (uhwi_to_double_int (offset),
2862 uhwi_to_double_int (size));
2863
2864 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
2865 [BITOFFSET, BITOFFSET_END)? */
2866 if (double_int_cmp (access_end, bitoffset, 0) > 0
cfef45c8
RG
2867 && (field_size == NULL_TREE
2868 || double_int_cmp (uhwi_to_double_int (offset),
2869 bitoffset_end, 0) < 0))
2870 {
cfef45c8
RG
2871 double_int inner_offset = double_int_sub (uhwi_to_double_int (offset),
2872 bitoffset);
2873 /* We do have overlap. Now see if field is large enough to
2874 cover the access. Give up for accesses spanning multiple
2875 fields. */
2876 if (double_int_cmp (access_end, bitoffset_end, 0) > 0)
2877 return NULL_TREE;
b8b2b009
JJ
2878 if (double_int_cmp (uhwi_to_double_int (offset), bitoffset, 0) < 0)
2879 return NULL_TREE;
cfef45c8
RG
2880 return fold_ctor_reference (type, cval,
2881 double_int_to_uhwi (inner_offset), size);
2882 }
2883 }
2884 /* When memory is not explicitely mentioned in constructor, it is 0. */
2885 return build_zero_cst (type);
2886}
2887
2888/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
2889 to the memory at bit OFFSET. */
2890
2891static tree
2892fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
2893 unsigned HOST_WIDE_INT size)
2894{
2895 tree ret;
2896
2897 /* We found the field with exact match. */
2898 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
2899 && !offset)
2900 return canonicalize_constructor_val (ctor);
2901
2902 /* We are at the end of walk, see if we can view convert the
2903 result. */
2904 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
2905 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
2906 && operand_equal_p (TYPE_SIZE (type),
2907 TYPE_SIZE (TREE_TYPE (ctor)), 0))
2908 {
2909 ret = canonicalize_constructor_val (ctor);
2910 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
2911 if (ret)
2912 STRIP_NOPS (ret);
2913 return ret;
2914 }
2915 if (TREE_CODE (ctor) == STRING_CST)
2916 return fold_string_cst_ctor_reference (type, ctor, offset, size);
2917 if (TREE_CODE (ctor) == CONSTRUCTOR)
2918 {
2919
eb8f1123
RG
2920 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
2921 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
cfef45c8
RG
2922 return fold_array_ctor_reference (type, ctor, offset, size);
2923 else
2924 return fold_nonarray_ctor_reference (type, ctor, offset, size);
2925 }
2926
2927 return NULL_TREE;
2928}
2929
2930/* Return the tree representing the element referenced by T if T is an
2931 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
2932 names using VALUEIZE. Return NULL_TREE otherwise. */
2933
2934tree
2935fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
2936{
2937 tree ctor, idx, base;
2938 HOST_WIDE_INT offset, size, max_size;
2939 tree tem;
2940
f8a7df45
RG
2941 if (TREE_THIS_VOLATILE (t))
2942 return NULL_TREE;
2943
cfef45c8
RG
2944 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
2945 return get_symbol_constant_value (t);
2946
2947 tem = fold_read_from_constant_string (t);
2948 if (tem)
2949 return tem;
2950
2951 switch (TREE_CODE (t))
2952 {
2953 case ARRAY_REF:
2954 case ARRAY_RANGE_REF:
2955 /* Constant indexes are handled well by get_base_constructor.
2956 Only special case variable offsets.
2957 FIXME: This code can't handle nested references with variable indexes
2958 (they will be handled only by iteration of ccp). Perhaps we can bring
2959 get_ref_base_and_extent here and make it use a valueize callback. */
2960 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
2961 && valueize
2962 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
2963 && host_integerp (idx, 0))
2964 {
2965 tree low_bound, unit_size;
2966
2967 /* If the resulting bit-offset is constant, track it. */
2968 if ((low_bound = array_ref_low_bound (t),
2969 host_integerp (low_bound, 0))
2970 && (unit_size = array_ref_element_size (t),
2971 host_integerp (unit_size, 1)))
2972 {
2973 offset = TREE_INT_CST_LOW (idx);
2974 offset -= TREE_INT_CST_LOW (low_bound);
2975 offset *= TREE_INT_CST_LOW (unit_size);
2976 offset *= BITS_PER_UNIT;
2977
2978 base = TREE_OPERAND (t, 0);
2979 ctor = get_base_constructor (base, &offset, valueize);
2980 /* Empty constructor. Always fold to 0. */
2981 if (ctor == error_mark_node)
2982 return build_zero_cst (TREE_TYPE (t));
2983 /* Out of bound array access. Value is undefined,
2984 but don't fold. */
2985 if (offset < 0)
2986 return NULL_TREE;
2987 /* We can not determine ctor. */
2988 if (!ctor)
2989 return NULL_TREE;
2990 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
2991 TREE_INT_CST_LOW (unit_size)
2992 * BITS_PER_UNIT);
2993 }
2994 }
2995 /* Fallthru. */
2996
2997 case COMPONENT_REF:
2998 case BIT_FIELD_REF:
2999 case TARGET_MEM_REF:
3000 case MEM_REF:
3001 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
3002 ctor = get_base_constructor (base, &offset, valueize);
3003
3004 /* Empty constructor. Always fold to 0. */
3005 if (ctor == error_mark_node)
3006 return build_zero_cst (TREE_TYPE (t));
3007 /* We do not know precise address. */
3008 if (max_size == -1 || max_size != size)
3009 return NULL_TREE;
3010 /* We can not determine ctor. */
3011 if (!ctor)
3012 return NULL_TREE;
3013
3014 /* Out of bound array access. Value is undefined, but don't fold. */
3015 if (offset < 0)
3016 return NULL_TREE;
3017
3018 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size);
3019
3020 case REALPART_EXPR:
3021 case IMAGPART_EXPR:
3022 {
3023 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
3024 if (c && TREE_CODE (c) == COMPLEX_CST)
3025 return fold_build1_loc (EXPR_LOCATION (t),
3026 TREE_CODE (t), TREE_TYPE (t), c);
3027 break;
3028 }
3029
3030 default:
3031 break;
3032 }
3033
3034 return NULL_TREE;
3035}
3036
3037tree
3038fold_const_aggregate_ref (tree t)
3039{
3040 return fold_const_aggregate_ref_1 (t, NULL);
3041}
06bc3ec7 3042
81fa35bd
MJ
3043/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
3044 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
3045 KNOWN_BINFO carries the binfo describing the true type of
3046 OBJ_TYPE_REF_OBJECT(REF). */
3047
3048tree
3049gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo)
3050{
3051 unsigned HOST_WIDE_INT offset, size;
3052 tree v, fn;
3053
3054 v = BINFO_VTABLE (known_binfo);
3055 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
3056 if (!v)
3057 return NULL_TREE;
3058
3059 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
3060 {
3061 offset = tree_low_cst (TREE_OPERAND (v, 1), 1) * BITS_PER_UNIT;
3062 v = TREE_OPERAND (v, 0);
3063 }
3064 else
3065 offset = 0;
3066
3067 if (TREE_CODE (v) != ADDR_EXPR)
3068 return NULL_TREE;
3069 v = TREE_OPERAND (v, 0);
3070
3071 if (TREE_CODE (v) != VAR_DECL
3072 || !DECL_VIRTUAL_P (v)
5548ca35
JH
3073 || !DECL_INITIAL (v)
3074 || DECL_INITIAL (v) == error_mark_node)
81fa35bd
MJ
3075 return NULL_TREE;
3076 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
3077 size = tree_low_cst (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))), 1);
3078 offset += token * size;
3079 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), DECL_INITIAL (v),
3080 offset, size);
8e8483e6 3081 if (!fn || integer_zerop (fn))
81fa35bd
MJ
3082 return NULL_TREE;
3083 gcc_assert (TREE_CODE (fn) == ADDR_EXPR
3084 || TREE_CODE (fn) == FDESC_EXPR);
3085 fn = TREE_OPERAND (fn, 0);
3086 gcc_assert (TREE_CODE (fn) == FUNCTION_DECL);
3087
3088 /* When cgraph node is missing and function is not public, we cannot
3089 devirtualize. This can happen in WHOPR when the actual method
3090 ends up in other partition, because we found devirtualization
3091 possibility too late. */
3092 if (!can_refer_decl_in_current_unit_p (fn))
3093 return NULL_TREE;
3094
7501ca28
RG
3095 /* Make sure we create a cgraph node for functions we'll reference.
3096 They can be non-existent if the reference comes from an entry
3097 of an external vtable for example. */
3098 cgraph_get_create_node (fn);
3099
81fa35bd
MJ
3100 return fn;
3101}
3102
06bc3ec7
BS
3103/* Return true iff VAL is a gimple expression that is known to be
3104 non-negative. Restricted to floating-point inputs. */
3105
3106bool
3107gimple_val_nonnegative_real_p (tree val)
3108{
3109 gimple def_stmt;
3110
3111 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
3112
3113 /* Use existing logic for non-gimple trees. */
3114 if (tree_expr_nonnegative_p (val))
3115 return true;
3116
3117 if (TREE_CODE (val) != SSA_NAME)
3118 return false;
3119
3120 /* Currently we look only at the immediately defining statement
3121 to make this determination, since recursion on defining
3122 statements of operands can lead to quadratic behavior in the
3123 worst case. This is expected to catch almost all occurrences
3124 in practice. It would be possible to implement limited-depth
3125 recursion if important cases are lost. Alternatively, passes
3126 that need this information (such as the pow/powi lowering code
3127 in the cse_sincos pass) could be revised to provide it through
3128 dataflow propagation. */
3129
3130 def_stmt = SSA_NAME_DEF_STMT (val);
3131
3132 if (is_gimple_assign (def_stmt))
3133 {
3134 tree op0, op1;
3135
3136 /* See fold-const.c:tree_expr_nonnegative_p for additional
3137 cases that could be handled with recursion. */
3138
3139 switch (gimple_assign_rhs_code (def_stmt))
3140 {
3141 case ABS_EXPR:
3142 /* Always true for floating-point operands. */
3143 return true;
3144
3145 case MULT_EXPR:
3146 /* True if the two operands are identical (since we are
3147 restricted to floating-point inputs). */
3148 op0 = gimple_assign_rhs1 (def_stmt);
3149 op1 = gimple_assign_rhs2 (def_stmt);
3150
3151 if (op0 == op1
3152 || operand_equal_p (op0, op1, 0))
3153 return true;
3154
3155 default:
3156 return false;
3157 }
3158 }
3159 else if (is_gimple_call (def_stmt))
3160 {
3161 tree fndecl = gimple_call_fndecl (def_stmt);
3162 if (fndecl
3163 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
3164 {
3165 tree arg1;
3166
3167 switch (DECL_FUNCTION_CODE (fndecl))
3168 {
3169 CASE_FLT_FN (BUILT_IN_ACOS):
3170 CASE_FLT_FN (BUILT_IN_ACOSH):
3171 CASE_FLT_FN (BUILT_IN_CABS):
3172 CASE_FLT_FN (BUILT_IN_COSH):
3173 CASE_FLT_FN (BUILT_IN_ERFC):
3174 CASE_FLT_FN (BUILT_IN_EXP):
3175 CASE_FLT_FN (BUILT_IN_EXP10):
3176 CASE_FLT_FN (BUILT_IN_EXP2):
3177 CASE_FLT_FN (BUILT_IN_FABS):
3178 CASE_FLT_FN (BUILT_IN_FDIM):
3179 CASE_FLT_FN (BUILT_IN_HYPOT):
3180 CASE_FLT_FN (BUILT_IN_POW10):
3181 return true;
3182
3183 CASE_FLT_FN (BUILT_IN_SQRT):
3184 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
3185 nonnegative inputs. */
3186 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
3187 return true;
3188
3189 break;
3190
3191 CASE_FLT_FN (BUILT_IN_POWI):
3192 /* True if the second argument is an even integer. */
3193 arg1 = gimple_call_arg (def_stmt, 1);
3194
3195 if (TREE_CODE (arg1) == INTEGER_CST
3196 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
3197 return true;
3198
3199 break;
3200
3201 CASE_FLT_FN (BUILT_IN_POW):
3202 /* True if the second argument is an even integer-valued
3203 real. */
3204 arg1 = gimple_call_arg (def_stmt, 1);
3205
3206 if (TREE_CODE (arg1) == REAL_CST)
3207 {
3208 REAL_VALUE_TYPE c;
3209 HOST_WIDE_INT n;
3210
3211 c = TREE_REAL_CST (arg1);
3212 n = real_to_integer (&c);
3213
3214 if ((n & 1) == 0)
3215 {
3216 REAL_VALUE_TYPE cint;
3217 real_from_integer (&cint, VOIDmode, n, n < 0 ? -1 : 0, 0);
3218 if (real_identical (&c, &cint))
3219 return true;
3220 }
3221 }
3222
3223 break;
3224
3225 default:
3226 return false;
3227 }
3228 }
3229 }
3230
3231 return false;
3232}
This page took 1.245898 seconds and 5 git commands to generate.