]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple-fold.c
invoke.texi ([Wnarrowing]): Update for non-constants in C++11.
[gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
23a5b65a 2 Copyright (C) 2010-2014 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
cbdd87d4 31#include "function.h"
7ee2468b 32#include "dumpfile.h"
442b4905 33#include "bitmap.h"
2fb9a547
AM
34#include "basic-block.h"
35#include "tree-ssa-alias.h"
36#include "internal-fn.h"
37#include "gimple-fold.h"
38#include "gimple-expr.h"
39#include "is-a.h"
18f429e2 40#include "gimple.h"
45b0be94 41#include "gimplify.h"
5be5c238 42#include "gimple-iterator.h"
442b4905
AM
43#include "gimple-ssa.h"
44#include "tree-ssanames.h"
45#include "tree-into-ssa.h"
46#include "tree-dfa.h"
7a300452 47#include "tree-ssa.h"
cbdd87d4 48#include "tree-ssa-propagate.h"
cbdd87d4 49#include "target.h"
450ad0cd
JH
50#include "ipa-utils.h"
51#include "gimple-pretty-print.h"
4484a35a 52#include "tree-ssa-address.h"
862d0b35 53#include "langhooks.h"
19e51b40 54#include "gimplify-me.h"
2b5f0895 55#include "dbgcnt.h"
9b2b7279 56#include "builtins.h"
fef5a0d9 57#include "output.h"
cbdd87d4 58
b3b9f3d0 59/* Return true when DECL can be referenced from current unit.
c44c2088
JH
60 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
61 We can get declarations that are not possible to reference for various
62 reasons:
1389294c 63
1389294c
JH
64 1) When analyzing C++ virtual tables.
65 C++ virtual tables do have known constructors even
66 when they are keyed to other compilation unit.
67 Those tables can contain pointers to methods and vars
68 in other units. Those methods have both STATIC and EXTERNAL
69 set.
70 2) In WHOPR mode devirtualization might lead to reference
71 to method that was partitioned elsehwere.
72 In this case we have static VAR_DECL or FUNCTION_DECL
73 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
74 declaring the body.
75 3) COMDAT functions referred by external vtables that
3e89949e 76 we devirtualize only during final compilation stage.
b3b9f3d0
JH
77 At this time we already decided that we will not output
78 the function body and thus we can't reference the symbol
79 directly. */
80
1389294c 81static bool
c44c2088 82can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 83{
2c8326a5 84 varpool_node *vnode;
1389294c 85 struct cgraph_node *node;
5e20cdc9 86 symtab_node *snode;
c44c2088 87
1632a686
JH
88 if (DECL_ABSTRACT (decl))
89 return false;
90
91 /* We are concerned only about static/external vars and functions. */
92 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
93 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
94 return true;
95
96 /* Static objects can be referred only if they was not optimized out yet. */
97 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
98 {
3aaf0529
JH
99 /* Before we start optimizing unreachable code we can be sure all
100 static objects are defined. */
101 if (cgraph_function_flags_ready)
102 return true;
d52f5295 103 snode = symtab_node::get (decl);
3aaf0529 104 if (!snode || !snode->definition)
1632a686 105 return false;
7de90a6c 106 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
107 return !node || !node->global.inlined_to;
108 }
109
6da8be89 110 /* We will later output the initializer, so we can refer to it.
c44c2088 111 So we are concerned only when DECL comes from initializer of
3aaf0529 112 external var or var that has been optimized out. */
c44c2088
JH
113 if (!from_decl
114 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 115 || (!DECL_EXTERNAL (from_decl)
9041d2e6 116 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 117 && vnode->definition)
6da8be89 118 || (flag_ltrans
9041d2e6 119 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 120 && vnode->in_other_partition))
c44c2088 121 return true;
c44c2088
JH
122 /* We are folding reference from external vtable. The vtable may reffer
123 to a symbol keyed to other compilation unit. The other compilation
124 unit may be in separate DSO and the symbol may be hidden. */
125 if (DECL_VISIBILITY_SPECIFIED (decl)
126 && DECL_EXTERNAL (decl)
a33a931b 127 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 128 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 129 return false;
b3b9f3d0
JH
130 /* When function is public, we always can introduce new reference.
131 Exception are the COMDAT functions where introducing a direct
132 reference imply need to include function body in the curren tunit. */
133 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
134 return true;
3aaf0529
JH
135 /* We have COMDAT. We are going to check if we still have definition
136 or if the definition is going to be output in other partition.
137 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
138
139 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 140 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
141 output elsewhere when corresponding vtable is output.
142 This is however not possible - ABI specify that COMDATs are output in
143 units where they are used and when the other unit was compiled with LTO
144 it is possible that vtable was kept public while the function itself
145 was privatized. */
3aaf0529 146 if (!cgraph_function_flags_ready)
b3b9f3d0 147 return true;
c44c2088 148
d52f5295 149 snode = symtab_node::get (decl);
3aaf0529
JH
150 if (!snode
151 || ((!snode->definition || DECL_EXTERNAL (decl))
152 && (!snode->in_other_partition
153 || (!snode->forced_by_abi && !snode->force_output))))
154 return false;
155 node = dyn_cast <cgraph_node *> (snode);
156 return !node || !node->global.inlined_to;
1389294c
JH
157}
158
0038d4e0 159/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
160 acceptable form for is_gimple_min_invariant.
161 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
162
163tree
c44c2088 164canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 165{
50619002
EB
166 tree orig_cval = cval;
167 STRIP_NOPS (cval);
315f5f1b
RG
168 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
169 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 170 {
315f5f1b
RG
171 tree ptr = TREE_OPERAND (cval, 0);
172 if (is_gimple_min_invariant (ptr))
173 cval = build1_loc (EXPR_LOCATION (cval),
174 ADDR_EXPR, TREE_TYPE (ptr),
175 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
176 ptr,
177 fold_convert (ptr_type_node,
178 TREE_OPERAND (cval, 1))));
17f39a39
JH
179 }
180 if (TREE_CODE (cval) == ADDR_EXPR)
181 {
5a27a197
RG
182 tree base = NULL_TREE;
183 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
184 {
185 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
186 if (base)
187 TREE_OPERAND (cval, 0) = base;
188 }
5a27a197
RG
189 else
190 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
191 if (!base)
192 return NULL_TREE;
b3b9f3d0 193
7501ca28
RG
194 if ((TREE_CODE (base) == VAR_DECL
195 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 196 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 197 return NULL_TREE;
7501ca28 198 if (TREE_CODE (base) == VAR_DECL)
46eb666a 199 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
200 else if (TREE_CODE (base) == FUNCTION_DECL)
201 {
202 /* Make sure we create a cgraph node for functions we'll reference.
203 They can be non-existent if the reference comes from an entry
204 of an external vtable for example. */
d52f5295 205 cgraph_node::get_create (base);
7501ca28 206 }
0038d4e0 207 /* Fixup types in global initializers. */
73aef89e
RG
208 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
209 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
210
211 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
212 cval = fold_convert (TREE_TYPE (orig_cval), cval);
213 return cval;
17f39a39 214 }
846abd0d
RB
215 if (TREE_OVERFLOW_P (cval))
216 return drop_tree_overflow (cval);
50619002 217 return orig_cval;
17f39a39 218}
cbdd87d4
RG
219
220/* If SYM is a constant variable with known value, return the value.
221 NULL_TREE is returned otherwise. */
222
223tree
224get_symbol_constant_value (tree sym)
225{
6a6dac52
JH
226 tree val = ctor_for_folding (sym);
227 if (val != error_mark_node)
cbdd87d4 228 {
cbdd87d4
RG
229 if (val)
230 {
9d60be38 231 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 232 if (val && is_gimple_min_invariant (val))
17f39a39 233 return val;
1389294c
JH
234 else
235 return NULL_TREE;
cbdd87d4
RG
236 }
237 /* Variables declared 'const' without an initializer
238 have zero as the initializer if they may not be
239 overridden at link or run time. */
240 if (!val
cbdd87d4
RG
241 && (INTEGRAL_TYPE_P (TREE_TYPE (sym))
242 || SCALAR_FLOAT_TYPE_P (TREE_TYPE (sym))))
e8160c9a 243 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
244 }
245
246 return NULL_TREE;
247}
248
249
cbdd87d4
RG
250
251/* Subroutine of fold_stmt. We perform several simplifications of the
252 memory reference tree EXPR and make sure to re-gimplify them properly
253 after propagation of constant addresses. IS_LHS is true if the
254 reference is supposed to be an lvalue. */
255
256static tree
257maybe_fold_reference (tree expr, bool is_lhs)
258{
259 tree *t = &expr;
17f39a39 260 tree result;
cbdd87d4 261
f0eddb90
RG
262 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
263 || TREE_CODE (expr) == REALPART_EXPR
264 || TREE_CODE (expr) == IMAGPART_EXPR)
265 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
266 return fold_unary_loc (EXPR_LOCATION (expr),
267 TREE_CODE (expr),
268 TREE_TYPE (expr),
269 TREE_OPERAND (expr, 0));
270 else if (TREE_CODE (expr) == BIT_FIELD_REF
271 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
272 return fold_ternary_loc (EXPR_LOCATION (expr),
273 TREE_CODE (expr),
274 TREE_TYPE (expr),
275 TREE_OPERAND (expr, 0),
276 TREE_OPERAND (expr, 1),
277 TREE_OPERAND (expr, 2));
278
279 while (handled_component_p (*t))
280 t = &TREE_OPERAND (*t, 0);
cbdd87d4 281
f0eddb90
RG
282 /* Canonicalize MEM_REFs invariant address operand. Do this first
283 to avoid feeding non-canonical MEM_REFs elsewhere. */
284 if (TREE_CODE (*t) == MEM_REF
285 && !is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)))
cbdd87d4 286 {
f0eddb90
RG
287 bool volatile_p = TREE_THIS_VOLATILE (*t);
288 tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
289 TREE_OPERAND (*t, 0),
290 TREE_OPERAND (*t, 1));
291 if (tem)
292 {
293 TREE_THIS_VOLATILE (tem) = volatile_p;
294 *t = tem;
295 tem = maybe_fold_reference (expr, is_lhs);
296 if (tem)
297 return tem;
298 return expr;
299 }
cbdd87d4
RG
300 }
301
f0eddb90
RG
302 if (!is_lhs
303 && (result = fold_const_aggregate_ref (expr))
304 && is_gimple_min_invariant (result))
305 return result;
cbdd87d4 306
70f34814
RG
307 /* Fold back MEM_REFs to reference trees. */
308 if (TREE_CODE (*t) == MEM_REF
309 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
310 && integer_zerop (TREE_OPERAND (*t, 1))
311 && (TREE_THIS_VOLATILE (*t)
312 == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
313 && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
314 && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
315 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
316 /* We have to look out here to not drop a required conversion
317 from the rhs to the lhs if is_lhs, but we don't have the
318 rhs here to verify that. Thus require strict type
319 compatibility. */
320 && types_compatible_p (TREE_TYPE (*t),
321 TREE_TYPE (TREE_OPERAND
f0eddb90 322 (TREE_OPERAND (*t, 0), 0))))
cbdd87d4 323 {
70f34814
RG
324 tree tem;
325 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
326 tem = maybe_fold_reference (expr, is_lhs);
327 if (tem)
328 return tem;
329 return expr;
330 }
4d948885
RG
331 else if (TREE_CODE (*t) == TARGET_MEM_REF)
332 {
333 tree tem = maybe_fold_tmr (*t);
334 if (tem)
335 {
336 *t = tem;
337 tem = maybe_fold_reference (expr, is_lhs);
338 if (tem)
339 return tem;
340 return expr;
341 }
342 }
cbdd87d4
RG
343
344 return NULL_TREE;
345}
346
347
348/* Attempt to fold an assignment statement pointed-to by SI. Returns a
349 replacement rhs for the statement or NULL_TREE if no simplification
350 could be made. It is assumed that the operands have been previously
351 folded. */
352
353static tree
354fold_gimple_assign (gimple_stmt_iterator *si)
355{
356 gimple stmt = gsi_stmt (*si);
357 enum tree_code subcode = gimple_assign_rhs_code (stmt);
358 location_t loc = gimple_location (stmt);
359
360 tree result = NULL_TREE;
361
362 switch (get_gimple_rhs_class (subcode))
363 {
364 case GIMPLE_SINGLE_RHS:
365 {
366 tree rhs = gimple_assign_rhs1 (stmt);
367
4e71066d 368 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
369 return maybe_fold_reference (rhs, false);
370
bdf37f7a
JH
371 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
372 {
373 tree val = OBJ_TYPE_REF_EXPR (rhs);
374 if (is_gimple_min_invariant (val))
375 return val;
f8a39967 376 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
377 {
378 bool final;
379 vec <cgraph_node *>targets
f8a39967 380 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 381 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a
JH
382 {
383 tree fndecl;
2b5f0895 384
bdf37f7a
JH
385 if (targets.length () == 1)
386 fndecl = targets[0]->decl;
387 else
388 fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2b5f0895
XDL
389 if (dump_enabled_p ())
390 {
807b7d62 391 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
392 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
393 "resolving virtual function address "
394 "reference to function %s\n",
395 targets.length () == 1
396 ? targets[0]->name ()
397 : "__builtin_unreachable");
398 }
f8a39967
JH
399 val = fold_convert (TREE_TYPE (val),
400 build_fold_addr_expr_loc (loc, fndecl));
bdf37f7a
JH
401 STRIP_USELESS_TYPE_CONVERSION (val);
402 return val;
403 }
404 }
405
406 }
cbdd87d4
RG
407 else if (TREE_CODE (rhs) == ADDR_EXPR)
408 {
70f34814
RG
409 tree ref = TREE_OPERAND (rhs, 0);
410 tree tem = maybe_fold_reference (ref, true);
411 if (tem
412 && TREE_CODE (tem) == MEM_REF
413 && integer_zerop (TREE_OPERAND (tem, 1)))
414 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
415 else if (tem)
cbdd87d4
RG
416 result = fold_convert (TREE_TYPE (rhs),
417 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
418 else if (TREE_CODE (ref) == MEM_REF
419 && integer_zerop (TREE_OPERAND (ref, 1)))
420 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
421 }
422
423 else if (TREE_CODE (rhs) == CONSTRUCTOR
424 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
425 && (CONSTRUCTOR_NELTS (rhs)
426 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
427 {
428 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
429 unsigned i;
430 tree val;
431
432 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
433 if (TREE_CODE (val) != INTEGER_CST
434 && TREE_CODE (val) != REAL_CST
435 && TREE_CODE (val) != FIXED_CST)
436 return NULL_TREE;
437
438 return build_vector_from_ctor (TREE_TYPE (rhs),
439 CONSTRUCTOR_ELTS (rhs));
440 }
441
442 else if (DECL_P (rhs))
9d60be38 443 return get_symbol_constant_value (rhs);
cbdd87d4
RG
444
445 /* If we couldn't fold the RHS, hand over to the generic
446 fold routines. */
447 if (result == NULL_TREE)
448 result = fold (rhs);
449
450 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
451 that may have been added by fold, and "useless" type
452 conversions that might now be apparent due to propagation. */
453 STRIP_USELESS_TYPE_CONVERSION (result);
454
455 if (result != rhs && valid_gimple_rhs_p (result))
456 return result;
457
458 return NULL_TREE;
459 }
460 break;
461
462 case GIMPLE_UNARY_RHS:
463 {
464 tree rhs = gimple_assign_rhs1 (stmt);
465
466 result = fold_unary_loc (loc, subcode, gimple_expr_type (stmt), rhs);
467 if (result)
468 {
469 /* If the operation was a conversion do _not_ mark a
470 resulting constant with TREE_OVERFLOW if the original
471 constant was not. These conversions have implementation
472 defined behavior and retaining the TREE_OVERFLOW flag
473 here would confuse later passes such as VRP. */
474 if (CONVERT_EXPR_CODE_P (subcode)
475 && TREE_CODE (result) == INTEGER_CST
476 && TREE_CODE (rhs) == INTEGER_CST)
477 TREE_OVERFLOW (result) = TREE_OVERFLOW (rhs);
478
479 STRIP_USELESS_TYPE_CONVERSION (result);
480 if (valid_gimple_rhs_p (result))
481 return result;
482 }
cbdd87d4
RG
483 }
484 break;
485
486 case GIMPLE_BINARY_RHS:
9b80d091
KT
487 /* Try to canonicalize for boolean-typed X the comparisons
488 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
489 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
490 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
491 {
492 tree lhs = gimple_assign_lhs (stmt);
493 tree op1 = gimple_assign_rhs1 (stmt);
494 tree op2 = gimple_assign_rhs2 (stmt);
495 tree type = TREE_TYPE (op1);
496
497 /* Check whether the comparison operands are of the same boolean
498 type as the result type is.
499 Check that second operand is an integer-constant with value
500 one or zero. */
501 if (TREE_CODE (op2) == INTEGER_CST
502 && (integer_zerop (op2) || integer_onep (op2))
503 && useless_type_conversion_p (TREE_TYPE (lhs), type))
504 {
505 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
506 bool is_logical_not = false;
507
508 /* X == 0 and X != 1 is a logical-not.of X
509 X == 1 and X != 0 is X */
510 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
511 || (cmp_code == NE_EXPR && integer_onep (op2)))
512 is_logical_not = true;
513
514 if (is_logical_not == false)
515 result = op1;
516 /* Only for one-bit precision typed X the transformation
517 !X -> ~X is valied. */
518 else if (TYPE_PRECISION (type) == 1)
519 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
520 type, op1);
521 /* Otherwise we use !X -> X ^ 1. */
522 else
523 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
524 type, op1, build_int_cst (type, 1));
525
526 }
527 }
cbdd87d4
RG
528
529 if (!result)
530 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
531 TREE_TYPE (gimple_assign_lhs (stmt)),
532 gimple_assign_rhs1 (stmt),
533 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
534
535 if (result)
536 {
537 STRIP_USELESS_TYPE_CONVERSION (result);
538 if (valid_gimple_rhs_p (result))
539 return result;
cbdd87d4
RG
540 }
541 break;
542
0354c0c7 543 case GIMPLE_TERNARY_RHS:
4e71066d
RG
544 /* Try to fold a conditional expression. */
545 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
546 {
547 tree op0 = gimple_assign_rhs1 (stmt);
548 tree tem;
549 bool set = false;
550 location_t cond_loc = gimple_location (stmt);
551
552 if (COMPARISON_CLASS_P (op0))
553 {
554 fold_defer_overflow_warnings ();
555 tem = fold_binary_loc (cond_loc,
556 TREE_CODE (op0), TREE_TYPE (op0),
557 TREE_OPERAND (op0, 0),
558 TREE_OPERAND (op0, 1));
559 /* This is actually a conditional expression, not a GIMPLE
560 conditional statement, however, the valid_gimple_rhs_p
561 test still applies. */
562 set = (tem && is_gimple_condexpr (tem)
563 && valid_gimple_rhs_p (tem));
564 fold_undefer_overflow_warnings (set, stmt, 0);
565 }
566 else if (is_gimple_min_invariant (op0))
567 {
568 tem = op0;
569 set = true;
570 }
571 else
572 return NULL_TREE;
573
574 if (set)
575 result = fold_build3_loc (cond_loc, COND_EXPR,
576 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
577 gimple_assign_rhs2 (stmt),
578 gimple_assign_rhs3 (stmt));
579 }
580
581 if (!result)
582 result = fold_ternary_loc (loc, subcode,
583 TREE_TYPE (gimple_assign_lhs (stmt)),
584 gimple_assign_rhs1 (stmt),
585 gimple_assign_rhs2 (stmt),
586 gimple_assign_rhs3 (stmt));
0354c0c7
BS
587
588 if (result)
589 {
590 STRIP_USELESS_TYPE_CONVERSION (result);
591 if (valid_gimple_rhs_p (result))
592 return result;
0354c0c7
BS
593 }
594 break;
595
cbdd87d4
RG
596 case GIMPLE_INVALID_RHS:
597 gcc_unreachable ();
598 }
599
600 return NULL_TREE;
601}
602
603/* Attempt to fold a conditional statement. Return true if any changes were
604 made. We only attempt to fold the condition expression, and do not perform
605 any transformation that would require alteration of the cfg. It is
606 assumed that the operands have been previously folded. */
607
608static bool
609fold_gimple_cond (gimple stmt)
610{
611 tree result = fold_binary_loc (gimple_location (stmt),
612 gimple_cond_code (stmt),
613 boolean_type_node,
614 gimple_cond_lhs (stmt),
615 gimple_cond_rhs (stmt));
616
617 if (result)
618 {
619 STRIP_USELESS_TYPE_CONVERSION (result);
620 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
621 {
622 gimple_cond_set_condition_from_tree (stmt, result);
623 return true;
624 }
625 }
626
627 return false;
628}
629
fef5a0d9
RB
630
631/* Replace a statement at *SI_P with a sequence of statements in STMTS,
632 adjusting the replacement stmts location and virtual operands.
633 If the statement has a lhs the last stmt in the sequence is expected
634 to assign to that lhs. */
635
636static void
637gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
638{
639 gimple stmt = gsi_stmt (*si_p);
640
641 if (gimple_has_location (stmt))
642 annotate_all_with_location (stmts, gimple_location (stmt));
643
644 /* First iterate over the replacement statements backward, assigning
645 virtual operands to their defining statements. */
646 gimple laststore = NULL;
647 for (gimple_stmt_iterator i = gsi_last (stmts);
648 !gsi_end_p (i); gsi_prev (&i))
649 {
650 gimple new_stmt = gsi_stmt (i);
651 if ((gimple_assign_single_p (new_stmt)
652 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
653 || (is_gimple_call (new_stmt)
654 && (gimple_call_flags (new_stmt)
655 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
656 {
657 tree vdef;
658 if (!laststore)
659 vdef = gimple_vdef (stmt);
660 else
661 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
662 gimple_set_vdef (new_stmt, vdef);
663 if (vdef && TREE_CODE (vdef) == SSA_NAME)
664 SSA_NAME_DEF_STMT (vdef) = new_stmt;
665 laststore = new_stmt;
666 }
667 }
668
669 /* Second iterate over the statements forward, assigning virtual
670 operands to their uses. */
671 tree reaching_vuse = gimple_vuse (stmt);
672 for (gimple_stmt_iterator i = gsi_start (stmts);
673 !gsi_end_p (i); gsi_next (&i))
674 {
675 gimple new_stmt = gsi_stmt (i);
676 /* If the new statement possibly has a VUSE, update it with exact SSA
677 name we know will reach this one. */
678 if (gimple_has_mem_ops (new_stmt))
679 gimple_set_vuse (new_stmt, reaching_vuse);
680 gimple_set_modified (new_stmt, true);
681 if (gimple_vdef (new_stmt))
682 reaching_vuse = gimple_vdef (new_stmt);
683 }
684
685 /* If the new sequence does not do a store release the virtual
686 definition of the original statement. */
687 if (reaching_vuse
688 && reaching_vuse == gimple_vuse (stmt))
689 {
690 tree vdef = gimple_vdef (stmt);
691 if (vdef
692 && TREE_CODE (vdef) == SSA_NAME)
693 {
694 unlink_stmt_vdef (stmt);
695 release_ssa_name (vdef);
696 }
697 }
698
699 /* Finally replace the original statement with the sequence. */
700 gsi_replace_with_seq (si_p, stmts, false);
701}
702
cbdd87d4
RG
703/* Convert EXPR into a GIMPLE value suitable for substitution on the
704 RHS of an assignment. Insert the necessary statements before
705 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
706 is replaced. If the call is expected to produces a result, then it
707 is replaced by an assignment of the new RHS to the result variable.
708 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
709 GIMPLE_NOP. A proper VDEF chain is retained by making the first
710 VUSE and the last VDEF of the whole sequence be the same as the replaced
711 statement and using new SSA names for stores in between. */
cbdd87d4
RG
712
713void
714gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
715{
716 tree lhs;
cbdd87d4
RG
717 gimple stmt, new_stmt;
718 gimple_stmt_iterator i;
355a7673 719 gimple_seq stmts = NULL;
cbdd87d4
RG
720
721 stmt = gsi_stmt (*si_p);
722
723 gcc_assert (is_gimple_call (stmt));
724
45852dcc 725 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 726
e256dfce 727 lhs = gimple_call_lhs (stmt);
cbdd87d4 728 if (lhs == NULL_TREE)
6e572326
RG
729 {
730 gimplify_and_add (expr, &stmts);
731 /* We can end up with folding a memcpy of an empty class assignment
732 which gets optimized away by C++ gimplification. */
733 if (gimple_seq_empty_p (stmts))
734 {
9fdc58de 735 pop_gimplify_context (NULL);
6e572326
RG
736 if (gimple_in_ssa_p (cfun))
737 {
738 unlink_stmt_vdef (stmt);
739 release_defs (stmt);
740 }
77d19c72 741 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
742 return;
743 }
744 }
cbdd87d4 745 else
e256dfce
RG
746 {
747 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
748 new_stmt = gimple_build_assign (lhs, tmp);
749 i = gsi_last (stmts);
750 gsi_insert_after_without_update (&i, new_stmt,
751 GSI_CONTINUE_LINKING);
752 }
cbdd87d4
RG
753
754 pop_gimplify_context (NULL);
755
fef5a0d9
RB
756 gsi_replace_with_seq_vops (si_p, stmts);
757}
cbdd87d4 758
fef5a0d9
RB
759
760/* Replace the call at *GSI with the gimple value VAL. */
761
762static void
763replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
764{
765 gimple stmt = gsi_stmt (*gsi);
766 tree lhs = gimple_call_lhs (stmt);
767 gimple repl;
768 if (lhs)
e256dfce 769 {
fef5a0d9
RB
770 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
771 val = fold_convert (TREE_TYPE (lhs), val);
772 repl = gimple_build_assign (lhs, val);
773 }
774 else
775 repl = gimple_build_nop ();
776 tree vdef = gimple_vdef (stmt);
777 if (vdef && TREE_CODE (vdef) == SSA_NAME)
778 {
779 unlink_stmt_vdef (stmt);
780 release_ssa_name (vdef);
781 }
782 gsi_replace (gsi, repl, true);
783}
784
785/* Replace the call at *GSI with the new call REPL and fold that
786 again. */
787
788static void
789replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
790{
791 gimple stmt = gsi_stmt (*gsi);
792 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
793 gimple_set_location (repl, gimple_location (stmt));
794 if (gimple_vdef (stmt)
795 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
796 {
797 gimple_set_vdef (repl, gimple_vdef (stmt));
798 gimple_set_vuse (repl, gimple_vuse (stmt));
799 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
800 }
801 gsi_replace (gsi, repl, true);
802 fold_stmt (gsi);
803}
804
805/* Return true if VAR is a VAR_DECL or a component thereof. */
806
807static bool
808var_decl_component_p (tree var)
809{
810 tree inner = var;
811 while (handled_component_p (inner))
812 inner = TREE_OPERAND (inner, 0);
813 return SSA_VAR_P (inner);
814}
815
816/* Fold function call to builtin mem{{,p}cpy,move}. Return
817 NULL_TREE if no simplification can be made.
818 If ENDP is 0, return DEST (like memcpy).
819 If ENDP is 1, return DEST+LEN (like mempcpy).
820 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
821 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
822 (memmove). */
823
824static bool
825gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
826 tree dest, tree src, int endp)
827{
828 gimple stmt = gsi_stmt (*gsi);
829 tree lhs = gimple_call_lhs (stmt);
830 tree len = gimple_call_arg (stmt, 2);
831 tree destvar, srcvar;
832 location_t loc = gimple_location (stmt);
833
834 /* If the LEN parameter is zero, return DEST. */
835 if (integer_zerop (len))
836 {
837 gimple repl;
838 if (gimple_call_lhs (stmt))
839 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
840 else
841 repl = gimple_build_nop ();
842 tree vdef = gimple_vdef (stmt);
843 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 844 {
fef5a0d9
RB
845 unlink_stmt_vdef (stmt);
846 release_ssa_name (vdef);
847 }
848 gsi_replace (gsi, repl, true);
849 return true;
850 }
851
852 /* If SRC and DEST are the same (and not volatile), return
853 DEST{,+LEN,+LEN-1}. */
854 if (operand_equal_p (src, dest, 0))
855 {
856 unlink_stmt_vdef (stmt);
857 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
858 release_ssa_name (gimple_vdef (stmt));
859 if (!lhs)
860 {
861 gsi_replace (gsi, gimple_build_nop (), true);
862 return true;
863 }
864 goto done;
865 }
866 else
867 {
868 tree srctype, desttype;
869 unsigned int src_align, dest_align;
870 tree off0;
871
872 /* Build accesses at offset zero with a ref-all character type. */
873 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
874 ptr_mode, true), 0);
875
876 /* If we can perform the copy efficiently with first doing all loads
877 and then all stores inline it that way. Currently efficiently
878 means that we can load all the memory into a single integer
879 register which is what MOVE_MAX gives us. */
880 src_align = get_pointer_alignment (src);
881 dest_align = get_pointer_alignment (dest);
882 if (tree_fits_uhwi_p (len)
883 && compare_tree_int (len, MOVE_MAX) <= 0
884 /* ??? Don't transform copies from strings with known length this
885 confuses the tree-ssa-strlen.c. This doesn't handle
886 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
887 reason. */
888 && !c_strlen (src, 2))
889 {
890 unsigned ilen = tree_to_uhwi (len);
891 if (exact_log2 (ilen) != -1)
892 {
893 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
894 if (type
895 && TYPE_MODE (type) != BLKmode
896 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
897 == ilen * 8)
898 /* If the destination pointer is not aligned we must be able
899 to emit an unaligned store. */
900 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
901 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
902 {
903 tree srctype = type;
904 tree desttype = type;
905 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
906 srctype = build_aligned_type (type, src_align);
907 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
908 tree tem = fold_const_aggregate_ref (srcmem);
909 if (tem)
910 srcmem = tem;
911 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
912 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
913 src_align))
914 srcmem = NULL_TREE;
915 if (srcmem)
916 {
917 gimple new_stmt;
918 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
919 {
920 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
921 if (gimple_in_ssa_p (cfun))
922 srcmem = make_ssa_name (TREE_TYPE (srcmem),
923 new_stmt);
924 else
925 srcmem = create_tmp_reg (TREE_TYPE (srcmem),
926 NULL);
927 gimple_assign_set_lhs (new_stmt, srcmem);
928 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
929 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
930 }
931 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
932 desttype = build_aligned_type (type, dest_align);
933 new_stmt
934 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
935 dest, off0),
936 srcmem);
937 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
938 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
939 if (gimple_vdef (new_stmt)
940 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
941 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
942 if (!lhs)
943 {
944 gsi_replace (gsi, new_stmt, true);
945 return true;
946 }
947 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
948 goto done;
949 }
950 }
951 }
952 }
953
954 if (endp == 3)
955 {
956 /* Both DEST and SRC must be pointer types.
957 ??? This is what old code did. Is the testing for pointer types
958 really mandatory?
959
960 If either SRC is readonly or length is 1, we can use memcpy. */
961 if (!dest_align || !src_align)
962 return false;
963 if (readonly_data_expr (src)
964 || (tree_fits_uhwi_p (len)
965 && (MIN (src_align, dest_align) / BITS_PER_UNIT
966 >= tree_to_uhwi (len))))
967 {
968 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
969 if (!fn)
970 return false;
971 gimple_call_set_fndecl (stmt, fn);
972 gimple_call_set_arg (stmt, 0, dest);
973 gimple_call_set_arg (stmt, 1, src);
974 fold_stmt (gsi);
975 return true;
976 }
977
978 /* If *src and *dest can't overlap, optimize into memcpy as well. */
979 if (TREE_CODE (src) == ADDR_EXPR
980 && TREE_CODE (dest) == ADDR_EXPR)
981 {
982 tree src_base, dest_base, fn;
983 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
984 HOST_WIDE_INT size = -1;
985 HOST_WIDE_INT maxsize = -1;
986
987 srcvar = TREE_OPERAND (src, 0);
988 src_base = get_ref_base_and_extent (srcvar, &src_offset,
989 &size, &maxsize);
990 destvar = TREE_OPERAND (dest, 0);
991 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
992 &size, &maxsize);
993 if (tree_fits_uhwi_p (len))
994 maxsize = tree_to_uhwi (len);
995 else
996 maxsize = -1;
997 src_offset /= BITS_PER_UNIT;
998 dest_offset /= BITS_PER_UNIT;
999 if (SSA_VAR_P (src_base)
1000 && SSA_VAR_P (dest_base))
1001 {
1002 if (operand_equal_p (src_base, dest_base, 0)
1003 && ranges_overlap_p (src_offset, maxsize,
1004 dest_offset, maxsize))
1005 return false;
1006 }
1007 else if (TREE_CODE (src_base) == MEM_REF
1008 && TREE_CODE (dest_base) == MEM_REF)
1009 {
1010 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1011 TREE_OPERAND (dest_base, 0), 0))
1012 return false;
1013 offset_int off = mem_ref_offset (src_base) + src_offset;
1014 if (!wi::fits_shwi_p (off))
1015 return false;
1016 src_offset = off.to_shwi ();
1017
1018 off = mem_ref_offset (dest_base) + dest_offset;
1019 if (!wi::fits_shwi_p (off))
1020 return false;
1021 dest_offset = off.to_shwi ();
1022 if (ranges_overlap_p (src_offset, maxsize,
1023 dest_offset, maxsize))
1024 return false;
1025 }
1026 else
1027 return false;
1028
1029 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1030 if (!fn)
1031 return false;
1032 gimple_call_set_fndecl (stmt, fn);
1033 gimple_call_set_arg (stmt, 0, dest);
1034 gimple_call_set_arg (stmt, 1, src);
1035 fold_stmt (gsi);
1036 return true;
1037 }
1038
1039 /* If the destination and source do not alias optimize into
1040 memcpy as well. */
1041 if ((is_gimple_min_invariant (dest)
1042 || TREE_CODE (dest) == SSA_NAME)
1043 && (is_gimple_min_invariant (src)
1044 || TREE_CODE (src) == SSA_NAME))
1045 {
1046 ao_ref destr, srcr;
1047 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1048 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1049 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1050 {
1051 tree fn;
1052 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1053 if (!fn)
1054 return false;
1055 gimple_call_set_fndecl (stmt, fn);
1056 gimple_call_set_arg (stmt, 0, dest);
1057 gimple_call_set_arg (stmt, 1, src);
1058 fold_stmt (gsi);
1059 return true;
1060 }
1061 }
1062
1063 return false;
1064 }
1065
1066 if (!tree_fits_shwi_p (len))
1067 return false;
1068 /* FIXME:
1069 This logic lose for arguments like (type *)malloc (sizeof (type)),
1070 since we strip the casts of up to VOID return value from malloc.
1071 Perhaps we ought to inherit type from non-VOID argument here? */
1072 STRIP_NOPS (src);
1073 STRIP_NOPS (dest);
1074 if (!POINTER_TYPE_P (TREE_TYPE (src))
1075 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1076 return false;
1077 /* In the following try to find a type that is most natural to be
1078 used for the memcpy source and destination and that allows
1079 the most optimization when memcpy is turned into a plain assignment
1080 using that type. In theory we could always use a char[len] type
1081 but that only gains us that the destination and source possibly
1082 no longer will have their address taken. */
1083 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1084 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1085 {
1086 tree tem = TREE_OPERAND (src, 0);
1087 STRIP_NOPS (tem);
1088 if (tem != TREE_OPERAND (src, 0))
1089 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1090 }
1091 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1092 {
1093 tree tem = TREE_OPERAND (dest, 0);
1094 STRIP_NOPS (tem);
1095 if (tem != TREE_OPERAND (dest, 0))
1096 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1097 }
1098 srctype = TREE_TYPE (TREE_TYPE (src));
1099 if (TREE_CODE (srctype) == ARRAY_TYPE
1100 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1101 {
1102 srctype = TREE_TYPE (srctype);
1103 STRIP_NOPS (src);
1104 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1105 }
1106 desttype = TREE_TYPE (TREE_TYPE (dest));
1107 if (TREE_CODE (desttype) == ARRAY_TYPE
1108 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1109 {
1110 desttype = TREE_TYPE (desttype);
1111 STRIP_NOPS (dest);
1112 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1113 }
1114 if (TREE_ADDRESSABLE (srctype)
1115 || TREE_ADDRESSABLE (desttype))
1116 return false;
1117
1118 /* Make sure we are not copying using a floating-point mode or
1119 a type whose size possibly does not match its precision. */
1120 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1121 || TREE_CODE (desttype) == BOOLEAN_TYPE
1122 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1123 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1124 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1125 || TREE_CODE (srctype) == BOOLEAN_TYPE
1126 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1127 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1128 if (!srctype)
1129 srctype = desttype;
1130 if (!desttype)
1131 desttype = srctype;
1132 if (!srctype)
1133 return false;
1134
1135 src_align = get_pointer_alignment (src);
1136 dest_align = get_pointer_alignment (dest);
1137 if (dest_align < TYPE_ALIGN (desttype)
1138 || src_align < TYPE_ALIGN (srctype))
1139 return false;
1140
1141 destvar = dest;
1142 STRIP_NOPS (destvar);
1143 if (TREE_CODE (destvar) == ADDR_EXPR
1144 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1145 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1146 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1147 else
1148 destvar = NULL_TREE;
1149
1150 srcvar = src;
1151 STRIP_NOPS (srcvar);
1152 if (TREE_CODE (srcvar) == ADDR_EXPR
1153 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1154 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1155 {
1156 if (!destvar
1157 || src_align >= TYPE_ALIGN (desttype))
1158 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1159 srcvar, off0);
1160 else if (!STRICT_ALIGNMENT)
1161 {
1162 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1163 src_align);
1164 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1165 }
e256dfce 1166 else
fef5a0d9
RB
1167 srcvar = NULL_TREE;
1168 }
1169 else
1170 srcvar = NULL_TREE;
1171
1172 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1173 return false;
1174
1175 if (srcvar == NULL_TREE)
1176 {
1177 STRIP_NOPS (src);
1178 if (src_align >= TYPE_ALIGN (desttype))
1179 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1180 else
1181 {
1182 if (STRICT_ALIGNMENT)
1183 return false;
1184 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1185 src_align);
1186 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1187 }
1188 }
1189 else if (destvar == NULL_TREE)
1190 {
1191 STRIP_NOPS (dest);
1192 if (dest_align >= TYPE_ALIGN (srctype))
1193 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1194 else
1195 {
1196 if (STRICT_ALIGNMENT)
1197 return false;
1198 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1199 dest_align);
1200 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1201 }
1202 }
1203
1204 gimple new_stmt;
1205 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1206 {
1207 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1208 if (gimple_in_ssa_p (cfun))
1209 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1210 else
1211 srcvar = create_tmp_reg (TREE_TYPE (srcvar), NULL);
1212 gimple_assign_set_lhs (new_stmt, srcvar);
1213 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1214 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1215 }
1216 new_stmt = gimple_build_assign (destvar, srcvar);
1217 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1218 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1219 if (gimple_vdef (new_stmt)
1220 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1221 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1222 if (!lhs)
1223 {
1224 gsi_replace (gsi, new_stmt, true);
1225 return true;
1226 }
1227 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1228 }
1229
1230done:
1231 if (endp == 0 || endp == 3)
1232 len = NULL_TREE;
1233 else if (endp == 2)
1234 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1235 ssize_int (1));
1236 if (endp == 2 || endp == 1)
1237 dest = fold_build_pointer_plus_loc (loc, dest, len);
1238
1239 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1240 GSI_SAME_STMT);
1241 gimple repl = gimple_build_assign (lhs, dest);
1242 gsi_replace (gsi, repl, true);
1243 return true;
1244}
1245
1246/* Fold function call to builtin memset or bzero at *GSI setting the
1247 memory of size LEN to VAL. Return whether a simplification was made. */
1248
1249static bool
1250gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1251{
1252 gimple stmt = gsi_stmt (*gsi);
1253 tree etype;
1254 unsigned HOST_WIDE_INT length, cval;
1255
1256 /* If the LEN parameter is zero, return DEST. */
1257 if (integer_zerop (len))
1258 {
1259 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1260 return true;
1261 }
1262
1263 if (! tree_fits_uhwi_p (len))
1264 return false;
1265
1266 if (TREE_CODE (c) != INTEGER_CST)
1267 return false;
1268
1269 tree dest = gimple_call_arg (stmt, 0);
1270 tree var = dest;
1271 if (TREE_CODE (var) != ADDR_EXPR)
1272 return false;
1273
1274 var = TREE_OPERAND (var, 0);
1275 if (TREE_THIS_VOLATILE (var))
1276 return false;
1277
1278 etype = TREE_TYPE (var);
1279 if (TREE_CODE (etype) == ARRAY_TYPE)
1280 etype = TREE_TYPE (etype);
1281
1282 if (!INTEGRAL_TYPE_P (etype)
1283 && !POINTER_TYPE_P (etype))
1284 return NULL_TREE;
1285
1286 if (! var_decl_component_p (var))
1287 return NULL_TREE;
1288
1289 length = tree_to_uhwi (len);
1290 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1291 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1292 return NULL_TREE;
1293
1294 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1295 return NULL_TREE;
1296
1297 if (integer_zerop (c))
1298 cval = 0;
1299 else
1300 {
1301 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1302 return NULL_TREE;
1303
1304 cval = TREE_INT_CST_LOW (c);
1305 cval &= 0xff;
1306 cval |= cval << 8;
1307 cval |= cval << 16;
1308 cval |= (cval << 31) << 1;
1309 }
1310
1311 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1312 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1313 gimple_set_vuse (store, gimple_vuse (stmt));
1314 tree vdef = gimple_vdef (stmt);
1315 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1316 {
1317 gimple_set_vdef (store, gimple_vdef (stmt));
1318 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1319 }
1320 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1321 if (gimple_call_lhs (stmt))
1322 {
1323 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1324 gsi_replace (gsi, asgn, true);
1325 }
1326 else
1327 {
1328 gimple_stmt_iterator gsi2 = *gsi;
1329 gsi_prev (gsi);
1330 gsi_remove (&gsi2, true);
1331 }
1332
1333 return true;
1334}
1335
1336
1337/* Return the string length, maximum string length or maximum value of
1338 ARG in LENGTH.
1339 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1340 is not NULL and, for TYPE == 0, its value is not equal to the length
1341 we determine or if we are unable to determine the length or value,
1342 return false. VISITED is a bitmap of visited variables.
1343 TYPE is 0 if string length should be returned, 1 for maximum string
1344 length and 2 for maximum value ARG can have. */
1345
1346static bool
1347get_maxval_strlen (tree arg, tree *length, bitmap visited, int type)
1348{
1349 tree var, val;
1350 gimple def_stmt;
1351
1352 if (TREE_CODE (arg) != SSA_NAME)
1353 {
1354 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1355 if (TREE_CODE (arg) == ADDR_EXPR
1356 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1357 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1358 {
1359 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1360 if (TREE_CODE (aop0) == INDIRECT_REF
1361 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1362 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1363 length, visited, type);
1364 }
1365
1366 if (type == 2)
1367 {
1368 val = arg;
1369 if (TREE_CODE (val) != INTEGER_CST
1370 || tree_int_cst_sgn (val) < 0)
1371 return false;
1372 }
1373 else
1374 val = c_strlen (arg, 1);
1375 if (!val)
1376 return false;
1377
1378 if (*length)
1379 {
1380 if (type > 0)
1381 {
1382 if (TREE_CODE (*length) != INTEGER_CST
1383 || TREE_CODE (val) != INTEGER_CST)
1384 return false;
1385
1386 if (tree_int_cst_lt (*length, val))
1387 *length = val;
1388 return true;
1389 }
1390 else if (simple_cst_equal (val, *length) != 1)
1391 return false;
1392 }
1393
1394 *length = val;
1395 return true;
1396 }
1397
1398 /* If ARG is registered for SSA update we cannot look at its defining
1399 statement. */
1400 if (name_registered_for_update_p (arg))
1401 return false;
1402
1403 /* If we were already here, break the infinite cycle. */
1404 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1405 return true;
1406
1407 var = arg;
1408 def_stmt = SSA_NAME_DEF_STMT (var);
1409
1410 switch (gimple_code (def_stmt))
1411 {
1412 case GIMPLE_ASSIGN:
1413 /* The RHS of the statement defining VAR must either have a
1414 constant length or come from another SSA_NAME with a constant
1415 length. */
1416 if (gimple_assign_single_p (def_stmt)
1417 || gimple_assign_unary_nop_p (def_stmt))
1418 {
1419 tree rhs = gimple_assign_rhs1 (def_stmt);
1420 return get_maxval_strlen (rhs, length, visited, type);
1421 }
1422 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1423 {
1424 tree op2 = gimple_assign_rhs2 (def_stmt);
1425 tree op3 = gimple_assign_rhs3 (def_stmt);
1426 return get_maxval_strlen (op2, length, visited, type)
1427 && get_maxval_strlen (op3, length, visited, type);
1428 }
1429 return false;
1430
1431 case GIMPLE_PHI:
1432 {
1433 /* All the arguments of the PHI node must have the same constant
1434 length. */
1435 unsigned i;
1436
1437 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1438 {
1439 tree arg = gimple_phi_arg (def_stmt, i)->def;
1440
1441 /* If this PHI has itself as an argument, we cannot
1442 determine the string length of this argument. However,
1443 if we can find a constant string length for the other
1444 PHI args then we can still be sure that this is a
1445 constant string length. So be optimistic and just
1446 continue with the next argument. */
1447 if (arg == gimple_phi_result (def_stmt))
1448 continue;
1449
1450 if (!get_maxval_strlen (arg, length, visited, type))
1451 return false;
1452 }
1453 }
1454 return true;
1455
1456 default:
1457 return false;
1458 }
1459}
1460
1461
1462/* Fold function call to builtin strcpy with arguments DEST and SRC.
1463 If LEN is not NULL, it represents the length of the string to be
1464 copied. Return NULL_TREE if no simplification can be made. */
1465
1466static bool
1467gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
1468 location_t loc, tree dest, tree src, tree len)
1469{
1470 tree fn;
1471
1472 /* If SRC and DEST are the same (and not volatile), return DEST. */
1473 if (operand_equal_p (src, dest, 0))
1474 {
1475 replace_call_with_value (gsi, dest);
1476 return true;
1477 }
1478
1479 if (optimize_function_for_size_p (cfun))
1480 return false;
1481
1482 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1483 if (!fn)
1484 return false;
1485
1486 if (!len)
1487 {
1488 len = c_strlen (src, 1);
1489 if (! len || TREE_SIDE_EFFECTS (len))
1490 return NULL_TREE;
1491 }
1492
1493 len = fold_convert_loc (loc, size_type_node, len);
1494 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1495 len = force_gimple_operand_gsi (gsi, len, true,
1496 NULL_TREE, true, GSI_SAME_STMT);
1497 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1498 replace_call_with_call_and_fold (gsi, repl);
1499 return true;
1500}
1501
1502/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1503 If SLEN is not NULL, it represents the length of the source string.
1504 Return NULL_TREE if no simplification can be made. */
1505
1506static bool
1507gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi, location_t loc,
1508 tree dest, tree src, tree len, tree slen)
1509{
1510 tree fn;
1511
1512 /* If the LEN parameter is zero, return DEST. */
1513 if (integer_zerop (len))
1514 {
1515 replace_call_with_value (gsi, dest);
1516 return true;
1517 }
1518
1519 /* We can't compare slen with len as constants below if len is not a
1520 constant. */
1521 if (len == 0 || TREE_CODE (len) != INTEGER_CST)
1522 return false;
1523
1524 if (!slen)
1525 slen = c_strlen (src, 1);
1526
1527 /* Now, we must be passed a constant src ptr parameter. */
1528 if (slen == 0 || TREE_CODE (slen) != INTEGER_CST)
1529 return false;
1530
1531 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1532
1533 /* We do not support simplification of this case, though we do
1534 support it when expanding trees into RTL. */
1535 /* FIXME: generate a call to __builtin_memset. */
1536 if (tree_int_cst_lt (slen, len))
1537 return false;
1538
1539 /* OK transform into builtin memcpy. */
1540 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1541 if (!fn)
1542 return false;
1543
1544 len = fold_convert_loc (loc, size_type_node, len);
1545 len = force_gimple_operand_gsi (gsi, len, true,
1546 NULL_TREE, true, GSI_SAME_STMT);
1547 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1548 replace_call_with_call_and_fold (gsi, repl);
1549 return true;
1550}
1551
1552/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1553 to the call.
1554
1555 Return NULL_TREE if no simplification was possible, otherwise return the
1556 simplified form of the call as a tree.
1557
1558 The simplified form may be a constant or other expression which
1559 computes the same value, but in a more efficient manner (including
1560 calls to other builtin functions).
1561
1562 The call may contain arguments which need to be evaluated, but
1563 which are not useful to determine the result of the call. In
1564 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1565 COMPOUND_EXPR will be an argument which must be evaluated.
1566 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1567 COMPOUND_EXPR in the chain will contain the tree for the simplified
1568 form of the builtin function call. */
1569
1570static bool
1571gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi,
1572 location_t loc ATTRIBUTE_UNUSED, tree dst, tree src,
1573 tree len)
1574{
1575 gimple stmt = gsi_stmt (*gsi);
1576
1577 const char *p = c_getstr (src);
1578
1579 /* If the string length is zero, return the dst parameter. */
1580 if (p && *p == '\0')
1581 {
1582 replace_call_with_value (gsi, dst);
1583 return true;
1584 }
1585
1586 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1587 return false;
1588
1589 /* See if we can store by pieces into (dst + strlen(dst)). */
1590 tree newdst;
1591 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1592 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1593
1594 if (!strlen_fn || !memcpy_fn)
1595 return false;
1596
1597 /* If the length of the source string isn't computable don't
1598 split strcat into strlen and memcpy. */
1599 if (! len)
1600 len = c_strlen (src, 1);
1601 if (! len || TREE_SIDE_EFFECTS (len))
1602 return false;
1603
1604 /* Create strlen (dst). */
1605 gimple_seq stmts = NULL, stmts2;
1606 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1607 gimple_set_location (repl, loc);
1608 if (gimple_in_ssa_p (cfun))
1609 newdst = make_ssa_name (size_type_node, NULL);
1610 else
1611 newdst = create_tmp_reg (size_type_node, NULL);
1612 gimple_call_set_lhs (repl, newdst);
1613 gimple_seq_add_stmt_without_update (&stmts, repl);
1614
1615 /* Create (dst p+ strlen (dst)). */
1616 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1617 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1618 gimple_seq_add_seq_without_update (&stmts, stmts2);
1619
1620 len = fold_convert_loc (loc, size_type_node, len);
1621 len = size_binop_loc (loc, PLUS_EXPR, len,
1622 build_int_cst (size_type_node, 1));
1623 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1624 gimple_seq_add_seq_without_update (&stmts, stmts2);
1625
1626 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1627 gimple_seq_add_stmt_without_update (&stmts, repl);
1628 if (gimple_call_lhs (stmt))
1629 {
1630 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1631 gimple_seq_add_stmt_without_update (&stmts, repl);
1632 gsi_replace_with_seq_vops (gsi, stmts);
1633 /* gsi now points at the assignment to the lhs, get a
1634 stmt iterator to the memcpy call.
1635 ??? We can't use gsi_for_stmt as that doesn't work when the
1636 CFG isn't built yet. */
1637 gimple_stmt_iterator gsi2 = *gsi;
1638 gsi_prev (&gsi2);
1639 fold_stmt (&gsi2);
1640 }
1641 else
1642 {
1643 gsi_replace_with_seq_vops (gsi, stmts);
1644 fold_stmt (gsi);
1645 }
1646 return true;
1647}
1648
1649/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1650 to the call. IGNORE is true if the value returned
1651 by the builtin will be ignored. UNLOCKED is true is true if this
1652 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1653 the known length of the string. Return NULL_TREE if no simplification
1654 was possible. */
1655
1656static bool
1657gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
1658 location_t loc ATTRIBUTE_UNUSED,
1659 tree arg0, tree arg1,
1660 bool ignore, bool unlocked, tree len)
1661{
1662 /* If we're using an unlocked function, assume the other unlocked
1663 functions exist explicitly. */
1664 tree const fn_fputc = (unlocked
1665 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1666 : builtin_decl_implicit (BUILT_IN_FPUTC));
1667 tree const fn_fwrite = (unlocked
1668 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1669 : builtin_decl_implicit (BUILT_IN_FWRITE));
1670
1671 /* If the return value is used, don't do the transformation. */
1672 if (!ignore)
1673 return false;
1674
1675 if (! len)
1676 len = c_strlen (arg0, 0);
1677
1678 /* Get the length of the string passed to fputs. If the length
1679 can't be determined, punt. */
1680 if (!len
1681 || TREE_CODE (len) != INTEGER_CST)
1682 return false;
1683
1684 switch (compare_tree_int (len, 1))
1685 {
1686 case -1: /* length is 0, delete the call entirely . */
1687 replace_call_with_value (gsi, integer_zero_node);
1688 return true;
1689
1690 case 0: /* length is 1, call fputc. */
1691 {
1692 const char *p = c_getstr (arg0);
1693 if (p != NULL)
1694 {
1695 if (!fn_fputc)
1696 return false;
1697
1698 gimple repl = gimple_build_call (fn_fputc, 2,
1699 build_int_cst
1700 (integer_type_node, p[0]), arg1);
1701 replace_call_with_call_and_fold (gsi, repl);
1702 return true;
1703 }
1704 }
1705 /* FALLTHROUGH */
1706 case 1: /* length is greater than 1, call fwrite. */
1707 {
1708 /* If optimizing for size keep fputs. */
1709 if (optimize_function_for_size_p (cfun))
1710 return false;
1711 /* New argument list transforming fputs(string, stream) to
1712 fwrite(string, 1, len, stream). */
1713 if (!fn_fwrite)
1714 return false;
1715
1716 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1717 size_one_node, len, arg1);
1718 replace_call_with_call_and_fold (gsi, repl);
1719 return true;
1720 }
1721 default:
1722 gcc_unreachable ();
1723 }
1724 return false;
1725}
1726
1727/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1728 DEST, SRC, LEN, and SIZE are the arguments to the call.
1729 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1730 code of the builtin. If MAXLEN is not NULL, it is maximum length
1731 passed as third argument. */
1732
1733static bool
1734gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
1735 location_t loc,
1736 tree dest, tree src, tree len, tree size,
1737 tree maxlen, bool ignore,
1738 enum built_in_function fcode)
1739{
1740 tree fn;
1741
1742 /* If SRC and DEST are the same (and not volatile), return DEST
1743 (resp. DEST+LEN for __mempcpy_chk). */
1744 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1745 {
1746 if (fcode != BUILT_IN_MEMPCPY_CHK)
1747 {
1748 replace_call_with_value (gsi, dest);
1749 return true;
1750 }
1751 else
1752 {
1753 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1754 temp = force_gimple_operand_gsi (gsi, temp,
1755 false, NULL_TREE, true,
1756 GSI_SAME_STMT);
1757 replace_call_with_value (gsi, temp);
1758 return true;
1759 }
1760 }
1761
1762 if (! tree_fits_uhwi_p (size))
1763 return false;
1764
1765 if (! integer_all_onesp (size))
1766 {
1767 if (! tree_fits_uhwi_p (len))
1768 {
1769 /* If LEN is not constant, try MAXLEN too.
1770 For MAXLEN only allow optimizing into non-_ocs function
1771 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1772 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1773 {
1774 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1775 {
1776 /* (void) __mempcpy_chk () can be optimized into
1777 (void) __memcpy_chk (). */
1778 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1779 if (!fn)
1780 return false;
1781
1782 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1783 replace_call_with_call_and_fold (gsi, repl);
1784 return true;
1785 }
1786 return false;
1787 }
1788 }
1789 else
1790 maxlen = len;
1791
1792 if (tree_int_cst_lt (size, maxlen))
1793 return false;
1794 }
1795
1796 fn = NULL_TREE;
1797 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1798 mem{cpy,pcpy,move,set} is available. */
1799 switch (fcode)
1800 {
1801 case BUILT_IN_MEMCPY_CHK:
1802 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1803 break;
1804 case BUILT_IN_MEMPCPY_CHK:
1805 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1806 break;
1807 case BUILT_IN_MEMMOVE_CHK:
1808 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1809 break;
1810 case BUILT_IN_MEMSET_CHK:
1811 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1812 break;
1813 default:
1814 break;
1815 }
1816
1817 if (!fn)
1818 return false;
1819
1820 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1821 replace_call_with_call_and_fold (gsi, repl);
1822 return true;
1823}
1824
1825/* Fold a call to the __st[rp]cpy_chk builtin.
1826 DEST, SRC, and SIZE are the arguments to the call.
1827 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1828 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1829 strings passed as second argument. */
1830
1831static bool
1832gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
1833 location_t loc, tree dest,
1834 tree src, tree size,
1835 tree maxlen, bool ignore,
1836 enum built_in_function fcode)
1837{
1838 tree len, fn;
1839
1840 /* If SRC and DEST are the same (and not volatile), return DEST. */
1841 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1842 {
1843 replace_call_with_value (gsi, dest);
1844 return true;
1845 }
1846
1847 if (! tree_fits_uhwi_p (size))
1848 return false;
1849
1850 if (! integer_all_onesp (size))
1851 {
1852 len = c_strlen (src, 1);
1853 if (! len || ! tree_fits_uhwi_p (len))
1854 {
1855 /* If LEN is not constant, try MAXLEN too.
1856 For MAXLEN only allow optimizing into non-_ocs function
1857 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1858 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1859 {
1860 if (fcode == BUILT_IN_STPCPY_CHK)
1861 {
1862 if (! ignore)
1863 return false;
1864
1865 /* If return value of __stpcpy_chk is ignored,
1866 optimize into __strcpy_chk. */
1867 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1868 if (!fn)
1869 return false;
1870
1871 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1872 replace_call_with_call_and_fold (gsi, repl);
1873 return true;
1874 }
1875
1876 if (! len || TREE_SIDE_EFFECTS (len))
1877 return false;
1878
1879 /* If c_strlen returned something, but not a constant,
1880 transform __strcpy_chk into __memcpy_chk. */
1881 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1882 if (!fn)
1883 return false;
1884
1885 len = fold_convert_loc (loc, size_type_node, len);
1886 len = size_binop_loc (loc, PLUS_EXPR, len,
1887 build_int_cst (size_type_node, 1));
1888 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1889 true, GSI_SAME_STMT);
1890 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1891 replace_call_with_call_and_fold (gsi, repl);
1892 return true;
1893 }
e256dfce 1894 }
fef5a0d9
RB
1895 else
1896 maxlen = len;
1897
1898 if (! tree_int_cst_lt (maxlen, size))
1899 return false;
e256dfce
RG
1900 }
1901
fef5a0d9
RB
1902 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1903 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1904 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1905 if (!fn)
1906 return false;
1907
1908 gimple repl = gimple_build_call (fn, 2, dest, src);
1909 replace_call_with_call_and_fold (gsi, repl);
1910 return true;
1911}
1912
1913/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1914 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1915 length passed as third argument. IGNORE is true if return value can be
1916 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1917
1918static bool
1919gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
1920 tree dest, tree src,
1921 tree len, tree size, tree maxlen, bool ignore,
1922 enum built_in_function fcode)
1923{
1924 tree fn;
1925
1926 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 1927 {
fef5a0d9
RB
1928 /* If return value of __stpncpy_chk is ignored,
1929 optimize into __strncpy_chk. */
1930 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
1931 if (fn)
1932 {
1933 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1934 replace_call_with_call_and_fold (gsi, repl);
1935 return true;
1936 }
cbdd87d4
RG
1937 }
1938
fef5a0d9
RB
1939 if (! tree_fits_uhwi_p (size))
1940 return false;
1941
1942 if (! integer_all_onesp (size))
cbdd87d4 1943 {
fef5a0d9 1944 if (! tree_fits_uhwi_p (len))
fe2ef088 1945 {
fef5a0d9
RB
1946 /* If LEN is not constant, try MAXLEN too.
1947 For MAXLEN only allow optimizing into non-_ocs function
1948 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1949 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1950 return false;
8a1561bc 1951 }
fef5a0d9
RB
1952 else
1953 maxlen = len;
1954
1955 if (tree_int_cst_lt (size, maxlen))
1956 return false;
cbdd87d4
RG
1957 }
1958
fef5a0d9
RB
1959 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
1960 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
1961 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
1962 if (!fn)
1963 return false;
1964
1965 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1966 replace_call_with_call_and_fold (gsi, repl);
1967 return true;
cbdd87d4
RG
1968}
1969
fef5a0d9
RB
1970/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
1971 NULL_TREE if a normal call should be emitted rather than expanding
1972 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
1973 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
1974 passed as second argument. */
cbdd87d4
RG
1975
1976static bool
fef5a0d9
RB
1977gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
1978 tree maxlen, enum built_in_function fcode)
cbdd87d4 1979{
fef5a0d9
RB
1980 gimple stmt = gsi_stmt (*gsi);
1981 tree dest, size, len, fn, fmt, flag;
1982 const char *fmt_str;
cbdd87d4 1983
fef5a0d9
RB
1984 /* Verify the required arguments in the original call. */
1985 if (gimple_call_num_args (stmt) < 5)
1986 return false;
cbdd87d4 1987
fef5a0d9
RB
1988 dest = gimple_call_arg (stmt, 0);
1989 len = gimple_call_arg (stmt, 1);
1990 flag = gimple_call_arg (stmt, 2);
1991 size = gimple_call_arg (stmt, 3);
1992 fmt = gimple_call_arg (stmt, 4);
1993
1994 if (! tree_fits_uhwi_p (size))
1995 return false;
1996
1997 if (! integer_all_onesp (size))
1998 {
1999 if (! tree_fits_uhwi_p (len))
cbdd87d4 2000 {
fef5a0d9
RB
2001 /* If LEN is not constant, try MAXLEN too.
2002 For MAXLEN only allow optimizing into non-_ocs function
2003 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2004 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2005 return false;
2006 }
2007 else
fef5a0d9 2008 maxlen = len;
cbdd87d4 2009
fef5a0d9
RB
2010 if (tree_int_cst_lt (size, maxlen))
2011 return false;
2012 }
cbdd87d4 2013
fef5a0d9
RB
2014 if (!init_target_chars ())
2015 return false;
cbdd87d4 2016
fef5a0d9
RB
2017 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2018 or if format doesn't contain % chars or is "%s". */
2019 if (! integer_zerop (flag))
2020 {
2021 fmt_str = c_getstr (fmt);
2022 if (fmt_str == NULL)
2023 return false;
2024 if (strchr (fmt_str, target_percent) != NULL
2025 && strcmp (fmt_str, target_percent_s))
2026 return false;
cbdd87d4
RG
2027 }
2028
fef5a0d9
RB
2029 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2030 available. */
2031 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2032 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2033 if (!fn)
491e0b9b
RG
2034 return false;
2035
fef5a0d9
RB
2036 /* Replace the called function and the first 5 argument by 3 retaining
2037 trailing varargs. */
2038 gimple_call_set_fndecl (stmt, fn);
2039 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2040 gimple_call_set_arg (stmt, 0, dest);
2041 gimple_call_set_arg (stmt, 1, len);
2042 gimple_call_set_arg (stmt, 2, fmt);
2043 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2044 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2045 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2046 fold_stmt (gsi);
2047 return true;
2048}
cbdd87d4 2049
fef5a0d9
RB
2050/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2051 Return NULL_TREE if a normal call should be emitted rather than
2052 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2053 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2054
fef5a0d9
RB
2055static bool
2056gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2057 enum built_in_function fcode)
2058{
2059 gimple stmt = gsi_stmt (*gsi);
2060 tree dest, size, len, fn, fmt, flag;
2061 const char *fmt_str;
2062 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2063
fef5a0d9
RB
2064 /* Verify the required arguments in the original call. */
2065 if (nargs < 4)
2066 return false;
2067 dest = gimple_call_arg (stmt, 0);
2068 flag = gimple_call_arg (stmt, 1);
2069 size = gimple_call_arg (stmt, 2);
2070 fmt = gimple_call_arg (stmt, 3);
2071
2072 if (! tree_fits_uhwi_p (size))
2073 return false;
2074
2075 len = NULL_TREE;
2076
2077 if (!init_target_chars ())
2078 return false;
2079
2080 /* Check whether the format is a literal string constant. */
2081 fmt_str = c_getstr (fmt);
2082 if (fmt_str != NULL)
2083 {
2084 /* If the format doesn't contain % args or %%, we know the size. */
2085 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2086 {
fef5a0d9
RB
2087 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2088 len = build_int_cstu (size_type_node, strlen (fmt_str));
2089 }
2090 /* If the format is "%s" and first ... argument is a string literal,
2091 we know the size too. */
2092 else if (fcode == BUILT_IN_SPRINTF_CHK
2093 && strcmp (fmt_str, target_percent_s) == 0)
2094 {
2095 tree arg;
cbdd87d4 2096
fef5a0d9
RB
2097 if (nargs == 5)
2098 {
2099 arg = gimple_call_arg (stmt, 4);
2100 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2101 {
2102 len = c_strlen (arg, 1);
2103 if (! len || ! tree_fits_uhwi_p (len))
2104 len = NULL_TREE;
2105 }
2106 }
2107 }
2108 }
cbdd87d4 2109
fef5a0d9
RB
2110 if (! integer_all_onesp (size))
2111 {
2112 if (! len || ! tree_int_cst_lt (len, size))
2113 return false;
2114 }
cbdd87d4 2115
fef5a0d9
RB
2116 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2117 or if format doesn't contain % chars or is "%s". */
2118 if (! integer_zerop (flag))
2119 {
2120 if (fmt_str == NULL)
2121 return false;
2122 if (strchr (fmt_str, target_percent) != NULL
2123 && strcmp (fmt_str, target_percent_s))
2124 return false;
2125 }
cbdd87d4 2126
fef5a0d9
RB
2127 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2128 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2129 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2130 if (!fn)
2131 return false;
2132
2133 /* Replace the called function and the first 4 argument by 2 retaining
2134 trailing varargs. */
2135 gimple_call_set_fndecl (stmt, fn);
2136 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2137 gimple_call_set_arg (stmt, 0, dest);
2138 gimple_call_set_arg (stmt, 1, fmt);
2139 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2140 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2141 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2142 fold_stmt (gsi);
2143 return true;
2144}
2145
2146
2147/* Fold a call to __builtin_strlen with known length LEN. */
2148
2149static bool
2150gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi, tree len)
2151{
2152 if (!len)
2153 {
2154 gimple stmt = gsi_stmt (*gsi);
2155 len = c_strlen (gimple_call_arg (stmt, 0), 0);
cbdd87d4 2156 }
fef5a0d9
RB
2157 if (!len)
2158 return false;
2159 replace_call_with_value (gsi, len);
2160 return true;
cbdd87d4
RG
2161}
2162
2163
fef5a0d9 2164/* Fold builtins at *GSI with knowledge about a length argument. */
cbdd87d4 2165
fef5a0d9
RB
2166static bool
2167gimple_fold_builtin_with_strlen (gimple_stmt_iterator *gsi)
cbdd87d4 2168{
fef5a0d9
RB
2169 gimple stmt = gsi_stmt (*gsi);
2170 tree val[3];
2171 tree a;
cbdd87d4
RG
2172 int arg_idx, type;
2173 bitmap visited;
2174 bool ignore;
cbdd87d4
RG
2175 location_t loc = gimple_location (stmt);
2176
cbdd87d4
RG
2177 ignore = (gimple_call_lhs (stmt) == NULL);
2178
cbdd87d4 2179 /* Limit the work only for builtins we know how to simplify. */
fef5a0d9 2180 tree callee = gimple_call_fndecl (stmt);
cbdd87d4
RG
2181 switch (DECL_FUNCTION_CODE (callee))
2182 {
2183 case BUILT_IN_STRLEN:
2184 case BUILT_IN_FPUTS:
2185 case BUILT_IN_FPUTS_UNLOCKED:
2186 arg_idx = 0;
2187 type = 0;
2188 break;
2189 case BUILT_IN_STRCPY:
2190 case BUILT_IN_STRNCPY:
9a7eefec 2191 case BUILT_IN_STRCAT:
cbdd87d4
RG
2192 arg_idx = 1;
2193 type = 0;
2194 break;
2195 case BUILT_IN_MEMCPY_CHK:
2196 case BUILT_IN_MEMPCPY_CHK:
2197 case BUILT_IN_MEMMOVE_CHK:
2198 case BUILT_IN_MEMSET_CHK:
2199 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2200 case BUILT_IN_STPNCPY_CHK:
cbdd87d4
RG
2201 arg_idx = 2;
2202 type = 2;
2203 break;
2204 case BUILT_IN_STRCPY_CHK:
2205 case BUILT_IN_STPCPY_CHK:
2206 arg_idx = 1;
2207 type = 1;
2208 break;
2209 case BUILT_IN_SNPRINTF_CHK:
2210 case BUILT_IN_VSNPRINTF_CHK:
2211 arg_idx = 1;
2212 type = 2;
2213 break;
2214 default:
fef5a0d9 2215 return false;
cbdd87d4
RG
2216 }
2217
fef5a0d9 2218 int nargs = gimple_call_num_args (stmt);
cbdd87d4 2219 if (arg_idx >= nargs)
fef5a0d9 2220 return false;
cbdd87d4
RG
2221
2222 /* Try to use the dataflow information gathered by the CCP process. */
2223 visited = BITMAP_ALLOC (NULL);
2224 bitmap_clear (visited);
2225
2226 memset (val, 0, sizeof (val));
2227 a = gimple_call_arg (stmt, arg_idx);
fef5a0d9
RB
2228 if (!get_maxval_strlen (a, &val[arg_idx], visited, type)
2229 || !is_gimple_val (val[arg_idx]))
cbdd87d4
RG
2230 val[arg_idx] = NULL_TREE;
2231
2232 BITMAP_FREE (visited);
2233
cbdd87d4
RG
2234 switch (DECL_FUNCTION_CODE (callee))
2235 {
2236 case BUILT_IN_STRLEN:
fef5a0d9 2237 return gimple_fold_builtin_strlen (gsi, val[0]);
cbdd87d4
RG
2238
2239 case BUILT_IN_STRCPY:
fef5a0d9
RB
2240 return gimple_fold_builtin_strcpy (gsi, loc,
2241 gimple_call_arg (stmt, 0),
2242 gimple_call_arg (stmt, 1),
2243 val[1]);
cbdd87d4
RG
2244
2245 case BUILT_IN_STRNCPY:
fef5a0d9
RB
2246 return gimple_fold_builtin_strncpy (gsi, loc,
2247 gimple_call_arg (stmt, 0),
2248 gimple_call_arg (stmt, 1),
2249 gimple_call_arg (stmt, 2),
2250 val[1]);
cbdd87d4 2251
9a7eefec 2252 case BUILT_IN_STRCAT:
fef5a0d9
RB
2253 return gimple_fold_builtin_strcat (gsi, loc, gimple_call_arg (stmt, 0),
2254 gimple_call_arg (stmt, 1),
2255 val[1]);
9a7eefec 2256
cbdd87d4 2257 case BUILT_IN_FPUTS:
fef5a0d9
RB
2258 return gimple_fold_builtin_fputs (gsi, loc, gimple_call_arg (stmt, 0),
2259 gimple_call_arg (stmt, 1),
2260 ignore, false, val[0]);
cbdd87d4
RG
2261
2262 case BUILT_IN_FPUTS_UNLOCKED:
fef5a0d9
RB
2263 return gimple_fold_builtin_fputs (gsi, loc, gimple_call_arg (stmt, 0),
2264 gimple_call_arg (stmt, 1),
2265 ignore, true, val[0]);
cbdd87d4
RG
2266
2267 case BUILT_IN_MEMCPY_CHK:
2268 case BUILT_IN_MEMPCPY_CHK:
2269 case BUILT_IN_MEMMOVE_CHK:
2270 case BUILT_IN_MEMSET_CHK:
fef5a0d9
RB
2271 return gimple_fold_builtin_memory_chk (gsi, loc,
2272 gimple_call_arg (stmt, 0),
2273 gimple_call_arg (stmt, 1),
2274 gimple_call_arg (stmt, 2),
2275 gimple_call_arg (stmt, 3),
2276 val[2], ignore,
2277 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2278
2279 case BUILT_IN_STRCPY_CHK:
2280 case BUILT_IN_STPCPY_CHK:
fef5a0d9
RB
2281 return gimple_fold_builtin_stxcpy_chk (gsi, loc,
2282 gimple_call_arg (stmt, 0),
2283 gimple_call_arg (stmt, 1),
2284 gimple_call_arg (stmt, 2),
2285 val[1], ignore,
2286 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2287
2288 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2289 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2290 return gimple_fold_builtin_stxncpy_chk (gsi,
2291 gimple_call_arg (stmt, 0),
2292 gimple_call_arg (stmt, 1),
2293 gimple_call_arg (stmt, 2),
2294 gimple_call_arg (stmt, 3),
2295 val[2], ignore,
2296 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2297
2298 case BUILT_IN_SNPRINTF_CHK:
2299 case BUILT_IN_VSNPRINTF_CHK:
fef5a0d9
RB
2300 return gimple_fold_builtin_snprintf_chk (gsi, val[1],
2301 DECL_FUNCTION_CODE (callee));
cbdd87d4
RG
2302
2303 default:
2304 gcc_unreachable ();
2305 }
2306
fef5a0d9 2307 return false;
cbdd87d4
RG
2308}
2309
1ae6fe9b 2310
fef5a0d9
RB
2311/* Fold the non-target builtin at *GSI and return whether any simplification
2312 was made. */
2313
2314static bool
2315gimple_fold_builtin (gimple_stmt_iterator *gsi)
2316{
2317 gimple stmt = gsi_stmt (*gsi);
2318 tree callee = gimple_call_fndecl (stmt);
2319
2320 /* Give up for always_inline inline builtins until they are
2321 inlined. */
2322 if (avoid_folding_inline_builtin (callee))
2323 return false;
2324
2325 if (gimple_fold_builtin_with_strlen (gsi))
2326 return true;
2327
2328 switch (DECL_FUNCTION_CODE (callee))
2329 {
2330 case BUILT_IN_BZERO:
2331 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2332 gimple_call_arg (stmt, 1));
2333 case BUILT_IN_MEMSET:
2334 return gimple_fold_builtin_memset (gsi,
2335 gimple_call_arg (stmt, 1),
2336 gimple_call_arg (stmt, 2));
2337 case BUILT_IN_BCOPY:
2338 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2339 gimple_call_arg (stmt, 0), 3);
2340 case BUILT_IN_MEMCPY:
2341 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2342 gimple_call_arg (stmt, 1), 0);
2343 case BUILT_IN_MEMPCPY:
2344 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2345 gimple_call_arg (stmt, 1), 1);
2346 case BUILT_IN_MEMMOVE:
2347 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2348 gimple_call_arg (stmt, 1), 3);
2349 case BUILT_IN_SPRINTF_CHK:
2350 case BUILT_IN_VSPRINTF_CHK:
2351 return gimple_fold_builtin_sprintf_chk (gsi, DECL_FUNCTION_CODE (callee));
2352 default:;
2353 }
2354
2355 /* Try the generic builtin folder. */
2356 bool ignore = (gimple_call_lhs (stmt) == NULL);
2357 tree result = fold_call_stmt (stmt, ignore);
2358 if (result)
2359 {
2360 if (ignore)
2361 STRIP_NOPS (result);
2362 else
2363 result = fold_convert (gimple_call_return_type (stmt), result);
2364 if (!update_call_from_tree (gsi, result))
2365 gimplify_and_update_call_from_tree (gsi, result);
2366 return true;
2367 }
2368
2369 return false;
2370}
2371
cbdd87d4
RG
2372/* Attempt to fold a call statement referenced by the statement iterator GSI.
2373 The statement may be replaced by another statement, e.g., if the call
2374 simplifies to a constant value. Return true if any changes were made.
2375 It is assumed that the operands have been previously folded. */
2376
e021c122 2377static bool
ceeffab0 2378gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4
RG
2379{
2380 gimple stmt = gsi_stmt (*gsi);
3b45a007 2381 tree callee;
e021c122
RG
2382 bool changed = false;
2383 unsigned i;
cbdd87d4 2384
e021c122
RG
2385 /* Fold *& in call arguments. */
2386 for (i = 0; i < gimple_call_num_args (stmt); ++i)
2387 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
2388 {
2389 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
2390 if (tmp)
2391 {
2392 gimple_call_set_arg (stmt, i, tmp);
2393 changed = true;
2394 }
2395 }
3b45a007
RG
2396
2397 /* Check for virtual calls that became direct calls. */
2398 callee = gimple_call_fn (stmt);
25583c4f 2399 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 2400 {
49c471e3
MJ
2401 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
2402 {
450ad0cd
JH
2403 if (dump_file && virtual_method_call_p (callee)
2404 && !possible_polymorphic_call_target_p
d52f5295
ML
2405 (callee, cgraph_node::get (gimple_call_addr_fndecl
2406 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
2407 {
2408 fprintf (dump_file,
a70e9985 2409 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
2410 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
2411 fprintf (dump_file, " to ");
2412 print_generic_expr (dump_file, callee, TDF_SLIM);
2413 fprintf (dump_file, "\n");
2414 }
2415
49c471e3 2416 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
2417 changed = true;
2418 }
a70e9985 2419 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 2420 {
61dd6a2e
JH
2421 bool final;
2422 vec <cgraph_node *>targets
058d0a90 2423 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 2424 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 2425 {
a70e9985 2426 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
2427 if (dump_enabled_p ())
2428 {
807b7d62 2429 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
2430 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2431 "folding virtual function call to %s\n",
2432 targets.length () == 1
2433 ? targets[0]->name ()
2434 : "__builtin_unreachable");
2435 }
61dd6a2e 2436 if (targets.length () == 1)
cf3e5a89
JJ
2437 {
2438 gimple_call_set_fndecl (stmt, targets[0]->decl);
2439 changed = true;
a70e9985
JJ
2440 /* If the call becomes noreturn, remove the lhs. */
2441 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
2442 {
2443 if (TREE_CODE (lhs) == SSA_NAME)
2444 {
2445 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2446 tree def = get_or_create_ssa_default_def (cfun, var);
2447 gimple new_stmt = gimple_build_assign (lhs, def);
2448 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
2449 }
2450 gimple_call_set_lhs (stmt, NULL_TREE);
2451 }
cf3e5a89 2452 }
a70e9985 2453 else
cf3e5a89
JJ
2454 {
2455 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2456 gimple new_stmt = gimple_build_call (fndecl, 0);
2457 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
2458 if (lhs && TREE_CODE (lhs) == SSA_NAME)
2459 {
2460 tree var = create_tmp_var (TREE_TYPE (lhs), NULL);
2461 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
2462
2463 /* To satisfy condition for
2464 cgraph_update_edges_for_call_stmt_node,
2465 we need to preserve GIMPLE_CALL statement
2466 at position of GSI iterator. */
a70e9985 2467 update_call_from_tree (gsi, def);
eb14a79f 2468 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
2469 }
2470 else
2471 gsi_replace (gsi, new_stmt, true);
cf3e5a89
JJ
2472 return true;
2473 }
e021c122 2474 }
49c471e3 2475 }
e021c122 2476 }
49c471e3 2477
e021c122
RG
2478 if (inplace)
2479 return changed;
2480
2481 /* Check for builtins that CCP can handle using information not
2482 available in the generic fold routines. */
fef5a0d9
RB
2483 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
2484 {
2485 if (gimple_fold_builtin (gsi))
2486 changed = true;
2487 }
2488 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 2489 {
ea679d55 2490 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 2491 }
368b454d 2492 else if (gimple_call_internal_p (stmt))
ed9c79e1 2493 {
368b454d
JJ
2494 enum tree_code subcode = ERROR_MARK;
2495 tree result = NULL_TREE;
2496 switch (gimple_call_internal_fn (stmt))
2497 {
2498 case IFN_BUILTIN_EXPECT:
2499 result = fold_builtin_expect (gimple_location (stmt),
2500 gimple_call_arg (stmt, 0),
2501 gimple_call_arg (stmt, 1),
2502 gimple_call_arg (stmt, 2));
2503 break;
2504 case IFN_UBSAN_CHECK_ADD:
2505 subcode = PLUS_EXPR;
2506 break;
2507 case IFN_UBSAN_CHECK_SUB:
2508 subcode = MINUS_EXPR;
2509 break;
2510 case IFN_UBSAN_CHECK_MUL:
2511 subcode = MULT_EXPR;
2512 break;
2513 default:
2514 break;
2515 }
2516 if (subcode != ERROR_MARK)
2517 {
2518 tree arg0 = gimple_call_arg (stmt, 0);
2519 tree arg1 = gimple_call_arg (stmt, 1);
2520 /* x = y + 0; x = y - 0; x = y * 0; */
2521 if (integer_zerop (arg1))
2522 result = subcode == MULT_EXPR
2523 ? build_zero_cst (TREE_TYPE (arg0))
2524 : arg0;
2525 /* x = 0 + y; x = 0 * y; */
2526 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
2527 result = subcode == MULT_EXPR
2528 ? build_zero_cst (TREE_TYPE (arg0))
2529 : arg1;
2530 /* x = y - y; */
2531 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
2532 result = build_zero_cst (TREE_TYPE (arg0));
2533 /* x = y * 1; x = 1 * y; */
2534 else if (subcode == MULT_EXPR)
2535 {
2536 if (integer_onep (arg1))
2537 result = arg0;
2538 else if (integer_onep (arg0))
2539 result = arg1;
2540 }
2541 }
ed9c79e1
JJ
2542 if (result)
2543 {
2544 if (!update_call_from_tree (gsi, result))
2545 gimplify_and_update_call_from_tree (gsi, result);
2546 changed = true;
2547 }
2548 }
3b45a007 2549
e021c122 2550 return changed;
cbdd87d4
RG
2551}
2552
2553/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
2554 distinguishes both cases. */
2555
2556static bool
2557fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace)
2558{
2559 bool changed = false;
2560 gimple stmt = gsi_stmt (*gsi);
2561 unsigned i;
2562
2563 /* Fold the main computation performed by the statement. */
2564 switch (gimple_code (stmt))
2565 {
2566 case GIMPLE_ASSIGN:
2567 {
2568 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 2569 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 2570 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
2571 tree new_rhs;
2572 /* First canonicalize operand order. This avoids building new
2573 trees if this is the only thing fold would later do. */
2574 if ((commutative_tree_code (subcode)
2575 || commutative_ternary_tree_code (subcode))
2576 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
2577 gimple_assign_rhs2 (stmt), false))
2578 {
2579 tree tem = gimple_assign_rhs1 (stmt);
2580 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
2581 gimple_assign_set_rhs2 (stmt, tem);
2582 changed = true;
2583 }
2584 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
2585 if (new_rhs
2586 && !useless_type_conversion_p (TREE_TYPE (lhs),
2587 TREE_TYPE (new_rhs)))
2588 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
2589 if (new_rhs
2590 && (!inplace
2591 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
2592 {
2593 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
2594 changed = true;
2595 }
2596 break;
2597 }
2598
2599 case GIMPLE_COND:
2600 changed |= fold_gimple_cond (stmt);
2601 break;
2602
2603 case GIMPLE_CALL:
ceeffab0 2604 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
2605 break;
2606
2607 case GIMPLE_ASM:
2608 /* Fold *& in asm operands. */
38384150
JJ
2609 {
2610 size_t noutputs;
2611 const char **oconstraints;
2612 const char *constraint;
2613 bool allows_mem, allows_reg;
2614
2615 noutputs = gimple_asm_noutputs (stmt);
2616 oconstraints = XALLOCAVEC (const char *, noutputs);
2617
2618 for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
2619 {
2620 tree link = gimple_asm_output_op (stmt, i);
2621 tree op = TREE_VALUE (link);
2622 oconstraints[i]
2623 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2624 if (REFERENCE_CLASS_P (op)
2625 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
2626 {
2627 TREE_VALUE (link) = op;
2628 changed = true;
2629 }
2630 }
2631 for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
2632 {
2633 tree link = gimple_asm_input_op (stmt, i);
2634 tree op = TREE_VALUE (link);
2635 constraint
2636 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
2637 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
2638 oconstraints, &allows_mem, &allows_reg);
2639 if (REFERENCE_CLASS_P (op)
2640 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
2641 != NULL_TREE)
2642 {
2643 TREE_VALUE (link) = op;
2644 changed = true;
2645 }
2646 }
2647 }
cbdd87d4
RG
2648 break;
2649
bd422c4a
RG
2650 case GIMPLE_DEBUG:
2651 if (gimple_debug_bind_p (stmt))
2652 {
2653 tree val = gimple_debug_bind_get_value (stmt);
2654 if (val
2655 && REFERENCE_CLASS_P (val))
2656 {
2657 tree tem = maybe_fold_reference (val, false);
2658 if (tem)
2659 {
2660 gimple_debug_bind_set_value (stmt, tem);
2661 changed = true;
2662 }
2663 }
3e888a5e
RG
2664 else if (val
2665 && TREE_CODE (val) == ADDR_EXPR)
2666 {
2667 tree ref = TREE_OPERAND (val, 0);
2668 tree tem = maybe_fold_reference (ref, false);
2669 if (tem)
2670 {
2671 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
2672 gimple_debug_bind_set_value (stmt, tem);
2673 changed = true;
2674 }
2675 }
bd422c4a
RG
2676 }
2677 break;
2678
cbdd87d4
RG
2679 default:;
2680 }
2681
2682 stmt = gsi_stmt (*gsi);
2683
37376165
RB
2684 /* Fold *& on the lhs. */
2685 if (gimple_has_lhs (stmt))
cbdd87d4
RG
2686 {
2687 tree lhs = gimple_get_lhs (stmt);
2688 if (lhs && REFERENCE_CLASS_P (lhs))
2689 {
2690 tree new_lhs = maybe_fold_reference (lhs, true);
2691 if (new_lhs)
2692 {
2693 gimple_set_lhs (stmt, new_lhs);
2694 changed = true;
2695 }
2696 }
2697 }
2698
2699 return changed;
2700}
2701
2702/* Fold the statement pointed to by GSI. In some cases, this function may
2703 replace the whole statement with a new one. Returns true iff folding
2704 makes any changes.
2705 The statement pointed to by GSI should be in valid gimple form but may
2706 be in unfolded state as resulting from for example constant propagation
2707 which can produce *&x = 0. */
2708
2709bool
2710fold_stmt (gimple_stmt_iterator *gsi)
2711{
2712 return fold_stmt_1 (gsi, false);
2713}
2714
59401b92 2715/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
2716 *&x created by constant propagation are handled. The statement cannot
2717 be replaced with a new one. Return true if the statement was
2718 changed, false otherwise.
59401b92 2719 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
2720 be in unfolded state as resulting from for example constant propagation
2721 which can produce *&x = 0. */
2722
2723bool
59401b92 2724fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 2725{
59401b92
RG
2726 gimple stmt = gsi_stmt (*gsi);
2727 bool changed = fold_stmt_1 (gsi, true);
2728 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
2729 return changed;
2730}
2731
e89065a1
SL
2732/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
2733 if EXPR is null or we don't know how.
2734 If non-null, the result always has boolean type. */
2735
2736static tree
2737canonicalize_bool (tree expr, bool invert)
2738{
2739 if (!expr)
2740 return NULL_TREE;
2741 else if (invert)
2742 {
2743 if (integer_nonzerop (expr))
2744 return boolean_false_node;
2745 else if (integer_zerop (expr))
2746 return boolean_true_node;
2747 else if (TREE_CODE (expr) == SSA_NAME)
2748 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
2749 build_int_cst (TREE_TYPE (expr), 0));
2750 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
2751 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
2752 boolean_type_node,
2753 TREE_OPERAND (expr, 0),
2754 TREE_OPERAND (expr, 1));
2755 else
2756 return NULL_TREE;
2757 }
2758 else
2759 {
2760 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
2761 return expr;
2762 if (integer_nonzerop (expr))
2763 return boolean_true_node;
2764 else if (integer_zerop (expr))
2765 return boolean_false_node;
2766 else if (TREE_CODE (expr) == SSA_NAME)
2767 return fold_build2 (NE_EXPR, boolean_type_node, expr,
2768 build_int_cst (TREE_TYPE (expr), 0));
2769 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
2770 return fold_build2 (TREE_CODE (expr),
2771 boolean_type_node,
2772 TREE_OPERAND (expr, 0),
2773 TREE_OPERAND (expr, 1));
2774 else
2775 return NULL_TREE;
2776 }
2777}
2778
2779/* Check to see if a boolean expression EXPR is logically equivalent to the
2780 comparison (OP1 CODE OP2). Check for various identities involving
2781 SSA_NAMEs. */
2782
2783static bool
2784same_bool_comparison_p (const_tree expr, enum tree_code code,
2785 const_tree op1, const_tree op2)
2786{
2787 gimple s;
2788
2789 /* The obvious case. */
2790 if (TREE_CODE (expr) == code
2791 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
2792 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
2793 return true;
2794
2795 /* Check for comparing (name, name != 0) and the case where expr
2796 is an SSA_NAME with a definition matching the comparison. */
2797 if (TREE_CODE (expr) == SSA_NAME
2798 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
2799 {
2800 if (operand_equal_p (expr, op1, 0))
2801 return ((code == NE_EXPR && integer_zerop (op2))
2802 || (code == EQ_EXPR && integer_nonzerop (op2)));
2803 s = SSA_NAME_DEF_STMT (expr);
2804 if (is_gimple_assign (s)
2805 && gimple_assign_rhs_code (s) == code
2806 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
2807 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
2808 return true;
2809 }
2810
2811 /* If op1 is of the form (name != 0) or (name == 0), and the definition
2812 of name is a comparison, recurse. */
2813 if (TREE_CODE (op1) == SSA_NAME
2814 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
2815 {
2816 s = SSA_NAME_DEF_STMT (op1);
2817 if (is_gimple_assign (s)
2818 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
2819 {
2820 enum tree_code c = gimple_assign_rhs_code (s);
2821 if ((c == NE_EXPR && integer_zerop (op2))
2822 || (c == EQ_EXPR && integer_nonzerop (op2)))
2823 return same_bool_comparison_p (expr, c,
2824 gimple_assign_rhs1 (s),
2825 gimple_assign_rhs2 (s));
2826 if ((c == EQ_EXPR && integer_zerop (op2))
2827 || (c == NE_EXPR && integer_nonzerop (op2)))
2828 return same_bool_comparison_p (expr,
2829 invert_tree_comparison (c, false),
2830 gimple_assign_rhs1 (s),
2831 gimple_assign_rhs2 (s));
2832 }
2833 }
2834 return false;
2835}
2836
2837/* Check to see if two boolean expressions OP1 and OP2 are logically
2838 equivalent. */
2839
2840static bool
2841same_bool_result_p (const_tree op1, const_tree op2)
2842{
2843 /* Simple cases first. */
2844 if (operand_equal_p (op1, op2, 0))
2845 return true;
2846
2847 /* Check the cases where at least one of the operands is a comparison.
2848 These are a bit smarter than operand_equal_p in that they apply some
2849 identifies on SSA_NAMEs. */
2850 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
2851 && same_bool_comparison_p (op1, TREE_CODE (op2),
2852 TREE_OPERAND (op2, 0),
2853 TREE_OPERAND (op2, 1)))
2854 return true;
2855 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
2856 && same_bool_comparison_p (op2, TREE_CODE (op1),
2857 TREE_OPERAND (op1, 0),
2858 TREE_OPERAND (op1, 1)))
2859 return true;
2860
2861 /* Default case. */
2862 return false;
2863}
2864
2865/* Forward declarations for some mutually recursive functions. */
2866
2867static tree
2868and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2869 enum tree_code code2, tree op2a, tree op2b);
2870static tree
2871and_var_with_comparison (tree var, bool invert,
2872 enum tree_code code2, tree op2a, tree op2b);
2873static tree
2874and_var_with_comparison_1 (gimple stmt,
2875 enum tree_code code2, tree op2a, tree op2b);
2876static tree
2877or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
2878 enum tree_code code2, tree op2a, tree op2b);
2879static tree
2880or_var_with_comparison (tree var, bool invert,
2881 enum tree_code code2, tree op2a, tree op2b);
2882static tree
2883or_var_with_comparison_1 (gimple stmt,
2884 enum tree_code code2, tree op2a, tree op2b);
2885
2886/* Helper function for and_comparisons_1: try to simplify the AND of the
2887 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
2888 If INVERT is true, invert the value of the VAR before doing the AND.
2889 Return NULL_EXPR if we can't simplify this to a single expression. */
2890
2891static tree
2892and_var_with_comparison (tree var, bool invert,
2893 enum tree_code code2, tree op2a, tree op2b)
2894{
2895 tree t;
2896 gimple stmt = SSA_NAME_DEF_STMT (var);
2897
2898 /* We can only deal with variables whose definitions are assignments. */
2899 if (!is_gimple_assign (stmt))
2900 return NULL_TREE;
2901
2902 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
2903 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
2904 Then we only have to consider the simpler non-inverted cases. */
2905 if (invert)
2906 t = or_var_with_comparison_1 (stmt,
2907 invert_tree_comparison (code2, false),
2908 op2a, op2b);
2909 else
2910 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
2911 return canonicalize_bool (t, invert);
2912}
2913
2914/* Try to simplify the AND of the ssa variable defined by the assignment
2915 STMT with the comparison specified by (OP2A CODE2 OP2B).
2916 Return NULL_EXPR if we can't simplify this to a single expression. */
2917
2918static tree
2919and_var_with_comparison_1 (gimple stmt,
2920 enum tree_code code2, tree op2a, tree op2b)
2921{
2922 tree var = gimple_assign_lhs (stmt);
2923 tree true_test_var = NULL_TREE;
2924 tree false_test_var = NULL_TREE;
2925 enum tree_code innercode = gimple_assign_rhs_code (stmt);
2926
2927 /* Check for identities like (var AND (var == 0)) => false. */
2928 if (TREE_CODE (op2a) == SSA_NAME
2929 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
2930 {
2931 if ((code2 == NE_EXPR && integer_zerop (op2b))
2932 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
2933 {
2934 true_test_var = op2a;
2935 if (var == true_test_var)
2936 return var;
2937 }
2938 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
2939 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
2940 {
2941 false_test_var = op2a;
2942 if (var == false_test_var)
2943 return boolean_false_node;
2944 }
2945 }
2946
2947 /* If the definition is a comparison, recurse on it. */
2948 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
2949 {
2950 tree t = and_comparisons_1 (innercode,
2951 gimple_assign_rhs1 (stmt),
2952 gimple_assign_rhs2 (stmt),
2953 code2,
2954 op2a,
2955 op2b);
2956 if (t)
2957 return t;
2958 }
2959
2960 /* If the definition is an AND or OR expression, we may be able to
2961 simplify by reassociating. */
eb9820c0
KT
2962 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
2963 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
2964 {
2965 tree inner1 = gimple_assign_rhs1 (stmt);
2966 tree inner2 = gimple_assign_rhs2 (stmt);
2967 gimple s;
2968 tree t;
2969 tree partial = NULL_TREE;
eb9820c0 2970 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
2971
2972 /* Check for boolean identities that don't require recursive examination
2973 of inner1/inner2:
2974 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
2975 inner1 AND (inner1 OR inner2) => inner1
2976 !inner1 AND (inner1 AND inner2) => false
2977 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
2978 Likewise for similar cases involving inner2. */
2979 if (inner1 == true_test_var)
2980 return (is_and ? var : inner1);
2981 else if (inner2 == true_test_var)
2982 return (is_and ? var : inner2);
2983 else if (inner1 == false_test_var)
2984 return (is_and
2985 ? boolean_false_node
2986 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
2987 else if (inner2 == false_test_var)
2988 return (is_and
2989 ? boolean_false_node
2990 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
2991
2992 /* Next, redistribute/reassociate the AND across the inner tests.
2993 Compute the first partial result, (inner1 AND (op2a code op2b)) */
2994 if (TREE_CODE (inner1) == SSA_NAME
2995 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
2996 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
2997 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
2998 gimple_assign_rhs1 (s),
2999 gimple_assign_rhs2 (s),
3000 code2, op2a, op2b)))
3001 {
3002 /* Handle the AND case, where we are reassociating:
3003 (inner1 AND inner2) AND (op2a code2 op2b)
3004 => (t AND inner2)
3005 If the partial result t is a constant, we win. Otherwise
3006 continue on to try reassociating with the other inner test. */
3007 if (is_and)
3008 {
3009 if (integer_onep (t))
3010 return inner2;
3011 else if (integer_zerop (t))
3012 return boolean_false_node;
3013 }
3014
3015 /* Handle the OR case, where we are redistributing:
3016 (inner1 OR inner2) AND (op2a code2 op2b)
3017 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
3018 else if (integer_onep (t))
3019 return boolean_true_node;
3020
3021 /* Save partial result for later. */
3022 partial = t;
e89065a1
SL
3023 }
3024
3025 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
3026 if (TREE_CODE (inner2) == SSA_NAME
3027 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3028 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3029 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
3030 gimple_assign_rhs1 (s),
3031 gimple_assign_rhs2 (s),
3032 code2, op2a, op2b)))
3033 {
3034 /* Handle the AND case, where we are reassociating:
3035 (inner1 AND inner2) AND (op2a code2 op2b)
3036 => (inner1 AND t) */
3037 if (is_and)
3038 {
3039 if (integer_onep (t))
3040 return inner1;
3041 else if (integer_zerop (t))
3042 return boolean_false_node;
8236c8eb
JJ
3043 /* If both are the same, we can apply the identity
3044 (x AND x) == x. */
3045 else if (partial && same_bool_result_p (t, partial))
3046 return t;
e89065a1
SL
3047 }
3048
3049 /* Handle the OR case. where we are redistributing:
3050 (inner1 OR inner2) AND (op2a code2 op2b)
3051 => (t OR (inner1 AND (op2a code2 op2b)))
3052 => (t OR partial) */
3053 else
3054 {
3055 if (integer_onep (t))
3056 return boolean_true_node;
3057 else if (partial)
3058 {
3059 /* We already got a simplification for the other
3060 operand to the redistributed OR expression. The
3061 interesting case is when at least one is false.
3062 Or, if both are the same, we can apply the identity
3063 (x OR x) == x. */
3064 if (integer_zerop (partial))
3065 return t;
3066 else if (integer_zerop (t))
3067 return partial;
3068 else if (same_bool_result_p (t, partial))
3069 return t;
3070 }
3071 }
3072 }
3073 }
3074 return NULL_TREE;
3075}
3076
3077/* Try to simplify the AND of two comparisons defined by
3078 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3079 If this can be done without constructing an intermediate value,
3080 return the resulting tree; otherwise NULL_TREE is returned.
3081 This function is deliberately asymmetric as it recurses on SSA_DEFs
3082 in the first comparison but not the second. */
3083
3084static tree
3085and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3086 enum tree_code code2, tree op2a, tree op2b)
3087{
ae22ac3c 3088 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 3089
e89065a1
SL
3090 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
3091 if (operand_equal_p (op1a, op2a, 0)
3092 && operand_equal_p (op1b, op2b, 0))
3093 {
eb9820c0 3094 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3095 tree t = combine_comparisons (UNKNOWN_LOCATION,
3096 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 3097 truth_type, op1a, op1b);
e89065a1
SL
3098 if (t)
3099 return t;
3100 }
3101
3102 /* Likewise the swapped case of the above. */
3103 if (operand_equal_p (op1a, op2b, 0)
3104 && operand_equal_p (op1b, op2a, 0))
3105 {
eb9820c0 3106 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3107 tree t = combine_comparisons (UNKNOWN_LOCATION,
3108 TRUTH_ANDIF_EXPR, code1,
3109 swap_tree_comparison (code2),
31ed6226 3110 truth_type, op1a, op1b);
e89065a1
SL
3111 if (t)
3112 return t;
3113 }
3114
3115 /* If both comparisons are of the same value against constants, we might
3116 be able to merge them. */
3117 if (operand_equal_p (op1a, op2a, 0)
3118 && TREE_CODE (op1b) == INTEGER_CST
3119 && TREE_CODE (op2b) == INTEGER_CST)
3120 {
3121 int cmp = tree_int_cst_compare (op1b, op2b);
3122
3123 /* If we have (op1a == op1b), we should either be able to
3124 return that or FALSE, depending on whether the constant op1b
3125 also satisfies the other comparison against op2b. */
3126 if (code1 == EQ_EXPR)
3127 {
3128 bool done = true;
3129 bool val;
3130 switch (code2)
3131 {
3132 case EQ_EXPR: val = (cmp == 0); break;
3133 case NE_EXPR: val = (cmp != 0); break;
3134 case LT_EXPR: val = (cmp < 0); break;
3135 case GT_EXPR: val = (cmp > 0); break;
3136 case LE_EXPR: val = (cmp <= 0); break;
3137 case GE_EXPR: val = (cmp >= 0); break;
3138 default: done = false;
3139 }
3140 if (done)
3141 {
3142 if (val)
3143 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3144 else
3145 return boolean_false_node;
3146 }
3147 }
3148 /* Likewise if the second comparison is an == comparison. */
3149 else if (code2 == EQ_EXPR)
3150 {
3151 bool done = true;
3152 bool val;
3153 switch (code1)
3154 {
3155 case EQ_EXPR: val = (cmp == 0); break;
3156 case NE_EXPR: val = (cmp != 0); break;
3157 case LT_EXPR: val = (cmp > 0); break;
3158 case GT_EXPR: val = (cmp < 0); break;
3159 case LE_EXPR: val = (cmp >= 0); break;
3160 case GE_EXPR: val = (cmp <= 0); break;
3161 default: done = false;
3162 }
3163 if (done)
3164 {
3165 if (val)
3166 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3167 else
3168 return boolean_false_node;
3169 }
3170 }
3171
3172 /* Same business with inequality tests. */
3173 else if (code1 == NE_EXPR)
3174 {
3175 bool val;
3176 switch (code2)
3177 {
3178 case EQ_EXPR: val = (cmp != 0); break;
3179 case NE_EXPR: val = (cmp == 0); break;
3180 case LT_EXPR: val = (cmp >= 0); break;
3181 case GT_EXPR: val = (cmp <= 0); break;
3182 case LE_EXPR: val = (cmp > 0); break;
3183 case GE_EXPR: val = (cmp < 0); break;
3184 default:
3185 val = false;
3186 }
3187 if (val)
3188 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3189 }
3190 else if (code2 == NE_EXPR)
3191 {
3192 bool val;
3193 switch (code1)
3194 {
3195 case EQ_EXPR: val = (cmp == 0); break;
3196 case NE_EXPR: val = (cmp != 0); break;
3197 case LT_EXPR: val = (cmp <= 0); break;
3198 case GT_EXPR: val = (cmp >= 0); break;
3199 case LE_EXPR: val = (cmp < 0); break;
3200 case GE_EXPR: val = (cmp > 0); break;
3201 default:
3202 val = false;
3203 }
3204 if (val)
3205 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3206 }
3207
3208 /* Chose the more restrictive of two < or <= comparisons. */
3209 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3210 && (code2 == LT_EXPR || code2 == LE_EXPR))
3211 {
3212 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3213 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3214 else
3215 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3216 }
3217
3218 /* Likewise chose the more restrictive of two > or >= comparisons. */
3219 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3220 && (code2 == GT_EXPR || code2 == GE_EXPR))
3221 {
3222 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3223 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3224 else
3225 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3226 }
3227
3228 /* Check for singleton ranges. */
3229 else if (cmp == 0
3230 && ((code1 == LE_EXPR && code2 == GE_EXPR)
3231 || (code1 == GE_EXPR && code2 == LE_EXPR)))
3232 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
3233
3234 /* Check for disjoint ranges. */
3235 else if (cmp <= 0
3236 && (code1 == LT_EXPR || code1 == LE_EXPR)
3237 && (code2 == GT_EXPR || code2 == GE_EXPR))
3238 return boolean_false_node;
3239 else if (cmp >= 0
3240 && (code1 == GT_EXPR || code1 == GE_EXPR)
3241 && (code2 == LT_EXPR || code2 == LE_EXPR))
3242 return boolean_false_node;
3243 }
3244
3245 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3246 NAME's definition is a truth value. See if there are any simplifications
3247 that can be done against the NAME's definition. */
3248 if (TREE_CODE (op1a) == SSA_NAME
3249 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3250 && (integer_zerop (op1b) || integer_onep (op1b)))
3251 {
3252 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3253 || (code1 == NE_EXPR && integer_onep (op1b)));
3254 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3255 switch (gimple_code (stmt))
3256 {
3257 case GIMPLE_ASSIGN:
3258 /* Try to simplify by copy-propagating the definition. */
3259 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
3260
3261 case GIMPLE_PHI:
3262 /* If every argument to the PHI produces the same result when
3263 ANDed with the second comparison, we win.
3264 Do not do this unless the type is bool since we need a bool
3265 result here anyway. */
3266 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3267 {
3268 tree result = NULL_TREE;
3269 unsigned i;
3270 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3271 {
3272 tree arg = gimple_phi_arg_def (stmt, i);
3273
3274 /* If this PHI has itself as an argument, ignore it.
3275 If all the other args produce the same result,
3276 we're still OK. */
3277 if (arg == gimple_phi_result (stmt))
3278 continue;
3279 else if (TREE_CODE (arg) == INTEGER_CST)
3280 {
3281 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
3282 {
3283 if (!result)
3284 result = boolean_false_node;
3285 else if (!integer_zerop (result))
3286 return NULL_TREE;
3287 }
3288 else if (!result)
3289 result = fold_build2 (code2, boolean_type_node,
3290 op2a, op2b);
3291 else if (!same_bool_comparison_p (result,
3292 code2, op2a, op2b))
3293 return NULL_TREE;
3294 }
0e8b84ec
JJ
3295 else if (TREE_CODE (arg) == SSA_NAME
3296 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 3297 {
6c66f733
JJ
3298 tree temp;
3299 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3300 /* In simple cases we can look through PHI nodes,
3301 but we have to be careful with loops.
3302 See PR49073. */
3303 if (! dom_info_available_p (CDI_DOMINATORS)
3304 || gimple_bb (def_stmt) == gimple_bb (stmt)
3305 || dominated_by_p (CDI_DOMINATORS,
3306 gimple_bb (def_stmt),
3307 gimple_bb (stmt)))
3308 return NULL_TREE;
3309 temp = and_var_with_comparison (arg, invert, code2,
3310 op2a, op2b);
e89065a1
SL
3311 if (!temp)
3312 return NULL_TREE;
3313 else if (!result)
3314 result = temp;
3315 else if (!same_bool_result_p (result, temp))
3316 return NULL_TREE;
3317 }
3318 else
3319 return NULL_TREE;
3320 }
3321 return result;
3322 }
3323
3324 default:
3325 break;
3326 }
3327 }
3328 return NULL_TREE;
3329}
3330
3331/* Try to simplify the AND of two comparisons, specified by
3332 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3333 If this can be simplified to a single expression (without requiring
3334 introducing more SSA variables to hold intermediate values),
3335 return the resulting tree. Otherwise return NULL_TREE.
3336 If the result expression is non-null, it has boolean type. */
3337
3338tree
3339maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
3340 enum tree_code code2, tree op2a, tree op2b)
3341{
3342 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3343 if (t)
3344 return t;
3345 else
3346 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3347}
3348
3349/* Helper function for or_comparisons_1: try to simplify the OR of the
3350 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3351 If INVERT is true, invert the value of VAR before doing the OR.
3352 Return NULL_EXPR if we can't simplify this to a single expression. */
3353
3354static tree
3355or_var_with_comparison (tree var, bool invert,
3356 enum tree_code code2, tree op2a, tree op2b)
3357{
3358 tree t;
3359 gimple stmt = SSA_NAME_DEF_STMT (var);
3360
3361 /* We can only deal with variables whose definitions are assignments. */
3362 if (!is_gimple_assign (stmt))
3363 return NULL_TREE;
3364
3365 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3366 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
3367 Then we only have to consider the simpler non-inverted cases. */
3368 if (invert)
3369 t = and_var_with_comparison_1 (stmt,
3370 invert_tree_comparison (code2, false),
3371 op2a, op2b);
3372 else
3373 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
3374 return canonicalize_bool (t, invert);
3375}
3376
3377/* Try to simplify the OR of the ssa variable defined by the assignment
3378 STMT with the comparison specified by (OP2A CODE2 OP2B).
3379 Return NULL_EXPR if we can't simplify this to a single expression. */
3380
3381static tree
3382or_var_with_comparison_1 (gimple stmt,
3383 enum tree_code code2, tree op2a, tree op2b)
3384{
3385 tree var = gimple_assign_lhs (stmt);
3386 tree true_test_var = NULL_TREE;
3387 tree false_test_var = NULL_TREE;
3388 enum tree_code innercode = gimple_assign_rhs_code (stmt);
3389
3390 /* Check for identities like (var OR (var != 0)) => true . */
3391 if (TREE_CODE (op2a) == SSA_NAME
3392 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
3393 {
3394 if ((code2 == NE_EXPR && integer_zerop (op2b))
3395 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
3396 {
3397 true_test_var = op2a;
3398 if (var == true_test_var)
3399 return var;
3400 }
3401 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
3402 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
3403 {
3404 false_test_var = op2a;
3405 if (var == false_test_var)
3406 return boolean_true_node;
3407 }
3408 }
3409
3410 /* If the definition is a comparison, recurse on it. */
3411 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
3412 {
3413 tree t = or_comparisons_1 (innercode,
3414 gimple_assign_rhs1 (stmt),
3415 gimple_assign_rhs2 (stmt),
3416 code2,
3417 op2a,
3418 op2b);
3419 if (t)
3420 return t;
3421 }
3422
3423 /* If the definition is an AND or OR expression, we may be able to
3424 simplify by reassociating. */
eb9820c0
KT
3425 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
3426 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
3427 {
3428 tree inner1 = gimple_assign_rhs1 (stmt);
3429 tree inner2 = gimple_assign_rhs2 (stmt);
3430 gimple s;
3431 tree t;
3432 tree partial = NULL_TREE;
eb9820c0 3433 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
3434
3435 /* Check for boolean identities that don't require recursive examination
3436 of inner1/inner2:
3437 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
3438 inner1 OR (inner1 AND inner2) => inner1
3439 !inner1 OR (inner1 OR inner2) => true
3440 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
3441 */
3442 if (inner1 == true_test_var)
3443 return (is_or ? var : inner1);
3444 else if (inner2 == true_test_var)
3445 return (is_or ? var : inner2);
3446 else if (inner1 == false_test_var)
3447 return (is_or
3448 ? boolean_true_node
3449 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
3450 else if (inner2 == false_test_var)
3451 return (is_or
3452 ? boolean_true_node
3453 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
3454
3455 /* Next, redistribute/reassociate the OR across the inner tests.
3456 Compute the first partial result, (inner1 OR (op2a code op2b)) */
3457 if (TREE_CODE (inner1) == SSA_NAME
3458 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
3459 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3460 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3461 gimple_assign_rhs1 (s),
3462 gimple_assign_rhs2 (s),
3463 code2, op2a, op2b)))
3464 {
3465 /* Handle the OR case, where we are reassociating:
3466 (inner1 OR inner2) OR (op2a code2 op2b)
3467 => (t OR inner2)
3468 If the partial result t is a constant, we win. Otherwise
3469 continue on to try reassociating with the other inner test. */
8236c8eb 3470 if (is_or)
e89065a1
SL
3471 {
3472 if (integer_onep (t))
3473 return boolean_true_node;
3474 else if (integer_zerop (t))
3475 return inner2;
3476 }
3477
3478 /* Handle the AND case, where we are redistributing:
3479 (inner1 AND inner2) OR (op2a code2 op2b)
3480 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
3481 else if (integer_zerop (t))
3482 return boolean_false_node;
3483
3484 /* Save partial result for later. */
3485 partial = t;
e89065a1
SL
3486 }
3487
3488 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
3489 if (TREE_CODE (inner2) == SSA_NAME
3490 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
3491 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
3492 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
3493 gimple_assign_rhs1 (s),
3494 gimple_assign_rhs2 (s),
3495 code2, op2a, op2b)))
3496 {
3497 /* Handle the OR case, where we are reassociating:
3498 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
3499 => (inner1 OR t)
3500 => (t OR partial) */
3501 if (is_or)
e89065a1
SL
3502 {
3503 if (integer_zerop (t))
3504 return inner1;
3505 else if (integer_onep (t))
3506 return boolean_true_node;
8236c8eb
JJ
3507 /* If both are the same, we can apply the identity
3508 (x OR x) == x. */
3509 else if (partial && same_bool_result_p (t, partial))
3510 return t;
e89065a1
SL
3511 }
3512
3513 /* Handle the AND case, where we are redistributing:
3514 (inner1 AND inner2) OR (op2a code2 op2b)
3515 => (t AND (inner1 OR (op2a code2 op2b)))
3516 => (t AND partial) */
3517 else
3518 {
3519 if (integer_zerop (t))
3520 return boolean_false_node;
3521 else if (partial)
3522 {
3523 /* We already got a simplification for the other
3524 operand to the redistributed AND expression. The
3525 interesting case is when at least one is true.
3526 Or, if both are the same, we can apply the identity
8236c8eb 3527 (x AND x) == x. */
e89065a1
SL
3528 if (integer_onep (partial))
3529 return t;
3530 else if (integer_onep (t))
3531 return partial;
3532 else if (same_bool_result_p (t, partial))
8236c8eb 3533 return t;
e89065a1
SL
3534 }
3535 }
3536 }
3537 }
3538 return NULL_TREE;
3539}
3540
3541/* Try to simplify the OR of two comparisons defined by
3542 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
3543 If this can be done without constructing an intermediate value,
3544 return the resulting tree; otherwise NULL_TREE is returned.
3545 This function is deliberately asymmetric as it recurses on SSA_DEFs
3546 in the first comparison but not the second. */
3547
3548static tree
3549or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3550 enum tree_code code2, tree op2a, tree op2b)
3551{
ae22ac3c 3552 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 3553
e89065a1
SL
3554 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
3555 if (operand_equal_p (op1a, op2a, 0)
3556 && operand_equal_p (op1b, op2b, 0))
3557 {
eb9820c0 3558 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3559 tree t = combine_comparisons (UNKNOWN_LOCATION,
3560 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 3561 truth_type, op1a, op1b);
e89065a1
SL
3562 if (t)
3563 return t;
3564 }
3565
3566 /* Likewise the swapped case of the above. */
3567 if (operand_equal_p (op1a, op2b, 0)
3568 && operand_equal_p (op1b, op2a, 0))
3569 {
eb9820c0 3570 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
3571 tree t = combine_comparisons (UNKNOWN_LOCATION,
3572 TRUTH_ORIF_EXPR, code1,
3573 swap_tree_comparison (code2),
31ed6226 3574 truth_type, op1a, op1b);
e89065a1
SL
3575 if (t)
3576 return t;
3577 }
3578
3579 /* If both comparisons are of the same value against constants, we might
3580 be able to merge them. */
3581 if (operand_equal_p (op1a, op2a, 0)
3582 && TREE_CODE (op1b) == INTEGER_CST
3583 && TREE_CODE (op2b) == INTEGER_CST)
3584 {
3585 int cmp = tree_int_cst_compare (op1b, op2b);
3586
3587 /* If we have (op1a != op1b), we should either be able to
3588 return that or TRUE, depending on whether the constant op1b
3589 also satisfies the other comparison against op2b. */
3590 if (code1 == NE_EXPR)
3591 {
3592 bool done = true;
3593 bool val;
3594 switch (code2)
3595 {
3596 case EQ_EXPR: val = (cmp == 0); break;
3597 case NE_EXPR: val = (cmp != 0); break;
3598 case LT_EXPR: val = (cmp < 0); break;
3599 case GT_EXPR: val = (cmp > 0); break;
3600 case LE_EXPR: val = (cmp <= 0); break;
3601 case GE_EXPR: val = (cmp >= 0); break;
3602 default: done = false;
3603 }
3604 if (done)
3605 {
3606 if (val)
3607 return boolean_true_node;
3608 else
3609 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3610 }
3611 }
3612 /* Likewise if the second comparison is a != comparison. */
3613 else if (code2 == NE_EXPR)
3614 {
3615 bool done = true;
3616 bool val;
3617 switch (code1)
3618 {
3619 case EQ_EXPR: val = (cmp == 0); break;
3620 case NE_EXPR: val = (cmp != 0); break;
3621 case LT_EXPR: val = (cmp > 0); break;
3622 case GT_EXPR: val = (cmp < 0); break;
3623 case LE_EXPR: val = (cmp >= 0); break;
3624 case GE_EXPR: val = (cmp <= 0); break;
3625 default: done = false;
3626 }
3627 if (done)
3628 {
3629 if (val)
3630 return boolean_true_node;
3631 else
3632 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3633 }
3634 }
3635
3636 /* See if an equality test is redundant with the other comparison. */
3637 else if (code1 == EQ_EXPR)
3638 {
3639 bool val;
3640 switch (code2)
3641 {
3642 case EQ_EXPR: val = (cmp == 0); break;
3643 case NE_EXPR: val = (cmp != 0); break;
3644 case LT_EXPR: val = (cmp < 0); break;
3645 case GT_EXPR: val = (cmp > 0); break;
3646 case LE_EXPR: val = (cmp <= 0); break;
3647 case GE_EXPR: val = (cmp >= 0); break;
3648 default:
3649 val = false;
3650 }
3651 if (val)
3652 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3653 }
3654 else if (code2 == EQ_EXPR)
3655 {
3656 bool val;
3657 switch (code1)
3658 {
3659 case EQ_EXPR: val = (cmp == 0); break;
3660 case NE_EXPR: val = (cmp != 0); break;
3661 case LT_EXPR: val = (cmp > 0); break;
3662 case GT_EXPR: val = (cmp < 0); break;
3663 case LE_EXPR: val = (cmp >= 0); break;
3664 case GE_EXPR: val = (cmp <= 0); break;
3665 default:
3666 val = false;
3667 }
3668 if (val)
3669 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3670 }
3671
3672 /* Chose the less restrictive of two < or <= comparisons. */
3673 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
3674 && (code2 == LT_EXPR || code2 == LE_EXPR))
3675 {
3676 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
3677 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3678 else
3679 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3680 }
3681
3682 /* Likewise chose the less restrictive of two > or >= comparisons. */
3683 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
3684 && (code2 == GT_EXPR || code2 == GE_EXPR))
3685 {
3686 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
3687 return fold_build2 (code2, boolean_type_node, op2a, op2b);
3688 else
3689 return fold_build2 (code1, boolean_type_node, op1a, op1b);
3690 }
3691
3692 /* Check for singleton ranges. */
3693 else if (cmp == 0
3694 && ((code1 == LT_EXPR && code2 == GT_EXPR)
3695 || (code1 == GT_EXPR && code2 == LT_EXPR)))
3696 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
3697
3698 /* Check for less/greater pairs that don't restrict the range at all. */
3699 else if (cmp >= 0
3700 && (code1 == LT_EXPR || code1 == LE_EXPR)
3701 && (code2 == GT_EXPR || code2 == GE_EXPR))
3702 return boolean_true_node;
3703 else if (cmp <= 0
3704 && (code1 == GT_EXPR || code1 == GE_EXPR)
3705 && (code2 == LT_EXPR || code2 == LE_EXPR))
3706 return boolean_true_node;
3707 }
3708
3709 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
3710 NAME's definition is a truth value. See if there are any simplifications
3711 that can be done against the NAME's definition. */
3712 if (TREE_CODE (op1a) == SSA_NAME
3713 && (code1 == NE_EXPR || code1 == EQ_EXPR)
3714 && (integer_zerop (op1b) || integer_onep (op1b)))
3715 {
3716 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
3717 || (code1 == NE_EXPR && integer_onep (op1b)));
3718 gimple stmt = SSA_NAME_DEF_STMT (op1a);
3719 switch (gimple_code (stmt))
3720 {
3721 case GIMPLE_ASSIGN:
3722 /* Try to simplify by copy-propagating the definition. */
3723 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
3724
3725 case GIMPLE_PHI:
3726 /* If every argument to the PHI produces the same result when
3727 ORed with the second comparison, we win.
3728 Do not do this unless the type is bool since we need a bool
3729 result here anyway. */
3730 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
3731 {
3732 tree result = NULL_TREE;
3733 unsigned i;
3734 for (i = 0; i < gimple_phi_num_args (stmt); i++)
3735 {
3736 tree arg = gimple_phi_arg_def (stmt, i);
3737
3738 /* If this PHI has itself as an argument, ignore it.
3739 If all the other args produce the same result,
3740 we're still OK. */
3741 if (arg == gimple_phi_result (stmt))
3742 continue;
3743 else if (TREE_CODE (arg) == INTEGER_CST)
3744 {
3745 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
3746 {
3747 if (!result)
3748 result = boolean_true_node;
3749 else if (!integer_onep (result))
3750 return NULL_TREE;
3751 }
3752 else if (!result)
3753 result = fold_build2 (code2, boolean_type_node,
3754 op2a, op2b);
3755 else if (!same_bool_comparison_p (result,
3756 code2, op2a, op2b))
3757 return NULL_TREE;
3758 }
0e8b84ec
JJ
3759 else if (TREE_CODE (arg) == SSA_NAME
3760 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 3761 {
6c66f733
JJ
3762 tree temp;
3763 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
3764 /* In simple cases we can look through PHI nodes,
3765 but we have to be careful with loops.
3766 See PR49073. */
3767 if (! dom_info_available_p (CDI_DOMINATORS)
3768 || gimple_bb (def_stmt) == gimple_bb (stmt)
3769 || dominated_by_p (CDI_DOMINATORS,
3770 gimple_bb (def_stmt),
3771 gimple_bb (stmt)))
3772 return NULL_TREE;
3773 temp = or_var_with_comparison (arg, invert, code2,
3774 op2a, op2b);
e89065a1
SL
3775 if (!temp)
3776 return NULL_TREE;
3777 else if (!result)
3778 result = temp;
3779 else if (!same_bool_result_p (result, temp))
3780 return NULL_TREE;
3781 }
3782 else
3783 return NULL_TREE;
3784 }
3785 return result;
3786 }
3787
3788 default:
3789 break;
3790 }
3791 }
3792 return NULL_TREE;
3793}
3794
3795/* Try to simplify the OR of two comparisons, specified by
3796 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
3797 If this can be simplified to a single expression (without requiring
3798 introducing more SSA variables to hold intermediate values),
3799 return the resulting tree. Otherwise return NULL_TREE.
3800 If the result expression is non-null, it has boolean type. */
3801
3802tree
3803maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
3804 enum tree_code code2, tree op2a, tree op2b)
3805{
3806 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
3807 if (t)
3808 return t;
3809 else
3810 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
3811}
cfef45c8
RG
3812
3813
3814/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
3815
3816 Either NULL_TREE, a simplified but non-constant or a constant
3817 is returned.
3818
3819 ??? This should go into a gimple-fold-inline.h file to be eventually
3820 privatized with the single valueize function used in the various TUs
3821 to avoid the indirect function call overhead. */
3822
3823tree
3824gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree))
3825{
3826 location_t loc = gimple_location (stmt);
3827 switch (gimple_code (stmt))
3828 {
3829 case GIMPLE_ASSIGN:
3830 {
3831 enum tree_code subcode = gimple_assign_rhs_code (stmt);
3832
3833 switch (get_gimple_rhs_class (subcode))
3834 {
3835 case GIMPLE_SINGLE_RHS:
3836 {
3837 tree rhs = gimple_assign_rhs1 (stmt);
3838 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
3839
3840 if (TREE_CODE (rhs) == SSA_NAME)
3841 {
3842 /* If the RHS is an SSA_NAME, return its known constant value,
3843 if any. */
3844 return (*valueize) (rhs);
3845 }
3846 /* Handle propagating invariant addresses into address
3847 operations. */
3848 else if (TREE_CODE (rhs) == ADDR_EXPR
3849 && !is_gimple_min_invariant (rhs))
3850 {
d25c4172 3851 HOST_WIDE_INT offset = 0;
cfef45c8
RG
3852 tree base;
3853 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
3854 &offset,
3855 valueize);
3856 if (base
3857 && (CONSTANT_CLASS_P (base)
3858 || decl_address_invariant_p (base)))
3859 return build_invariant_address (TREE_TYPE (rhs),
3860 base, offset);
3861 }
3862 else if (TREE_CODE (rhs) == CONSTRUCTOR
3863 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
3864 && (CONSTRUCTOR_NELTS (rhs)
3865 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
3866 {
3867 unsigned i;
d2a12ae7 3868 tree val, *vec;
cfef45c8 3869
d2a12ae7
RG
3870 vec = XALLOCAVEC (tree,
3871 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
3872 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
3873 {
3874 val = (*valueize) (val);
3875 if (TREE_CODE (val) == INTEGER_CST
3876 || TREE_CODE (val) == REAL_CST
3877 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 3878 vec[i] = val;
cfef45c8
RG
3879 else
3880 return NULL_TREE;
3881 }
3882
d2a12ae7 3883 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 3884 }
bdf37f7a
JH
3885 if (subcode == OBJ_TYPE_REF)
3886 {
3887 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
3888 /* If callee is constant, we can fold away the wrapper. */
3889 if (is_gimple_min_invariant (val))
3890 return val;
3891 }
cfef45c8
RG
3892
3893 if (kind == tcc_reference)
3894 {
3895 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
3896 || TREE_CODE (rhs) == REALPART_EXPR
3897 || TREE_CODE (rhs) == IMAGPART_EXPR)
3898 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
3899 {
3900 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
3901 return fold_unary_loc (EXPR_LOCATION (rhs),
3902 TREE_CODE (rhs),
3903 TREE_TYPE (rhs), val);
3904 }
3905 else if (TREE_CODE (rhs) == BIT_FIELD_REF
3906 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
3907 {
3908 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
3909 return fold_ternary_loc (EXPR_LOCATION (rhs),
3910 TREE_CODE (rhs),
3911 TREE_TYPE (rhs), val,
3912 TREE_OPERAND (rhs, 1),
3913 TREE_OPERAND (rhs, 2));
3914 }
3915 else if (TREE_CODE (rhs) == MEM_REF
3916 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
3917 {
3918 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
3919 if (TREE_CODE (val) == ADDR_EXPR
3920 && is_gimple_min_invariant (val))
3921 {
3922 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
3923 unshare_expr (val),
3924 TREE_OPERAND (rhs, 1));
3925 if (tem)
3926 rhs = tem;
3927 }
3928 }
3929 return fold_const_aggregate_ref_1 (rhs, valueize);
3930 }
3931 else if (kind == tcc_declaration)
3932 return get_symbol_constant_value (rhs);
3933 return rhs;
3934 }
3935
3936 case GIMPLE_UNARY_RHS:
3937 {
3938 /* Handle unary operators that can appear in GIMPLE form.
3939 Note that we know the single operand must be a constant,
3940 so this should almost always return a simplified RHS. */
cfef45c8
RG
3941 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
3942
cfef45c8
RG
3943 return
3944 fold_unary_ignore_overflow_loc (loc, subcode,
3945 gimple_expr_type (stmt), op0);
3946 }
3947
3948 case GIMPLE_BINARY_RHS:
3949 {
3950 /* Handle binary operators that can appear in GIMPLE form. */
3951 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
3952 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
3953
3954 /* Translate &x + CST into an invariant form suitable for
3955 further propagation. */
3956 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
3957 && TREE_CODE (op0) == ADDR_EXPR
3958 && TREE_CODE (op1) == INTEGER_CST)
3959 {
3960 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
3961 return build_fold_addr_expr_loc
3962 (loc,
3963 fold_build2 (MEM_REF,
cfef45c8
RG
3964 TREE_TYPE (TREE_TYPE (op0)),
3965 unshare_expr (op0), off));
3966 }
3967
3968 return fold_binary_loc (loc, subcode,
3969 gimple_expr_type (stmt), op0, op1);
3970 }
3971
3972 case GIMPLE_TERNARY_RHS:
3973 {
3974 /* Handle ternary operators that can appear in GIMPLE form. */
3975 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
3976 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
3977 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
3978
d3878abf
RG
3979 /* Fold embedded expressions in ternary codes. */
3980 if ((subcode == COND_EXPR
3981 || subcode == VEC_COND_EXPR)
3982 && COMPARISON_CLASS_P (op0))
3983 {
3984 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
3985 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
3986 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
3987 TREE_TYPE (op0), op00, op01);
3988 if (tem)
3989 op0 = tem;
3990 }
3991
cfef45c8
RG
3992 return fold_ternary_loc (loc, subcode,
3993 gimple_expr_type (stmt), op0, op1, op2);
3994 }
3995
3996 default:
3997 gcc_unreachable ();
3998 }
3999 }
4000
4001 case GIMPLE_CALL:
4002 {
25583c4f
RS
4003 tree fn;
4004
4005 if (gimple_call_internal_p (stmt))
31e071ae
MP
4006 {
4007 enum tree_code subcode = ERROR_MARK;
4008 switch (gimple_call_internal_fn (stmt))
4009 {
4010 case IFN_UBSAN_CHECK_ADD:
4011 subcode = PLUS_EXPR;
4012 break;
4013 case IFN_UBSAN_CHECK_SUB:
4014 subcode = MINUS_EXPR;
4015 break;
4016 case IFN_UBSAN_CHECK_MUL:
4017 subcode = MULT_EXPR;
4018 break;
4019 default:
4020 return NULL_TREE;
4021 }
368b454d
JJ
4022 tree arg0 = gimple_call_arg (stmt, 0);
4023 tree arg1 = gimple_call_arg (stmt, 1);
4024 tree op0 = (*valueize) (arg0);
4025 tree op1 = (*valueize) (arg1);
31e071ae
MP
4026
4027 if (TREE_CODE (op0) != INTEGER_CST
4028 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
4029 {
4030 switch (subcode)
4031 {
4032 case MULT_EXPR:
4033 /* x * 0 = 0 * x = 0 without overflow. */
4034 if (integer_zerop (op0) || integer_zerop (op1))
4035 return build_zero_cst (TREE_TYPE (arg0));
4036 break;
4037 case MINUS_EXPR:
4038 /* y - y = 0 without overflow. */
4039 if (operand_equal_p (op0, op1, 0))
4040 return build_zero_cst (TREE_TYPE (arg0));
4041 break;
4042 default:
4043 break;
4044 }
4045 }
4046 tree res
4047 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
4048 if (res
4049 && TREE_CODE (res) == INTEGER_CST
4050 && !TREE_OVERFLOW (res))
4051 return res;
4052 return NULL_TREE;
4053 }
25583c4f
RS
4054
4055 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
4056 if (TREE_CODE (fn) == ADDR_EXPR
4057 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
4058 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
4059 && gimple_builtin_call_types_compatible_p (stmt,
4060 TREE_OPERAND (fn, 0)))
cfef45c8
RG
4061 {
4062 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
4063 tree call, retval;
4064 unsigned i;
4065 for (i = 0; i < gimple_call_num_args (stmt); ++i)
4066 args[i] = (*valueize) (gimple_call_arg (stmt, i));
4067 call = build_call_array_loc (loc,
4068 gimple_call_return_type (stmt),
4069 fn, gimple_call_num_args (stmt), args);
4070 retval = fold_call_expr (EXPR_LOCATION (call), call, false);
4071 if (retval)
5c944c6c
RB
4072 {
4073 /* fold_call_expr wraps the result inside a NOP_EXPR. */
4074 STRIP_NOPS (retval);
4075 retval = fold_convert (gimple_call_return_type (stmt), retval);
4076 }
cfef45c8
RG
4077 return retval;
4078 }
4079 return NULL_TREE;
4080 }
4081
4082 default:
4083 return NULL_TREE;
4084 }
4085}
4086
4087/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4088 Returns NULL_TREE if folding to a constant is not possible, otherwise
4089 returns a constant according to is_gimple_min_invariant. */
4090
4091tree
4092gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
4093{
4094 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
4095 if (res && is_gimple_min_invariant (res))
4096 return res;
4097 return NULL_TREE;
4098}
4099
4100
4101/* The following set of functions are supposed to fold references using
4102 their constant initializers. */
4103
4104static tree fold_ctor_reference (tree type, tree ctor,
4105 unsigned HOST_WIDE_INT offset,
c44c2088 4106 unsigned HOST_WIDE_INT size, tree);
cfef45c8
RG
4107
4108/* See if we can find constructor defining value of BASE.
4109 When we know the consructor with constant offset (such as
4110 base is array[40] and we do know constructor of array), then
4111 BIT_OFFSET is adjusted accordingly.
4112
4113 As a special case, return error_mark_node when constructor
4114 is not explicitly available, but it is known to be zero
4115 such as 'static const int a;'. */
4116static tree
4117get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
4118 tree (*valueize)(tree))
4119{
4120 HOST_WIDE_INT bit_offset2, size, max_size;
4121 if (TREE_CODE (base) == MEM_REF)
4122 {
4123 if (!integer_zerop (TREE_OPERAND (base, 1)))
4124 {
9541ffee 4125 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 4126 return NULL_TREE;
807e902e 4127 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
4128 * BITS_PER_UNIT);
4129 }
4130
4131 if (valueize
4132 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
4133 base = valueize (TREE_OPERAND (base, 0));
4134 if (!base || TREE_CODE (base) != ADDR_EXPR)
4135 return NULL_TREE;
4136 base = TREE_OPERAND (base, 0);
4137 }
4138
4139 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
4140 DECL_INITIAL. If BASE is a nested reference into another
4141 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
4142 the inner reference. */
4143 switch (TREE_CODE (base))
4144 {
4145 case VAR_DECL:
cfef45c8 4146 case CONST_DECL:
6a6dac52
JH
4147 {
4148 tree init = ctor_for_folding (base);
4149
688010ba 4150 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
4151 NULL means unknown, while error_mark_node is 0. */
4152 if (init == error_mark_node)
4153 return NULL_TREE;
4154 if (!init)
4155 return error_mark_node;
4156 return init;
4157 }
cfef45c8
RG
4158
4159 case ARRAY_REF:
4160 case COMPONENT_REF:
4161 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
4162 if (max_size == -1 || size != max_size)
4163 return NULL_TREE;
4164 *bit_offset += bit_offset2;
4165 return get_base_constructor (base, bit_offset, valueize);
4166
4167 case STRING_CST:
4168 case CONSTRUCTOR:
4169 return base;
4170
4171 default:
4172 return NULL_TREE;
4173 }
4174}
4175
cfef45c8
RG
4176/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
4177 SIZE to the memory at bit OFFSET. */
4178
4179static tree
4180fold_array_ctor_reference (tree type, tree ctor,
4181 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4182 unsigned HOST_WIDE_INT size,
4183 tree from_decl)
cfef45c8
RG
4184{
4185 unsigned HOST_WIDE_INT cnt;
4186 tree cfield, cval;
807e902e
KZ
4187 offset_int low_bound;
4188 offset_int elt_size;
4189 offset_int index, max_index;
4190 offset_int access_index;
b48e22b2 4191 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
4192 HOST_WIDE_INT inner_offset;
4193
4194 /* Compute low bound and elt size. */
eb8f1123
RG
4195 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
4196 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
4197 if (domain_type && TYPE_MIN_VALUE (domain_type))
4198 {
4199 /* Static constructors for variably sized objects makes no sense. */
4200 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 4201 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 4202 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
4203 }
4204 else
807e902e 4205 low_bound = 0;
cfef45c8 4206 /* Static constructors for variably sized objects makes no sense. */
c3284718 4207 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 4208 == INTEGER_CST);
807e902e 4209 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
4210
4211 /* We can handle only constantly sized accesses that are known to not
4212 be larger than size of array element. */
4213 if (!TYPE_SIZE_UNIT (type)
4214 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
4215 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
4216 || elt_size == 0)
cfef45c8
RG
4217 return NULL_TREE;
4218
4219 /* Compute the array index we look for. */
807e902e
KZ
4220 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
4221 elt_size);
27bcd47c 4222 access_index += low_bound;
b48e22b2 4223 if (index_type)
807e902e
KZ
4224 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
4225 TYPE_SIGN (index_type));
cfef45c8
RG
4226
4227 /* And offset within the access. */
27bcd47c 4228 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
4229
4230 /* See if the array field is large enough to span whole access. We do not
4231 care to fold accesses spanning multiple array indexes. */
27bcd47c 4232 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
4233 return NULL_TREE;
4234
807e902e 4235 index = low_bound - 1;
b48e22b2 4236 if (index_type)
807e902e
KZ
4237 index = wi::ext (index, TYPE_PRECISION (index_type),
4238 TYPE_SIGN (index_type));
b48e22b2 4239
cfef45c8
RG
4240 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
4241 {
4242 /* Array constructor might explicitely set index, or specify range
4243 or leave index NULL meaning that it is next index after previous
4244 one. */
4245 if (cfield)
4246 {
4247 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 4248 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
4249 else
4250 {
4251 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
4252 index = wi::to_offset (TREE_OPERAND (cfield, 0));
4253 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
4254 }
4255 }
4256 else
b48e22b2 4257 {
807e902e 4258 index += 1;
b48e22b2 4259 if (index_type)
807e902e
KZ
4260 index = wi::ext (index, TYPE_PRECISION (index_type),
4261 TYPE_SIGN (index_type));
b48e22b2
EB
4262 max_index = index;
4263 }
cfef45c8
RG
4264
4265 /* Do we have match? */
807e902e
KZ
4266 if (wi::cmpu (access_index, index) >= 0
4267 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
4268 return fold_ctor_reference (type, cval, inner_offset, size,
4269 from_decl);
cfef45c8
RG
4270 }
4271 /* When memory is not explicitely mentioned in constructor,
4272 it is 0 (or out of range). */
4273 return build_zero_cst (type);
4274}
4275
4276/* CTOR is CONSTRUCTOR of an aggregate or vector.
4277 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
4278
4279static tree
4280fold_nonarray_ctor_reference (tree type, tree ctor,
4281 unsigned HOST_WIDE_INT offset,
c44c2088
JH
4282 unsigned HOST_WIDE_INT size,
4283 tree from_decl)
cfef45c8
RG
4284{
4285 unsigned HOST_WIDE_INT cnt;
4286 tree cfield, cval;
4287
4288 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
4289 cval)
4290 {
4291 tree byte_offset = DECL_FIELD_OFFSET (cfield);
4292 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
4293 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
4294 offset_int bitoffset;
4295 offset_int bitoffset_end, access_end;
cfef45c8
RG
4296
4297 /* Variable sized objects in static constructors makes no sense,
4298 but field_size can be NULL for flexible array members. */
4299 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
4300 && TREE_CODE (byte_offset) == INTEGER_CST
4301 && (field_size != NULL_TREE
4302 ? TREE_CODE (field_size) == INTEGER_CST
4303 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
4304
4305 /* Compute bit offset of the field. */
807e902e
KZ
4306 bitoffset = (wi::to_offset (field_offset)
4307 + wi::lshift (wi::to_offset (byte_offset),
4308 LOG2_BITS_PER_UNIT));
cfef45c8
RG
4309 /* Compute bit offset where the field ends. */
4310 if (field_size != NULL_TREE)
807e902e 4311 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 4312 else
807e902e 4313 bitoffset_end = 0;
cfef45c8 4314
807e902e 4315 access_end = offset_int (offset) + size;
b8b2b009
JJ
4316
4317 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
4318 [BITOFFSET, BITOFFSET_END)? */
807e902e 4319 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 4320 && (field_size == NULL_TREE
807e902e 4321 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 4322 {
807e902e 4323 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
4324 /* We do have overlap. Now see if field is large enough to
4325 cover the access. Give up for accesses spanning multiple
4326 fields. */
807e902e 4327 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 4328 return NULL_TREE;
807e902e 4329 if (wi::lts_p (offset, bitoffset))
b8b2b009 4330 return NULL_TREE;
cfef45c8 4331 return fold_ctor_reference (type, cval,
27bcd47c 4332 inner_offset.to_uhwi (), size,
c44c2088 4333 from_decl);
cfef45c8
RG
4334 }
4335 }
4336 /* When memory is not explicitely mentioned in constructor, it is 0. */
4337 return build_zero_cst (type);
4338}
4339
4340/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
4341 to the memory at bit OFFSET. */
4342
4343static tree
4344fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 4345 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
4346{
4347 tree ret;
4348
4349 /* We found the field with exact match. */
4350 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
4351 && !offset)
9d60be38 4352 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
4353
4354 /* We are at the end of walk, see if we can view convert the
4355 result. */
4356 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
4357 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
4358 && operand_equal_p (TYPE_SIZE (type),
4359 TYPE_SIZE (TREE_TYPE (ctor)), 0))
4360 {
9d60be38 4361 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
4362 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
4363 if (ret)
4364 STRIP_NOPS (ret);
4365 return ret;
4366 }
b2505143
RB
4367 /* For constants and byte-aligned/sized reads try to go through
4368 native_encode/interpret. */
4369 if (CONSTANT_CLASS_P (ctor)
4370 && BITS_PER_UNIT == 8
4371 && offset % BITS_PER_UNIT == 0
4372 && size % BITS_PER_UNIT == 0
4373 && size <= MAX_BITSIZE_MODE_ANY_MODE)
4374 {
4375 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
4376 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
4377 offset / BITS_PER_UNIT) > 0)
4378 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
4379 }
cfef45c8
RG
4380 if (TREE_CODE (ctor) == CONSTRUCTOR)
4381 {
4382
eb8f1123
RG
4383 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
4384 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
4385 return fold_array_ctor_reference (type, ctor, offset, size,
4386 from_decl);
cfef45c8 4387 else
c44c2088
JH
4388 return fold_nonarray_ctor_reference (type, ctor, offset, size,
4389 from_decl);
cfef45c8
RG
4390 }
4391
4392 return NULL_TREE;
4393}
4394
4395/* Return the tree representing the element referenced by T if T is an
4396 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
4397 names using VALUEIZE. Return NULL_TREE otherwise. */
4398
4399tree
4400fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
4401{
4402 tree ctor, idx, base;
4403 HOST_WIDE_INT offset, size, max_size;
4404 tree tem;
4405
f8a7df45
RG
4406 if (TREE_THIS_VOLATILE (t))
4407 return NULL_TREE;
4408
cfef45c8
RG
4409 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
4410 return get_symbol_constant_value (t);
4411
4412 tem = fold_read_from_constant_string (t);
4413 if (tem)
4414 return tem;
4415
4416 switch (TREE_CODE (t))
4417 {
4418 case ARRAY_REF:
4419 case ARRAY_RANGE_REF:
4420 /* Constant indexes are handled well by get_base_constructor.
4421 Only special case variable offsets.
4422 FIXME: This code can't handle nested references with variable indexes
4423 (they will be handled only by iteration of ccp). Perhaps we can bring
4424 get_ref_base_and_extent here and make it use a valueize callback. */
4425 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
4426 && valueize
4427 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 4428 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
4429 {
4430 tree low_bound, unit_size;
4431
4432 /* If the resulting bit-offset is constant, track it. */
4433 if ((low_bound = array_ref_low_bound (t),
b48e22b2 4434 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 4435 && (unit_size = array_ref_element_size (t),
807e902e 4436 tree_fits_uhwi_p (unit_size)))
cfef45c8 4437 {
807e902e
KZ
4438 offset_int woffset
4439 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
4440 TYPE_PRECISION (TREE_TYPE (idx)));
4441
4442 if (wi::fits_shwi_p (woffset))
4443 {
4444 offset = woffset.to_shwi ();
4445 /* TODO: This code seems wrong, multiply then check
4446 to see if it fits. */
4447 offset *= tree_to_uhwi (unit_size);
4448 offset *= BITS_PER_UNIT;
4449
4450 base = TREE_OPERAND (t, 0);
4451 ctor = get_base_constructor (base, &offset, valueize);
4452 /* Empty constructor. Always fold to 0. */
4453 if (ctor == error_mark_node)
4454 return build_zero_cst (TREE_TYPE (t));
4455 /* Out of bound array access. Value is undefined,
4456 but don't fold. */
4457 if (offset < 0)
4458 return NULL_TREE;
4459 /* We can not determine ctor. */
4460 if (!ctor)
4461 return NULL_TREE;
4462 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
4463 tree_to_uhwi (unit_size)
4464 * BITS_PER_UNIT,
4465 base);
4466 }
cfef45c8
RG
4467 }
4468 }
4469 /* Fallthru. */
4470
4471 case COMPONENT_REF:
4472 case BIT_FIELD_REF:
4473 case TARGET_MEM_REF:
4474 case MEM_REF:
4475 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
4476 ctor = get_base_constructor (base, &offset, valueize);
4477
4478 /* Empty constructor. Always fold to 0. */
4479 if (ctor == error_mark_node)
4480 return build_zero_cst (TREE_TYPE (t));
4481 /* We do not know precise address. */
4482 if (max_size == -1 || max_size != size)
4483 return NULL_TREE;
4484 /* We can not determine ctor. */
4485 if (!ctor)
4486 return NULL_TREE;
4487
4488 /* Out of bound array access. Value is undefined, but don't fold. */
4489 if (offset < 0)
4490 return NULL_TREE;
4491
c44c2088
JH
4492 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
4493 base);
cfef45c8
RG
4494
4495 case REALPART_EXPR:
4496 case IMAGPART_EXPR:
4497 {
4498 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
4499 if (c && TREE_CODE (c) == COMPLEX_CST)
4500 return fold_build1_loc (EXPR_LOCATION (t),
4501 TREE_CODE (t), TREE_TYPE (t), c);
4502 break;
4503 }
4504
4505 default:
4506 break;
4507 }
4508
4509 return NULL_TREE;
4510}
4511
4512tree
4513fold_const_aggregate_ref (tree t)
4514{
4515 return fold_const_aggregate_ref_1 (t, NULL);
4516}
06bc3ec7 4517
85942f45 4518/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
4519 at OFFSET.
4520 Set CAN_REFER if non-NULL to false if method
4521 is not referable or if the virtual table is ill-formed (such as rewriten
4522 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
4523
4524tree
85942f45
JH
4525gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
4526 tree v,
ec77d61f
JH
4527 unsigned HOST_WIDE_INT offset,
4528 bool *can_refer)
81fa35bd 4529{
85942f45
JH
4530 tree vtable = v, init, fn;
4531 unsigned HOST_WIDE_INT size;
8c311b50
JH
4532 unsigned HOST_WIDE_INT elt_size, access_index;
4533 tree domain_type;
81fa35bd 4534
ec77d61f
JH
4535 if (can_refer)
4536 *can_refer = true;
4537
9de2f554 4538 /* First of all double check we have virtual table. */
81fa35bd 4539 if (TREE_CODE (v) != VAR_DECL
2aa3da06 4540 || !DECL_VIRTUAL_P (v))
ec77d61f
JH
4541 {
4542 gcc_assert (in_lto_p);
4543 /* Pass down that we lost track of the target. */
4544 if (can_refer)
4545 *can_refer = false;
4546 return NULL_TREE;
4547 }
9de2f554 4548
2aa3da06
JH
4549 init = ctor_for_folding (v);
4550
9de2f554 4551 /* The virtual tables should always be born with constructors
2aa3da06
JH
4552 and we always should assume that they are avaialble for
4553 folding. At the moment we do not stream them in all cases,
4554 but it should never happen that ctor seem unreachable. */
4555 gcc_assert (init);
4556 if (init == error_mark_node)
4557 {
4558 gcc_assert (in_lto_p);
ec77d61f
JH
4559 /* Pass down that we lost track of the target. */
4560 if (can_refer)
4561 *can_refer = false;
2aa3da06
JH
4562 return NULL_TREE;
4563 }
81fa35bd 4564 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 4565 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 4566 offset *= BITS_PER_UNIT;
81fa35bd 4567 offset += token * size;
9de2f554 4568
8c311b50
JH
4569 /* Lookup the value in the constructor that is assumed to be array.
4570 This is equivalent to
4571 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
4572 offset, size, NULL);
4573 but in a constant time. We expect that frontend produced a simple
4574 array without indexed initializers. */
4575
4576 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
4577 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
4578 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
4579 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
4580
4581 access_index = offset / BITS_PER_UNIT / elt_size;
4582 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
4583
4584 /* This code makes an assumption that there are no
4585 indexed fileds produced by C++ FE, so we can directly index the array. */
4586 if (access_index < CONSTRUCTOR_NELTS (init))
4587 {
4588 fn = CONSTRUCTOR_ELT (init, access_index)->value;
4589 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
4590 STRIP_NOPS (fn);
4591 }
4592 else
4593 fn = NULL;
9de2f554
JH
4594
4595 /* For type inconsistent program we may end up looking up virtual method
4596 in virtual table that does not contain TOKEN entries. We may overrun
4597 the virtual table and pick up a constant or RTTI info pointer.
4598 In any case the call is undefined. */
4599 if (!fn
4600 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
4601 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
4602 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
4603 else
4604 {
4605 fn = TREE_OPERAND (fn, 0);
4606
4607 /* When cgraph node is missing and function is not public, we cannot
4608 devirtualize. This can happen in WHOPR when the actual method
4609 ends up in other partition, because we found devirtualization
4610 possibility too late. */
4611 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
4612 {
4613 if (can_refer)
4614 {
4615 *can_refer = false;
4616 return fn;
4617 }
4618 return NULL_TREE;
4619 }
9de2f554 4620 }
81fa35bd 4621
7501ca28
RG
4622 /* Make sure we create a cgraph node for functions we'll reference.
4623 They can be non-existent if the reference comes from an entry
4624 of an external vtable for example. */
d52f5295 4625 cgraph_node::get_create (fn);
7501ca28 4626
81fa35bd
MJ
4627 return fn;
4628}
4629
85942f45
JH
4630/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
4631 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
4632 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
4633 OBJ_TYPE_REF_OBJECT(REF).
4634 Set CAN_REFER if non-NULL to false if method
4635 is not referable or if the virtual table is ill-formed (such as rewriten
4636 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
4637
4638tree
ec77d61f
JH
4639gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
4640 bool *can_refer)
85942f45
JH
4641{
4642 unsigned HOST_WIDE_INT offset;
4643 tree v;
4644
4645 v = BINFO_VTABLE (known_binfo);
4646 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
4647 if (!v)
4648 return NULL_TREE;
4649
4650 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
4651 {
4652 if (can_refer)
4653 *can_refer = false;
4654 return NULL_TREE;
4655 }
4656 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
4657}
4658
06bc3ec7
BS
4659/* Return true iff VAL is a gimple expression that is known to be
4660 non-negative. Restricted to floating-point inputs. */
4661
4662bool
4663gimple_val_nonnegative_real_p (tree val)
4664{
4665 gimple def_stmt;
4666
4667 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
4668
4669 /* Use existing logic for non-gimple trees. */
4670 if (tree_expr_nonnegative_p (val))
4671 return true;
4672
4673 if (TREE_CODE (val) != SSA_NAME)
4674 return false;
4675
4676 /* Currently we look only at the immediately defining statement
4677 to make this determination, since recursion on defining
4678 statements of operands can lead to quadratic behavior in the
4679 worst case. This is expected to catch almost all occurrences
4680 in practice. It would be possible to implement limited-depth
4681 recursion if important cases are lost. Alternatively, passes
4682 that need this information (such as the pow/powi lowering code
4683 in the cse_sincos pass) could be revised to provide it through
4684 dataflow propagation. */
4685
4686 def_stmt = SSA_NAME_DEF_STMT (val);
4687
4688 if (is_gimple_assign (def_stmt))
4689 {
4690 tree op0, op1;
4691
4692 /* See fold-const.c:tree_expr_nonnegative_p for additional
4693 cases that could be handled with recursion. */
4694
4695 switch (gimple_assign_rhs_code (def_stmt))
4696 {
4697 case ABS_EXPR:
4698 /* Always true for floating-point operands. */
4699 return true;
4700
4701 case MULT_EXPR:
4702 /* True if the two operands are identical (since we are
4703 restricted to floating-point inputs). */
4704 op0 = gimple_assign_rhs1 (def_stmt);
4705 op1 = gimple_assign_rhs2 (def_stmt);
4706
4707 if (op0 == op1
4708 || operand_equal_p (op0, op1, 0))
4709 return true;
4710
4711 default:
4712 return false;
4713 }
4714 }
4715 else if (is_gimple_call (def_stmt))
4716 {
4717 tree fndecl = gimple_call_fndecl (def_stmt);
4718 if (fndecl
4719 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
4720 {
4721 tree arg1;
4722
4723 switch (DECL_FUNCTION_CODE (fndecl))
4724 {
4725 CASE_FLT_FN (BUILT_IN_ACOS):
4726 CASE_FLT_FN (BUILT_IN_ACOSH):
4727 CASE_FLT_FN (BUILT_IN_CABS):
4728 CASE_FLT_FN (BUILT_IN_COSH):
4729 CASE_FLT_FN (BUILT_IN_ERFC):
4730 CASE_FLT_FN (BUILT_IN_EXP):
4731 CASE_FLT_FN (BUILT_IN_EXP10):
4732 CASE_FLT_FN (BUILT_IN_EXP2):
4733 CASE_FLT_FN (BUILT_IN_FABS):
4734 CASE_FLT_FN (BUILT_IN_FDIM):
4735 CASE_FLT_FN (BUILT_IN_HYPOT):
4736 CASE_FLT_FN (BUILT_IN_POW10):
4737 return true;
4738
4739 CASE_FLT_FN (BUILT_IN_SQRT):
4740 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
4741 nonnegative inputs. */
4742 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (val))))
4743 return true;
4744
4745 break;
4746
4747 CASE_FLT_FN (BUILT_IN_POWI):
4748 /* True if the second argument is an even integer. */
4749 arg1 = gimple_call_arg (def_stmt, 1);
4750
4751 if (TREE_CODE (arg1) == INTEGER_CST
4752 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
4753 return true;
4754
4755 break;
4756
4757 CASE_FLT_FN (BUILT_IN_POW):
4758 /* True if the second argument is an even integer-valued
4759 real. */
4760 arg1 = gimple_call_arg (def_stmt, 1);
4761
4762 if (TREE_CODE (arg1) == REAL_CST)
4763 {
4764 REAL_VALUE_TYPE c;
4765 HOST_WIDE_INT n;
4766
4767 c = TREE_REAL_CST (arg1);
4768 n = real_to_integer (&c);
4769
4770 if ((n & 1) == 0)
4771 {
4772 REAL_VALUE_TYPE cint;
807e902e 4773 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
4774 if (real_identical (&c, &cint))
4775 return true;
4776 }
4777 }
4778
4779 break;
4780
4781 default:
4782 return false;
4783 }
4784 }
4785 }
4786
4787 return false;
4788}
b184c8f1
AM
4789
4790/* Given a pointer value OP0, return a simplified version of an
4791 indirection through OP0, or NULL_TREE if no simplification is
4792 possible. Note that the resulting type may be different from
4793 the type pointed to in the sense that it is still compatible
4794 from the langhooks point of view. */
4795
4796tree
4797gimple_fold_indirect_ref (tree t)
4798{
4799 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
4800 tree sub = t;
4801 tree subtype;
4802
4803 STRIP_NOPS (sub);
4804 subtype = TREE_TYPE (sub);
4805 if (!POINTER_TYPE_P (subtype))
4806 return NULL_TREE;
4807
4808 if (TREE_CODE (sub) == ADDR_EXPR)
4809 {
4810 tree op = TREE_OPERAND (sub, 0);
4811 tree optype = TREE_TYPE (op);
4812 /* *&p => p */
4813 if (useless_type_conversion_p (type, optype))
4814 return op;
4815
4816 /* *(foo *)&fooarray => fooarray[0] */
4817 if (TREE_CODE (optype) == ARRAY_TYPE
4818 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
4819 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4820 {
4821 tree type_domain = TYPE_DOMAIN (optype);
4822 tree min_val = size_zero_node;
4823 if (type_domain && TYPE_MIN_VALUE (type_domain))
4824 min_val = TYPE_MIN_VALUE (type_domain);
4825 if (TREE_CODE (min_val) == INTEGER_CST)
4826 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
4827 }
4828 /* *(foo *)&complexfoo => __real__ complexfoo */
4829 else if (TREE_CODE (optype) == COMPLEX_TYPE
4830 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4831 return fold_build1 (REALPART_EXPR, type, op);
4832 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
4833 else if (TREE_CODE (optype) == VECTOR_TYPE
4834 && useless_type_conversion_p (type, TREE_TYPE (optype)))
4835 {
4836 tree part_width = TYPE_SIZE (type);
4837 tree index = bitsize_int (0);
4838 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
4839 }
4840 }
4841
4842 /* *(p + CST) -> ... */
4843 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
4844 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
4845 {
4846 tree addr = TREE_OPERAND (sub, 0);
4847 tree off = TREE_OPERAND (sub, 1);
4848 tree addrtype;
4849
4850 STRIP_NOPS (addr);
4851 addrtype = TREE_TYPE (addr);
4852
4853 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
4854 if (TREE_CODE (addr) == ADDR_EXPR
4855 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
4856 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 4857 && tree_fits_uhwi_p (off))
b184c8f1 4858 {
ae7e9ddd 4859 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
4860 tree part_width = TYPE_SIZE (type);
4861 unsigned HOST_WIDE_INT part_widthi
9439e9a1 4862 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
4863 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
4864 tree index = bitsize_int (indexi);
4865 if (offset / part_widthi
e934916c 4866 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
4867 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
4868 part_width, index);
4869 }
4870
4871 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
4872 if (TREE_CODE (addr) == ADDR_EXPR
4873 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
4874 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
4875 {
4876 tree size = TYPE_SIZE_UNIT (type);
4877 if (tree_int_cst_equal (size, off))
4878 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
4879 }
4880
4881 /* *(p + CST) -> MEM_REF <p, CST>. */
4882 if (TREE_CODE (addr) != ADDR_EXPR
4883 || DECL_P (TREE_OPERAND (addr, 0)))
4884 return fold_build2 (MEM_REF, type,
4885 addr,
807e902e 4886 wide_int_to_tree (ptype, off));
b184c8f1
AM
4887 }
4888
4889 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
4890 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
4891 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
4892 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
4893 {
4894 tree type_domain;
4895 tree min_val = size_zero_node;
4896 tree osub = sub;
4897 sub = gimple_fold_indirect_ref (sub);
4898 if (! sub)
4899 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
4900 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
4901 if (type_domain && TYPE_MIN_VALUE (type_domain))
4902 min_val = TYPE_MIN_VALUE (type_domain);
4903 if (TREE_CODE (min_val) == INTEGER_CST)
4904 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
4905 }
4906
4907 return NULL_TREE;
4908}
19e51b40
JJ
4909
4910/* Return true if CODE is an operation that when operating on signed
4911 integer types involves undefined behavior on overflow and the
4912 operation can be expressed with unsigned arithmetic. */
4913
4914bool
4915arith_code_with_undefined_signed_overflow (tree_code code)
4916{
4917 switch (code)
4918 {
4919 case PLUS_EXPR:
4920 case MINUS_EXPR:
4921 case MULT_EXPR:
4922 case NEGATE_EXPR:
4923 case POINTER_PLUS_EXPR:
4924 return true;
4925 default:
4926 return false;
4927 }
4928}
4929
4930/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
4931 operation that can be transformed to unsigned arithmetic by converting
4932 its operand, carrying out the operation in the corresponding unsigned
4933 type and converting the result back to the original type.
4934
4935 Returns a sequence of statements that replace STMT and also contain
4936 a modified form of STMT itself. */
4937
4938gimple_seq
4939rewrite_to_defined_overflow (gimple stmt)
4940{
4941 if (dump_file && (dump_flags & TDF_DETAILS))
4942 {
4943 fprintf (dump_file, "rewriting stmt with undefined signed "
4944 "overflow ");
4945 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
4946 }
4947
4948 tree lhs = gimple_assign_lhs (stmt);
4949 tree type = unsigned_type_for (TREE_TYPE (lhs));
4950 gimple_seq stmts = NULL;
4951 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
4952 {
4953 gimple_seq stmts2 = NULL;
4954 gimple_set_op (stmt, i,
4955 force_gimple_operand (fold_convert (type,
4956 gimple_op (stmt, i)),
4957 &stmts2, true, NULL_TREE));
4958 gimple_seq_add_seq (&stmts, stmts2);
4959 }
4960 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
4961 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
4962 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
4963 gimple_seq_add_stmt (&stmts, stmt);
4964 gimple cvt = gimple_build_assign_with_ops
4965 (NOP_EXPR, lhs, gimple_assign_lhs (stmt), NULL_TREE);
4966 gimple_seq_add_stmt (&stmts, cvt);
4967
4968 return stmts;
4969}
This page took 2.100672 seconds and 5 git commands to generate.