]> gcc.gnu.org Git - gcc.git/blame - gcc/gimple-fold.c
Update copyright years.
[gcc.git] / gcc / gimple-fold.c
CommitLineData
cbdd87d4 1/* Statement simplification on GIMPLE.
5624e564 2 Copyright (C) 2010-2015 Free Software Foundation, Inc.
cbdd87d4
RG
3 Split out from tree-ssa-ccp.c.
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify it
8under the terms of the GNU General Public License as published by the
9Free Software Foundation; either version 3, or (at your option) any
10later version.
11
12GCC is distributed in the hope that it will be useful, but WITHOUT
13ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
16
17You should have received a copy of the GNU General Public License
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
d8a2d370
DN
26#include "stringpool.h"
27#include "expr.h"
28#include "stmt.h"
29#include "stor-layout.h"
cbdd87d4 30#include "flags.h"
83685514
AM
31#include "hashtab.h"
32#include "hash-set.h"
33#include "vec.h"
34#include "machmode.h"
35#include "hard-reg-set.h"
36#include "input.h"
cbdd87d4 37#include "function.h"
7ee2468b 38#include "dumpfile.h"
442b4905 39#include "bitmap.h"
60393bbc
AM
40#include "predict.h"
41#include "dominance.h"
2fb9a547
AM
42#include "basic-block.h"
43#include "tree-ssa-alias.h"
44#include "internal-fn.h"
45#include "gimple-fold.h"
46#include "gimple-expr.h"
47#include "is-a.h"
18f429e2 48#include "gimple.h"
45b0be94 49#include "gimplify.h"
5be5c238 50#include "gimple-iterator.h"
442b4905
AM
51#include "gimple-ssa.h"
52#include "tree-ssanames.h"
53#include "tree-into-ssa.h"
54#include "tree-dfa.h"
7a300452 55#include "tree-ssa.h"
cbdd87d4 56#include "tree-ssa-propagate.h"
cbdd87d4 57#include "target.h"
c582198b
AM
58#include "hash-map.h"
59#include "plugin-api.h"
60#include "ipa-ref.h"
61#include "cgraph.h"
450ad0cd
JH
62#include "ipa-utils.h"
63#include "gimple-pretty-print.h"
4484a35a 64#include "tree-ssa-address.h"
862d0b35 65#include "langhooks.h"
19e51b40 66#include "gimplify-me.h"
2b5f0895 67#include "dbgcnt.h"
9b2b7279 68#include "builtins.h"
fef5a0d9 69#include "output.h"
e0ee10ed
RB
70#include "tree-eh.h"
71#include "gimple-match.h"
45cc9f96
RB
72#include "tree-phinodes.h"
73#include "ssa-iterators.h"
cbdd87d4 74
b3b9f3d0 75/* Return true when DECL can be referenced from current unit.
c44c2088
JH
76 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
77 We can get declarations that are not possible to reference for various
78 reasons:
1389294c 79
1389294c
JH
80 1) When analyzing C++ virtual tables.
81 C++ virtual tables do have known constructors even
82 when they are keyed to other compilation unit.
83 Those tables can contain pointers to methods and vars
84 in other units. Those methods have both STATIC and EXTERNAL
85 set.
86 2) In WHOPR mode devirtualization might lead to reference
87 to method that was partitioned elsehwere.
88 In this case we have static VAR_DECL or FUNCTION_DECL
89 that has no corresponding callgraph/varpool node
b3b9f3d0
JH
90 declaring the body.
91 3) COMDAT functions referred by external vtables that
3e89949e 92 we devirtualize only during final compilation stage.
b3b9f3d0
JH
93 At this time we already decided that we will not output
94 the function body and thus we can't reference the symbol
95 directly. */
96
1389294c 97static bool
c44c2088 98can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
1389294c 99{
2c8326a5 100 varpool_node *vnode;
1389294c 101 struct cgraph_node *node;
5e20cdc9 102 symtab_node *snode;
c44c2088 103
00de328a 104 if (DECL_ABSTRACT_P (decl))
1632a686
JH
105 return false;
106
107 /* We are concerned only about static/external vars and functions. */
108 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
109 || (TREE_CODE (decl) != VAR_DECL && TREE_CODE (decl) != FUNCTION_DECL))
110 return true;
111
112 /* Static objects can be referred only if they was not optimized out yet. */
113 if (!TREE_PUBLIC (decl) && !DECL_EXTERNAL (decl))
114 {
3aaf0529
JH
115 /* Before we start optimizing unreachable code we can be sure all
116 static objects are defined. */
3dafb85c 117 if (symtab->function_flags_ready)
3aaf0529 118 return true;
d52f5295 119 snode = symtab_node::get (decl);
3aaf0529 120 if (!snode || !snode->definition)
1632a686 121 return false;
7de90a6c 122 node = dyn_cast <cgraph_node *> (snode);
1632a686
JH
123 return !node || !node->global.inlined_to;
124 }
125
6da8be89 126 /* We will later output the initializer, so we can refer to it.
c44c2088 127 So we are concerned only when DECL comes from initializer of
3aaf0529 128 external var or var that has been optimized out. */
c44c2088
JH
129 if (!from_decl
130 || TREE_CODE (from_decl) != VAR_DECL
3aaf0529 131 || (!DECL_EXTERNAL (from_decl)
9041d2e6 132 && (vnode = varpool_node::get (from_decl)) != NULL
3aaf0529 133 && vnode->definition)
6da8be89 134 || (flag_ltrans
9041d2e6 135 && (vnode = varpool_node::get (from_decl)) != NULL
6adda80b 136 && vnode->in_other_partition))
c44c2088 137 return true;
c44c2088
JH
138 /* We are folding reference from external vtable. The vtable may reffer
139 to a symbol keyed to other compilation unit. The other compilation
140 unit may be in separate DSO and the symbol may be hidden. */
141 if (DECL_VISIBILITY_SPECIFIED (decl)
142 && DECL_EXTERNAL (decl)
a33a931b 143 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
d52f5295 144 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
c44c2088 145 return false;
b3b9f3d0
JH
146 /* When function is public, we always can introduce new reference.
147 Exception are the COMDAT functions where introducing a direct
148 reference imply need to include function body in the curren tunit. */
149 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
150 return true;
3aaf0529
JH
151 /* We have COMDAT. We are going to check if we still have definition
152 or if the definition is going to be output in other partition.
153 Bypass this when gimplifying; all needed functions will be produced.
c44c2088
JH
154
155 As observed in PR20991 for already optimized out comdat virtual functions
073a8998 156 it may be tempting to not necessarily give up because the copy will be
c44c2088
JH
157 output elsewhere when corresponding vtable is output.
158 This is however not possible - ABI specify that COMDATs are output in
159 units where they are used and when the other unit was compiled with LTO
160 it is possible that vtable was kept public while the function itself
161 was privatized. */
3dafb85c 162 if (!symtab->function_flags_ready)
b3b9f3d0 163 return true;
c44c2088 164
d52f5295 165 snode = symtab_node::get (decl);
3aaf0529
JH
166 if (!snode
167 || ((!snode->definition || DECL_EXTERNAL (decl))
168 && (!snode->in_other_partition
169 || (!snode->forced_by_abi && !snode->force_output))))
170 return false;
171 node = dyn_cast <cgraph_node *> (snode);
172 return !node || !node->global.inlined_to;
1389294c
JH
173}
174
0038d4e0 175/* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
c44c2088
JH
176 acceptable form for is_gimple_min_invariant.
177 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
17f39a39
JH
178
179tree
c44c2088 180canonicalize_constructor_val (tree cval, tree from_decl)
17f39a39 181{
50619002
EB
182 tree orig_cval = cval;
183 STRIP_NOPS (cval);
315f5f1b
RG
184 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
185 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
17f39a39 186 {
315f5f1b
RG
187 tree ptr = TREE_OPERAND (cval, 0);
188 if (is_gimple_min_invariant (ptr))
189 cval = build1_loc (EXPR_LOCATION (cval),
190 ADDR_EXPR, TREE_TYPE (ptr),
191 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
192 ptr,
193 fold_convert (ptr_type_node,
194 TREE_OPERAND (cval, 1))));
17f39a39
JH
195 }
196 if (TREE_CODE (cval) == ADDR_EXPR)
197 {
5a27a197
RG
198 tree base = NULL_TREE;
199 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
ca5f4331
MM
200 {
201 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
202 if (base)
203 TREE_OPERAND (cval, 0) = base;
204 }
5a27a197
RG
205 else
206 base = get_base_address (TREE_OPERAND (cval, 0));
7501ca28
RG
207 if (!base)
208 return NULL_TREE;
b3b9f3d0 209
7501ca28
RG
210 if ((TREE_CODE (base) == VAR_DECL
211 || TREE_CODE (base) == FUNCTION_DECL)
c44c2088 212 && !can_refer_decl_in_current_unit_p (base, from_decl))
1389294c 213 return NULL_TREE;
7501ca28 214 if (TREE_CODE (base) == VAR_DECL)
46eb666a 215 TREE_ADDRESSABLE (base) = 1;
7501ca28
RG
216 else if (TREE_CODE (base) == FUNCTION_DECL)
217 {
218 /* Make sure we create a cgraph node for functions we'll reference.
219 They can be non-existent if the reference comes from an entry
220 of an external vtable for example. */
d52f5295 221 cgraph_node::get_create (base);
7501ca28 222 }
0038d4e0 223 /* Fixup types in global initializers. */
73aef89e
RG
224 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
225 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
50619002
EB
226
227 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
228 cval = fold_convert (TREE_TYPE (orig_cval), cval);
229 return cval;
17f39a39 230 }
846abd0d
RB
231 if (TREE_OVERFLOW_P (cval))
232 return drop_tree_overflow (cval);
50619002 233 return orig_cval;
17f39a39 234}
cbdd87d4
RG
235
236/* If SYM is a constant variable with known value, return the value.
237 NULL_TREE is returned otherwise. */
238
239tree
240get_symbol_constant_value (tree sym)
241{
6a6dac52
JH
242 tree val = ctor_for_folding (sym);
243 if (val != error_mark_node)
cbdd87d4 244 {
cbdd87d4
RG
245 if (val)
246 {
9d60be38 247 val = canonicalize_constructor_val (unshare_expr (val), sym);
1389294c 248 if (val && is_gimple_min_invariant (val))
17f39a39 249 return val;
1389294c
JH
250 else
251 return NULL_TREE;
cbdd87d4
RG
252 }
253 /* Variables declared 'const' without an initializer
254 have zero as the initializer if they may not be
255 overridden at link or run time. */
256 if (!val
b8a8c472 257 && is_gimple_reg_type (TREE_TYPE (sym)))
e8160c9a 258 return build_zero_cst (TREE_TYPE (sym));
cbdd87d4
RG
259 }
260
261 return NULL_TREE;
262}
263
264
cbdd87d4
RG
265
266/* Subroutine of fold_stmt. We perform several simplifications of the
267 memory reference tree EXPR and make sure to re-gimplify them properly
268 after propagation of constant addresses. IS_LHS is true if the
269 reference is supposed to be an lvalue. */
270
271static tree
272maybe_fold_reference (tree expr, bool is_lhs)
273{
17f39a39 274 tree result;
cbdd87d4 275
f0eddb90
RG
276 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
277 || TREE_CODE (expr) == REALPART_EXPR
278 || TREE_CODE (expr) == IMAGPART_EXPR)
279 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
280 return fold_unary_loc (EXPR_LOCATION (expr),
281 TREE_CODE (expr),
282 TREE_TYPE (expr),
283 TREE_OPERAND (expr, 0));
284 else if (TREE_CODE (expr) == BIT_FIELD_REF
285 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
286 return fold_ternary_loc (EXPR_LOCATION (expr),
287 TREE_CODE (expr),
288 TREE_TYPE (expr),
289 TREE_OPERAND (expr, 0),
290 TREE_OPERAND (expr, 1),
291 TREE_OPERAND (expr, 2));
292
f0eddb90
RG
293 if (!is_lhs
294 && (result = fold_const_aggregate_ref (expr))
295 && is_gimple_min_invariant (result))
296 return result;
cbdd87d4 297
cbdd87d4
RG
298 return NULL_TREE;
299}
300
301
302/* Attempt to fold an assignment statement pointed-to by SI. Returns a
303 replacement rhs for the statement or NULL_TREE if no simplification
304 could be made. It is assumed that the operands have been previously
305 folded. */
306
307static tree
308fold_gimple_assign (gimple_stmt_iterator *si)
309{
310 gimple stmt = gsi_stmt (*si);
311 enum tree_code subcode = gimple_assign_rhs_code (stmt);
312 location_t loc = gimple_location (stmt);
313
314 tree result = NULL_TREE;
315
316 switch (get_gimple_rhs_class (subcode))
317 {
318 case GIMPLE_SINGLE_RHS:
319 {
320 tree rhs = gimple_assign_rhs1 (stmt);
321
8c00ba08
JW
322 if (TREE_CLOBBER_P (rhs))
323 return NULL_TREE;
324
4e71066d 325 if (REFERENCE_CLASS_P (rhs))
cbdd87d4
RG
326 return maybe_fold_reference (rhs, false);
327
bdf37f7a
JH
328 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
329 {
330 tree val = OBJ_TYPE_REF_EXPR (rhs);
331 if (is_gimple_min_invariant (val))
332 return val;
f8a39967 333 else if (flag_devirtualize && virtual_method_call_p (rhs))
bdf37f7a
JH
334 {
335 bool final;
336 vec <cgraph_node *>targets
f8a39967 337 = possible_polymorphic_call_targets (rhs, stmt, &final);
2b5f0895 338 if (final && targets.length () <= 1 && dbg_cnt (devirt))
bdf37f7a 339 {
2b5f0895
XDL
340 if (dump_enabled_p ())
341 {
807b7d62 342 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
343 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
344 "resolving virtual function address "
345 "reference to function %s\n",
346 targets.length () == 1
347 ? targets[0]->name ()
3ef276e4 348 : "NULL");
2b5f0895 349 }
3ef276e4
RB
350 if (targets.length () == 1)
351 {
352 val = fold_convert (TREE_TYPE (val),
353 build_fold_addr_expr_loc
354 (loc, targets[0]->decl));
355 STRIP_USELESS_TYPE_CONVERSION (val);
356 }
357 else
358 /* We can not use __builtin_unreachable here because it
359 can not have address taken. */
360 val = build_int_cst (TREE_TYPE (val), 0);
bdf37f7a
JH
361 return val;
362 }
363 }
364
365 }
cbdd87d4
RG
366 else if (TREE_CODE (rhs) == ADDR_EXPR)
367 {
70f34814
RG
368 tree ref = TREE_OPERAND (rhs, 0);
369 tree tem = maybe_fold_reference (ref, true);
370 if (tem
371 && TREE_CODE (tem) == MEM_REF
372 && integer_zerop (TREE_OPERAND (tem, 1)))
373 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
374 else if (tem)
cbdd87d4
RG
375 result = fold_convert (TREE_TYPE (rhs),
376 build_fold_addr_expr_loc (loc, tem));
70f34814
RG
377 else if (TREE_CODE (ref) == MEM_REF
378 && integer_zerop (TREE_OPERAND (ref, 1)))
379 result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
cbdd87d4
RG
380 }
381
382 else if (TREE_CODE (rhs) == CONSTRUCTOR
383 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
384 && (CONSTRUCTOR_NELTS (rhs)
385 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
386 {
387 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
388 unsigned i;
389 tree val;
390
391 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
392 if (TREE_CODE (val) != INTEGER_CST
393 && TREE_CODE (val) != REAL_CST
394 && TREE_CODE (val) != FIXED_CST)
395 return NULL_TREE;
396
397 return build_vector_from_ctor (TREE_TYPE (rhs),
398 CONSTRUCTOR_ELTS (rhs));
399 }
400
401 else if (DECL_P (rhs))
9d60be38 402 return get_symbol_constant_value (rhs);
cbdd87d4
RG
403
404 /* If we couldn't fold the RHS, hand over to the generic
405 fold routines. */
406 if (result == NULL_TREE)
407 result = fold (rhs);
408
409 /* Strip away useless type conversions. Both the NON_LVALUE_EXPR
410 that may have been added by fold, and "useless" type
411 conversions that might now be apparent due to propagation. */
412 STRIP_USELESS_TYPE_CONVERSION (result);
413
414 if (result != rhs && valid_gimple_rhs_p (result))
415 return result;
416
417 return NULL_TREE;
418 }
419 break;
420
421 case GIMPLE_UNARY_RHS:
cbdd87d4
RG
422 break;
423
424 case GIMPLE_BINARY_RHS:
9b80d091
KT
425 /* Try to canonicalize for boolean-typed X the comparisons
426 X == 0, X == 1, X != 0, and X != 1. */
315f5f1b
RG
427 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
428 || gimple_assign_rhs_code (stmt) == NE_EXPR)
9b80d091
KT
429 {
430 tree lhs = gimple_assign_lhs (stmt);
431 tree op1 = gimple_assign_rhs1 (stmt);
432 tree op2 = gimple_assign_rhs2 (stmt);
433 tree type = TREE_TYPE (op1);
434
435 /* Check whether the comparison operands are of the same boolean
436 type as the result type is.
437 Check that second operand is an integer-constant with value
438 one or zero. */
439 if (TREE_CODE (op2) == INTEGER_CST
440 && (integer_zerop (op2) || integer_onep (op2))
441 && useless_type_conversion_p (TREE_TYPE (lhs), type))
442 {
443 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
444 bool is_logical_not = false;
445
446 /* X == 0 and X != 1 is a logical-not.of X
447 X == 1 and X != 0 is X */
448 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
449 || (cmp_code == NE_EXPR && integer_onep (op2)))
450 is_logical_not = true;
451
452 if (is_logical_not == false)
453 result = op1;
454 /* Only for one-bit precision typed X the transformation
455 !X -> ~X is valied. */
456 else if (TYPE_PRECISION (type) == 1)
457 result = build1_loc (gimple_location (stmt), BIT_NOT_EXPR,
458 type, op1);
459 /* Otherwise we use !X -> X ^ 1. */
460 else
461 result = build2_loc (gimple_location (stmt), BIT_XOR_EXPR,
462 type, op1, build_int_cst (type, 1));
463
464 }
465 }
cbdd87d4
RG
466
467 if (!result)
468 result = fold_binary_loc (loc, subcode,
5fbcc0ed
RG
469 TREE_TYPE (gimple_assign_lhs (stmt)),
470 gimple_assign_rhs1 (stmt),
471 gimple_assign_rhs2 (stmt));
cbdd87d4
RG
472
473 if (result)
474 {
475 STRIP_USELESS_TYPE_CONVERSION (result);
476 if (valid_gimple_rhs_p (result))
477 return result;
cbdd87d4
RG
478 }
479 break;
480
0354c0c7 481 case GIMPLE_TERNARY_RHS:
4e71066d
RG
482 /* Try to fold a conditional expression. */
483 if (gimple_assign_rhs_code (stmt) == COND_EXPR)
484 {
485 tree op0 = gimple_assign_rhs1 (stmt);
486 tree tem;
487 bool set = false;
488 location_t cond_loc = gimple_location (stmt);
489
490 if (COMPARISON_CLASS_P (op0))
491 {
492 fold_defer_overflow_warnings ();
493 tem = fold_binary_loc (cond_loc,
494 TREE_CODE (op0), TREE_TYPE (op0),
495 TREE_OPERAND (op0, 0),
496 TREE_OPERAND (op0, 1));
497 /* This is actually a conditional expression, not a GIMPLE
498 conditional statement, however, the valid_gimple_rhs_p
499 test still applies. */
500 set = (tem && is_gimple_condexpr (tem)
501 && valid_gimple_rhs_p (tem));
502 fold_undefer_overflow_warnings (set, stmt, 0);
503 }
504 else if (is_gimple_min_invariant (op0))
505 {
506 tem = op0;
507 set = true;
508 }
509 else
510 return NULL_TREE;
511
512 if (set)
513 result = fold_build3_loc (cond_loc, COND_EXPR,
514 TREE_TYPE (gimple_assign_lhs (stmt)), tem,
515 gimple_assign_rhs2 (stmt),
516 gimple_assign_rhs3 (stmt));
517 }
518
519 if (!result)
520 result = fold_ternary_loc (loc, subcode,
521 TREE_TYPE (gimple_assign_lhs (stmt)),
522 gimple_assign_rhs1 (stmt),
523 gimple_assign_rhs2 (stmt),
524 gimple_assign_rhs3 (stmt));
0354c0c7
BS
525
526 if (result)
527 {
528 STRIP_USELESS_TYPE_CONVERSION (result);
529 if (valid_gimple_rhs_p (result))
530 return result;
0354c0c7
BS
531 }
532 break;
533
cbdd87d4
RG
534 case GIMPLE_INVALID_RHS:
535 gcc_unreachable ();
536 }
537
538 return NULL_TREE;
539}
540
541/* Attempt to fold a conditional statement. Return true if any changes were
542 made. We only attempt to fold the condition expression, and do not perform
543 any transformation that would require alteration of the cfg. It is
544 assumed that the operands have been previously folded. */
545
546static bool
538dd0b7 547fold_gimple_cond (gcond *stmt)
cbdd87d4
RG
548{
549 tree result = fold_binary_loc (gimple_location (stmt),
550 gimple_cond_code (stmt),
551 boolean_type_node,
552 gimple_cond_lhs (stmt),
553 gimple_cond_rhs (stmt));
554
555 if (result)
556 {
557 STRIP_USELESS_TYPE_CONVERSION (result);
558 if (is_gimple_condexpr (result) && valid_gimple_rhs_p (result))
559 {
560 gimple_cond_set_condition_from_tree (stmt, result);
561 return true;
562 }
563 }
564
565 return false;
566}
567
fef5a0d9
RB
568
569/* Replace a statement at *SI_P with a sequence of statements in STMTS,
570 adjusting the replacement stmts location and virtual operands.
571 If the statement has a lhs the last stmt in the sequence is expected
572 to assign to that lhs. */
573
574static void
575gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
576{
577 gimple stmt = gsi_stmt (*si_p);
578
579 if (gimple_has_location (stmt))
580 annotate_all_with_location (stmts, gimple_location (stmt));
581
582 /* First iterate over the replacement statements backward, assigning
583 virtual operands to their defining statements. */
584 gimple laststore = NULL;
585 for (gimple_stmt_iterator i = gsi_last (stmts);
586 !gsi_end_p (i); gsi_prev (&i))
587 {
588 gimple new_stmt = gsi_stmt (i);
589 if ((gimple_assign_single_p (new_stmt)
590 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
591 || (is_gimple_call (new_stmt)
592 && (gimple_call_flags (new_stmt)
593 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
594 {
595 tree vdef;
596 if (!laststore)
597 vdef = gimple_vdef (stmt);
598 else
599 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
600 gimple_set_vdef (new_stmt, vdef);
601 if (vdef && TREE_CODE (vdef) == SSA_NAME)
602 SSA_NAME_DEF_STMT (vdef) = new_stmt;
603 laststore = new_stmt;
604 }
605 }
606
607 /* Second iterate over the statements forward, assigning virtual
608 operands to their uses. */
609 tree reaching_vuse = gimple_vuse (stmt);
610 for (gimple_stmt_iterator i = gsi_start (stmts);
611 !gsi_end_p (i); gsi_next (&i))
612 {
613 gimple new_stmt = gsi_stmt (i);
614 /* If the new statement possibly has a VUSE, update it with exact SSA
615 name we know will reach this one. */
616 if (gimple_has_mem_ops (new_stmt))
617 gimple_set_vuse (new_stmt, reaching_vuse);
618 gimple_set_modified (new_stmt, true);
619 if (gimple_vdef (new_stmt))
620 reaching_vuse = gimple_vdef (new_stmt);
621 }
622
623 /* If the new sequence does not do a store release the virtual
624 definition of the original statement. */
625 if (reaching_vuse
626 && reaching_vuse == gimple_vuse (stmt))
627 {
628 tree vdef = gimple_vdef (stmt);
629 if (vdef
630 && TREE_CODE (vdef) == SSA_NAME)
631 {
632 unlink_stmt_vdef (stmt);
633 release_ssa_name (vdef);
634 }
635 }
636
637 /* Finally replace the original statement with the sequence. */
638 gsi_replace_with_seq (si_p, stmts, false);
639}
640
cbdd87d4
RG
641/* Convert EXPR into a GIMPLE value suitable for substitution on the
642 RHS of an assignment. Insert the necessary statements before
643 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
644 is replaced. If the call is expected to produces a result, then it
645 is replaced by an assignment of the new RHS to the result variable.
646 If the result is to be ignored, then the call is replaced by a
fe2ef088
MM
647 GIMPLE_NOP. A proper VDEF chain is retained by making the first
648 VUSE and the last VDEF of the whole sequence be the same as the replaced
649 statement and using new SSA names for stores in between. */
cbdd87d4
RG
650
651void
652gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
653{
654 tree lhs;
cbdd87d4
RG
655 gimple stmt, new_stmt;
656 gimple_stmt_iterator i;
355a7673 657 gimple_seq stmts = NULL;
cbdd87d4
RG
658
659 stmt = gsi_stmt (*si_p);
660
661 gcc_assert (is_gimple_call (stmt));
662
45852dcc 663 push_gimplify_context (gimple_in_ssa_p (cfun));
cbdd87d4 664
e256dfce 665 lhs = gimple_call_lhs (stmt);
cbdd87d4 666 if (lhs == NULL_TREE)
6e572326
RG
667 {
668 gimplify_and_add (expr, &stmts);
669 /* We can end up with folding a memcpy of an empty class assignment
670 which gets optimized away by C++ gimplification. */
671 if (gimple_seq_empty_p (stmts))
672 {
9fdc58de 673 pop_gimplify_context (NULL);
6e572326
RG
674 if (gimple_in_ssa_p (cfun))
675 {
676 unlink_stmt_vdef (stmt);
677 release_defs (stmt);
678 }
77d19c72 679 gsi_replace (si_p, gimple_build_nop (), true);
6e572326
RG
680 return;
681 }
682 }
cbdd87d4 683 else
e256dfce
RG
684 {
685 tree tmp = get_initialized_tmp_var (expr, &stmts, NULL);
686 new_stmt = gimple_build_assign (lhs, tmp);
687 i = gsi_last (stmts);
688 gsi_insert_after_without_update (&i, new_stmt,
689 GSI_CONTINUE_LINKING);
690 }
cbdd87d4
RG
691
692 pop_gimplify_context (NULL);
693
fef5a0d9
RB
694 gsi_replace_with_seq_vops (si_p, stmts);
695}
cbdd87d4 696
fef5a0d9
RB
697
698/* Replace the call at *GSI with the gimple value VAL. */
699
700static void
701replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
702{
703 gimple stmt = gsi_stmt (*gsi);
704 tree lhs = gimple_call_lhs (stmt);
705 gimple repl;
706 if (lhs)
e256dfce 707 {
fef5a0d9
RB
708 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
709 val = fold_convert (TREE_TYPE (lhs), val);
710 repl = gimple_build_assign (lhs, val);
711 }
712 else
713 repl = gimple_build_nop ();
714 tree vdef = gimple_vdef (stmt);
715 if (vdef && TREE_CODE (vdef) == SSA_NAME)
716 {
717 unlink_stmt_vdef (stmt);
718 release_ssa_name (vdef);
719 }
720 gsi_replace (gsi, repl, true);
721}
722
723/* Replace the call at *GSI with the new call REPL and fold that
724 again. */
725
726static void
727replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple repl)
728{
729 gimple stmt = gsi_stmt (*gsi);
730 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
731 gimple_set_location (repl, gimple_location (stmt));
732 if (gimple_vdef (stmt)
733 && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
734 {
735 gimple_set_vdef (repl, gimple_vdef (stmt));
736 gimple_set_vuse (repl, gimple_vuse (stmt));
737 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
738 }
739 gsi_replace (gsi, repl, true);
740 fold_stmt (gsi);
741}
742
743/* Return true if VAR is a VAR_DECL or a component thereof. */
744
745static bool
746var_decl_component_p (tree var)
747{
748 tree inner = var;
749 while (handled_component_p (inner))
750 inner = TREE_OPERAND (inner, 0);
751 return SSA_VAR_P (inner);
752}
753
754/* Fold function call to builtin mem{{,p}cpy,move}. Return
755 NULL_TREE if no simplification can be made.
756 If ENDP is 0, return DEST (like memcpy).
757 If ENDP is 1, return DEST+LEN (like mempcpy).
758 If ENDP is 2, return DEST+LEN-1 (like stpcpy).
759 If ENDP is 3, return DEST, additionally *SRC and *DEST may overlap
760 (memmove). */
761
762static bool
763gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
764 tree dest, tree src, int endp)
765{
766 gimple stmt = gsi_stmt (*gsi);
767 tree lhs = gimple_call_lhs (stmt);
768 tree len = gimple_call_arg (stmt, 2);
769 tree destvar, srcvar;
770 location_t loc = gimple_location (stmt);
771
772 /* If the LEN parameter is zero, return DEST. */
773 if (integer_zerop (len))
774 {
775 gimple repl;
776 if (gimple_call_lhs (stmt))
777 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
778 else
779 repl = gimple_build_nop ();
780 tree vdef = gimple_vdef (stmt);
781 if (vdef && TREE_CODE (vdef) == SSA_NAME)
e256dfce 782 {
fef5a0d9
RB
783 unlink_stmt_vdef (stmt);
784 release_ssa_name (vdef);
785 }
786 gsi_replace (gsi, repl, true);
787 return true;
788 }
789
790 /* If SRC and DEST are the same (and not volatile), return
791 DEST{,+LEN,+LEN-1}. */
792 if (operand_equal_p (src, dest, 0))
793 {
794 unlink_stmt_vdef (stmt);
795 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
796 release_ssa_name (gimple_vdef (stmt));
797 if (!lhs)
798 {
799 gsi_replace (gsi, gimple_build_nop (), true);
800 return true;
801 }
802 goto done;
803 }
804 else
805 {
806 tree srctype, desttype;
807 unsigned int src_align, dest_align;
808 tree off0;
809
810 /* Build accesses at offset zero with a ref-all character type. */
811 off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
812 ptr_mode, true), 0);
813
814 /* If we can perform the copy efficiently with first doing all loads
815 and then all stores inline it that way. Currently efficiently
816 means that we can load all the memory into a single integer
817 register which is what MOVE_MAX gives us. */
818 src_align = get_pointer_alignment (src);
819 dest_align = get_pointer_alignment (dest);
820 if (tree_fits_uhwi_p (len)
821 && compare_tree_int (len, MOVE_MAX) <= 0
822 /* ??? Don't transform copies from strings with known length this
823 confuses the tree-ssa-strlen.c. This doesn't handle
824 the case in gcc.dg/strlenopt-8.c which is XFAILed for that
825 reason. */
826 && !c_strlen (src, 2))
827 {
828 unsigned ilen = tree_to_uhwi (len);
829 if (exact_log2 (ilen) != -1)
830 {
831 tree type = lang_hooks.types.type_for_size (ilen * 8, 1);
832 if (type
833 && TYPE_MODE (type) != BLKmode
834 && (GET_MODE_SIZE (TYPE_MODE (type)) * BITS_PER_UNIT
835 == ilen * 8)
836 /* If the destination pointer is not aligned we must be able
837 to emit an unaligned store. */
838 && (dest_align >= GET_MODE_ALIGNMENT (TYPE_MODE (type))
839 || !SLOW_UNALIGNED_ACCESS (TYPE_MODE (type), dest_align)))
840 {
841 tree srctype = type;
842 tree desttype = type;
843 if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
844 srctype = build_aligned_type (type, src_align);
845 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
846 tree tem = fold_const_aggregate_ref (srcmem);
847 if (tem)
848 srcmem = tem;
849 else if (src_align < GET_MODE_ALIGNMENT (TYPE_MODE (type))
850 && SLOW_UNALIGNED_ACCESS (TYPE_MODE (type),
851 src_align))
852 srcmem = NULL_TREE;
853 if (srcmem)
854 {
855 gimple new_stmt;
856 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
857 {
858 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
859 if (gimple_in_ssa_p (cfun))
860 srcmem = make_ssa_name (TREE_TYPE (srcmem),
861 new_stmt);
862 else
b731b390 863 srcmem = create_tmp_reg (TREE_TYPE (srcmem));
fef5a0d9
RB
864 gimple_assign_set_lhs (new_stmt, srcmem);
865 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
866 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
867 }
868 if (dest_align < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
869 desttype = build_aligned_type (type, dest_align);
870 new_stmt
871 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
872 dest, off0),
873 srcmem);
874 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
875 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
876 if (gimple_vdef (new_stmt)
877 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
878 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
879 if (!lhs)
880 {
881 gsi_replace (gsi, new_stmt, true);
882 return true;
883 }
884 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
885 goto done;
886 }
887 }
888 }
889 }
890
891 if (endp == 3)
892 {
893 /* Both DEST and SRC must be pointer types.
894 ??? This is what old code did. Is the testing for pointer types
895 really mandatory?
896
897 If either SRC is readonly or length is 1, we can use memcpy. */
898 if (!dest_align || !src_align)
899 return false;
900 if (readonly_data_expr (src)
901 || (tree_fits_uhwi_p (len)
902 && (MIN (src_align, dest_align) / BITS_PER_UNIT
903 >= tree_to_uhwi (len))))
904 {
905 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
906 if (!fn)
907 return false;
908 gimple_call_set_fndecl (stmt, fn);
909 gimple_call_set_arg (stmt, 0, dest);
910 gimple_call_set_arg (stmt, 1, src);
911 fold_stmt (gsi);
912 return true;
913 }
914
915 /* If *src and *dest can't overlap, optimize into memcpy as well. */
916 if (TREE_CODE (src) == ADDR_EXPR
917 && TREE_CODE (dest) == ADDR_EXPR)
918 {
919 tree src_base, dest_base, fn;
920 HOST_WIDE_INT src_offset = 0, dest_offset = 0;
921 HOST_WIDE_INT size = -1;
922 HOST_WIDE_INT maxsize = -1;
923
924 srcvar = TREE_OPERAND (src, 0);
925 src_base = get_ref_base_and_extent (srcvar, &src_offset,
926 &size, &maxsize);
927 destvar = TREE_OPERAND (dest, 0);
928 dest_base = get_ref_base_and_extent (destvar, &dest_offset,
929 &size, &maxsize);
930 if (tree_fits_uhwi_p (len))
931 maxsize = tree_to_uhwi (len);
932 else
933 maxsize = -1;
934 src_offset /= BITS_PER_UNIT;
935 dest_offset /= BITS_PER_UNIT;
936 if (SSA_VAR_P (src_base)
937 && SSA_VAR_P (dest_base))
938 {
939 if (operand_equal_p (src_base, dest_base, 0)
940 && ranges_overlap_p (src_offset, maxsize,
941 dest_offset, maxsize))
942 return false;
943 }
944 else if (TREE_CODE (src_base) == MEM_REF
945 && TREE_CODE (dest_base) == MEM_REF)
946 {
947 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
948 TREE_OPERAND (dest_base, 0), 0))
949 return false;
950 offset_int off = mem_ref_offset (src_base) + src_offset;
951 if (!wi::fits_shwi_p (off))
952 return false;
953 src_offset = off.to_shwi ();
954
955 off = mem_ref_offset (dest_base) + dest_offset;
956 if (!wi::fits_shwi_p (off))
957 return false;
958 dest_offset = off.to_shwi ();
959 if (ranges_overlap_p (src_offset, maxsize,
960 dest_offset, maxsize))
961 return false;
962 }
963 else
964 return false;
965
966 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
967 if (!fn)
968 return false;
969 gimple_call_set_fndecl (stmt, fn);
970 gimple_call_set_arg (stmt, 0, dest);
971 gimple_call_set_arg (stmt, 1, src);
972 fold_stmt (gsi);
973 return true;
974 }
975
976 /* If the destination and source do not alias optimize into
977 memcpy as well. */
978 if ((is_gimple_min_invariant (dest)
979 || TREE_CODE (dest) == SSA_NAME)
980 && (is_gimple_min_invariant (src)
981 || TREE_CODE (src) == SSA_NAME))
982 {
983 ao_ref destr, srcr;
984 ao_ref_init_from_ptr_and_size (&destr, dest, len);
985 ao_ref_init_from_ptr_and_size (&srcr, src, len);
986 if (!refs_may_alias_p_1 (&destr, &srcr, false))
987 {
988 tree fn;
989 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
990 if (!fn)
991 return false;
992 gimple_call_set_fndecl (stmt, fn);
993 gimple_call_set_arg (stmt, 0, dest);
994 gimple_call_set_arg (stmt, 1, src);
995 fold_stmt (gsi);
996 return true;
997 }
998 }
999
1000 return false;
1001 }
1002
1003 if (!tree_fits_shwi_p (len))
1004 return false;
1005 /* FIXME:
1006 This logic lose for arguments like (type *)malloc (sizeof (type)),
1007 since we strip the casts of up to VOID return value from malloc.
1008 Perhaps we ought to inherit type from non-VOID argument here? */
1009 STRIP_NOPS (src);
1010 STRIP_NOPS (dest);
1011 if (!POINTER_TYPE_P (TREE_TYPE (src))
1012 || !POINTER_TYPE_P (TREE_TYPE (dest)))
1013 return false;
1014 /* In the following try to find a type that is most natural to be
1015 used for the memcpy source and destination and that allows
1016 the most optimization when memcpy is turned into a plain assignment
1017 using that type. In theory we could always use a char[len] type
1018 but that only gains us that the destination and source possibly
1019 no longer will have their address taken. */
1020 /* As we fold (void *)(p + CST) to (void *)p + CST undo this here. */
1021 if (TREE_CODE (src) == POINTER_PLUS_EXPR)
1022 {
1023 tree tem = TREE_OPERAND (src, 0);
1024 STRIP_NOPS (tem);
1025 if (tem != TREE_OPERAND (src, 0))
1026 src = build1 (NOP_EXPR, TREE_TYPE (tem), src);
1027 }
1028 if (TREE_CODE (dest) == POINTER_PLUS_EXPR)
1029 {
1030 tree tem = TREE_OPERAND (dest, 0);
1031 STRIP_NOPS (tem);
1032 if (tem != TREE_OPERAND (dest, 0))
1033 dest = build1 (NOP_EXPR, TREE_TYPE (tem), dest);
1034 }
1035 srctype = TREE_TYPE (TREE_TYPE (src));
1036 if (TREE_CODE (srctype) == ARRAY_TYPE
1037 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1038 {
1039 srctype = TREE_TYPE (srctype);
1040 STRIP_NOPS (src);
1041 src = build1 (NOP_EXPR, build_pointer_type (srctype), src);
1042 }
1043 desttype = TREE_TYPE (TREE_TYPE (dest));
1044 if (TREE_CODE (desttype) == ARRAY_TYPE
1045 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1046 {
1047 desttype = TREE_TYPE (desttype);
1048 STRIP_NOPS (dest);
1049 dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
1050 }
1051 if (TREE_ADDRESSABLE (srctype)
1052 || TREE_ADDRESSABLE (desttype))
1053 return false;
1054
1055 /* Make sure we are not copying using a floating-point mode or
1056 a type whose size possibly does not match its precision. */
1057 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1058 || TREE_CODE (desttype) == BOOLEAN_TYPE
1059 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1060 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1061 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1062 || TREE_CODE (srctype) == BOOLEAN_TYPE
1063 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1064 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1065 if (!srctype)
1066 srctype = desttype;
1067 if (!desttype)
1068 desttype = srctype;
1069 if (!srctype)
1070 return false;
1071
1072 src_align = get_pointer_alignment (src);
1073 dest_align = get_pointer_alignment (dest);
1074 if (dest_align < TYPE_ALIGN (desttype)
1075 || src_align < TYPE_ALIGN (srctype))
1076 return false;
1077
1078 destvar = dest;
1079 STRIP_NOPS (destvar);
1080 if (TREE_CODE (destvar) == ADDR_EXPR
1081 && var_decl_component_p (TREE_OPERAND (destvar, 0))
1082 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1083 destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
1084 else
1085 destvar = NULL_TREE;
1086
1087 srcvar = src;
1088 STRIP_NOPS (srcvar);
1089 if (TREE_CODE (srcvar) == ADDR_EXPR
1090 && var_decl_component_p (TREE_OPERAND (srcvar, 0))
1091 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1092 {
1093 if (!destvar
1094 || src_align >= TYPE_ALIGN (desttype))
1095 srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
1096 srcvar, off0);
1097 else if (!STRICT_ALIGNMENT)
1098 {
1099 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1100 src_align);
1101 srcvar = fold_build2 (MEM_REF, srctype, srcvar, off0);
1102 }
e256dfce 1103 else
fef5a0d9
RB
1104 srcvar = NULL_TREE;
1105 }
1106 else
1107 srcvar = NULL_TREE;
1108
1109 if (srcvar == NULL_TREE && destvar == NULL_TREE)
1110 return false;
1111
1112 if (srcvar == NULL_TREE)
1113 {
1114 STRIP_NOPS (src);
1115 if (src_align >= TYPE_ALIGN (desttype))
1116 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1117 else
1118 {
1119 if (STRICT_ALIGNMENT)
1120 return false;
1121 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1122 src_align);
1123 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1124 }
1125 }
1126 else if (destvar == NULL_TREE)
1127 {
1128 STRIP_NOPS (dest);
1129 if (dest_align >= TYPE_ALIGN (srctype))
1130 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1131 else
1132 {
1133 if (STRICT_ALIGNMENT)
1134 return false;
1135 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1136 dest_align);
1137 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1138 }
1139 }
1140
1141 gimple new_stmt;
1142 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1143 {
1144 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1145 if (gimple_in_ssa_p (cfun))
1146 srcvar = make_ssa_name (TREE_TYPE (srcvar), new_stmt);
1147 else
b731b390 1148 srcvar = create_tmp_reg (TREE_TYPE (srcvar));
fef5a0d9
RB
1149 gimple_assign_set_lhs (new_stmt, srcvar);
1150 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1151 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1152 }
1153 new_stmt = gimple_build_assign (destvar, srcvar);
1154 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1155 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
1156 if (gimple_vdef (new_stmt)
1157 && TREE_CODE (gimple_vdef (new_stmt)) == SSA_NAME)
1158 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
1159 if (!lhs)
1160 {
1161 gsi_replace (gsi, new_stmt, true);
1162 return true;
1163 }
1164 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1165 }
1166
1167done:
1168 if (endp == 0 || endp == 3)
1169 len = NULL_TREE;
1170 else if (endp == 2)
1171 len = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (len), len,
1172 ssize_int (1));
1173 if (endp == 2 || endp == 1)
1174 dest = fold_build_pointer_plus_loc (loc, dest, len);
1175
1176 dest = force_gimple_operand_gsi (gsi, dest, false, NULL_TREE, true,
1177 GSI_SAME_STMT);
1178 gimple repl = gimple_build_assign (lhs, dest);
1179 gsi_replace (gsi, repl, true);
1180 return true;
1181}
1182
1183/* Fold function call to builtin memset or bzero at *GSI setting the
1184 memory of size LEN to VAL. Return whether a simplification was made. */
1185
1186static bool
1187gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1188{
1189 gimple stmt = gsi_stmt (*gsi);
1190 tree etype;
1191 unsigned HOST_WIDE_INT length, cval;
1192
1193 /* If the LEN parameter is zero, return DEST. */
1194 if (integer_zerop (len))
1195 {
1196 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1197 return true;
1198 }
1199
1200 if (! tree_fits_uhwi_p (len))
1201 return false;
1202
1203 if (TREE_CODE (c) != INTEGER_CST)
1204 return false;
1205
1206 tree dest = gimple_call_arg (stmt, 0);
1207 tree var = dest;
1208 if (TREE_CODE (var) != ADDR_EXPR)
1209 return false;
1210
1211 var = TREE_OPERAND (var, 0);
1212 if (TREE_THIS_VOLATILE (var))
1213 return false;
1214
1215 etype = TREE_TYPE (var);
1216 if (TREE_CODE (etype) == ARRAY_TYPE)
1217 etype = TREE_TYPE (etype);
1218
1219 if (!INTEGRAL_TYPE_P (etype)
1220 && !POINTER_TYPE_P (etype))
1221 return NULL_TREE;
1222
1223 if (! var_decl_component_p (var))
1224 return NULL_TREE;
1225
1226 length = tree_to_uhwi (len);
1227 if (GET_MODE_SIZE (TYPE_MODE (etype)) != length
1228 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1229 return NULL_TREE;
1230
1231 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1232 return NULL_TREE;
1233
1234 if (integer_zerop (c))
1235 cval = 0;
1236 else
1237 {
1238 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1239 return NULL_TREE;
1240
1241 cval = TREE_INT_CST_LOW (c);
1242 cval &= 0xff;
1243 cval |= cval << 8;
1244 cval |= cval << 16;
1245 cval |= (cval << 31) << 1;
1246 }
1247
1248 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1249 gimple store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1250 gimple_set_vuse (store, gimple_vuse (stmt));
1251 tree vdef = gimple_vdef (stmt);
1252 if (vdef && TREE_CODE (vdef) == SSA_NAME)
1253 {
1254 gimple_set_vdef (store, gimple_vdef (stmt));
1255 SSA_NAME_DEF_STMT (gimple_vdef (stmt)) = store;
1256 }
1257 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1258 if (gimple_call_lhs (stmt))
1259 {
1260 gimple asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1261 gsi_replace (gsi, asgn, true);
1262 }
1263 else
1264 {
1265 gimple_stmt_iterator gsi2 = *gsi;
1266 gsi_prev (gsi);
1267 gsi_remove (&gsi2, true);
1268 }
1269
1270 return true;
1271}
1272
1273
1274/* Return the string length, maximum string length or maximum value of
1275 ARG in LENGTH.
1276 If ARG is an SSA name variable, follow its use-def chains. If LENGTH
1277 is not NULL and, for TYPE == 0, its value is not equal to the length
1278 we determine or if we are unable to determine the length or value,
1279 return false. VISITED is a bitmap of visited variables.
1280 TYPE is 0 if string length should be returned, 1 for maximum string
1281 length and 2 for maximum value ARG can have. */
1282
1283static bool
dcb7fae2 1284get_maxval_strlen (tree arg, tree *length, bitmap *visited, int type)
fef5a0d9
RB
1285{
1286 tree var, val;
1287 gimple def_stmt;
1288
1289 if (TREE_CODE (arg) != SSA_NAME)
1290 {
1291 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1292 if (TREE_CODE (arg) == ADDR_EXPR
1293 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF
1294 && integer_zerop (TREE_OPERAND (TREE_OPERAND (arg, 0), 1)))
1295 {
1296 tree aop0 = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1297 if (TREE_CODE (aop0) == INDIRECT_REF
1298 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1299 return get_maxval_strlen (TREE_OPERAND (aop0, 0),
1300 length, visited, type);
1301 }
1302
1303 if (type == 2)
1304 {
1305 val = arg;
1306 if (TREE_CODE (val) != INTEGER_CST
1307 || tree_int_cst_sgn (val) < 0)
1308 return false;
1309 }
1310 else
1311 val = c_strlen (arg, 1);
1312 if (!val)
1313 return false;
1314
1315 if (*length)
1316 {
1317 if (type > 0)
1318 {
1319 if (TREE_CODE (*length) != INTEGER_CST
1320 || TREE_CODE (val) != INTEGER_CST)
1321 return false;
1322
1323 if (tree_int_cst_lt (*length, val))
1324 *length = val;
1325 return true;
1326 }
1327 else if (simple_cst_equal (val, *length) != 1)
1328 return false;
1329 }
1330
1331 *length = val;
1332 return true;
1333 }
1334
1335 /* If ARG is registered for SSA update we cannot look at its defining
1336 statement. */
1337 if (name_registered_for_update_p (arg))
1338 return false;
1339
1340 /* If we were already here, break the infinite cycle. */
dcb7fae2
RB
1341 if (!*visited)
1342 *visited = BITMAP_ALLOC (NULL);
1343 if (!bitmap_set_bit (*visited, SSA_NAME_VERSION (arg)))
fef5a0d9
RB
1344 return true;
1345
1346 var = arg;
1347 def_stmt = SSA_NAME_DEF_STMT (var);
1348
1349 switch (gimple_code (def_stmt))
1350 {
1351 case GIMPLE_ASSIGN:
1352 /* The RHS of the statement defining VAR must either have a
1353 constant length or come from another SSA_NAME with a constant
1354 length. */
1355 if (gimple_assign_single_p (def_stmt)
1356 || gimple_assign_unary_nop_p (def_stmt))
1357 {
1358 tree rhs = gimple_assign_rhs1 (def_stmt);
1359 return get_maxval_strlen (rhs, length, visited, type);
1360 }
1361 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1362 {
1363 tree op2 = gimple_assign_rhs2 (def_stmt);
1364 tree op3 = gimple_assign_rhs3 (def_stmt);
1365 return get_maxval_strlen (op2, length, visited, type)
1366 && get_maxval_strlen (op3, length, visited, type);
1367 }
1368 return false;
1369
1370 case GIMPLE_PHI:
1371 {
1372 /* All the arguments of the PHI node must have the same constant
1373 length. */
1374 unsigned i;
1375
1376 for (i = 0; i < gimple_phi_num_args (def_stmt); i++)
1377 {
1378 tree arg = gimple_phi_arg (def_stmt, i)->def;
1379
1380 /* If this PHI has itself as an argument, we cannot
1381 determine the string length of this argument. However,
1382 if we can find a constant string length for the other
1383 PHI args then we can still be sure that this is a
1384 constant string length. So be optimistic and just
1385 continue with the next argument. */
1386 if (arg == gimple_phi_result (def_stmt))
1387 continue;
1388
1389 if (!get_maxval_strlen (arg, length, visited, type))
1390 return false;
1391 }
1392 }
1393 return true;
1394
1395 default:
1396 return false;
1397 }
1398}
1399
dcb7fae2
RB
1400tree
1401get_maxval_strlen (tree arg, int type)
1402{
1403 bitmap visited = NULL;
1404 tree len = NULL_TREE;
1405 if (!get_maxval_strlen (arg, &len, &visited, type))
1406 len = NULL_TREE;
1407 if (visited)
1408 BITMAP_FREE (visited);
1409
1410 return len;
1411}
1412
fef5a0d9
RB
1413
1414/* Fold function call to builtin strcpy with arguments DEST and SRC.
1415 If LEN is not NULL, it represents the length of the string to be
1416 copied. Return NULL_TREE if no simplification can be made. */
1417
1418static bool
1419gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
dcb7fae2 1420 tree dest, tree src)
fef5a0d9 1421{
dcb7fae2 1422 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1423 tree fn;
1424
1425 /* If SRC and DEST are the same (and not volatile), return DEST. */
1426 if (operand_equal_p (src, dest, 0))
1427 {
1428 replace_call_with_value (gsi, dest);
1429 return true;
1430 }
1431
1432 if (optimize_function_for_size_p (cfun))
1433 return false;
1434
1435 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1436 if (!fn)
1437 return false;
1438
1579e1f8 1439 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1440 if (!len)
dcb7fae2 1441 return false;
fef5a0d9
RB
1442
1443 len = fold_convert_loc (loc, size_type_node, len);
1444 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
1445 len = force_gimple_operand_gsi (gsi, len, true,
1446 NULL_TREE, true, GSI_SAME_STMT);
1447 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1448 replace_call_with_call_and_fold (gsi, repl);
1449 return true;
1450}
1451
1452/* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
1453 If SLEN is not NULL, it represents the length of the source string.
1454 Return NULL_TREE if no simplification can be made. */
1455
1456static bool
dcb7fae2
RB
1457gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
1458 tree dest, tree src, tree len)
fef5a0d9 1459{
dcb7fae2 1460 location_t loc = gimple_location (gsi_stmt (*gsi));
fef5a0d9
RB
1461 tree fn;
1462
1463 /* If the LEN parameter is zero, return DEST. */
1464 if (integer_zerop (len))
1465 {
1466 replace_call_with_value (gsi, dest);
1467 return true;
1468 }
1469
1470 /* We can't compare slen with len as constants below if len is not a
1471 constant. */
dcb7fae2 1472 if (TREE_CODE (len) != INTEGER_CST)
fef5a0d9
RB
1473 return false;
1474
fef5a0d9 1475 /* Now, we must be passed a constant src ptr parameter. */
1579e1f8 1476 tree slen = get_maxval_strlen (src, 0);
dcb7fae2 1477 if (!slen || TREE_CODE (slen) != INTEGER_CST)
fef5a0d9
RB
1478 return false;
1479
1480 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
1481
1482 /* We do not support simplification of this case, though we do
1483 support it when expanding trees into RTL. */
1484 /* FIXME: generate a call to __builtin_memset. */
1485 if (tree_int_cst_lt (slen, len))
1486 return false;
1487
1488 /* OK transform into builtin memcpy. */
1489 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1490 if (!fn)
1491 return false;
1492
1493 len = fold_convert_loc (loc, size_type_node, len);
1494 len = force_gimple_operand_gsi (gsi, len, true,
1495 NULL_TREE, true, GSI_SAME_STMT);
1496 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1497 replace_call_with_call_and_fold (gsi, repl);
1498 return true;
1499}
1500
1501/* Simplify a call to the strcat builtin. DST and SRC are the arguments
1502 to the call.
1503
1504 Return NULL_TREE if no simplification was possible, otherwise return the
1505 simplified form of the call as a tree.
1506
1507 The simplified form may be a constant or other expression which
1508 computes the same value, but in a more efficient manner (including
1509 calls to other builtin functions).
1510
1511 The call may contain arguments which need to be evaluated, but
1512 which are not useful to determine the result of the call. In
1513 this case we return a chain of COMPOUND_EXPRs. The LHS of each
1514 COMPOUND_EXPR will be an argument which must be evaluated.
1515 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
1516 COMPOUND_EXPR in the chain will contain the tree for the simplified
1517 form of the builtin function call. */
1518
1519static bool
dcb7fae2 1520gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
fef5a0d9
RB
1521{
1522 gimple stmt = gsi_stmt (*gsi);
dcb7fae2 1523 location_t loc = gimple_location (stmt);
fef5a0d9
RB
1524
1525 const char *p = c_getstr (src);
1526
1527 /* If the string length is zero, return the dst parameter. */
1528 if (p && *p == '\0')
1529 {
1530 replace_call_with_value (gsi, dst);
1531 return true;
1532 }
1533
1534 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
1535 return false;
1536
1537 /* See if we can store by pieces into (dst + strlen(dst)). */
1538 tree newdst;
1539 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
1540 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1541
1542 if (!strlen_fn || !memcpy_fn)
1543 return false;
1544
1545 /* If the length of the source string isn't computable don't
1546 split strcat into strlen and memcpy. */
dcb7fae2 1547 tree len = get_maxval_strlen (src, 0);
fef5a0d9 1548 if (! len)
fef5a0d9
RB
1549 return false;
1550
1551 /* Create strlen (dst). */
1552 gimple_seq stmts = NULL, stmts2;
1553 gimple repl = gimple_build_call (strlen_fn, 1, dst);
1554 gimple_set_location (repl, loc);
1555 if (gimple_in_ssa_p (cfun))
b731b390 1556 newdst = make_ssa_name (size_type_node);
fef5a0d9 1557 else
b731b390 1558 newdst = create_tmp_reg (size_type_node);
fef5a0d9
RB
1559 gimple_call_set_lhs (repl, newdst);
1560 gimple_seq_add_stmt_without_update (&stmts, repl);
1561
1562 /* Create (dst p+ strlen (dst)). */
1563 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
1564 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
1565 gimple_seq_add_seq_without_update (&stmts, stmts2);
1566
1567 len = fold_convert_loc (loc, size_type_node, len);
1568 len = size_binop_loc (loc, PLUS_EXPR, len,
1569 build_int_cst (size_type_node, 1));
1570 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
1571 gimple_seq_add_seq_without_update (&stmts, stmts2);
1572
1573 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
1574 gimple_seq_add_stmt_without_update (&stmts, repl);
1575 if (gimple_call_lhs (stmt))
1576 {
1577 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
1578 gimple_seq_add_stmt_without_update (&stmts, repl);
1579 gsi_replace_with_seq_vops (gsi, stmts);
1580 /* gsi now points at the assignment to the lhs, get a
1581 stmt iterator to the memcpy call.
1582 ??? We can't use gsi_for_stmt as that doesn't work when the
1583 CFG isn't built yet. */
1584 gimple_stmt_iterator gsi2 = *gsi;
1585 gsi_prev (&gsi2);
1586 fold_stmt (&gsi2);
1587 }
1588 else
1589 {
1590 gsi_replace_with_seq_vops (gsi, stmts);
1591 fold_stmt (gsi);
1592 }
1593 return true;
1594}
1595
07f1cf56
RB
1596/* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
1597 are the arguments to the call. */
1598
1599static bool
1600gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
1601{
1602 gimple stmt = gsi_stmt (*gsi);
1603 tree dest = gimple_call_arg (stmt, 0);
1604 tree src = gimple_call_arg (stmt, 1);
1605 tree size = gimple_call_arg (stmt, 2);
1606 tree fn;
1607 const char *p;
1608
1609
1610 p = c_getstr (src);
1611 /* If the SRC parameter is "", return DEST. */
1612 if (p && *p == '\0')
1613 {
1614 replace_call_with_value (gsi, dest);
1615 return true;
1616 }
1617
1618 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
1619 return false;
1620
1621 /* If __builtin_strcat_chk is used, assume strcat is available. */
1622 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
1623 if (!fn)
1624 return false;
1625
1626 gimple repl = gimple_build_call (fn, 2, dest, src);
1627 replace_call_with_call_and_fold (gsi, repl);
1628 return true;
1629}
1630
ad03a744
RB
1631/* Simplify a call to the strncat builtin. */
1632
1633static bool
1634gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
1635{
1636 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
1637 tree dst = gimple_call_arg (stmt, 0);
1638 tree src = gimple_call_arg (stmt, 1);
1639 tree len = gimple_call_arg (stmt, 2);
1640
1641 const char *p = c_getstr (src);
1642
1643 /* If the requested length is zero, or the src parameter string
1644 length is zero, return the dst parameter. */
1645 if (integer_zerop (len) || (p && *p == '\0'))
1646 {
1647 replace_call_with_value (gsi, dst);
1648 return true;
1649 }
1650
1651 /* If the requested len is greater than or equal to the string
1652 length, call strcat. */
1653 if (TREE_CODE (len) == INTEGER_CST && p
1654 && compare_tree_int (len, strlen (p)) >= 0)
1655 {
1656 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
1657
1658 /* If the replacement _DECL isn't initialized, don't do the
1659 transformation. */
1660 if (!fn)
1661 return false;
1662
1663 gcall *repl = gimple_build_call (fn, 2, dst, src);
1664 replace_call_with_call_and_fold (gsi, repl);
1665 return true;
1666 }
1667
1668 return false;
1669}
1670
745583f9
RB
1671/* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
1672 LEN, and SIZE. */
1673
1674static bool
1675gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
1676{
1677 gimple stmt = gsi_stmt (*gsi);
1678 tree dest = gimple_call_arg (stmt, 0);
1679 tree src = gimple_call_arg (stmt, 1);
1680 tree len = gimple_call_arg (stmt, 2);
1681 tree size = gimple_call_arg (stmt, 3);
1682 tree fn;
1683 const char *p;
1684
1685 p = c_getstr (src);
1686 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
1687 if ((p && *p == '\0')
1688 || integer_zerop (len))
1689 {
1690 replace_call_with_value (gsi, dest);
1691 return true;
1692 }
1693
1694 if (! tree_fits_uhwi_p (size))
1695 return false;
1696
1697 if (! integer_all_onesp (size))
1698 {
1699 tree src_len = c_strlen (src, 1);
1700 if (src_len
1701 && tree_fits_uhwi_p (src_len)
1702 && tree_fits_uhwi_p (len)
1703 && ! tree_int_cst_lt (len, src_len))
1704 {
1705 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
1706 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
1707 if (!fn)
1708 return false;
1709
1710 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1711 replace_call_with_call_and_fold (gsi, repl);
1712 return true;
1713 }
1714 return false;
1715 }
1716
1717 /* If __builtin_strncat_chk is used, assume strncat is available. */
1718 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
1719 if (!fn)
1720 return false;
1721
1722 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1723 replace_call_with_call_and_fold (gsi, repl);
1724 return true;
1725}
1726
fef5a0d9
RB
1727/* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
1728 to the call. IGNORE is true if the value returned
1729 by the builtin will be ignored. UNLOCKED is true is true if this
1730 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
1731 the known length of the string. Return NULL_TREE if no simplification
1732 was possible. */
1733
1734static bool
1735gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
fef5a0d9 1736 tree arg0, tree arg1,
dcb7fae2 1737 bool unlocked)
fef5a0d9 1738{
dcb7fae2
RB
1739 gimple stmt = gsi_stmt (*gsi);
1740
fef5a0d9
RB
1741 /* If we're using an unlocked function, assume the other unlocked
1742 functions exist explicitly. */
1743 tree const fn_fputc = (unlocked
1744 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
1745 : builtin_decl_implicit (BUILT_IN_FPUTC));
1746 tree const fn_fwrite = (unlocked
1747 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
1748 : builtin_decl_implicit (BUILT_IN_FWRITE));
1749
1750 /* If the return value is used, don't do the transformation. */
dcb7fae2 1751 if (gimple_call_lhs (stmt))
fef5a0d9
RB
1752 return false;
1753
fef5a0d9
RB
1754 /* Get the length of the string passed to fputs. If the length
1755 can't be determined, punt. */
dcb7fae2 1756 tree len = get_maxval_strlen (arg0, 0);
fef5a0d9
RB
1757 if (!len
1758 || TREE_CODE (len) != INTEGER_CST)
1759 return false;
1760
1761 switch (compare_tree_int (len, 1))
1762 {
1763 case -1: /* length is 0, delete the call entirely . */
1764 replace_call_with_value (gsi, integer_zero_node);
1765 return true;
1766
1767 case 0: /* length is 1, call fputc. */
1768 {
1769 const char *p = c_getstr (arg0);
1770 if (p != NULL)
1771 {
1772 if (!fn_fputc)
1773 return false;
1774
1775 gimple repl = gimple_build_call (fn_fputc, 2,
1776 build_int_cst
1777 (integer_type_node, p[0]), arg1);
1778 replace_call_with_call_and_fold (gsi, repl);
1779 return true;
1780 }
1781 }
1782 /* FALLTHROUGH */
1783 case 1: /* length is greater than 1, call fwrite. */
1784 {
1785 /* If optimizing for size keep fputs. */
1786 if (optimize_function_for_size_p (cfun))
1787 return false;
1788 /* New argument list transforming fputs(string, stream) to
1789 fwrite(string, 1, len, stream). */
1790 if (!fn_fwrite)
1791 return false;
1792
1793 gimple repl = gimple_build_call (fn_fwrite, 4, arg0,
1794 size_one_node, len, arg1);
1795 replace_call_with_call_and_fold (gsi, repl);
1796 return true;
1797 }
1798 default:
1799 gcc_unreachable ();
1800 }
1801 return false;
1802}
1803
1804/* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
1805 DEST, SRC, LEN, and SIZE are the arguments to the call.
1806 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
1807 code of the builtin. If MAXLEN is not NULL, it is maximum length
1808 passed as third argument. */
1809
1810static bool
1811gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
fef5a0d9 1812 tree dest, tree src, tree len, tree size,
fef5a0d9
RB
1813 enum built_in_function fcode)
1814{
dcb7fae2
RB
1815 gimple stmt = gsi_stmt (*gsi);
1816 location_t loc = gimple_location (stmt);
1817 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1818 tree fn;
1819
1820 /* If SRC and DEST are the same (and not volatile), return DEST
1821 (resp. DEST+LEN for __mempcpy_chk). */
1822 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
1823 {
1824 if (fcode != BUILT_IN_MEMPCPY_CHK)
1825 {
1826 replace_call_with_value (gsi, dest);
1827 return true;
1828 }
1829 else
1830 {
1831 tree temp = fold_build_pointer_plus_loc (loc, dest, len);
1832 temp = force_gimple_operand_gsi (gsi, temp,
1833 false, NULL_TREE, true,
1834 GSI_SAME_STMT);
1835 replace_call_with_value (gsi, temp);
1836 return true;
1837 }
1838 }
1839
1840 if (! tree_fits_uhwi_p (size))
1841 return false;
1842
dcb7fae2 1843 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9
RB
1844 if (! integer_all_onesp (size))
1845 {
1846 if (! tree_fits_uhwi_p (len))
1847 {
1848 /* If LEN is not constant, try MAXLEN too.
1849 For MAXLEN only allow optimizing into non-_ocs function
1850 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1851 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1852 {
1853 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
1854 {
1855 /* (void) __mempcpy_chk () can be optimized into
1856 (void) __memcpy_chk (). */
1857 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1858 if (!fn)
1859 return false;
1860
1861 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1862 replace_call_with_call_and_fold (gsi, repl);
1863 return true;
1864 }
1865 return false;
1866 }
1867 }
1868 else
1869 maxlen = len;
1870
1871 if (tree_int_cst_lt (size, maxlen))
1872 return false;
1873 }
1874
1875 fn = NULL_TREE;
1876 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
1877 mem{cpy,pcpy,move,set} is available. */
1878 switch (fcode)
1879 {
1880 case BUILT_IN_MEMCPY_CHK:
1881 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
1882 break;
1883 case BUILT_IN_MEMPCPY_CHK:
1884 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
1885 break;
1886 case BUILT_IN_MEMMOVE_CHK:
1887 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
1888 break;
1889 case BUILT_IN_MEMSET_CHK:
1890 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
1891 break;
1892 default:
1893 break;
1894 }
1895
1896 if (!fn)
1897 return false;
1898
1899 gimple repl = gimple_build_call (fn, 3, dest, src, len);
1900 replace_call_with_call_and_fold (gsi, repl);
1901 return true;
1902}
1903
1904/* Fold a call to the __st[rp]cpy_chk builtin.
1905 DEST, SRC, and SIZE are the arguments to the call.
1906 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
1907 code of the builtin. If MAXLEN is not NULL, it is maximum length of
1908 strings passed as second argument. */
1909
1910static bool
1911gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
dcb7fae2 1912 tree dest,
fef5a0d9 1913 tree src, tree size,
fef5a0d9
RB
1914 enum built_in_function fcode)
1915{
dcb7fae2
RB
1916 gimple stmt = gsi_stmt (*gsi);
1917 location_t loc = gimple_location (stmt);
1918 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
1919 tree len, fn;
1920
1921 /* If SRC and DEST are the same (and not volatile), return DEST. */
1922 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
1923 {
1924 replace_call_with_value (gsi, dest);
1925 return true;
1926 }
1927
1928 if (! tree_fits_uhwi_p (size))
1929 return false;
1930
dcb7fae2 1931 tree maxlen = get_maxval_strlen (src, 1);
fef5a0d9
RB
1932 if (! integer_all_onesp (size))
1933 {
1934 len = c_strlen (src, 1);
1935 if (! len || ! tree_fits_uhwi_p (len))
1936 {
1937 /* If LEN is not constant, try MAXLEN too.
1938 For MAXLEN only allow optimizing into non-_ocs function
1939 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
1940 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
1941 {
1942 if (fcode == BUILT_IN_STPCPY_CHK)
1943 {
1944 if (! ignore)
1945 return false;
1946
1947 /* If return value of __stpcpy_chk is ignored,
1948 optimize into __strcpy_chk. */
1949 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
1950 if (!fn)
1951 return false;
1952
1953 gimple repl = gimple_build_call (fn, 3, dest, src, size);
1954 replace_call_with_call_and_fold (gsi, repl);
1955 return true;
1956 }
1957
1958 if (! len || TREE_SIDE_EFFECTS (len))
1959 return false;
1960
1961 /* If c_strlen returned something, but not a constant,
1962 transform __strcpy_chk into __memcpy_chk. */
1963 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
1964 if (!fn)
1965 return false;
1966
1967 len = fold_convert_loc (loc, size_type_node, len);
1968 len = size_binop_loc (loc, PLUS_EXPR, len,
1969 build_int_cst (size_type_node, 1));
1970 len = force_gimple_operand_gsi (gsi, len, true, NULL_TREE,
1971 true, GSI_SAME_STMT);
1972 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
1973 replace_call_with_call_and_fold (gsi, repl);
1974 return true;
1975 }
e256dfce 1976 }
fef5a0d9
RB
1977 else
1978 maxlen = len;
1979
1980 if (! tree_int_cst_lt (maxlen, size))
1981 return false;
e256dfce
RG
1982 }
1983
fef5a0d9
RB
1984 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
1985 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK
1986 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
1987 if (!fn)
1988 return false;
1989
1990 gimple repl = gimple_build_call (fn, 2, dest, src);
1991 replace_call_with_call_and_fold (gsi, repl);
1992 return true;
1993}
1994
1995/* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
1996 are the arguments to the call. If MAXLEN is not NULL, it is maximum
1997 length passed as third argument. IGNORE is true if return value can be
1998 ignored. FCODE is the BUILT_IN_* code of the builtin. */
1999
2000static bool
2001gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
2002 tree dest, tree src,
dcb7fae2 2003 tree len, tree size,
fef5a0d9
RB
2004 enum built_in_function fcode)
2005{
dcb7fae2
RB
2006 gimple stmt = gsi_stmt (*gsi);
2007 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
fef5a0d9
RB
2008 tree fn;
2009
2010 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
cbdd87d4 2011 {
fef5a0d9
RB
2012 /* If return value of __stpncpy_chk is ignored,
2013 optimize into __strncpy_chk. */
2014 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
2015 if (fn)
2016 {
2017 gimple repl = gimple_build_call (fn, 4, dest, src, len, size);
2018 replace_call_with_call_and_fold (gsi, repl);
2019 return true;
2020 }
cbdd87d4
RG
2021 }
2022
fef5a0d9
RB
2023 if (! tree_fits_uhwi_p (size))
2024 return false;
2025
dcb7fae2 2026 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2027 if (! integer_all_onesp (size))
cbdd87d4 2028 {
fef5a0d9 2029 if (! tree_fits_uhwi_p (len))
fe2ef088 2030 {
fef5a0d9
RB
2031 /* If LEN is not constant, try MAXLEN too.
2032 For MAXLEN only allow optimizing into non-_ocs function
2033 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2034 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
2035 return false;
8a1561bc 2036 }
fef5a0d9
RB
2037 else
2038 maxlen = len;
2039
2040 if (tree_int_cst_lt (size, maxlen))
2041 return false;
cbdd87d4
RG
2042 }
2043
fef5a0d9
RB
2044 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
2045 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK
2046 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
2047 if (!fn)
2048 return false;
2049
2050 gimple repl = gimple_build_call (fn, 3, dest, src, len);
2051 replace_call_with_call_and_fold (gsi, repl);
2052 return true;
cbdd87d4
RG
2053}
2054
2625bb5d
RB
2055/* Fold function call to builtin stpcpy with arguments DEST and SRC.
2056 Return NULL_TREE if no simplification can be made. */
2057
2058static bool
2059gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
2060{
2061 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2062 location_t loc = gimple_location (stmt);
2063 tree dest = gimple_call_arg (stmt, 0);
2064 tree src = gimple_call_arg (stmt, 1);
2065 tree fn, len, lenp1;
2066
2067 /* If the result is unused, replace stpcpy with strcpy. */
2068 if (gimple_call_lhs (stmt) == NULL_TREE)
2069 {
2070 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2071 if (!fn)
2072 return false;
2073 gimple_call_set_fndecl (stmt, fn);
2074 fold_stmt (gsi);
2075 return true;
2076 }
2077
2078 len = c_strlen (src, 1);
2079 if (!len
2080 || TREE_CODE (len) != INTEGER_CST)
2081 return false;
2082
2083 if (optimize_function_for_size_p (cfun)
2084 /* If length is zero it's small enough. */
2085 && !integer_zerop (len))
2086 return false;
2087
2088 /* If the source has a known length replace stpcpy with memcpy. */
2089 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2090 if (!fn)
2091 return false;
2092
2093 gimple_seq stmts = NULL;
2094 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
2095 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
2096 tem, build_int_cst (size_type_node, 1));
2097 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2098 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
2099 gimple_set_vuse (repl, gimple_vuse (stmt));
2100 gimple_set_vdef (repl, gimple_vdef (stmt));
2101 if (gimple_vdef (repl)
2102 && TREE_CODE (gimple_vdef (repl)) == SSA_NAME)
2103 SSA_NAME_DEF_STMT (gimple_vdef (repl)) = repl;
2104 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
2105 /* Replace the result with dest + len. */
2106 stmts = NULL;
2107 tem = gimple_convert (&stmts, loc, sizetype, len);
2108 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
2109 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
2110 POINTER_PLUS_EXPR, dest, tem);
2111 gsi_replace (gsi, ret, true);
2112 /* Finally fold the memcpy call. */
2113 gimple_stmt_iterator gsi2 = *gsi;
2114 gsi_prev (&gsi2);
2115 fold_stmt (&gsi2);
2116 return true;
2117}
2118
fef5a0d9
RB
2119/* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
2120 NULL_TREE if a normal call should be emitted rather than expanding
2121 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
2122 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
2123 passed as second argument. */
cbdd87d4
RG
2124
2125static bool
fef5a0d9 2126gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
dcb7fae2 2127 enum built_in_function fcode)
cbdd87d4 2128{
538dd0b7 2129 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2130 tree dest, size, len, fn, fmt, flag;
2131 const char *fmt_str;
cbdd87d4 2132
fef5a0d9
RB
2133 /* Verify the required arguments in the original call. */
2134 if (gimple_call_num_args (stmt) < 5)
2135 return false;
cbdd87d4 2136
fef5a0d9
RB
2137 dest = gimple_call_arg (stmt, 0);
2138 len = gimple_call_arg (stmt, 1);
2139 flag = gimple_call_arg (stmt, 2);
2140 size = gimple_call_arg (stmt, 3);
2141 fmt = gimple_call_arg (stmt, 4);
2142
2143 if (! tree_fits_uhwi_p (size))
2144 return false;
2145
2146 if (! integer_all_onesp (size))
2147 {
dcb7fae2 2148 tree maxlen = get_maxval_strlen (len, 2);
fef5a0d9 2149 if (! tree_fits_uhwi_p (len))
cbdd87d4 2150 {
fef5a0d9
RB
2151 /* If LEN is not constant, try MAXLEN too.
2152 For MAXLEN only allow optimizing into non-_ocs function
2153 if SIZE is >= MAXLEN, never convert to __ocs_fail (). */
2154 if (maxlen == NULL_TREE || ! tree_fits_uhwi_p (maxlen))
cbdd87d4
RG
2155 return false;
2156 }
2157 else
fef5a0d9 2158 maxlen = len;
cbdd87d4 2159
fef5a0d9
RB
2160 if (tree_int_cst_lt (size, maxlen))
2161 return false;
2162 }
cbdd87d4 2163
fef5a0d9
RB
2164 if (!init_target_chars ())
2165 return false;
cbdd87d4 2166
fef5a0d9
RB
2167 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
2168 or if format doesn't contain % chars or is "%s". */
2169 if (! integer_zerop (flag))
2170 {
2171 fmt_str = c_getstr (fmt);
2172 if (fmt_str == NULL)
2173 return false;
2174 if (strchr (fmt_str, target_percent) != NULL
2175 && strcmp (fmt_str, target_percent_s))
2176 return false;
cbdd87d4
RG
2177 }
2178
fef5a0d9
RB
2179 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
2180 available. */
2181 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
2182 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
2183 if (!fn)
491e0b9b
RG
2184 return false;
2185
fef5a0d9
RB
2186 /* Replace the called function and the first 5 argument by 3 retaining
2187 trailing varargs. */
2188 gimple_call_set_fndecl (stmt, fn);
2189 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2190 gimple_call_set_arg (stmt, 0, dest);
2191 gimple_call_set_arg (stmt, 1, len);
2192 gimple_call_set_arg (stmt, 2, fmt);
2193 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
2194 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2195 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2196 fold_stmt (gsi);
2197 return true;
2198}
cbdd87d4 2199
fef5a0d9
RB
2200/* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
2201 Return NULL_TREE if a normal call should be emitted rather than
2202 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
2203 or BUILT_IN_VSPRINTF_CHK. */
cbdd87d4 2204
fef5a0d9
RB
2205static bool
2206gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
2207 enum built_in_function fcode)
2208{
538dd0b7 2209 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
fef5a0d9
RB
2210 tree dest, size, len, fn, fmt, flag;
2211 const char *fmt_str;
2212 unsigned nargs = gimple_call_num_args (stmt);
cbdd87d4 2213
fef5a0d9
RB
2214 /* Verify the required arguments in the original call. */
2215 if (nargs < 4)
2216 return false;
2217 dest = gimple_call_arg (stmt, 0);
2218 flag = gimple_call_arg (stmt, 1);
2219 size = gimple_call_arg (stmt, 2);
2220 fmt = gimple_call_arg (stmt, 3);
2221
2222 if (! tree_fits_uhwi_p (size))
2223 return false;
2224
2225 len = NULL_TREE;
2226
2227 if (!init_target_chars ())
2228 return false;
2229
2230 /* Check whether the format is a literal string constant. */
2231 fmt_str = c_getstr (fmt);
2232 if (fmt_str != NULL)
2233 {
2234 /* If the format doesn't contain % args or %%, we know the size. */
2235 if (strchr (fmt_str, target_percent) == 0)
cbdd87d4 2236 {
fef5a0d9
RB
2237 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
2238 len = build_int_cstu (size_type_node, strlen (fmt_str));
2239 }
2240 /* If the format is "%s" and first ... argument is a string literal,
2241 we know the size too. */
2242 else if (fcode == BUILT_IN_SPRINTF_CHK
2243 && strcmp (fmt_str, target_percent_s) == 0)
2244 {
2245 tree arg;
cbdd87d4 2246
fef5a0d9
RB
2247 if (nargs == 5)
2248 {
2249 arg = gimple_call_arg (stmt, 4);
2250 if (POINTER_TYPE_P (TREE_TYPE (arg)))
2251 {
2252 len = c_strlen (arg, 1);
2253 if (! len || ! tree_fits_uhwi_p (len))
2254 len = NULL_TREE;
2255 }
2256 }
2257 }
2258 }
cbdd87d4 2259
fef5a0d9
RB
2260 if (! integer_all_onesp (size))
2261 {
2262 if (! len || ! tree_int_cst_lt (len, size))
2263 return false;
2264 }
cbdd87d4 2265
fef5a0d9
RB
2266 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
2267 or if format doesn't contain % chars or is "%s". */
2268 if (! integer_zerop (flag))
2269 {
2270 if (fmt_str == NULL)
2271 return false;
2272 if (strchr (fmt_str, target_percent) != NULL
2273 && strcmp (fmt_str, target_percent_s))
2274 return false;
2275 }
cbdd87d4 2276
fef5a0d9
RB
2277 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
2278 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
2279 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
2280 if (!fn)
2281 return false;
2282
2283 /* Replace the called function and the first 4 argument by 2 retaining
2284 trailing varargs. */
2285 gimple_call_set_fndecl (stmt, fn);
2286 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
2287 gimple_call_set_arg (stmt, 0, dest);
2288 gimple_call_set_arg (stmt, 1, fmt);
2289 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
2290 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
2291 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
2292 fold_stmt (gsi);
2293 return true;
2294}
2295
35770bb2
RB
2296/* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
2297 ORIG may be null if this is a 2-argument call. We don't attempt to
2298 simplify calls with more than 3 arguments.
2299
2300 Return NULL_TREE if no simplification was possible, otherwise return the
2301 simplified form of the call as a tree. If IGNORED is true, it means that
2302 the caller does not use the returned value of the function. */
2303
2304static bool
dcb7fae2 2305gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
35770bb2
RB
2306{
2307 gimple stmt = gsi_stmt (*gsi);
2308 tree dest = gimple_call_arg (stmt, 0);
2309 tree fmt = gimple_call_arg (stmt, 1);
2310 tree orig = NULL_TREE;
2311 const char *fmt_str = NULL;
2312
2313 /* Verify the required arguments in the original call. We deal with two
2314 types of sprintf() calls: 'sprintf (str, fmt)' and
2315 'sprintf (dest, "%s", orig)'. */
2316 if (gimple_call_num_args (stmt) > 3)
2317 return false;
2318
2319 if (gimple_call_num_args (stmt) == 3)
2320 orig = gimple_call_arg (stmt, 2);
2321
2322 /* Check whether the format is a literal string constant. */
2323 fmt_str = c_getstr (fmt);
2324 if (fmt_str == NULL)
2325 return false;
2326
2327 if (!init_target_chars ())
2328 return false;
2329
2330 /* If the format doesn't contain % args or %%, use strcpy. */
2331 if (strchr (fmt_str, target_percent) == NULL)
2332 {
2333 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2334
2335 if (!fn)
2336 return false;
2337
2338 /* Don't optimize sprintf (buf, "abc", ptr++). */
2339 if (orig)
2340 return false;
2341
2342 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
2343 'format' is known to contain no % formats. */
2344 gimple_seq stmts = NULL;
2345 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2346 gimple_seq_add_stmt_without_update (&stmts, repl);
2347 if (gimple_call_lhs (stmt))
2348 {
2349 repl = gimple_build_assign (gimple_call_lhs (stmt),
2350 build_int_cst (integer_type_node,
2351 strlen (fmt_str)));
2352 gimple_seq_add_stmt_without_update (&stmts, repl);
2353 gsi_replace_with_seq_vops (gsi, stmts);
2354 /* gsi now points at the assignment to the lhs, get a
2355 stmt iterator to the memcpy call.
2356 ??? We can't use gsi_for_stmt as that doesn't work when the
2357 CFG isn't built yet. */
2358 gimple_stmt_iterator gsi2 = *gsi;
2359 gsi_prev (&gsi2);
2360 fold_stmt (&gsi2);
2361 }
2362 else
2363 {
2364 gsi_replace_with_seq_vops (gsi, stmts);
2365 fold_stmt (gsi);
2366 }
2367 return true;
2368 }
2369
2370 /* If the format is "%s", use strcpy if the result isn't used. */
2371 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2372 {
2373 tree fn;
2374 fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2375
2376 if (!fn)
2377 return false;
2378
2379 /* Don't crash on sprintf (str1, "%s"). */
2380 if (!orig)
2381 return false;
2382
dcb7fae2
RB
2383 tree orig_len = NULL_TREE;
2384 if (gimple_call_lhs (stmt))
35770bb2 2385 {
dcb7fae2 2386 orig_len = get_maxval_strlen (orig, 0);
d7e78447 2387 if (!orig_len)
35770bb2
RB
2388 return false;
2389 }
2390
2391 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
2392 gimple_seq stmts = NULL;
2393 gimple repl = gimple_build_call (fn, 2, dest, orig);
2394 gimple_seq_add_stmt_without_update (&stmts, repl);
2395 if (gimple_call_lhs (stmt))
2396 {
d7e78447
RB
2397 if (!useless_type_conversion_p (integer_type_node,
2398 TREE_TYPE (orig_len)))
2399 orig_len = fold_convert (integer_type_node, orig_len);
2400 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
35770bb2
RB
2401 gimple_seq_add_stmt_without_update (&stmts, repl);
2402 gsi_replace_with_seq_vops (gsi, stmts);
2403 /* gsi now points at the assignment to the lhs, get a
2404 stmt iterator to the memcpy call.
2405 ??? We can't use gsi_for_stmt as that doesn't work when the
2406 CFG isn't built yet. */
2407 gimple_stmt_iterator gsi2 = *gsi;
2408 gsi_prev (&gsi2);
2409 fold_stmt (&gsi2);
2410 }
2411 else
2412 {
2413 gsi_replace_with_seq_vops (gsi, stmts);
2414 fold_stmt (gsi);
2415 }
2416 return true;
2417 }
2418 return false;
2419}
2420
d7e78447
RB
2421/* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
2422 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
2423 attempt to simplify calls with more than 4 arguments.
35770bb2 2424
d7e78447
RB
2425 Return NULL_TREE if no simplification was possible, otherwise return the
2426 simplified form of the call as a tree. If IGNORED is true, it means that
2427 the caller does not use the returned value of the function. */
2428
2429static bool
dcb7fae2 2430gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
d7e78447 2431{
538dd0b7 2432 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
d7e78447
RB
2433 tree dest = gimple_call_arg (stmt, 0);
2434 tree destsize = gimple_call_arg (stmt, 1);
2435 tree fmt = gimple_call_arg (stmt, 2);
2436 tree orig = NULL_TREE;
2437 const char *fmt_str = NULL;
2438
2439 if (gimple_call_num_args (stmt) > 4)
2440 return false;
2441
2442 if (gimple_call_num_args (stmt) == 4)
2443 orig = gimple_call_arg (stmt, 3);
2444
2445 if (!tree_fits_uhwi_p (destsize))
2446 return false;
2447 unsigned HOST_WIDE_INT destlen = tree_to_uhwi (destsize);
2448
2449 /* Check whether the format is a literal string constant. */
2450 fmt_str = c_getstr (fmt);
2451 if (fmt_str == NULL)
2452 return false;
2453
2454 if (!init_target_chars ())
2455 return false;
2456
2457 /* If the format doesn't contain % args or %%, use strcpy. */
2458 if (strchr (fmt_str, target_percent) == NULL)
2459 {
2460 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2461 if (!fn)
2462 return false;
2463
2464 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
2465 if (orig)
2466 return false;
2467
2468 /* We could expand this as
2469 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
2470 or to
2471 memcpy (str, fmt_with_nul_at_cstm1, cst);
2472 but in the former case that might increase code size
2473 and in the latter case grow .rodata section too much.
2474 So punt for now. */
2475 size_t len = strlen (fmt_str);
2476 if (len >= destlen)
2477 return false;
2478
2479 gimple_seq stmts = NULL;
2480 gimple repl = gimple_build_call (fn, 2, dest, fmt);
2481 gimple_seq_add_stmt_without_update (&stmts, repl);
2482 if (gimple_call_lhs (stmt))
2483 {
2484 repl = gimple_build_assign (gimple_call_lhs (stmt),
2485 build_int_cst (integer_type_node, len));
2486 gimple_seq_add_stmt_without_update (&stmts, repl);
2487 gsi_replace_with_seq_vops (gsi, stmts);
2488 /* gsi now points at the assignment to the lhs, get a
2489 stmt iterator to the memcpy call.
2490 ??? We can't use gsi_for_stmt as that doesn't work when the
2491 CFG isn't built yet. */
2492 gimple_stmt_iterator gsi2 = *gsi;
2493 gsi_prev (&gsi2);
2494 fold_stmt (&gsi2);
2495 }
2496 else
2497 {
2498 gsi_replace_with_seq_vops (gsi, stmts);
2499 fold_stmt (gsi);
2500 }
2501 return true;
2502 }
2503
2504 /* If the format is "%s", use strcpy if the result isn't used. */
2505 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
2506 {
2507 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
2508 if (!fn)
2509 return false;
2510
2511 /* Don't crash on snprintf (str1, cst, "%s"). */
2512 if (!orig)
2513 return false;
2514
dcb7fae2 2515 tree orig_len = get_maxval_strlen (orig, 0);
d7e78447 2516 if (!orig_len)
dcb7fae2 2517 return false;
d7e78447
RB
2518
2519 /* We could expand this as
2520 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
2521 or to
2522 memcpy (str1, str2_with_nul_at_cstm1, cst);
2523 but in the former case that might increase code size
2524 and in the latter case grow .rodata section too much.
2525 So punt for now. */
2526 if (compare_tree_int (orig_len, destlen) >= 0)
2527 return false;
2528
2529 /* Convert snprintf (str1, cst, "%s", str2) into
2530 strcpy (str1, str2) if strlen (str2) < cst. */
2531 gimple_seq stmts = NULL;
2532 gimple repl = gimple_build_call (fn, 2, dest, orig);
2533 gimple_seq_add_stmt_without_update (&stmts, repl);
2534 if (gimple_call_lhs (stmt))
2535 {
2536 if (!useless_type_conversion_p (integer_type_node,
2537 TREE_TYPE (orig_len)))
2538 orig_len = fold_convert (integer_type_node, orig_len);
2539 repl = gimple_build_assign (gimple_call_lhs (stmt), orig_len);
2540 gimple_seq_add_stmt_without_update (&stmts, repl);
2541 gsi_replace_with_seq_vops (gsi, stmts);
2542 /* gsi now points at the assignment to the lhs, get a
2543 stmt iterator to the memcpy call.
2544 ??? We can't use gsi_for_stmt as that doesn't work when the
2545 CFG isn't built yet. */
2546 gimple_stmt_iterator gsi2 = *gsi;
2547 gsi_prev (&gsi2);
2548 fold_stmt (&gsi2);
2549 }
2550 else
2551 {
2552 gsi_replace_with_seq_vops (gsi, stmts);
2553 fold_stmt (gsi);
2554 }
2555 return true;
2556 }
2557 return false;
2558}
35770bb2 2559
edd7ae68
RB
2560/* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
2561 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
2562 more than 3 arguments, and ARG may be null in the 2-argument case.
2563
2564 Return NULL_TREE if no simplification was possible, otherwise return the
2565 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2566 code of the function to be simplified. */
2567
2568static bool
2569gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
2570 tree fp, tree fmt, tree arg,
2571 enum built_in_function fcode)
2572{
2573 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2574 tree fn_fputc, fn_fputs;
2575 const char *fmt_str = NULL;
2576
2577 /* If the return value is used, don't do the transformation. */
2578 if (gimple_call_lhs (stmt) != NULL_TREE)
2579 return false;
2580
2581 /* Check whether the format is a literal string constant. */
2582 fmt_str = c_getstr (fmt);
2583 if (fmt_str == NULL)
2584 return false;
2585
2586 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
2587 {
2588 /* If we're using an unlocked function, assume the other
2589 unlocked functions exist explicitly. */
2590 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
2591 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
2592 }
2593 else
2594 {
2595 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
2596 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
2597 }
2598
2599 if (!init_target_chars ())
2600 return false;
2601
2602 /* If the format doesn't contain % args or %%, use strcpy. */
2603 if (strchr (fmt_str, target_percent) == NULL)
2604 {
2605 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
2606 && arg)
2607 return false;
2608
2609 /* If the format specifier was "", fprintf does nothing. */
2610 if (fmt_str[0] == '\0')
2611 {
2612 replace_call_with_value (gsi, NULL_TREE);
2613 return true;
2614 }
2615
2616 /* When "string" doesn't contain %, replace all cases of
2617 fprintf (fp, string) with fputs (string, fp). The fputs
2618 builtin will take care of special cases like length == 1. */
2619 if (fn_fputs)
2620 {
2621 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
2622 replace_call_with_call_and_fold (gsi, repl);
2623 return true;
2624 }
2625 }
2626
2627 /* The other optimizations can be done only on the non-va_list variants. */
2628 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
2629 return false;
2630
2631 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
2632 else if (strcmp (fmt_str, target_percent_s) == 0)
2633 {
2634 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2635 return false;
2636 if (fn_fputs)
2637 {
2638 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
2639 replace_call_with_call_and_fold (gsi, repl);
2640 return true;
2641 }
2642 }
2643
2644 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
2645 else if (strcmp (fmt_str, target_percent_c) == 0)
2646 {
2647 if (!arg
2648 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
2649 return false;
2650 if (fn_fputc)
2651 {
2652 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
2653 replace_call_with_call_and_fold (gsi, repl);
2654 return true;
2655 }
2656 }
2657
2658 return false;
2659}
2660
ad03a744
RB
2661/* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
2662 FMT and ARG are the arguments to the call; we don't fold cases with
2663 more than 2 arguments, and ARG may be null if this is a 1-argument case.
2664
2665 Return NULL_TREE if no simplification was possible, otherwise return the
2666 simplified form of the call as a tree. FCODE is the BUILT_IN_*
2667 code of the function to be simplified. */
2668
2669static bool
2670gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
2671 tree arg, enum built_in_function fcode)
2672{
2673 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
2674 tree fn_putchar, fn_puts, newarg;
2675 const char *fmt_str = NULL;
2676
2677 /* If the return value is used, don't do the transformation. */
2678 if (gimple_call_lhs (stmt) != NULL_TREE)
2679 return false;
2680
2681 /* Check whether the format is a literal string constant. */
2682 fmt_str = c_getstr (fmt);
2683 if (fmt_str == NULL)
2684 return false;
2685
2686 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
2687 {
2688 /* If we're using an unlocked function, assume the other
2689 unlocked functions exist explicitly. */
2690 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
2691 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
2692 }
2693 else
2694 {
2695 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
2696 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
2697 }
2698
2699 if (!init_target_chars ())
2700 return false;
2701
2702 if (strcmp (fmt_str, target_percent_s) == 0
2703 || strchr (fmt_str, target_percent) == NULL)
2704 {
2705 const char *str;
2706
2707 if (strcmp (fmt_str, target_percent_s) == 0)
2708 {
2709 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2710 return false;
2711
2712 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2713 return false;
2714
2715 str = c_getstr (arg);
2716 if (str == NULL)
2717 return false;
2718 }
2719 else
2720 {
2721 /* The format specifier doesn't contain any '%' characters. */
2722 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
2723 && arg)
2724 return false;
2725 str = fmt_str;
2726 }
2727
2728 /* If the string was "", printf does nothing. */
2729 if (str[0] == '\0')
2730 {
2731 replace_call_with_value (gsi, NULL_TREE);
2732 return true;
2733 }
2734
2735 /* If the string has length of 1, call putchar. */
2736 if (str[1] == '\0')
2737 {
2738 /* Given printf("c"), (where c is any one character,)
2739 convert "c"[0] to an int and pass that to the replacement
2740 function. */
2741 newarg = build_int_cst (integer_type_node, str[0]);
2742 if (fn_putchar)
2743 {
2744 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
2745 replace_call_with_call_and_fold (gsi, repl);
2746 return true;
2747 }
2748 }
2749 else
2750 {
2751 /* If the string was "string\n", call puts("string"). */
2752 size_t len = strlen (str);
2753 if ((unsigned char)str[len - 1] == target_newline
2754 && (size_t) (int) len == len
2755 && (int) len > 0)
2756 {
2757 char *newstr;
2758 tree offset_node, string_cst;
2759
2760 /* Create a NUL-terminated string that's one char shorter
2761 than the original, stripping off the trailing '\n'. */
2762 newarg = build_string_literal (len, str);
2763 string_cst = string_constant (newarg, &offset_node);
2764 gcc_checking_assert (string_cst
2765 && (TREE_STRING_LENGTH (string_cst)
2766 == (int) len)
2767 && integer_zerop (offset_node)
2768 && (unsigned char)
2769 TREE_STRING_POINTER (string_cst)[len - 1]
2770 == target_newline);
2771 /* build_string_literal creates a new STRING_CST,
2772 modify it in place to avoid double copying. */
2773 newstr = CONST_CAST (char *, TREE_STRING_POINTER (string_cst));
2774 newstr[len - 1] = '\0';
2775 if (fn_puts)
2776 {
2777 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
2778 replace_call_with_call_and_fold (gsi, repl);
2779 return true;
2780 }
2781 }
2782 else
2783 /* We'd like to arrange to call fputs(string,stdout) here,
2784 but we need stdout and don't have a way to get it yet. */
2785 return false;
2786 }
2787 }
2788
2789 /* The other optimizations can be done only on the non-va_list variants. */
2790 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
2791 return false;
2792
2793 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
2794 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
2795 {
2796 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
2797 return false;
2798 if (fn_puts)
2799 {
2800 gcall *repl = gimple_build_call (fn_puts, 1, arg);
2801 replace_call_with_call_and_fold (gsi, repl);
2802 return true;
2803 }
2804 }
2805
2806 /* If the format specifier was "%c", call __builtin_putchar(arg). */
2807 else if (strcmp (fmt_str, target_percent_c) == 0)
2808 {
2809 if (!arg || ! useless_type_conversion_p (integer_type_node,
2810 TREE_TYPE (arg)))
2811 return false;
2812 if (fn_putchar)
2813 {
2814 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
2815 replace_call_with_call_and_fold (gsi, repl);
2816 return true;
2817 }
2818 }
2819
2820 return false;
2821}
2822
edd7ae68 2823
fef5a0d9
RB
2824
2825/* Fold a call to __builtin_strlen with known length LEN. */
2826
2827static bool
dcb7fae2 2828gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
fef5a0d9 2829{
dcb7fae2
RB
2830 gimple stmt = gsi_stmt (*gsi);
2831 tree len = get_maxval_strlen (gimple_call_arg (stmt, 0), 0);
fef5a0d9
RB
2832 if (!len)
2833 return false;
2813904b 2834 len = force_gimple_operand_gsi (gsi, len, true, NULL, true, GSI_SAME_STMT);
fef5a0d9
RB
2835 replace_call_with_value (gsi, len);
2836 return true;
cbdd87d4
RG
2837}
2838
2839
dcb7fae2
RB
2840/* Fold the non-target builtin at *GSI and return whether any simplification
2841 was made. */
cbdd87d4 2842
fef5a0d9 2843static bool
dcb7fae2 2844gimple_fold_builtin (gimple_stmt_iterator *gsi)
cbdd87d4 2845{
538dd0b7 2846 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
fef5a0d9 2847 tree callee = gimple_call_fndecl (stmt);
cbdd87d4 2848
dcb7fae2
RB
2849 /* Give up for always_inline inline builtins until they are
2850 inlined. */
2851 if (avoid_folding_inline_builtin (callee))
2852 return false;
cbdd87d4 2853
edd7ae68
RB
2854 unsigned n = gimple_call_num_args (stmt);
2855 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2856 switch (fcode)
cbdd87d4 2857 {
dcb7fae2
RB
2858 case BUILT_IN_BZERO:
2859 return gimple_fold_builtin_memset (gsi, integer_zero_node,
2860 gimple_call_arg (stmt, 1));
2861 case BUILT_IN_MEMSET:
2862 return gimple_fold_builtin_memset (gsi,
2863 gimple_call_arg (stmt, 1),
2864 gimple_call_arg (stmt, 2));
2865 case BUILT_IN_BCOPY:
2866 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 1),
2867 gimple_call_arg (stmt, 0), 3);
2868 case BUILT_IN_MEMCPY:
2869 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2870 gimple_call_arg (stmt, 1), 0);
2871 case BUILT_IN_MEMPCPY:
2872 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2873 gimple_call_arg (stmt, 1), 1);
2874 case BUILT_IN_MEMMOVE:
2875 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
2876 gimple_call_arg (stmt, 1), 3);
2877 case BUILT_IN_SPRINTF_CHK:
2878 case BUILT_IN_VSPRINTF_CHK:
edd7ae68 2879 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
dcb7fae2
RB
2880 case BUILT_IN_STRCAT_CHK:
2881 return gimple_fold_builtin_strcat_chk (gsi);
745583f9
RB
2882 case BUILT_IN_STRNCAT_CHK:
2883 return gimple_fold_builtin_strncat_chk (gsi);
cbdd87d4 2884 case BUILT_IN_STRLEN:
dcb7fae2 2885 return gimple_fold_builtin_strlen (gsi);
cbdd87d4 2886 case BUILT_IN_STRCPY:
dcb7fae2 2887 return gimple_fold_builtin_strcpy (gsi,
fef5a0d9 2888 gimple_call_arg (stmt, 0),
dcb7fae2 2889 gimple_call_arg (stmt, 1));
cbdd87d4 2890 case BUILT_IN_STRNCPY:
dcb7fae2 2891 return gimple_fold_builtin_strncpy (gsi,
fef5a0d9
RB
2892 gimple_call_arg (stmt, 0),
2893 gimple_call_arg (stmt, 1),
dcb7fae2 2894 gimple_call_arg (stmt, 2));
9a7eefec 2895 case BUILT_IN_STRCAT:
dcb7fae2
RB
2896 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
2897 gimple_call_arg (stmt, 1));
ad03a744
RB
2898 case BUILT_IN_STRNCAT:
2899 return gimple_fold_builtin_strncat (gsi);
cbdd87d4 2900 case BUILT_IN_FPUTS:
dcb7fae2
RB
2901 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2902 gimple_call_arg (stmt, 1), false);
cbdd87d4 2903 case BUILT_IN_FPUTS_UNLOCKED:
dcb7fae2
RB
2904 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
2905 gimple_call_arg (stmt, 1), true);
cbdd87d4
RG
2906 case BUILT_IN_MEMCPY_CHK:
2907 case BUILT_IN_MEMPCPY_CHK:
2908 case BUILT_IN_MEMMOVE_CHK:
2909 case BUILT_IN_MEMSET_CHK:
dcb7fae2 2910 return gimple_fold_builtin_memory_chk (gsi,
fef5a0d9
RB
2911 gimple_call_arg (stmt, 0),
2912 gimple_call_arg (stmt, 1),
2913 gimple_call_arg (stmt, 2),
2914 gimple_call_arg (stmt, 3),
edd7ae68 2915 fcode);
2625bb5d
RB
2916 case BUILT_IN_STPCPY:
2917 return gimple_fold_builtin_stpcpy (gsi);
cbdd87d4
RG
2918 case BUILT_IN_STRCPY_CHK:
2919 case BUILT_IN_STPCPY_CHK:
dcb7fae2 2920 return gimple_fold_builtin_stxcpy_chk (gsi,
fef5a0d9
RB
2921 gimple_call_arg (stmt, 0),
2922 gimple_call_arg (stmt, 1),
2923 gimple_call_arg (stmt, 2),
edd7ae68 2924 fcode);
cbdd87d4 2925 case BUILT_IN_STRNCPY_CHK:
f3fc9b80 2926 case BUILT_IN_STPNCPY_CHK:
fef5a0d9
RB
2927 return gimple_fold_builtin_stxncpy_chk (gsi,
2928 gimple_call_arg (stmt, 0),
2929 gimple_call_arg (stmt, 1),
2930 gimple_call_arg (stmt, 2),
2931 gimple_call_arg (stmt, 3),
edd7ae68 2932 fcode);
cbdd87d4
RG
2933 case BUILT_IN_SNPRINTF_CHK:
2934 case BUILT_IN_VSNPRINTF_CHK:
edd7ae68 2935 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
d7e78447 2936 case BUILT_IN_SNPRINTF:
dcb7fae2 2937 return gimple_fold_builtin_snprintf (gsi);
d7e78447 2938 case BUILT_IN_SPRINTF:
dcb7fae2 2939 return gimple_fold_builtin_sprintf (gsi);
edd7ae68
RB
2940 case BUILT_IN_FPRINTF:
2941 case BUILT_IN_FPRINTF_UNLOCKED:
2942 case BUILT_IN_VFPRINTF:
2943 if (n == 2 || n == 3)
2944 return gimple_fold_builtin_fprintf (gsi,
2945 gimple_call_arg (stmt, 0),
2946 gimple_call_arg (stmt, 1),
2947 n == 3
2948 ? gimple_call_arg (stmt, 2)
2949 : NULL_TREE,
2950 fcode);
2951 break;
2952 case BUILT_IN_FPRINTF_CHK:
2953 case BUILT_IN_VFPRINTF_CHK:
2954 if (n == 3 || n == 4)
2955 return gimple_fold_builtin_fprintf (gsi,
2956 gimple_call_arg (stmt, 0),
2957 gimple_call_arg (stmt, 2),
2958 n == 4
2959 ? gimple_call_arg (stmt, 3)
2960 : NULL_TREE,
2961 fcode);
2962 break;
ad03a744
RB
2963 case BUILT_IN_PRINTF:
2964 case BUILT_IN_PRINTF_UNLOCKED:
2965 case BUILT_IN_VPRINTF:
2966 if (n == 1 || n == 2)
2967 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
2968 n == 2
2969 ? gimple_call_arg (stmt, 1)
2970 : NULL_TREE, fcode);
2971 break;
2972 case BUILT_IN_PRINTF_CHK:
2973 case BUILT_IN_VPRINTF_CHK:
2974 if (n == 2 || n == 3)
2975 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
2976 n == 3
2977 ? gimple_call_arg (stmt, 2)
2978 : NULL_TREE, fcode);
fef5a0d9
RB
2979 default:;
2980 }
2981
2982 /* Try the generic builtin folder. */
2983 bool ignore = (gimple_call_lhs (stmt) == NULL);
2984 tree result = fold_call_stmt (stmt, ignore);
2985 if (result)
2986 {
2987 if (ignore)
2988 STRIP_NOPS (result);
2989 else
2990 result = fold_convert (gimple_call_return_type (stmt), result);
2991 if (!update_call_from_tree (gsi, result))
2992 gimplify_and_update_call_from_tree (gsi, result);
2993 return true;
2994 }
2995
2996 return false;
2997}
2998
1304953e
JJ
2999/* Return true if ARG0 CODE ARG1 in infinite signed precision operation
3000 doesn't fit into TYPE. The test for overflow should be regardless of
3001 -fwrapv, and even for unsigned types. */
3002
3003bool
3004arith_overflowed_p (enum tree_code code, const_tree type,
3005 const_tree arg0, const_tree arg1)
3006{
3007 typedef FIXED_WIDE_INT (WIDE_INT_MAX_PRECISION * 2) widest2_int;
3008 typedef generic_wide_int <wi::extended_tree <WIDE_INT_MAX_PRECISION * 2> >
3009 widest2_int_cst;
3010 widest2_int warg0 = widest2_int_cst (arg0);
3011 widest2_int warg1 = widest2_int_cst (arg1);
3012 widest2_int wres;
3013 switch (code)
3014 {
3015 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
3016 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
3017 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
3018 default: gcc_unreachable ();
3019 }
3020 signop sign = TYPE_SIGN (type);
3021 if (sign == UNSIGNED && wi::neg_p (wres))
3022 return true;
3023 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
3024}
3025
cbdd87d4
RG
3026/* Attempt to fold a call statement referenced by the statement iterator GSI.
3027 The statement may be replaced by another statement, e.g., if the call
3028 simplifies to a constant value. Return true if any changes were made.
3029 It is assumed that the operands have been previously folded. */
3030
e021c122 3031static bool
ceeffab0 3032gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
cbdd87d4 3033{
538dd0b7 3034 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3b45a007 3035 tree callee;
e021c122
RG
3036 bool changed = false;
3037 unsigned i;
cbdd87d4 3038
e021c122
RG
3039 /* Fold *& in call arguments. */
3040 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3041 if (REFERENCE_CLASS_P (gimple_call_arg (stmt, i)))
3042 {
3043 tree tmp = maybe_fold_reference (gimple_call_arg (stmt, i), false);
3044 if (tmp)
3045 {
3046 gimple_call_set_arg (stmt, i, tmp);
3047 changed = true;
3048 }
3049 }
3b45a007
RG
3050
3051 /* Check for virtual calls that became direct calls. */
3052 callee = gimple_call_fn (stmt);
25583c4f 3053 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
3b45a007 3054 {
49c471e3
MJ
3055 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
3056 {
450ad0cd
JH
3057 if (dump_file && virtual_method_call_p (callee)
3058 && !possible_polymorphic_call_target_p
6f8091fc
JH
3059 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
3060 (OBJ_TYPE_REF_EXPR (callee)))))
450ad0cd
JH
3061 {
3062 fprintf (dump_file,
a70e9985 3063 "Type inheritance inconsistent devirtualization of ");
450ad0cd
JH
3064 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
3065 fprintf (dump_file, " to ");
3066 print_generic_expr (dump_file, callee, TDF_SLIM);
3067 fprintf (dump_file, "\n");
3068 }
3069
49c471e3 3070 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
e021c122
RG
3071 changed = true;
3072 }
a70e9985 3073 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
e021c122 3074 {
61dd6a2e
JH
3075 bool final;
3076 vec <cgraph_node *>targets
058d0a90 3077 = possible_polymorphic_call_targets (callee, stmt, &final);
2b5f0895 3078 if (final && targets.length () <= 1 && dbg_cnt (devirt))
e021c122 3079 {
a70e9985 3080 tree lhs = gimple_call_lhs (stmt);
2b5f0895
XDL
3081 if (dump_enabled_p ())
3082 {
807b7d62 3083 location_t loc = gimple_location_safe (stmt);
2b5f0895
XDL
3084 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
3085 "folding virtual function call to %s\n",
3086 targets.length () == 1
3087 ? targets[0]->name ()
3088 : "__builtin_unreachable");
3089 }
61dd6a2e 3090 if (targets.length () == 1)
cf3e5a89
JJ
3091 {
3092 gimple_call_set_fndecl (stmt, targets[0]->decl);
3093 changed = true;
a70e9985
JJ
3094 /* If the call becomes noreturn, remove the lhs. */
3095 if (lhs && (gimple_call_flags (stmt) & ECF_NORETURN))
3096 {
3097 if (TREE_CODE (lhs) == SSA_NAME)
3098 {
b731b390 3099 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985
JJ
3100 tree def = get_or_create_ssa_default_def (cfun, var);
3101 gimple new_stmt = gimple_build_assign (lhs, def);
3102 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
3103 }
3104 gimple_call_set_lhs (stmt, NULL_TREE);
3105 }
cf3e5a89 3106 }
a70e9985 3107 else
cf3e5a89
JJ
3108 {
3109 tree fndecl = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
3110 gimple new_stmt = gimple_build_call (fndecl, 0);
3111 gimple_set_location (new_stmt, gimple_location (stmt));
a70e9985
JJ
3112 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3113 {
b731b390 3114 tree var = create_tmp_var (TREE_TYPE (lhs));
a70e9985 3115 tree def = get_or_create_ssa_default_def (cfun, var);
eb14a79f
ML
3116
3117 /* To satisfy condition for
3118 cgraph_update_edges_for_call_stmt_node,
3119 we need to preserve GIMPLE_CALL statement
3120 at position of GSI iterator. */
a70e9985 3121 update_call_from_tree (gsi, def);
eb14a79f 3122 gsi_insert_before (gsi, new_stmt, GSI_NEW_STMT);
a70e9985
JJ
3123 }
3124 else
42e52a51
RB
3125 {
3126 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
3127 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
3128 gsi_replace (gsi, new_stmt, false);
3129 }
cf3e5a89
JJ
3130 return true;
3131 }
e021c122 3132 }
49c471e3 3133 }
e021c122 3134 }
49c471e3 3135
f2d3d07e
RH
3136 /* Check for indirect calls that became direct calls, and then
3137 no longer require a static chain. */
3138 if (gimple_call_chain (stmt))
3139 {
3140 tree fn = gimple_call_fndecl (stmt);
3141 if (fn && !DECL_STATIC_CHAIN (fn))
3142 {
3143 gimple_call_set_chain (stmt, NULL);
3144 changed = true;
3145 }
3146 else
3147 {
3148 tree tmp = maybe_fold_reference (gimple_call_chain (stmt), false);
3149 if (tmp)
3150 {
3151 gimple_call_set_chain (stmt, tmp);
3152 changed = true;
3153 }
3154 }
3155 }
3156
e021c122
RG
3157 if (inplace)
3158 return changed;
3159
3160 /* Check for builtins that CCP can handle using information not
3161 available in the generic fold routines. */
fef5a0d9
RB
3162 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
3163 {
3164 if (gimple_fold_builtin (gsi))
3165 changed = true;
3166 }
3167 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
e021c122 3168 {
ea679d55 3169 changed |= targetm.gimple_fold_builtin (gsi);
3b45a007 3170 }
368b454d 3171 else if (gimple_call_internal_p (stmt))
ed9c79e1 3172 {
368b454d
JJ
3173 enum tree_code subcode = ERROR_MARK;
3174 tree result = NULL_TREE;
1304953e
JJ
3175 bool cplx_result = false;
3176 tree overflow = NULL_TREE;
368b454d
JJ
3177 switch (gimple_call_internal_fn (stmt))
3178 {
3179 case IFN_BUILTIN_EXPECT:
3180 result = fold_builtin_expect (gimple_location (stmt),
3181 gimple_call_arg (stmt, 0),
3182 gimple_call_arg (stmt, 1),
3183 gimple_call_arg (stmt, 2));
3184 break;
0e82f089
MP
3185 case IFN_UBSAN_OBJECT_SIZE:
3186 if (integer_all_onesp (gimple_call_arg (stmt, 2))
3187 || (TREE_CODE (gimple_call_arg (stmt, 1)) == INTEGER_CST
3188 && TREE_CODE (gimple_call_arg (stmt, 2)) == INTEGER_CST
3189 && tree_int_cst_le (gimple_call_arg (stmt, 1),
3190 gimple_call_arg (stmt, 2))))
3191 {
3192 gsi_replace (gsi, gimple_build_nop (), true);
3193 unlink_stmt_vdef (stmt);
3194 release_defs (stmt);
3195 return true;
3196 }
3197 break;
368b454d
JJ
3198 case IFN_UBSAN_CHECK_ADD:
3199 subcode = PLUS_EXPR;
3200 break;
3201 case IFN_UBSAN_CHECK_SUB:
3202 subcode = MINUS_EXPR;
3203 break;
3204 case IFN_UBSAN_CHECK_MUL:
3205 subcode = MULT_EXPR;
3206 break;
1304953e
JJ
3207 case IFN_ADD_OVERFLOW:
3208 subcode = PLUS_EXPR;
3209 cplx_result = true;
3210 break;
3211 case IFN_SUB_OVERFLOW:
3212 subcode = MINUS_EXPR;
3213 cplx_result = true;
3214 break;
3215 case IFN_MUL_OVERFLOW:
3216 subcode = MULT_EXPR;
3217 cplx_result = true;
3218 break;
368b454d
JJ
3219 default:
3220 break;
3221 }
3222 if (subcode != ERROR_MARK)
3223 {
3224 tree arg0 = gimple_call_arg (stmt, 0);
3225 tree arg1 = gimple_call_arg (stmt, 1);
1304953e
JJ
3226 tree type = TREE_TYPE (arg0);
3227 if (cplx_result)
3228 {
3229 tree lhs = gimple_call_lhs (stmt);
3230 if (lhs == NULL_TREE)
3231 type = NULL_TREE;
3232 else
3233 type = TREE_TYPE (TREE_TYPE (lhs));
3234 }
3235 if (type == NULL_TREE)
3236 ;
368b454d 3237 /* x = y + 0; x = y - 0; x = y * 0; */
1304953e
JJ
3238 else if (integer_zerop (arg1))
3239 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
368b454d
JJ
3240 /* x = 0 + y; x = 0 * y; */
3241 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
1304953e 3242 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
368b454d
JJ
3243 /* x = y - y; */
3244 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
1304953e 3245 result = integer_zero_node;
368b454d 3246 /* x = y * 1; x = 1 * y; */
1304953e
JJ
3247 else if (subcode == MULT_EXPR && integer_onep (arg1))
3248 result = arg0;
3249 else if (subcode == MULT_EXPR && integer_onep (arg0))
3250 result = arg1;
3251 else if (TREE_CODE (arg0) == INTEGER_CST
3252 && TREE_CODE (arg1) == INTEGER_CST)
368b454d 3253 {
1304953e
JJ
3254 if (cplx_result)
3255 result = int_const_binop (subcode, fold_convert (type, arg0),
3256 fold_convert (type, arg1));
3257 else
3258 result = int_const_binop (subcode, arg0, arg1);
3259 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
3260 {
3261 if (cplx_result)
3262 overflow = build_one_cst (type);
3263 else
3264 result = NULL_TREE;
3265 }
3266 }
3267 if (result)
3268 {
3269 if (result == integer_zero_node)
3270 result = build_zero_cst (type);
3271 else if (cplx_result && TREE_TYPE (result) != type)
3272 {
3273 if (TREE_CODE (result) == INTEGER_CST)
3274 {
3275 if (arith_overflowed_p (PLUS_EXPR, type, result,
3276 integer_zero_node))
3277 overflow = build_one_cst (type);
3278 }
3279 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
3280 && TYPE_UNSIGNED (type))
3281 || (TYPE_PRECISION (type)
3282 < (TYPE_PRECISION (TREE_TYPE (result))
3283 + (TYPE_UNSIGNED (TREE_TYPE (result))
3284 && !TYPE_UNSIGNED (type)))))
3285 result = NULL_TREE;
3286 if (result)
3287 result = fold_convert (type, result);
3288 }
368b454d
JJ
3289 }
3290 }
1304953e 3291
ed9c79e1
JJ
3292 if (result)
3293 {
1304953e
JJ
3294 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
3295 result = drop_tree_overflow (result);
3296 if (cplx_result)
3297 {
3298 if (overflow == NULL_TREE)
3299 overflow = build_zero_cst (TREE_TYPE (result));
3300 tree ctype = build_complex_type (TREE_TYPE (result));
3301 if (TREE_CODE (result) == INTEGER_CST
3302 && TREE_CODE (overflow) == INTEGER_CST)
3303 result = build_complex (ctype, result, overflow);
3304 else
3305 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
3306 ctype, result, overflow);
3307 }
ed9c79e1
JJ
3308 if (!update_call_from_tree (gsi, result))
3309 gimplify_and_update_call_from_tree (gsi, result);
3310 changed = true;
3311 }
3312 }
3b45a007 3313
e021c122 3314 return changed;
cbdd87d4
RG
3315}
3316
e0ee10ed
RB
3317
3318/* Worker for fold_stmt_1 dispatch to pattern based folding with
3319 gimple_simplify.
3320
3321 Replaces *GSI with the simplification result in RCODE and OPS
3322 and the associated statements in *SEQ. Does the replacement
3323 according to INPLACE and returns true if the operation succeeded. */
3324
3325static bool
3326replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
3327 code_helper rcode, tree *ops,
3328 gimple_seq *seq, bool inplace)
3329{
3330 gimple stmt = gsi_stmt (*gsi);
3331
3332 /* Play safe and do not allow abnormals to be mentioned in
3333 newly created statements. See also maybe_push_res_to_seq. */
3334 if ((TREE_CODE (ops[0]) == SSA_NAME
3335 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[0]))
3336 || (ops[1]
3337 && TREE_CODE (ops[1]) == SSA_NAME
3338 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[1]))
3339 || (ops[2]
3340 && TREE_CODE (ops[2]) == SSA_NAME
3341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[2])))
3342 return false;
3343
538dd0b7 3344 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
e0ee10ed
RB
3345 {
3346 gcc_assert (rcode.is_tree_code ());
3347 if (TREE_CODE_CLASS ((enum tree_code)rcode) == tcc_comparison
3348 /* GIMPLE_CONDs condition may not throw. */
3349 && (!flag_exceptions
3350 || !cfun->can_throw_non_call_exceptions
3351 || !operation_could_trap_p (rcode,
3352 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
3353 false, NULL_TREE)))
538dd0b7 3354 gimple_cond_set_condition (cond_stmt, rcode, ops[0], ops[1]);
e0ee10ed 3355 else if (rcode == SSA_NAME)
538dd0b7 3356 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
e0ee10ed
RB
3357 build_zero_cst (TREE_TYPE (ops[0])));
3358 else if (rcode == INTEGER_CST)
3359 {
3360 if (integer_zerop (ops[0]))
538dd0b7 3361 gimple_cond_make_false (cond_stmt);
e0ee10ed 3362 else
538dd0b7 3363 gimple_cond_make_true (cond_stmt);
e0ee10ed
RB
3364 }
3365 else if (!inplace)
3366 {
3367 tree res = maybe_push_res_to_seq (rcode, boolean_type_node,
3368 ops, seq);
3369 if (!res)
3370 return false;
538dd0b7 3371 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
e0ee10ed
RB
3372 build_zero_cst (TREE_TYPE (res)));
3373 }
3374 else
3375 return false;
3376 if (dump_file && (dump_flags & TDF_DETAILS))
3377 {
3378 fprintf (dump_file, "gimple_simplified to ");
3379 if (!gimple_seq_empty_p (*seq))
3380 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3381 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3382 0, TDF_SLIM);
3383 }
3384 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3385 return true;
3386 }
3387 else if (is_gimple_assign (stmt)
3388 && rcode.is_tree_code ())
3389 {
3390 if (!inplace
f3582e54 3391 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (rcode))
e0ee10ed
RB
3392 {
3393 maybe_build_generic_op (rcode,
3394 TREE_TYPE (gimple_assign_lhs (stmt)),
3395 &ops[0], ops[1], ops[2]);
00d66391 3396 gimple_assign_set_rhs_with_ops (gsi, rcode, ops[0], ops[1], ops[2]);
e0ee10ed
RB
3397 if (dump_file && (dump_flags & TDF_DETAILS))
3398 {
3399 fprintf (dump_file, "gimple_simplified to ");
3400 if (!gimple_seq_empty_p (*seq))
3401 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3402 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
3403 0, TDF_SLIM);
3404 }
3405 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
3406 return true;
3407 }
3408 }
3409 else if (!inplace)
3410 {
3411 if (gimple_has_lhs (stmt))
3412 {
3413 tree lhs = gimple_get_lhs (stmt);
de665bbd
RB
3414 if (!maybe_push_res_to_seq (rcode, TREE_TYPE (lhs),
3415 ops, seq, lhs))
3416 return false;
e0ee10ed
RB
3417 if (dump_file && (dump_flags & TDF_DETAILS))
3418 {
3419 fprintf (dump_file, "gimple_simplified to ");
3420 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
3421 }
3422 gsi_replace_with_seq_vops (gsi, *seq);
3423 return true;
3424 }
3425 else
3426 gcc_unreachable ();
3427 }
3428
3429 return false;
3430}
3431
040292e7
RB
3432/* Canonicalize MEM_REFs invariant address operand after propagation. */
3433
3434static bool
3435maybe_canonicalize_mem_ref_addr (tree *t)
3436{
3437 bool res = false;
3438
3439 if (TREE_CODE (*t) == ADDR_EXPR)
3440 t = &TREE_OPERAND (*t, 0);
3441
3442 while (handled_component_p (*t))
3443 t = &TREE_OPERAND (*t, 0);
3444
3445 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
3446 of invariant addresses into a SSA name MEM_REF address. */
3447 if (TREE_CODE (*t) == MEM_REF
3448 || TREE_CODE (*t) == TARGET_MEM_REF)
3449 {
3450 tree addr = TREE_OPERAND (*t, 0);
3451 if (TREE_CODE (addr) == ADDR_EXPR
3452 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
3453 || handled_component_p (TREE_OPERAND (addr, 0))))
3454 {
3455 tree base;
3456 HOST_WIDE_INT coffset;
3457 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
3458 &coffset);
3459 if (!base)
3460 gcc_unreachable ();
3461
3462 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
3463 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
3464 TREE_OPERAND (*t, 1),
3465 size_int (coffset));
3466 res = true;
3467 }
3468 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
3469 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
3470 }
3471
3472 /* Canonicalize back MEM_REFs to plain reference trees if the object
3473 accessed is a decl that has the same access semantics as the MEM_REF. */
3474 if (TREE_CODE (*t) == MEM_REF
3475 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
f3dccf50
RB
3476 && integer_zerop (TREE_OPERAND (*t, 1))
3477 && MR_DEPENDENCE_CLIQUE (*t) == 0)
040292e7
RB
3478 {
3479 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3480 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
3481 if (/* Same volatile qualification. */
3482 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
3483 /* Same TBAA behavior with -fstrict-aliasing. */
3484 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
3485 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
3486 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
3487 /* Same alignment. */
3488 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
3489 /* We have to look out here to not drop a required conversion
3490 from the rhs to the lhs if *t appears on the lhs or vice-versa
3491 if it appears on the rhs. Thus require strict type
3492 compatibility. */
3493 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
3494 {
3495 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
3496 res = true;
3497 }
3498 }
3499
3500 /* Canonicalize TARGET_MEM_REF in particular with respect to
3501 the indexes becoming constant. */
3502 else if (TREE_CODE (*t) == TARGET_MEM_REF)
3503 {
3504 tree tem = maybe_fold_tmr (*t);
3505 if (tem)
3506 {
3507 *t = tem;
3508 res = true;
3509 }
3510 }
3511
3512 return res;
3513}
3514
cbdd87d4
RG
3515/* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
3516 distinguishes both cases. */
3517
3518static bool
e0ee10ed 3519fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
cbdd87d4
RG
3520{
3521 bool changed = false;
3522 gimple stmt = gsi_stmt (*gsi);
3523 unsigned i;
3524
040292e7
RB
3525 /* First do required canonicalization of [TARGET_]MEM_REF addresses
3526 after propagation.
3527 ??? This shouldn't be done in generic folding but in the
3528 propagation helpers which also know whether an address was
3529 propagated. */
3530 switch (gimple_code (stmt))
3531 {
3532 case GIMPLE_ASSIGN:
3533 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
3534 {
3535 tree *rhs = gimple_assign_rhs1_ptr (stmt);
3536 if ((REFERENCE_CLASS_P (*rhs)
3537 || TREE_CODE (*rhs) == ADDR_EXPR)
3538 && maybe_canonicalize_mem_ref_addr (rhs))
3539 changed = true;
3540 tree *lhs = gimple_assign_lhs_ptr (stmt);
3541 if (REFERENCE_CLASS_P (*lhs)
3542 && maybe_canonicalize_mem_ref_addr (lhs))
3543 changed = true;
3544 }
3545 break;
3546 case GIMPLE_CALL:
3547 {
3548 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3549 {
3550 tree *arg = gimple_call_arg_ptr (stmt, i);
3551 if (REFERENCE_CLASS_P (*arg)
3552 && maybe_canonicalize_mem_ref_addr (arg))
3553 changed = true;
3554 }
3555 tree *lhs = gimple_call_lhs_ptr (stmt);
3556 if (*lhs
3557 && REFERENCE_CLASS_P (*lhs)
3558 && maybe_canonicalize_mem_ref_addr (lhs))
3559 changed = true;
3560 break;
3561 }
3562 case GIMPLE_ASM:
3563 {
538dd0b7
DM
3564 gasm *asm_stmt = as_a <gasm *> (stmt);
3565 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
040292e7 3566 {
538dd0b7 3567 tree link = gimple_asm_output_op (asm_stmt, i);
040292e7
RB
3568 tree op = TREE_VALUE (link);
3569 if (REFERENCE_CLASS_P (op)
3570 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3571 changed = true;
3572 }
538dd0b7 3573 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
040292e7 3574 {
538dd0b7 3575 tree link = gimple_asm_input_op (asm_stmt, i);
040292e7
RB
3576 tree op = TREE_VALUE (link);
3577 if ((REFERENCE_CLASS_P (op)
3578 || TREE_CODE (op) == ADDR_EXPR)
3579 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
3580 changed = true;
3581 }
3582 }
3583 break;
3584 case GIMPLE_DEBUG:
3585 if (gimple_debug_bind_p (stmt))
3586 {
3587 tree *val = gimple_debug_bind_get_value_ptr (stmt);
3588 if (*val
3589 && (REFERENCE_CLASS_P (*val)
3590 || TREE_CODE (*val) == ADDR_EXPR)
3591 && maybe_canonicalize_mem_ref_addr (val))
3592 changed = true;
3593 }
3594 break;
3595 default:;
3596 }
3597
e0ee10ed
RB
3598 /* Dispatch to pattern-based folding. */
3599 if (!inplace
3600 || is_gimple_assign (stmt)
3601 || gimple_code (stmt) == GIMPLE_COND)
3602 {
3603 gimple_seq seq = NULL;
3604 code_helper rcode;
3605 tree ops[3] = {};
3606 if (gimple_simplify (stmt, &rcode, ops, inplace ? NULL : &seq, valueize))
3607 {
3608 if (replace_stmt_with_simplification (gsi, rcode, ops, &seq, inplace))
3609 changed = true;
3610 else
3611 gimple_seq_discard (seq);
3612 }
3613 }
3614
3615 stmt = gsi_stmt (*gsi);
3616
cbdd87d4
RG
3617 /* Fold the main computation performed by the statement. */
3618 switch (gimple_code (stmt))
3619 {
3620 case GIMPLE_ASSIGN:
3621 {
3622 unsigned old_num_ops = gimple_num_ops (stmt);
5fbcc0ed 3623 enum tree_code subcode = gimple_assign_rhs_code (stmt);
cbdd87d4 3624 tree lhs = gimple_assign_lhs (stmt);
5fbcc0ed
RG
3625 tree new_rhs;
3626 /* First canonicalize operand order. This avoids building new
3627 trees if this is the only thing fold would later do. */
3628 if ((commutative_tree_code (subcode)
3629 || commutative_ternary_tree_code (subcode))
3630 && tree_swap_operands_p (gimple_assign_rhs1 (stmt),
3631 gimple_assign_rhs2 (stmt), false))
3632 {
3633 tree tem = gimple_assign_rhs1 (stmt);
3634 gimple_assign_set_rhs1 (stmt, gimple_assign_rhs2 (stmt));
3635 gimple_assign_set_rhs2 (stmt, tem);
3636 changed = true;
3637 }
3638 new_rhs = fold_gimple_assign (gsi);
cbdd87d4
RG
3639 if (new_rhs
3640 && !useless_type_conversion_p (TREE_TYPE (lhs),
3641 TREE_TYPE (new_rhs)))
3642 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
3643 if (new_rhs
3644 && (!inplace
3645 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
3646 {
3647 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
3648 changed = true;
3649 }
3650 break;
3651 }
3652
3653 case GIMPLE_COND:
538dd0b7 3654 changed |= fold_gimple_cond (as_a <gcond *> (stmt));
cbdd87d4
RG
3655 break;
3656
3657 case GIMPLE_CALL:
ceeffab0 3658 changed |= gimple_fold_call (gsi, inplace);
cbdd87d4
RG
3659 break;
3660
3661 case GIMPLE_ASM:
3662 /* Fold *& in asm operands. */
38384150 3663 {
538dd0b7 3664 gasm *asm_stmt = as_a <gasm *> (stmt);
38384150
JJ
3665 size_t noutputs;
3666 const char **oconstraints;
3667 const char *constraint;
3668 bool allows_mem, allows_reg;
3669
538dd0b7 3670 noutputs = gimple_asm_noutputs (asm_stmt);
38384150
JJ
3671 oconstraints = XALLOCAVEC (const char *, noutputs);
3672
538dd0b7 3673 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
38384150 3674 {
538dd0b7 3675 tree link = gimple_asm_output_op (asm_stmt, i);
38384150
JJ
3676 tree op = TREE_VALUE (link);
3677 oconstraints[i]
3678 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3679 if (REFERENCE_CLASS_P (op)
3680 && (op = maybe_fold_reference (op, true)) != NULL_TREE)
3681 {
3682 TREE_VALUE (link) = op;
3683 changed = true;
3684 }
3685 }
538dd0b7 3686 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
38384150 3687 {
538dd0b7 3688 tree link = gimple_asm_input_op (asm_stmt, i);
38384150
JJ
3689 tree op = TREE_VALUE (link);
3690 constraint
3691 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (link)));
3692 parse_input_constraint (&constraint, 0, 0, noutputs, 0,
3693 oconstraints, &allows_mem, &allows_reg);
3694 if (REFERENCE_CLASS_P (op)
3695 && (op = maybe_fold_reference (op, !allows_reg && allows_mem))
3696 != NULL_TREE)
3697 {
3698 TREE_VALUE (link) = op;
3699 changed = true;
3700 }
3701 }
3702 }
cbdd87d4
RG
3703 break;
3704
bd422c4a
RG
3705 case GIMPLE_DEBUG:
3706 if (gimple_debug_bind_p (stmt))
3707 {
3708 tree val = gimple_debug_bind_get_value (stmt);
3709 if (val
3710 && REFERENCE_CLASS_P (val))
3711 {
3712 tree tem = maybe_fold_reference (val, false);
3713 if (tem)
3714 {
3715 gimple_debug_bind_set_value (stmt, tem);
3716 changed = true;
3717 }
3718 }
3e888a5e
RG
3719 else if (val
3720 && TREE_CODE (val) == ADDR_EXPR)
3721 {
3722 tree ref = TREE_OPERAND (val, 0);
3723 tree tem = maybe_fold_reference (ref, false);
3724 if (tem)
3725 {
3726 tem = build_fold_addr_expr_with_type (tem, TREE_TYPE (val));
3727 gimple_debug_bind_set_value (stmt, tem);
3728 changed = true;
3729 }
3730 }
bd422c4a
RG
3731 }
3732 break;
3733
cbdd87d4
RG
3734 default:;
3735 }
3736
3737 stmt = gsi_stmt (*gsi);
3738
37376165
RB
3739 /* Fold *& on the lhs. */
3740 if (gimple_has_lhs (stmt))
cbdd87d4
RG
3741 {
3742 tree lhs = gimple_get_lhs (stmt);
3743 if (lhs && REFERENCE_CLASS_P (lhs))
3744 {
3745 tree new_lhs = maybe_fold_reference (lhs, true);
3746 if (new_lhs)
3747 {
3748 gimple_set_lhs (stmt, new_lhs);
3749 changed = true;
3750 }
3751 }
3752 }
3753
3754 return changed;
3755}
3756
e0ee10ed
RB
3757/* Valueziation callback that ends up not following SSA edges. */
3758
3759tree
3760no_follow_ssa_edges (tree)
3761{
3762 return NULL_TREE;
3763}
3764
45cc9f96
RB
3765/* Valueization callback that ends up following single-use SSA edges only. */
3766
3767tree
3768follow_single_use_edges (tree val)
3769{
3770 if (TREE_CODE (val) == SSA_NAME
3771 && !has_single_use (val))
3772 return NULL_TREE;
3773 return val;
3774}
3775
cbdd87d4
RG
3776/* Fold the statement pointed to by GSI. In some cases, this function may
3777 replace the whole statement with a new one. Returns true iff folding
3778 makes any changes.
3779 The statement pointed to by GSI should be in valid gimple form but may
3780 be in unfolded state as resulting from for example constant propagation
3781 which can produce *&x = 0. */
3782
3783bool
3784fold_stmt (gimple_stmt_iterator *gsi)
3785{
e0ee10ed
RB
3786 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
3787}
3788
3789bool
3790fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
3791{
3792 return fold_stmt_1 (gsi, false, valueize);
cbdd87d4
RG
3793}
3794
59401b92 3795/* Perform the minimal folding on statement *GSI. Only operations like
cbdd87d4
RG
3796 *&x created by constant propagation are handled. The statement cannot
3797 be replaced with a new one. Return true if the statement was
3798 changed, false otherwise.
59401b92 3799 The statement *GSI should be in valid gimple form but may
cbdd87d4
RG
3800 be in unfolded state as resulting from for example constant propagation
3801 which can produce *&x = 0. */
3802
3803bool
59401b92 3804fold_stmt_inplace (gimple_stmt_iterator *gsi)
cbdd87d4 3805{
59401b92 3806 gimple stmt = gsi_stmt (*gsi);
e0ee10ed 3807 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
59401b92 3808 gcc_assert (gsi_stmt (*gsi) == stmt);
cbdd87d4
RG
3809 return changed;
3810}
3811
e89065a1
SL
3812/* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
3813 if EXPR is null or we don't know how.
3814 If non-null, the result always has boolean type. */
3815
3816static tree
3817canonicalize_bool (tree expr, bool invert)
3818{
3819 if (!expr)
3820 return NULL_TREE;
3821 else if (invert)
3822 {
3823 if (integer_nonzerop (expr))
3824 return boolean_false_node;
3825 else if (integer_zerop (expr))
3826 return boolean_true_node;
3827 else if (TREE_CODE (expr) == SSA_NAME)
3828 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
3829 build_int_cst (TREE_TYPE (expr), 0));
3830 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3831 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
3832 boolean_type_node,
3833 TREE_OPERAND (expr, 0),
3834 TREE_OPERAND (expr, 1));
3835 else
3836 return NULL_TREE;
3837 }
3838 else
3839 {
3840 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3841 return expr;
3842 if (integer_nonzerop (expr))
3843 return boolean_true_node;
3844 else if (integer_zerop (expr))
3845 return boolean_false_node;
3846 else if (TREE_CODE (expr) == SSA_NAME)
3847 return fold_build2 (NE_EXPR, boolean_type_node, expr,
3848 build_int_cst (TREE_TYPE (expr), 0));
3849 else if (TREE_CODE_CLASS (TREE_CODE (expr)) == tcc_comparison)
3850 return fold_build2 (TREE_CODE (expr),
3851 boolean_type_node,
3852 TREE_OPERAND (expr, 0),
3853 TREE_OPERAND (expr, 1));
3854 else
3855 return NULL_TREE;
3856 }
3857}
3858
3859/* Check to see if a boolean expression EXPR is logically equivalent to the
3860 comparison (OP1 CODE OP2). Check for various identities involving
3861 SSA_NAMEs. */
3862
3863static bool
3864same_bool_comparison_p (const_tree expr, enum tree_code code,
3865 const_tree op1, const_tree op2)
3866{
3867 gimple s;
3868
3869 /* The obvious case. */
3870 if (TREE_CODE (expr) == code
3871 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
3872 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
3873 return true;
3874
3875 /* Check for comparing (name, name != 0) and the case where expr
3876 is an SSA_NAME with a definition matching the comparison. */
3877 if (TREE_CODE (expr) == SSA_NAME
3878 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
3879 {
3880 if (operand_equal_p (expr, op1, 0))
3881 return ((code == NE_EXPR && integer_zerop (op2))
3882 || (code == EQ_EXPR && integer_nonzerop (op2)));
3883 s = SSA_NAME_DEF_STMT (expr);
3884 if (is_gimple_assign (s)
3885 && gimple_assign_rhs_code (s) == code
3886 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
3887 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
3888 return true;
3889 }
3890
3891 /* If op1 is of the form (name != 0) or (name == 0), and the definition
3892 of name is a comparison, recurse. */
3893 if (TREE_CODE (op1) == SSA_NAME
3894 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
3895 {
3896 s = SSA_NAME_DEF_STMT (op1);
3897 if (is_gimple_assign (s)
3898 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
3899 {
3900 enum tree_code c = gimple_assign_rhs_code (s);
3901 if ((c == NE_EXPR && integer_zerop (op2))
3902 || (c == EQ_EXPR && integer_nonzerop (op2)))
3903 return same_bool_comparison_p (expr, c,
3904 gimple_assign_rhs1 (s),
3905 gimple_assign_rhs2 (s));
3906 if ((c == EQ_EXPR && integer_zerop (op2))
3907 || (c == NE_EXPR && integer_nonzerop (op2)))
3908 return same_bool_comparison_p (expr,
3909 invert_tree_comparison (c, false),
3910 gimple_assign_rhs1 (s),
3911 gimple_assign_rhs2 (s));
3912 }
3913 }
3914 return false;
3915}
3916
3917/* Check to see if two boolean expressions OP1 and OP2 are logically
3918 equivalent. */
3919
3920static bool
3921same_bool_result_p (const_tree op1, const_tree op2)
3922{
3923 /* Simple cases first. */
3924 if (operand_equal_p (op1, op2, 0))
3925 return true;
3926
3927 /* Check the cases where at least one of the operands is a comparison.
3928 These are a bit smarter than operand_equal_p in that they apply some
3929 identifies on SSA_NAMEs. */
3930 if (TREE_CODE_CLASS (TREE_CODE (op2)) == tcc_comparison
3931 && same_bool_comparison_p (op1, TREE_CODE (op2),
3932 TREE_OPERAND (op2, 0),
3933 TREE_OPERAND (op2, 1)))
3934 return true;
3935 if (TREE_CODE_CLASS (TREE_CODE (op1)) == tcc_comparison
3936 && same_bool_comparison_p (op2, TREE_CODE (op1),
3937 TREE_OPERAND (op1, 0),
3938 TREE_OPERAND (op1, 1)))
3939 return true;
3940
3941 /* Default case. */
3942 return false;
3943}
3944
3945/* Forward declarations for some mutually recursive functions. */
3946
3947static tree
3948and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3949 enum tree_code code2, tree op2a, tree op2b);
3950static tree
3951and_var_with_comparison (tree var, bool invert,
3952 enum tree_code code2, tree op2a, tree op2b);
3953static tree
3954and_var_with_comparison_1 (gimple stmt,
3955 enum tree_code code2, tree op2a, tree op2b);
3956static tree
3957or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
3958 enum tree_code code2, tree op2a, tree op2b);
3959static tree
3960or_var_with_comparison (tree var, bool invert,
3961 enum tree_code code2, tree op2a, tree op2b);
3962static tree
3963or_var_with_comparison_1 (gimple stmt,
3964 enum tree_code code2, tree op2a, tree op2b);
3965
3966/* Helper function for and_comparisons_1: try to simplify the AND of the
3967 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
3968 If INVERT is true, invert the value of the VAR before doing the AND.
3969 Return NULL_EXPR if we can't simplify this to a single expression. */
3970
3971static tree
3972and_var_with_comparison (tree var, bool invert,
3973 enum tree_code code2, tree op2a, tree op2b)
3974{
3975 tree t;
3976 gimple stmt = SSA_NAME_DEF_STMT (var);
3977
3978 /* We can only deal with variables whose definitions are assignments. */
3979 if (!is_gimple_assign (stmt))
3980 return NULL_TREE;
3981
3982 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
3983 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
3984 Then we only have to consider the simpler non-inverted cases. */
3985 if (invert)
3986 t = or_var_with_comparison_1 (stmt,
3987 invert_tree_comparison (code2, false),
3988 op2a, op2b);
3989 else
3990 t = and_var_with_comparison_1 (stmt, code2, op2a, op2b);
3991 return canonicalize_bool (t, invert);
3992}
3993
3994/* Try to simplify the AND of the ssa variable defined by the assignment
3995 STMT with the comparison specified by (OP2A CODE2 OP2B).
3996 Return NULL_EXPR if we can't simplify this to a single expression. */
3997
3998static tree
3999and_var_with_comparison_1 (gimple stmt,
4000 enum tree_code code2, tree op2a, tree op2b)
4001{
4002 tree var = gimple_assign_lhs (stmt);
4003 tree true_test_var = NULL_TREE;
4004 tree false_test_var = NULL_TREE;
4005 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4006
4007 /* Check for identities like (var AND (var == 0)) => false. */
4008 if (TREE_CODE (op2a) == SSA_NAME
4009 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4010 {
4011 if ((code2 == NE_EXPR && integer_zerop (op2b))
4012 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4013 {
4014 true_test_var = op2a;
4015 if (var == true_test_var)
4016 return var;
4017 }
4018 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4019 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4020 {
4021 false_test_var = op2a;
4022 if (var == false_test_var)
4023 return boolean_false_node;
4024 }
4025 }
4026
4027 /* If the definition is a comparison, recurse on it. */
4028 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4029 {
4030 tree t = and_comparisons_1 (innercode,
4031 gimple_assign_rhs1 (stmt),
4032 gimple_assign_rhs2 (stmt),
4033 code2,
4034 op2a,
4035 op2b);
4036 if (t)
4037 return t;
4038 }
4039
4040 /* If the definition is an AND or OR expression, we may be able to
4041 simplify by reassociating. */
eb9820c0
KT
4042 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4043 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4044 {
4045 tree inner1 = gimple_assign_rhs1 (stmt);
4046 tree inner2 = gimple_assign_rhs2 (stmt);
4047 gimple s;
4048 tree t;
4049 tree partial = NULL_TREE;
eb9820c0 4050 bool is_and = (innercode == BIT_AND_EXPR);
e89065a1
SL
4051
4052 /* Check for boolean identities that don't require recursive examination
4053 of inner1/inner2:
4054 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
4055 inner1 AND (inner1 OR inner2) => inner1
4056 !inner1 AND (inner1 AND inner2) => false
4057 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
4058 Likewise for similar cases involving inner2. */
4059 if (inner1 == true_test_var)
4060 return (is_and ? var : inner1);
4061 else if (inner2 == true_test_var)
4062 return (is_and ? var : inner2);
4063 else if (inner1 == false_test_var)
4064 return (is_and
4065 ? boolean_false_node
4066 : and_var_with_comparison (inner2, false, code2, op2a, op2b));
4067 else if (inner2 == false_test_var)
4068 return (is_and
4069 ? boolean_false_node
4070 : and_var_with_comparison (inner1, false, code2, op2a, op2b));
4071
4072 /* Next, redistribute/reassociate the AND across the inner tests.
4073 Compute the first partial result, (inner1 AND (op2a code op2b)) */
4074 if (TREE_CODE (inner1) == SSA_NAME
4075 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4076 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4077 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4078 gimple_assign_rhs1 (s),
4079 gimple_assign_rhs2 (s),
4080 code2, op2a, op2b)))
4081 {
4082 /* Handle the AND case, where we are reassociating:
4083 (inner1 AND inner2) AND (op2a code2 op2b)
4084 => (t AND inner2)
4085 If the partial result t is a constant, we win. Otherwise
4086 continue on to try reassociating with the other inner test. */
4087 if (is_and)
4088 {
4089 if (integer_onep (t))
4090 return inner2;
4091 else if (integer_zerop (t))
4092 return boolean_false_node;
4093 }
4094
4095 /* Handle the OR case, where we are redistributing:
4096 (inner1 OR inner2) AND (op2a code2 op2b)
4097 => (t OR (inner2 AND (op2a code2 op2b))) */
8236c8eb
JJ
4098 else if (integer_onep (t))
4099 return boolean_true_node;
4100
4101 /* Save partial result for later. */
4102 partial = t;
e89065a1
SL
4103 }
4104
4105 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
4106 if (TREE_CODE (inner2) == SSA_NAME
4107 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4108 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4109 && (t = maybe_fold_and_comparisons (gimple_assign_rhs_code (s),
4110 gimple_assign_rhs1 (s),
4111 gimple_assign_rhs2 (s),
4112 code2, op2a, op2b)))
4113 {
4114 /* Handle the AND case, where we are reassociating:
4115 (inner1 AND inner2) AND (op2a code2 op2b)
4116 => (inner1 AND t) */
4117 if (is_and)
4118 {
4119 if (integer_onep (t))
4120 return inner1;
4121 else if (integer_zerop (t))
4122 return boolean_false_node;
8236c8eb
JJ
4123 /* If both are the same, we can apply the identity
4124 (x AND x) == x. */
4125 else if (partial && same_bool_result_p (t, partial))
4126 return t;
e89065a1
SL
4127 }
4128
4129 /* Handle the OR case. where we are redistributing:
4130 (inner1 OR inner2) AND (op2a code2 op2b)
4131 => (t OR (inner1 AND (op2a code2 op2b)))
4132 => (t OR partial) */
4133 else
4134 {
4135 if (integer_onep (t))
4136 return boolean_true_node;
4137 else if (partial)
4138 {
4139 /* We already got a simplification for the other
4140 operand to the redistributed OR expression. The
4141 interesting case is when at least one is false.
4142 Or, if both are the same, we can apply the identity
4143 (x OR x) == x. */
4144 if (integer_zerop (partial))
4145 return t;
4146 else if (integer_zerop (t))
4147 return partial;
4148 else if (same_bool_result_p (t, partial))
4149 return t;
4150 }
4151 }
4152 }
4153 }
4154 return NULL_TREE;
4155}
4156
4157/* Try to simplify the AND of two comparisons defined by
4158 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4159 If this can be done without constructing an intermediate value,
4160 return the resulting tree; otherwise NULL_TREE is returned.
4161 This function is deliberately asymmetric as it recurses on SSA_DEFs
4162 in the first comparison but not the second. */
4163
4164static tree
4165and_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4166 enum tree_code code2, tree op2a, tree op2b)
4167{
ae22ac3c 4168 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4169
e89065a1
SL
4170 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
4171 if (operand_equal_p (op1a, op2a, 0)
4172 && operand_equal_p (op1b, op2b, 0))
4173 {
eb9820c0 4174 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4175 tree t = combine_comparisons (UNKNOWN_LOCATION,
4176 TRUTH_ANDIF_EXPR, code1, code2,
31ed6226 4177 truth_type, op1a, op1b);
e89065a1
SL
4178 if (t)
4179 return t;
4180 }
4181
4182 /* Likewise the swapped case of the above. */
4183 if (operand_equal_p (op1a, op2b, 0)
4184 && operand_equal_p (op1b, op2a, 0))
4185 {
eb9820c0 4186 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4187 tree t = combine_comparisons (UNKNOWN_LOCATION,
4188 TRUTH_ANDIF_EXPR, code1,
4189 swap_tree_comparison (code2),
31ed6226 4190 truth_type, op1a, op1b);
e89065a1
SL
4191 if (t)
4192 return t;
4193 }
4194
4195 /* If both comparisons are of the same value against constants, we might
4196 be able to merge them. */
4197 if (operand_equal_p (op1a, op2a, 0)
4198 && TREE_CODE (op1b) == INTEGER_CST
4199 && TREE_CODE (op2b) == INTEGER_CST)
4200 {
4201 int cmp = tree_int_cst_compare (op1b, op2b);
4202
4203 /* If we have (op1a == op1b), we should either be able to
4204 return that or FALSE, depending on whether the constant op1b
4205 also satisfies the other comparison against op2b. */
4206 if (code1 == EQ_EXPR)
4207 {
4208 bool done = true;
4209 bool val;
4210 switch (code2)
4211 {
4212 case EQ_EXPR: val = (cmp == 0); break;
4213 case NE_EXPR: val = (cmp != 0); break;
4214 case LT_EXPR: val = (cmp < 0); break;
4215 case GT_EXPR: val = (cmp > 0); break;
4216 case LE_EXPR: val = (cmp <= 0); break;
4217 case GE_EXPR: val = (cmp >= 0); break;
4218 default: done = false;
4219 }
4220 if (done)
4221 {
4222 if (val)
4223 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4224 else
4225 return boolean_false_node;
4226 }
4227 }
4228 /* Likewise if the second comparison is an == comparison. */
4229 else if (code2 == EQ_EXPR)
4230 {
4231 bool done = true;
4232 bool val;
4233 switch (code1)
4234 {
4235 case EQ_EXPR: val = (cmp == 0); break;
4236 case NE_EXPR: val = (cmp != 0); break;
4237 case LT_EXPR: val = (cmp > 0); break;
4238 case GT_EXPR: val = (cmp < 0); break;
4239 case LE_EXPR: val = (cmp >= 0); break;
4240 case GE_EXPR: val = (cmp <= 0); break;
4241 default: done = false;
4242 }
4243 if (done)
4244 {
4245 if (val)
4246 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4247 else
4248 return boolean_false_node;
4249 }
4250 }
4251
4252 /* Same business with inequality tests. */
4253 else if (code1 == NE_EXPR)
4254 {
4255 bool val;
4256 switch (code2)
4257 {
4258 case EQ_EXPR: val = (cmp != 0); break;
4259 case NE_EXPR: val = (cmp == 0); break;
4260 case LT_EXPR: val = (cmp >= 0); break;
4261 case GT_EXPR: val = (cmp <= 0); break;
4262 case LE_EXPR: val = (cmp > 0); break;
4263 case GE_EXPR: val = (cmp < 0); break;
4264 default:
4265 val = false;
4266 }
4267 if (val)
4268 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4269 }
4270 else if (code2 == NE_EXPR)
4271 {
4272 bool val;
4273 switch (code1)
4274 {
4275 case EQ_EXPR: val = (cmp == 0); break;
4276 case NE_EXPR: val = (cmp != 0); break;
4277 case LT_EXPR: val = (cmp <= 0); break;
4278 case GT_EXPR: val = (cmp >= 0); break;
4279 case LE_EXPR: val = (cmp < 0); break;
4280 case GE_EXPR: val = (cmp > 0); break;
4281 default:
4282 val = false;
4283 }
4284 if (val)
4285 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4286 }
4287
4288 /* Chose the more restrictive of two < or <= comparisons. */
4289 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4290 && (code2 == LT_EXPR || code2 == LE_EXPR))
4291 {
4292 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4293 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4294 else
4295 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4296 }
4297
4298 /* Likewise chose the more restrictive of two > or >= comparisons. */
4299 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4300 && (code2 == GT_EXPR || code2 == GE_EXPR))
4301 {
4302 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4303 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4304 else
4305 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4306 }
4307
4308 /* Check for singleton ranges. */
4309 else if (cmp == 0
4310 && ((code1 == LE_EXPR && code2 == GE_EXPR)
4311 || (code1 == GE_EXPR && code2 == LE_EXPR)))
4312 return fold_build2 (EQ_EXPR, boolean_type_node, op1a, op2b);
4313
4314 /* Check for disjoint ranges. */
4315 else if (cmp <= 0
4316 && (code1 == LT_EXPR || code1 == LE_EXPR)
4317 && (code2 == GT_EXPR || code2 == GE_EXPR))
4318 return boolean_false_node;
4319 else if (cmp >= 0
4320 && (code1 == GT_EXPR || code1 == GE_EXPR)
4321 && (code2 == LT_EXPR || code2 == LE_EXPR))
4322 return boolean_false_node;
4323 }
4324
4325 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4326 NAME's definition is a truth value. See if there are any simplifications
4327 that can be done against the NAME's definition. */
4328 if (TREE_CODE (op1a) == SSA_NAME
4329 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4330 && (integer_zerop (op1b) || integer_onep (op1b)))
4331 {
4332 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4333 || (code1 == NE_EXPR && integer_onep (op1b)));
4334 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4335 switch (gimple_code (stmt))
4336 {
4337 case GIMPLE_ASSIGN:
4338 /* Try to simplify by copy-propagating the definition. */
4339 return and_var_with_comparison (op1a, invert, code2, op2a, op2b);
4340
4341 case GIMPLE_PHI:
4342 /* If every argument to the PHI produces the same result when
4343 ANDed with the second comparison, we win.
4344 Do not do this unless the type is bool since we need a bool
4345 result here anyway. */
4346 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4347 {
4348 tree result = NULL_TREE;
4349 unsigned i;
4350 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4351 {
4352 tree arg = gimple_phi_arg_def (stmt, i);
4353
4354 /* If this PHI has itself as an argument, ignore it.
4355 If all the other args produce the same result,
4356 we're still OK. */
4357 if (arg == gimple_phi_result (stmt))
4358 continue;
4359 else if (TREE_CODE (arg) == INTEGER_CST)
4360 {
4361 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
4362 {
4363 if (!result)
4364 result = boolean_false_node;
4365 else if (!integer_zerop (result))
4366 return NULL_TREE;
4367 }
4368 else if (!result)
4369 result = fold_build2 (code2, boolean_type_node,
4370 op2a, op2b);
4371 else if (!same_bool_comparison_p (result,
4372 code2, op2a, op2b))
4373 return NULL_TREE;
4374 }
0e8b84ec
JJ
4375 else if (TREE_CODE (arg) == SSA_NAME
4376 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4377 {
6c66f733
JJ
4378 tree temp;
4379 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4380 /* In simple cases we can look through PHI nodes,
4381 but we have to be careful with loops.
4382 See PR49073. */
4383 if (! dom_info_available_p (CDI_DOMINATORS)
4384 || gimple_bb (def_stmt) == gimple_bb (stmt)
4385 || dominated_by_p (CDI_DOMINATORS,
4386 gimple_bb (def_stmt),
4387 gimple_bb (stmt)))
4388 return NULL_TREE;
4389 temp = and_var_with_comparison (arg, invert, code2,
4390 op2a, op2b);
e89065a1
SL
4391 if (!temp)
4392 return NULL_TREE;
4393 else if (!result)
4394 result = temp;
4395 else if (!same_bool_result_p (result, temp))
4396 return NULL_TREE;
4397 }
4398 else
4399 return NULL_TREE;
4400 }
4401 return result;
4402 }
4403
4404 default:
4405 break;
4406 }
4407 }
4408 return NULL_TREE;
4409}
4410
4411/* Try to simplify the AND of two comparisons, specified by
4412 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4413 If this can be simplified to a single expression (without requiring
4414 introducing more SSA variables to hold intermediate values),
4415 return the resulting tree. Otherwise return NULL_TREE.
4416 If the result expression is non-null, it has boolean type. */
4417
4418tree
4419maybe_fold_and_comparisons (enum tree_code code1, tree op1a, tree op1b,
4420 enum tree_code code2, tree op2a, tree op2b)
4421{
4422 tree t = and_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4423 if (t)
4424 return t;
4425 else
4426 return and_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4427}
4428
4429/* Helper function for or_comparisons_1: try to simplify the OR of the
4430 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
4431 If INVERT is true, invert the value of VAR before doing the OR.
4432 Return NULL_EXPR if we can't simplify this to a single expression. */
4433
4434static tree
4435or_var_with_comparison (tree var, bool invert,
4436 enum tree_code code2, tree op2a, tree op2b)
4437{
4438 tree t;
4439 gimple stmt = SSA_NAME_DEF_STMT (var);
4440
4441 /* We can only deal with variables whose definitions are assignments. */
4442 if (!is_gimple_assign (stmt))
4443 return NULL_TREE;
4444
4445 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
4446 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
4447 Then we only have to consider the simpler non-inverted cases. */
4448 if (invert)
4449 t = and_var_with_comparison_1 (stmt,
4450 invert_tree_comparison (code2, false),
4451 op2a, op2b);
4452 else
4453 t = or_var_with_comparison_1 (stmt, code2, op2a, op2b);
4454 return canonicalize_bool (t, invert);
4455}
4456
4457/* Try to simplify the OR of the ssa variable defined by the assignment
4458 STMT with the comparison specified by (OP2A CODE2 OP2B).
4459 Return NULL_EXPR if we can't simplify this to a single expression. */
4460
4461static tree
4462or_var_with_comparison_1 (gimple stmt,
4463 enum tree_code code2, tree op2a, tree op2b)
4464{
4465 tree var = gimple_assign_lhs (stmt);
4466 tree true_test_var = NULL_TREE;
4467 tree false_test_var = NULL_TREE;
4468 enum tree_code innercode = gimple_assign_rhs_code (stmt);
4469
4470 /* Check for identities like (var OR (var != 0)) => true . */
4471 if (TREE_CODE (op2a) == SSA_NAME
4472 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
4473 {
4474 if ((code2 == NE_EXPR && integer_zerop (op2b))
4475 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
4476 {
4477 true_test_var = op2a;
4478 if (var == true_test_var)
4479 return var;
4480 }
4481 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
4482 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
4483 {
4484 false_test_var = op2a;
4485 if (var == false_test_var)
4486 return boolean_true_node;
4487 }
4488 }
4489
4490 /* If the definition is a comparison, recurse on it. */
4491 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
4492 {
4493 tree t = or_comparisons_1 (innercode,
4494 gimple_assign_rhs1 (stmt),
4495 gimple_assign_rhs2 (stmt),
4496 code2,
4497 op2a,
4498 op2b);
4499 if (t)
4500 return t;
4501 }
4502
4503 /* If the definition is an AND or OR expression, we may be able to
4504 simplify by reassociating. */
eb9820c0
KT
4505 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
4506 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
e89065a1
SL
4507 {
4508 tree inner1 = gimple_assign_rhs1 (stmt);
4509 tree inner2 = gimple_assign_rhs2 (stmt);
4510 gimple s;
4511 tree t;
4512 tree partial = NULL_TREE;
eb9820c0 4513 bool is_or = (innercode == BIT_IOR_EXPR);
e89065a1
SL
4514
4515 /* Check for boolean identities that don't require recursive examination
4516 of inner1/inner2:
4517 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
4518 inner1 OR (inner1 AND inner2) => inner1
4519 !inner1 OR (inner1 OR inner2) => true
4520 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
4521 */
4522 if (inner1 == true_test_var)
4523 return (is_or ? var : inner1);
4524 else if (inner2 == true_test_var)
4525 return (is_or ? var : inner2);
4526 else if (inner1 == false_test_var)
4527 return (is_or
4528 ? boolean_true_node
4529 : or_var_with_comparison (inner2, false, code2, op2a, op2b));
4530 else if (inner2 == false_test_var)
4531 return (is_or
4532 ? boolean_true_node
4533 : or_var_with_comparison (inner1, false, code2, op2a, op2b));
4534
4535 /* Next, redistribute/reassociate the OR across the inner tests.
4536 Compute the first partial result, (inner1 OR (op2a code op2b)) */
4537 if (TREE_CODE (inner1) == SSA_NAME
4538 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
4539 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4540 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4541 gimple_assign_rhs1 (s),
4542 gimple_assign_rhs2 (s),
4543 code2, op2a, op2b)))
4544 {
4545 /* Handle the OR case, where we are reassociating:
4546 (inner1 OR inner2) OR (op2a code2 op2b)
4547 => (t OR inner2)
4548 If the partial result t is a constant, we win. Otherwise
4549 continue on to try reassociating with the other inner test. */
8236c8eb 4550 if (is_or)
e89065a1
SL
4551 {
4552 if (integer_onep (t))
4553 return boolean_true_node;
4554 else if (integer_zerop (t))
4555 return inner2;
4556 }
4557
4558 /* Handle the AND case, where we are redistributing:
4559 (inner1 AND inner2) OR (op2a code2 op2b)
4560 => (t AND (inner2 OR (op2a code op2b))) */
8236c8eb
JJ
4561 else if (integer_zerop (t))
4562 return boolean_false_node;
4563
4564 /* Save partial result for later. */
4565 partial = t;
e89065a1
SL
4566 }
4567
4568 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
4569 if (TREE_CODE (inner2) == SSA_NAME
4570 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
4571 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
4572 && (t = maybe_fold_or_comparisons (gimple_assign_rhs_code (s),
4573 gimple_assign_rhs1 (s),
4574 gimple_assign_rhs2 (s),
4575 code2, op2a, op2b)))
4576 {
4577 /* Handle the OR case, where we are reassociating:
4578 (inner1 OR inner2) OR (op2a code2 op2b)
8236c8eb
JJ
4579 => (inner1 OR t)
4580 => (t OR partial) */
4581 if (is_or)
e89065a1
SL
4582 {
4583 if (integer_zerop (t))
4584 return inner1;
4585 else if (integer_onep (t))
4586 return boolean_true_node;
8236c8eb
JJ
4587 /* If both are the same, we can apply the identity
4588 (x OR x) == x. */
4589 else if (partial && same_bool_result_p (t, partial))
4590 return t;
e89065a1
SL
4591 }
4592
4593 /* Handle the AND case, where we are redistributing:
4594 (inner1 AND inner2) OR (op2a code2 op2b)
4595 => (t AND (inner1 OR (op2a code2 op2b)))
4596 => (t AND partial) */
4597 else
4598 {
4599 if (integer_zerop (t))
4600 return boolean_false_node;
4601 else if (partial)
4602 {
4603 /* We already got a simplification for the other
4604 operand to the redistributed AND expression. The
4605 interesting case is when at least one is true.
4606 Or, if both are the same, we can apply the identity
8236c8eb 4607 (x AND x) == x. */
e89065a1
SL
4608 if (integer_onep (partial))
4609 return t;
4610 else if (integer_onep (t))
4611 return partial;
4612 else if (same_bool_result_p (t, partial))
8236c8eb 4613 return t;
e89065a1
SL
4614 }
4615 }
4616 }
4617 }
4618 return NULL_TREE;
4619}
4620
4621/* Try to simplify the OR of two comparisons defined by
4622 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
4623 If this can be done without constructing an intermediate value,
4624 return the resulting tree; otherwise NULL_TREE is returned.
4625 This function is deliberately asymmetric as it recurses on SSA_DEFs
4626 in the first comparison but not the second. */
4627
4628static tree
4629or_comparisons_1 (enum tree_code code1, tree op1a, tree op1b,
4630 enum tree_code code2, tree op2a, tree op2b)
4631{
ae22ac3c 4632 tree truth_type = truth_type_for (TREE_TYPE (op1a));
31ed6226 4633
e89065a1
SL
4634 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
4635 if (operand_equal_p (op1a, op2a, 0)
4636 && operand_equal_p (op1b, op2b, 0))
4637 {
eb9820c0 4638 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4639 tree t = combine_comparisons (UNKNOWN_LOCATION,
4640 TRUTH_ORIF_EXPR, code1, code2,
31ed6226 4641 truth_type, op1a, op1b);
e89065a1
SL
4642 if (t)
4643 return t;
4644 }
4645
4646 /* Likewise the swapped case of the above. */
4647 if (operand_equal_p (op1a, op2b, 0)
4648 && operand_equal_p (op1b, op2a, 0))
4649 {
eb9820c0 4650 /* Result will be either NULL_TREE, or a combined comparison. */
e89065a1
SL
4651 tree t = combine_comparisons (UNKNOWN_LOCATION,
4652 TRUTH_ORIF_EXPR, code1,
4653 swap_tree_comparison (code2),
31ed6226 4654 truth_type, op1a, op1b);
e89065a1
SL
4655 if (t)
4656 return t;
4657 }
4658
4659 /* If both comparisons are of the same value against constants, we might
4660 be able to merge them. */
4661 if (operand_equal_p (op1a, op2a, 0)
4662 && TREE_CODE (op1b) == INTEGER_CST
4663 && TREE_CODE (op2b) == INTEGER_CST)
4664 {
4665 int cmp = tree_int_cst_compare (op1b, op2b);
4666
4667 /* If we have (op1a != op1b), we should either be able to
4668 return that or TRUE, depending on whether the constant op1b
4669 also satisfies the other comparison against op2b. */
4670 if (code1 == NE_EXPR)
4671 {
4672 bool done = true;
4673 bool val;
4674 switch (code2)
4675 {
4676 case EQ_EXPR: val = (cmp == 0); break;
4677 case NE_EXPR: val = (cmp != 0); break;
4678 case LT_EXPR: val = (cmp < 0); break;
4679 case GT_EXPR: val = (cmp > 0); break;
4680 case LE_EXPR: val = (cmp <= 0); break;
4681 case GE_EXPR: val = (cmp >= 0); break;
4682 default: done = false;
4683 }
4684 if (done)
4685 {
4686 if (val)
4687 return boolean_true_node;
4688 else
4689 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4690 }
4691 }
4692 /* Likewise if the second comparison is a != comparison. */
4693 else if (code2 == NE_EXPR)
4694 {
4695 bool done = true;
4696 bool val;
4697 switch (code1)
4698 {
4699 case EQ_EXPR: val = (cmp == 0); break;
4700 case NE_EXPR: val = (cmp != 0); break;
4701 case LT_EXPR: val = (cmp > 0); break;
4702 case GT_EXPR: val = (cmp < 0); break;
4703 case LE_EXPR: val = (cmp >= 0); break;
4704 case GE_EXPR: val = (cmp <= 0); break;
4705 default: done = false;
4706 }
4707 if (done)
4708 {
4709 if (val)
4710 return boolean_true_node;
4711 else
4712 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4713 }
4714 }
4715
4716 /* See if an equality test is redundant with the other comparison. */
4717 else if (code1 == EQ_EXPR)
4718 {
4719 bool val;
4720 switch (code2)
4721 {
4722 case EQ_EXPR: val = (cmp == 0); break;
4723 case NE_EXPR: val = (cmp != 0); break;
4724 case LT_EXPR: val = (cmp < 0); break;
4725 case GT_EXPR: val = (cmp > 0); break;
4726 case LE_EXPR: val = (cmp <= 0); break;
4727 case GE_EXPR: val = (cmp >= 0); break;
4728 default:
4729 val = false;
4730 }
4731 if (val)
4732 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4733 }
4734 else if (code2 == EQ_EXPR)
4735 {
4736 bool val;
4737 switch (code1)
4738 {
4739 case EQ_EXPR: val = (cmp == 0); break;
4740 case NE_EXPR: val = (cmp != 0); break;
4741 case LT_EXPR: val = (cmp > 0); break;
4742 case GT_EXPR: val = (cmp < 0); break;
4743 case LE_EXPR: val = (cmp >= 0); break;
4744 case GE_EXPR: val = (cmp <= 0); break;
4745 default:
4746 val = false;
4747 }
4748 if (val)
4749 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4750 }
4751
4752 /* Chose the less restrictive of two < or <= comparisons. */
4753 else if ((code1 == LT_EXPR || code1 == LE_EXPR)
4754 && (code2 == LT_EXPR || code2 == LE_EXPR))
4755 {
4756 if ((cmp < 0) || (cmp == 0 && code1 == LT_EXPR))
4757 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4758 else
4759 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4760 }
4761
4762 /* Likewise chose the less restrictive of two > or >= comparisons. */
4763 else if ((code1 == GT_EXPR || code1 == GE_EXPR)
4764 && (code2 == GT_EXPR || code2 == GE_EXPR))
4765 {
4766 if ((cmp > 0) || (cmp == 0 && code1 == GT_EXPR))
4767 return fold_build2 (code2, boolean_type_node, op2a, op2b);
4768 else
4769 return fold_build2 (code1, boolean_type_node, op1a, op1b);
4770 }
4771
4772 /* Check for singleton ranges. */
4773 else if (cmp == 0
4774 && ((code1 == LT_EXPR && code2 == GT_EXPR)
4775 || (code1 == GT_EXPR && code2 == LT_EXPR)))
4776 return fold_build2 (NE_EXPR, boolean_type_node, op1a, op2b);
4777
4778 /* Check for less/greater pairs that don't restrict the range at all. */
4779 else if (cmp >= 0
4780 && (code1 == LT_EXPR || code1 == LE_EXPR)
4781 && (code2 == GT_EXPR || code2 == GE_EXPR))
4782 return boolean_true_node;
4783 else if (cmp <= 0
4784 && (code1 == GT_EXPR || code1 == GE_EXPR)
4785 && (code2 == LT_EXPR || code2 == LE_EXPR))
4786 return boolean_true_node;
4787 }
4788
4789 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
4790 NAME's definition is a truth value. See if there are any simplifications
4791 that can be done against the NAME's definition. */
4792 if (TREE_CODE (op1a) == SSA_NAME
4793 && (code1 == NE_EXPR || code1 == EQ_EXPR)
4794 && (integer_zerop (op1b) || integer_onep (op1b)))
4795 {
4796 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
4797 || (code1 == NE_EXPR && integer_onep (op1b)));
4798 gimple stmt = SSA_NAME_DEF_STMT (op1a);
4799 switch (gimple_code (stmt))
4800 {
4801 case GIMPLE_ASSIGN:
4802 /* Try to simplify by copy-propagating the definition. */
4803 return or_var_with_comparison (op1a, invert, code2, op2a, op2b);
4804
4805 case GIMPLE_PHI:
4806 /* If every argument to the PHI produces the same result when
4807 ORed with the second comparison, we win.
4808 Do not do this unless the type is bool since we need a bool
4809 result here anyway. */
4810 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
4811 {
4812 tree result = NULL_TREE;
4813 unsigned i;
4814 for (i = 0; i < gimple_phi_num_args (stmt); i++)
4815 {
4816 tree arg = gimple_phi_arg_def (stmt, i);
4817
4818 /* If this PHI has itself as an argument, ignore it.
4819 If all the other args produce the same result,
4820 we're still OK. */
4821 if (arg == gimple_phi_result (stmt))
4822 continue;
4823 else if (TREE_CODE (arg) == INTEGER_CST)
4824 {
4825 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
4826 {
4827 if (!result)
4828 result = boolean_true_node;
4829 else if (!integer_onep (result))
4830 return NULL_TREE;
4831 }
4832 else if (!result)
4833 result = fold_build2 (code2, boolean_type_node,
4834 op2a, op2b);
4835 else if (!same_bool_comparison_p (result,
4836 code2, op2a, op2b))
4837 return NULL_TREE;
4838 }
0e8b84ec
JJ
4839 else if (TREE_CODE (arg) == SSA_NAME
4840 && !SSA_NAME_IS_DEFAULT_DEF (arg))
e89065a1 4841 {
6c66f733
JJ
4842 tree temp;
4843 gimple def_stmt = SSA_NAME_DEF_STMT (arg);
4844 /* In simple cases we can look through PHI nodes,
4845 but we have to be careful with loops.
4846 See PR49073. */
4847 if (! dom_info_available_p (CDI_DOMINATORS)
4848 || gimple_bb (def_stmt) == gimple_bb (stmt)
4849 || dominated_by_p (CDI_DOMINATORS,
4850 gimple_bb (def_stmt),
4851 gimple_bb (stmt)))
4852 return NULL_TREE;
4853 temp = or_var_with_comparison (arg, invert, code2,
4854 op2a, op2b);
e89065a1
SL
4855 if (!temp)
4856 return NULL_TREE;
4857 else if (!result)
4858 result = temp;
4859 else if (!same_bool_result_p (result, temp))
4860 return NULL_TREE;
4861 }
4862 else
4863 return NULL_TREE;
4864 }
4865 return result;
4866 }
4867
4868 default:
4869 break;
4870 }
4871 }
4872 return NULL_TREE;
4873}
4874
4875/* Try to simplify the OR of two comparisons, specified by
4876 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
4877 If this can be simplified to a single expression (without requiring
4878 introducing more SSA variables to hold intermediate values),
4879 return the resulting tree. Otherwise return NULL_TREE.
4880 If the result expression is non-null, it has boolean type. */
4881
4882tree
4883maybe_fold_or_comparisons (enum tree_code code1, tree op1a, tree op1b,
4884 enum tree_code code2, tree op2a, tree op2b)
4885{
4886 tree t = or_comparisons_1 (code1, op1a, op1b, code2, op2a, op2b);
4887 if (t)
4888 return t;
4889 else
4890 return or_comparisons_1 (code2, op2a, op2b, code1, op1a, op1b);
4891}
cfef45c8
RG
4892
4893
4894/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
4895
4896 Either NULL_TREE, a simplified but non-constant or a constant
4897 is returned.
4898
4899 ??? This should go into a gimple-fold-inline.h file to be eventually
4900 privatized with the single valueize function used in the various TUs
4901 to avoid the indirect function call overhead. */
4902
4903tree
d2a85801
RB
4904gimple_fold_stmt_to_constant_1 (gimple stmt, tree (*valueize) (tree),
4905 tree (*gvalueize) (tree))
cfef45c8 4906{
45cc9f96
RB
4907 code_helper rcode;
4908 tree ops[3] = {};
4909 /* ??? The SSA propagators do not correctly deal with following SSA use-def
4910 edges if there are intermediate VARYING defs. For this reason
4911 do not follow SSA edges here even though SCCVN can technically
4912 just deal fine with that. */
d2a85801 4913 if (gimple_simplify (stmt, &rcode, ops, NULL, gvalueize)
45cc9f96
RB
4914 && rcode.is_tree_code ()
4915 && (TREE_CODE_LENGTH ((tree_code) rcode) == 0
4916 || ((tree_code) rcode) == ADDR_EXPR)
4917 && is_gimple_val (ops[0]))
4918 {
4919 tree res = ops[0];
4920 if (dump_file && dump_flags & TDF_DETAILS)
4921 {
4922 fprintf (dump_file, "Match-and-simplified ");
4923 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
4924 fprintf (dump_file, " to ");
4925 print_generic_expr (dump_file, res, 0);
4926 fprintf (dump_file, "\n");
4927 }
4928 return res;
4929 }
4930
cfef45c8
RG
4931 location_t loc = gimple_location (stmt);
4932 switch (gimple_code (stmt))
4933 {
4934 case GIMPLE_ASSIGN:
4935 {
4936 enum tree_code subcode = gimple_assign_rhs_code (stmt);
4937
4938 switch (get_gimple_rhs_class (subcode))
4939 {
4940 case GIMPLE_SINGLE_RHS:
4941 {
4942 tree rhs = gimple_assign_rhs1 (stmt);
4943 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
4944
4945 if (TREE_CODE (rhs) == SSA_NAME)
4946 {
4947 /* If the RHS is an SSA_NAME, return its known constant value,
4948 if any. */
4949 return (*valueize) (rhs);
4950 }
4951 /* Handle propagating invariant addresses into address
4952 operations. */
4953 else if (TREE_CODE (rhs) == ADDR_EXPR
4954 && !is_gimple_min_invariant (rhs))
4955 {
d25c4172 4956 HOST_WIDE_INT offset = 0;
cfef45c8
RG
4957 tree base;
4958 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
4959 &offset,
4960 valueize);
4961 if (base
4962 && (CONSTANT_CLASS_P (base)
4963 || decl_address_invariant_p (base)))
4964 return build_invariant_address (TREE_TYPE (rhs),
4965 base, offset);
4966 }
4967 else if (TREE_CODE (rhs) == CONSTRUCTOR
4968 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
4969 && (CONSTRUCTOR_NELTS (rhs)
4970 == TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
4971 {
4972 unsigned i;
d2a12ae7 4973 tree val, *vec;
cfef45c8 4974
d2a12ae7
RG
4975 vec = XALLOCAVEC (tree,
4976 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs)));
cfef45c8
RG
4977 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
4978 {
4979 val = (*valueize) (val);
4980 if (TREE_CODE (val) == INTEGER_CST
4981 || TREE_CODE (val) == REAL_CST
4982 || TREE_CODE (val) == FIXED_CST)
d2a12ae7 4983 vec[i] = val;
cfef45c8
RG
4984 else
4985 return NULL_TREE;
4986 }
4987
d2a12ae7 4988 return build_vector (TREE_TYPE (rhs), vec);
cfef45c8 4989 }
bdf37f7a
JH
4990 if (subcode == OBJ_TYPE_REF)
4991 {
4992 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
4993 /* If callee is constant, we can fold away the wrapper. */
4994 if (is_gimple_min_invariant (val))
4995 return val;
4996 }
cfef45c8
RG
4997
4998 if (kind == tcc_reference)
4999 {
5000 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
5001 || TREE_CODE (rhs) == REALPART_EXPR
5002 || TREE_CODE (rhs) == IMAGPART_EXPR)
5003 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5004 {
5005 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5006 return fold_unary_loc (EXPR_LOCATION (rhs),
5007 TREE_CODE (rhs),
5008 TREE_TYPE (rhs), val);
5009 }
5010 else if (TREE_CODE (rhs) == BIT_FIELD_REF
5011 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5012 {
5013 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5014 return fold_ternary_loc (EXPR_LOCATION (rhs),
5015 TREE_CODE (rhs),
5016 TREE_TYPE (rhs), val,
5017 TREE_OPERAND (rhs, 1),
5018 TREE_OPERAND (rhs, 2));
5019 }
5020 else if (TREE_CODE (rhs) == MEM_REF
5021 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
5022 {
5023 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
5024 if (TREE_CODE (val) == ADDR_EXPR
5025 && is_gimple_min_invariant (val))
5026 {
5027 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
5028 unshare_expr (val),
5029 TREE_OPERAND (rhs, 1));
5030 if (tem)
5031 rhs = tem;
5032 }
5033 }
5034 return fold_const_aggregate_ref_1 (rhs, valueize);
5035 }
5036 else if (kind == tcc_declaration)
5037 return get_symbol_constant_value (rhs);
5038 return rhs;
5039 }
5040
5041 case GIMPLE_UNARY_RHS:
f3582e54 5042 return NULL_TREE;
cfef45c8
RG
5043
5044 case GIMPLE_BINARY_RHS:
5045 {
5046 /* Handle binary operators that can appear in GIMPLE form. */
5047 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5048 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5049
5050 /* Translate &x + CST into an invariant form suitable for
5051 further propagation. */
5052 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
5053 && TREE_CODE (op0) == ADDR_EXPR
5054 && TREE_CODE (op1) == INTEGER_CST)
5055 {
5056 tree off = fold_convert (ptr_type_node, op1);
4d59a001
RG
5057 return build_fold_addr_expr_loc
5058 (loc,
5059 fold_build2 (MEM_REF,
cfef45c8
RG
5060 TREE_TYPE (TREE_TYPE (op0)),
5061 unshare_expr (op0), off));
5062 }
5063
5064 return fold_binary_loc (loc, subcode,
5065 gimple_expr_type (stmt), op0, op1);
5066 }
5067
5068 case GIMPLE_TERNARY_RHS:
5069 {
5070 /* Handle ternary operators that can appear in GIMPLE form. */
5071 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
5072 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
5073 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
5074
d3878abf
RG
5075 /* Fold embedded expressions in ternary codes. */
5076 if ((subcode == COND_EXPR
5077 || subcode == VEC_COND_EXPR)
5078 && COMPARISON_CLASS_P (op0))
5079 {
5080 tree op00 = (*valueize) (TREE_OPERAND (op0, 0));
5081 tree op01 = (*valueize) (TREE_OPERAND (op0, 1));
5082 tree tem = fold_binary_loc (loc, TREE_CODE (op0),
5083 TREE_TYPE (op0), op00, op01);
5084 if (tem)
5085 op0 = tem;
5086 }
5087
cfef45c8
RG
5088 return fold_ternary_loc (loc, subcode,
5089 gimple_expr_type (stmt), op0, op1, op2);
5090 }
5091
5092 default:
5093 gcc_unreachable ();
5094 }
5095 }
5096
5097 case GIMPLE_CALL:
5098 {
25583c4f 5099 tree fn;
538dd0b7 5100 gcall *call_stmt = as_a <gcall *> (stmt);
25583c4f
RS
5101
5102 if (gimple_call_internal_p (stmt))
31e071ae
MP
5103 {
5104 enum tree_code subcode = ERROR_MARK;
5105 switch (gimple_call_internal_fn (stmt))
5106 {
5107 case IFN_UBSAN_CHECK_ADD:
5108 subcode = PLUS_EXPR;
5109 break;
5110 case IFN_UBSAN_CHECK_SUB:
5111 subcode = MINUS_EXPR;
5112 break;
5113 case IFN_UBSAN_CHECK_MUL:
5114 subcode = MULT_EXPR;
5115 break;
5116 default:
5117 return NULL_TREE;
5118 }
368b454d
JJ
5119 tree arg0 = gimple_call_arg (stmt, 0);
5120 tree arg1 = gimple_call_arg (stmt, 1);
5121 tree op0 = (*valueize) (arg0);
5122 tree op1 = (*valueize) (arg1);
31e071ae
MP
5123
5124 if (TREE_CODE (op0) != INTEGER_CST
5125 || TREE_CODE (op1) != INTEGER_CST)
368b454d
JJ
5126 {
5127 switch (subcode)
5128 {
5129 case MULT_EXPR:
5130 /* x * 0 = 0 * x = 0 without overflow. */
5131 if (integer_zerop (op0) || integer_zerop (op1))
5132 return build_zero_cst (TREE_TYPE (arg0));
5133 break;
5134 case MINUS_EXPR:
5135 /* y - y = 0 without overflow. */
5136 if (operand_equal_p (op0, op1, 0))
5137 return build_zero_cst (TREE_TYPE (arg0));
5138 break;
5139 default:
5140 break;
5141 }
5142 }
5143 tree res
5144 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
31e071ae
MP
5145 if (res
5146 && TREE_CODE (res) == INTEGER_CST
5147 && !TREE_OVERFLOW (res))
5148 return res;
5149 return NULL_TREE;
5150 }
25583c4f
RS
5151
5152 fn = (*valueize) (gimple_call_fn (stmt));
cfef45c8
RG
5153 if (TREE_CODE (fn) == ADDR_EXPR
5154 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5c944c6c
RB
5155 && DECL_BUILT_IN (TREE_OPERAND (fn, 0))
5156 && gimple_builtin_call_types_compatible_p (stmt,
5157 TREE_OPERAND (fn, 0)))
cfef45c8
RG
5158 {
5159 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
a6a0570f 5160 tree retval;
cfef45c8
RG
5161 unsigned i;
5162 for (i = 0; i < gimple_call_num_args (stmt); ++i)
5163 args[i] = (*valueize) (gimple_call_arg (stmt, i));
a6a0570f 5164 retval = fold_builtin_call_array (loc,
538dd0b7 5165 gimple_call_return_type (call_stmt),
cfef45c8 5166 fn, gimple_call_num_args (stmt), args);
cfef45c8 5167 if (retval)
5c944c6c
RB
5168 {
5169 /* fold_call_expr wraps the result inside a NOP_EXPR. */
5170 STRIP_NOPS (retval);
538dd0b7
DM
5171 retval = fold_convert (gimple_call_return_type (call_stmt),
5172 retval);
5c944c6c 5173 }
cfef45c8
RG
5174 return retval;
5175 }
5176 return NULL_TREE;
5177 }
5178
5179 default:
5180 return NULL_TREE;
5181 }
5182}
5183
5184/* Fold STMT to a constant using VALUEIZE to valueize SSA names.
5185 Returns NULL_TREE if folding to a constant is not possible, otherwise
5186 returns a constant according to is_gimple_min_invariant. */
5187
5188tree
5189gimple_fold_stmt_to_constant (gimple stmt, tree (*valueize) (tree))
5190{
5191 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
5192 if (res && is_gimple_min_invariant (res))
5193 return res;
5194 return NULL_TREE;
5195}
5196
5197
5198/* The following set of functions are supposed to fold references using
5199 their constant initializers. */
5200
cfef45c8
RG
5201/* See if we can find constructor defining value of BASE.
5202 When we know the consructor with constant offset (such as
5203 base is array[40] and we do know constructor of array), then
5204 BIT_OFFSET is adjusted accordingly.
5205
5206 As a special case, return error_mark_node when constructor
5207 is not explicitly available, but it is known to be zero
5208 such as 'static const int a;'. */
5209static tree
5210get_base_constructor (tree base, HOST_WIDE_INT *bit_offset,
5211 tree (*valueize)(tree))
5212{
5213 HOST_WIDE_INT bit_offset2, size, max_size;
5214 if (TREE_CODE (base) == MEM_REF)
5215 {
5216 if (!integer_zerop (TREE_OPERAND (base, 1)))
5217 {
9541ffee 5218 if (!tree_fits_shwi_p (TREE_OPERAND (base, 1)))
cfef45c8 5219 return NULL_TREE;
807e902e 5220 *bit_offset += (mem_ref_offset (base).to_short_addr ()
cfef45c8
RG
5221 * BITS_PER_UNIT);
5222 }
5223
5224 if (valueize
5225 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
5226 base = valueize (TREE_OPERAND (base, 0));
5227 if (!base || TREE_CODE (base) != ADDR_EXPR)
5228 return NULL_TREE;
5229 base = TREE_OPERAND (base, 0);
5230 }
5231
5232 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
5233 DECL_INITIAL. If BASE is a nested reference into another
5234 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
5235 the inner reference. */
5236 switch (TREE_CODE (base))
5237 {
5238 case VAR_DECL:
cfef45c8 5239 case CONST_DECL:
6a6dac52
JH
5240 {
5241 tree init = ctor_for_folding (base);
5242
688010ba 5243 /* Our semantic is exact opposite of ctor_for_folding;
6a6dac52
JH
5244 NULL means unknown, while error_mark_node is 0. */
5245 if (init == error_mark_node)
5246 return NULL_TREE;
5247 if (!init)
5248 return error_mark_node;
5249 return init;
5250 }
cfef45c8
RG
5251
5252 case ARRAY_REF:
5253 case COMPONENT_REF:
5254 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size);
5255 if (max_size == -1 || size != max_size)
5256 return NULL_TREE;
5257 *bit_offset += bit_offset2;
5258 return get_base_constructor (base, bit_offset, valueize);
5259
5260 case STRING_CST:
5261 case CONSTRUCTOR:
5262 return base;
5263
5264 default:
5265 return NULL_TREE;
5266 }
5267}
5268
cfef45c8
RG
5269/* CTOR is CONSTRUCTOR of an array type. Fold reference of type TYPE and size
5270 SIZE to the memory at bit OFFSET. */
5271
5272static tree
5273fold_array_ctor_reference (tree type, tree ctor,
5274 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5275 unsigned HOST_WIDE_INT size,
5276 tree from_decl)
cfef45c8
RG
5277{
5278 unsigned HOST_WIDE_INT cnt;
5279 tree cfield, cval;
807e902e
KZ
5280 offset_int low_bound;
5281 offset_int elt_size;
5282 offset_int index, max_index;
5283 offset_int access_index;
b48e22b2 5284 tree domain_type = NULL_TREE, index_type = NULL_TREE;
cfef45c8
RG
5285 HOST_WIDE_INT inner_offset;
5286
5287 /* Compute low bound and elt size. */
eb8f1123
RG
5288 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
5289 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
cfef45c8
RG
5290 if (domain_type && TYPE_MIN_VALUE (domain_type))
5291 {
5292 /* Static constructors for variably sized objects makes no sense. */
5293 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
b48e22b2 5294 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
807e902e 5295 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
cfef45c8
RG
5296 }
5297 else
807e902e 5298 low_bound = 0;
cfef45c8 5299 /* Static constructors for variably sized objects makes no sense. */
c3284718 5300 gcc_assert (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))))
cfef45c8 5301 == INTEGER_CST);
807e902e 5302 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
cfef45c8
RG
5303
5304 /* We can handle only constantly sized accesses that are known to not
5305 be larger than size of array element. */
5306 if (!TYPE_SIZE_UNIT (type)
5307 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST
807e902e
KZ
5308 || wi::lts_p (elt_size, wi::to_offset (TYPE_SIZE_UNIT (type)))
5309 || elt_size == 0)
cfef45c8
RG
5310 return NULL_TREE;
5311
5312 /* Compute the array index we look for. */
807e902e
KZ
5313 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
5314 elt_size);
27bcd47c 5315 access_index += low_bound;
b48e22b2 5316 if (index_type)
807e902e
KZ
5317 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
5318 TYPE_SIGN (index_type));
cfef45c8
RG
5319
5320 /* And offset within the access. */
27bcd47c 5321 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
cfef45c8
RG
5322
5323 /* See if the array field is large enough to span whole access. We do not
5324 care to fold accesses spanning multiple array indexes. */
27bcd47c 5325 if (inner_offset + size > elt_size.to_uhwi () * BITS_PER_UNIT)
cfef45c8
RG
5326 return NULL_TREE;
5327
807e902e 5328 index = low_bound - 1;
b48e22b2 5329 if (index_type)
807e902e
KZ
5330 index = wi::ext (index, TYPE_PRECISION (index_type),
5331 TYPE_SIGN (index_type));
b48e22b2 5332
cfef45c8
RG
5333 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
5334 {
5335 /* Array constructor might explicitely set index, or specify range
5336 or leave index NULL meaning that it is next index after previous
5337 one. */
5338 if (cfield)
5339 {
5340 if (TREE_CODE (cfield) == INTEGER_CST)
807e902e 5341 max_index = index = wi::to_offset (cfield);
cfef45c8
RG
5342 else
5343 {
5344 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
807e902e
KZ
5345 index = wi::to_offset (TREE_OPERAND (cfield, 0));
5346 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
cfef45c8
RG
5347 }
5348 }
5349 else
b48e22b2 5350 {
807e902e 5351 index += 1;
b48e22b2 5352 if (index_type)
807e902e
KZ
5353 index = wi::ext (index, TYPE_PRECISION (index_type),
5354 TYPE_SIGN (index_type));
b48e22b2
EB
5355 max_index = index;
5356 }
cfef45c8
RG
5357
5358 /* Do we have match? */
807e902e
KZ
5359 if (wi::cmpu (access_index, index) >= 0
5360 && wi::cmpu (access_index, max_index) <= 0)
c44c2088
JH
5361 return fold_ctor_reference (type, cval, inner_offset, size,
5362 from_decl);
cfef45c8
RG
5363 }
5364 /* When memory is not explicitely mentioned in constructor,
5365 it is 0 (or out of range). */
5366 return build_zero_cst (type);
5367}
5368
5369/* CTOR is CONSTRUCTOR of an aggregate or vector.
5370 Fold reference of type TYPE and size SIZE to the memory at bit OFFSET. */
5371
5372static tree
5373fold_nonarray_ctor_reference (tree type, tree ctor,
5374 unsigned HOST_WIDE_INT offset,
c44c2088
JH
5375 unsigned HOST_WIDE_INT size,
5376 tree from_decl)
cfef45c8
RG
5377{
5378 unsigned HOST_WIDE_INT cnt;
5379 tree cfield, cval;
5380
5381 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
5382 cval)
5383 {
5384 tree byte_offset = DECL_FIELD_OFFSET (cfield);
5385 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
5386 tree field_size = DECL_SIZE (cfield);
807e902e
KZ
5387 offset_int bitoffset;
5388 offset_int bitoffset_end, access_end;
cfef45c8
RG
5389
5390 /* Variable sized objects in static constructors makes no sense,
5391 but field_size can be NULL for flexible array members. */
5392 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
5393 && TREE_CODE (byte_offset) == INTEGER_CST
5394 && (field_size != NULL_TREE
5395 ? TREE_CODE (field_size) == INTEGER_CST
5396 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
5397
5398 /* Compute bit offset of the field. */
807e902e
KZ
5399 bitoffset = (wi::to_offset (field_offset)
5400 + wi::lshift (wi::to_offset (byte_offset),
5401 LOG2_BITS_PER_UNIT));
cfef45c8
RG
5402 /* Compute bit offset where the field ends. */
5403 if (field_size != NULL_TREE)
807e902e 5404 bitoffset_end = bitoffset + wi::to_offset (field_size);
cfef45c8 5405 else
807e902e 5406 bitoffset_end = 0;
cfef45c8 5407
807e902e 5408 access_end = offset_int (offset) + size;
b8b2b009
JJ
5409
5410 /* Is there any overlap between [OFFSET, OFFSET+SIZE) and
5411 [BITOFFSET, BITOFFSET_END)? */
807e902e 5412 if (wi::cmps (access_end, bitoffset) > 0
cfef45c8 5413 && (field_size == NULL_TREE
807e902e 5414 || wi::lts_p (offset, bitoffset_end)))
cfef45c8 5415 {
807e902e 5416 offset_int inner_offset = offset_int (offset) - bitoffset;
cfef45c8
RG
5417 /* We do have overlap. Now see if field is large enough to
5418 cover the access. Give up for accesses spanning multiple
5419 fields. */
807e902e 5420 if (wi::cmps (access_end, bitoffset_end) > 0)
cfef45c8 5421 return NULL_TREE;
807e902e 5422 if (wi::lts_p (offset, bitoffset))
b8b2b009 5423 return NULL_TREE;
cfef45c8 5424 return fold_ctor_reference (type, cval,
27bcd47c 5425 inner_offset.to_uhwi (), size,
c44c2088 5426 from_decl);
cfef45c8
RG
5427 }
5428 }
5429 /* When memory is not explicitely mentioned in constructor, it is 0. */
5430 return build_zero_cst (type);
5431}
5432
5433/* CTOR is value initializing memory, fold reference of type TYPE and size SIZE
5434 to the memory at bit OFFSET. */
5435
8403c2cf 5436tree
cfef45c8 5437fold_ctor_reference (tree type, tree ctor, unsigned HOST_WIDE_INT offset,
c44c2088 5438 unsigned HOST_WIDE_INT size, tree from_decl)
cfef45c8
RG
5439{
5440 tree ret;
5441
5442 /* We found the field with exact match. */
5443 if (useless_type_conversion_p (type, TREE_TYPE (ctor))
5444 && !offset)
9d60be38 5445 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5446
5447 /* We are at the end of walk, see if we can view convert the
5448 result. */
5449 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
5450 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
3d8208ce
TP
5451 && !compare_tree_int (TYPE_SIZE (type), size)
5452 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
cfef45c8 5453 {
9d60be38 5454 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
cfef45c8
RG
5455 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
5456 if (ret)
5457 STRIP_NOPS (ret);
5458 return ret;
5459 }
b2505143
RB
5460 /* For constants and byte-aligned/sized reads try to go through
5461 native_encode/interpret. */
5462 if (CONSTANT_CLASS_P (ctor)
5463 && BITS_PER_UNIT == 8
5464 && offset % BITS_PER_UNIT == 0
5465 && size % BITS_PER_UNIT == 0
5466 && size <= MAX_BITSIZE_MODE_ANY_MODE)
5467 {
5468 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
5469 if (native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
5470 offset / BITS_PER_UNIT) > 0)
5471 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
5472 }
cfef45c8
RG
5473 if (TREE_CODE (ctor) == CONSTRUCTOR)
5474 {
5475
eb8f1123
RG
5476 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
5477 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
c44c2088
JH
5478 return fold_array_ctor_reference (type, ctor, offset, size,
5479 from_decl);
cfef45c8 5480 else
c44c2088
JH
5481 return fold_nonarray_ctor_reference (type, ctor, offset, size,
5482 from_decl);
cfef45c8
RG
5483 }
5484
5485 return NULL_TREE;
5486}
5487
5488/* Return the tree representing the element referenced by T if T is an
5489 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
5490 names using VALUEIZE. Return NULL_TREE otherwise. */
5491
5492tree
5493fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
5494{
5495 tree ctor, idx, base;
5496 HOST_WIDE_INT offset, size, max_size;
5497 tree tem;
5498
f8a7df45
RG
5499 if (TREE_THIS_VOLATILE (t))
5500 return NULL_TREE;
5501
cfef45c8
RG
5502 if (TREE_CODE_CLASS (TREE_CODE (t)) == tcc_declaration)
5503 return get_symbol_constant_value (t);
5504
5505 tem = fold_read_from_constant_string (t);
5506 if (tem)
5507 return tem;
5508
5509 switch (TREE_CODE (t))
5510 {
5511 case ARRAY_REF:
5512 case ARRAY_RANGE_REF:
5513 /* Constant indexes are handled well by get_base_constructor.
5514 Only special case variable offsets.
5515 FIXME: This code can't handle nested references with variable indexes
5516 (they will be handled only by iteration of ccp). Perhaps we can bring
5517 get_ref_base_and_extent here and make it use a valueize callback. */
5518 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
5519 && valueize
5520 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
b48e22b2 5521 && TREE_CODE (idx) == INTEGER_CST)
cfef45c8
RG
5522 {
5523 tree low_bound, unit_size;
5524
5525 /* If the resulting bit-offset is constant, track it. */
5526 if ((low_bound = array_ref_low_bound (t),
b48e22b2 5527 TREE_CODE (low_bound) == INTEGER_CST)
cfef45c8 5528 && (unit_size = array_ref_element_size (t),
807e902e 5529 tree_fits_uhwi_p (unit_size)))
cfef45c8 5530 {
807e902e
KZ
5531 offset_int woffset
5532 = wi::sext (wi::to_offset (idx) - wi::to_offset (low_bound),
5533 TYPE_PRECISION (TREE_TYPE (idx)));
5534
5535 if (wi::fits_shwi_p (woffset))
5536 {
5537 offset = woffset.to_shwi ();
5538 /* TODO: This code seems wrong, multiply then check
5539 to see if it fits. */
5540 offset *= tree_to_uhwi (unit_size);
5541 offset *= BITS_PER_UNIT;
5542
5543 base = TREE_OPERAND (t, 0);
5544 ctor = get_base_constructor (base, &offset, valueize);
5545 /* Empty constructor. Always fold to 0. */
5546 if (ctor == error_mark_node)
5547 return build_zero_cst (TREE_TYPE (t));
5548 /* Out of bound array access. Value is undefined,
5549 but don't fold. */
5550 if (offset < 0)
5551 return NULL_TREE;
5552 /* We can not determine ctor. */
5553 if (!ctor)
5554 return NULL_TREE;
5555 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
5556 tree_to_uhwi (unit_size)
5557 * BITS_PER_UNIT,
5558 base);
5559 }
cfef45c8
RG
5560 }
5561 }
5562 /* Fallthru. */
5563
5564 case COMPONENT_REF:
5565 case BIT_FIELD_REF:
5566 case TARGET_MEM_REF:
5567 case MEM_REF:
5568 base = get_ref_base_and_extent (t, &offset, &size, &max_size);
5569 ctor = get_base_constructor (base, &offset, valueize);
5570
5571 /* Empty constructor. Always fold to 0. */
5572 if (ctor == error_mark_node)
5573 return build_zero_cst (TREE_TYPE (t));
5574 /* We do not know precise address. */
5575 if (max_size == -1 || max_size != size)
5576 return NULL_TREE;
5577 /* We can not determine ctor. */
5578 if (!ctor)
5579 return NULL_TREE;
5580
5581 /* Out of bound array access. Value is undefined, but don't fold. */
5582 if (offset < 0)
5583 return NULL_TREE;
5584
c44c2088
JH
5585 return fold_ctor_reference (TREE_TYPE (t), ctor, offset, size,
5586 base);
cfef45c8
RG
5587
5588 case REALPART_EXPR:
5589 case IMAGPART_EXPR:
5590 {
5591 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
5592 if (c && TREE_CODE (c) == COMPLEX_CST)
5593 return fold_build1_loc (EXPR_LOCATION (t),
5594 TREE_CODE (t), TREE_TYPE (t), c);
5595 break;
5596 }
5597
5598 default:
5599 break;
5600 }
5601
5602 return NULL_TREE;
5603}
5604
5605tree
5606fold_const_aggregate_ref (tree t)
5607{
5608 return fold_const_aggregate_ref_1 (t, NULL);
5609}
06bc3ec7 5610
85942f45 5611/* Lookup virtual method with index TOKEN in a virtual table V
ec77d61f
JH
5612 at OFFSET.
5613 Set CAN_REFER if non-NULL to false if method
5614 is not referable or if the virtual table is ill-formed (such as rewriten
5615 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
81fa35bd
MJ
5616
5617tree
85942f45
JH
5618gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
5619 tree v,
ec77d61f
JH
5620 unsigned HOST_WIDE_INT offset,
5621 bool *can_refer)
81fa35bd 5622{
85942f45
JH
5623 tree vtable = v, init, fn;
5624 unsigned HOST_WIDE_INT size;
8c311b50
JH
5625 unsigned HOST_WIDE_INT elt_size, access_index;
5626 tree domain_type;
81fa35bd 5627
ec77d61f
JH
5628 if (can_refer)
5629 *can_refer = true;
5630
9de2f554 5631 /* First of all double check we have virtual table. */
81fa35bd 5632 if (TREE_CODE (v) != VAR_DECL
2aa3da06 5633 || !DECL_VIRTUAL_P (v))
ec77d61f
JH
5634 {
5635 gcc_assert (in_lto_p);
5636 /* Pass down that we lost track of the target. */
5637 if (can_refer)
5638 *can_refer = false;
5639 return NULL_TREE;
5640 }
9de2f554 5641
2aa3da06
JH
5642 init = ctor_for_folding (v);
5643
9de2f554 5644 /* The virtual tables should always be born with constructors
2aa3da06
JH
5645 and we always should assume that they are avaialble for
5646 folding. At the moment we do not stream them in all cases,
5647 but it should never happen that ctor seem unreachable. */
5648 gcc_assert (init);
5649 if (init == error_mark_node)
5650 {
5651 gcc_assert (in_lto_p);
ec77d61f
JH
5652 /* Pass down that we lost track of the target. */
5653 if (can_refer)
5654 *can_refer = false;
2aa3da06
JH
5655 return NULL_TREE;
5656 }
81fa35bd 5657 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
ae7e9ddd 5658 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
85942f45 5659 offset *= BITS_PER_UNIT;
81fa35bd 5660 offset += token * size;
9de2f554 5661
8c311b50
JH
5662 /* Lookup the value in the constructor that is assumed to be array.
5663 This is equivalent to
5664 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
5665 offset, size, NULL);
5666 but in a constant time. We expect that frontend produced a simple
5667 array without indexed initializers. */
5668
5669 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
5670 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
5671 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
5672 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
5673
5674 access_index = offset / BITS_PER_UNIT / elt_size;
5675 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
5676
5677 /* This code makes an assumption that there are no
5678 indexed fileds produced by C++ FE, so we can directly index the array. */
5679 if (access_index < CONSTRUCTOR_NELTS (init))
5680 {
5681 fn = CONSTRUCTOR_ELT (init, access_index)->value;
5682 gcc_checking_assert (!CONSTRUCTOR_ELT (init, access_index)->index);
5683 STRIP_NOPS (fn);
5684 }
5685 else
5686 fn = NULL;
9de2f554
JH
5687
5688 /* For type inconsistent program we may end up looking up virtual method
5689 in virtual table that does not contain TOKEN entries. We may overrun
5690 the virtual table and pick up a constant or RTTI info pointer.
5691 In any case the call is undefined. */
5692 if (!fn
5693 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
5694 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
5695 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5696 else
5697 {
5698 fn = TREE_OPERAND (fn, 0);
5699
5700 /* When cgraph node is missing and function is not public, we cannot
5701 devirtualize. This can happen in WHOPR when the actual method
5702 ends up in other partition, because we found devirtualization
5703 possibility too late. */
5704 if (!can_refer_decl_in_current_unit_p (fn, vtable))
ec77d61f
JH
5705 {
5706 if (can_refer)
5707 {
5708 *can_refer = false;
5709 return fn;
5710 }
5711 return NULL_TREE;
5712 }
9de2f554 5713 }
81fa35bd 5714
7501ca28
RG
5715 /* Make sure we create a cgraph node for functions we'll reference.
5716 They can be non-existent if the reference comes from an entry
5717 of an external vtable for example. */
d52f5295 5718 cgraph_node::get_create (fn);
7501ca28 5719
81fa35bd
MJ
5720 return fn;
5721}
5722
85942f45
JH
5723/* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
5724 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
5725 KNOWN_BINFO carries the binfo describing the true type of
ec77d61f
JH
5726 OBJ_TYPE_REF_OBJECT(REF).
5727 Set CAN_REFER if non-NULL to false if method
5728 is not referable or if the virtual table is ill-formed (such as rewriten
5729 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
85942f45
JH
5730
5731tree
ec77d61f
JH
5732gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
5733 bool *can_refer)
85942f45
JH
5734{
5735 unsigned HOST_WIDE_INT offset;
5736 tree v;
5737
5738 v = BINFO_VTABLE (known_binfo);
5739 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
5740 if (!v)
5741 return NULL_TREE;
5742
5743 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
ec77d61f
JH
5744 {
5745 if (can_refer)
5746 *can_refer = false;
5747 return NULL_TREE;
5748 }
5749 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
85942f45
JH
5750}
5751
06bc3ec7
BS
5752/* Return true iff VAL is a gimple expression that is known to be
5753 non-negative. Restricted to floating-point inputs. */
5754
5755bool
5756gimple_val_nonnegative_real_p (tree val)
5757{
5758 gimple def_stmt;
5759
5760 gcc_assert (val && SCALAR_FLOAT_TYPE_P (TREE_TYPE (val)));
5761
5762 /* Use existing logic for non-gimple trees. */
5763 if (tree_expr_nonnegative_p (val))
5764 return true;
5765
5766 if (TREE_CODE (val) != SSA_NAME)
5767 return false;
5768
5769 /* Currently we look only at the immediately defining statement
5770 to make this determination, since recursion on defining
5771 statements of operands can lead to quadratic behavior in the
5772 worst case. This is expected to catch almost all occurrences
5773 in practice. It would be possible to implement limited-depth
5774 recursion if important cases are lost. Alternatively, passes
5775 that need this information (such as the pow/powi lowering code
5776 in the cse_sincos pass) could be revised to provide it through
5777 dataflow propagation. */
5778
5779 def_stmt = SSA_NAME_DEF_STMT (val);
5780
5781 if (is_gimple_assign (def_stmt))
5782 {
5783 tree op0, op1;
5784
5785 /* See fold-const.c:tree_expr_nonnegative_p for additional
5786 cases that could be handled with recursion. */
5787
5788 switch (gimple_assign_rhs_code (def_stmt))
5789 {
5790 case ABS_EXPR:
5791 /* Always true for floating-point operands. */
5792 return true;
5793
5794 case MULT_EXPR:
5795 /* True if the two operands are identical (since we are
5796 restricted to floating-point inputs). */
5797 op0 = gimple_assign_rhs1 (def_stmt);
5798 op1 = gimple_assign_rhs2 (def_stmt);
5799
5800 if (op0 == op1
5801 || operand_equal_p (op0, op1, 0))
5802 return true;
5803
5804 default:
5805 return false;
5806 }
5807 }
5808 else if (is_gimple_call (def_stmt))
5809 {
5810 tree fndecl = gimple_call_fndecl (def_stmt);
5811 if (fndecl
5812 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
5813 {
5814 tree arg1;
5815
5816 switch (DECL_FUNCTION_CODE (fndecl))
5817 {
5818 CASE_FLT_FN (BUILT_IN_ACOS):
5819 CASE_FLT_FN (BUILT_IN_ACOSH):
5820 CASE_FLT_FN (BUILT_IN_CABS):
5821 CASE_FLT_FN (BUILT_IN_COSH):
5822 CASE_FLT_FN (BUILT_IN_ERFC):
5823 CASE_FLT_FN (BUILT_IN_EXP):
5824 CASE_FLT_FN (BUILT_IN_EXP10):
5825 CASE_FLT_FN (BUILT_IN_EXP2):
5826 CASE_FLT_FN (BUILT_IN_FABS):
5827 CASE_FLT_FN (BUILT_IN_FDIM):
5828 CASE_FLT_FN (BUILT_IN_HYPOT):
5829 CASE_FLT_FN (BUILT_IN_POW10):
5830 return true;
5831
5832 CASE_FLT_FN (BUILT_IN_SQRT):
5833 /* sqrt(-0.0) is -0.0, and sqrt is not defined over other
5834 nonnegative inputs. */
3d3dbadd 5835 if (!HONOR_SIGNED_ZEROS (val))
06bc3ec7
BS
5836 return true;
5837
5838 break;
5839
5840 CASE_FLT_FN (BUILT_IN_POWI):
5841 /* True if the second argument is an even integer. */
5842 arg1 = gimple_call_arg (def_stmt, 1);
5843
5844 if (TREE_CODE (arg1) == INTEGER_CST
5845 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
5846 return true;
5847
5848 break;
5849
5850 CASE_FLT_FN (BUILT_IN_POW):
5851 /* True if the second argument is an even integer-valued
5852 real. */
5853 arg1 = gimple_call_arg (def_stmt, 1);
5854
5855 if (TREE_CODE (arg1) == REAL_CST)
5856 {
5857 REAL_VALUE_TYPE c;
5858 HOST_WIDE_INT n;
5859
5860 c = TREE_REAL_CST (arg1);
5861 n = real_to_integer (&c);
5862
5863 if ((n & 1) == 0)
5864 {
5865 REAL_VALUE_TYPE cint;
807e902e 5866 real_from_integer (&cint, VOIDmode, n, SIGNED);
06bc3ec7
BS
5867 if (real_identical (&c, &cint))
5868 return true;
5869 }
5870 }
5871
5872 break;
5873
5874 default:
5875 return false;
5876 }
5877 }
5878 }
5879
5880 return false;
5881}
b184c8f1
AM
5882
5883/* Given a pointer value OP0, return a simplified version of an
5884 indirection through OP0, or NULL_TREE if no simplification is
5885 possible. Note that the resulting type may be different from
5886 the type pointed to in the sense that it is still compatible
5887 from the langhooks point of view. */
5888
5889tree
5890gimple_fold_indirect_ref (tree t)
5891{
5892 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
5893 tree sub = t;
5894 tree subtype;
5895
5896 STRIP_NOPS (sub);
5897 subtype = TREE_TYPE (sub);
5898 if (!POINTER_TYPE_P (subtype))
5899 return NULL_TREE;
5900
5901 if (TREE_CODE (sub) == ADDR_EXPR)
5902 {
5903 tree op = TREE_OPERAND (sub, 0);
5904 tree optype = TREE_TYPE (op);
5905 /* *&p => p */
5906 if (useless_type_conversion_p (type, optype))
5907 return op;
5908
5909 /* *(foo *)&fooarray => fooarray[0] */
5910 if (TREE_CODE (optype) == ARRAY_TYPE
5911 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
5912 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5913 {
5914 tree type_domain = TYPE_DOMAIN (optype);
5915 tree min_val = size_zero_node;
5916 if (type_domain && TYPE_MIN_VALUE (type_domain))
5917 min_val = TYPE_MIN_VALUE (type_domain);
5918 if (TREE_CODE (min_val) == INTEGER_CST)
5919 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
5920 }
5921 /* *(foo *)&complexfoo => __real__ complexfoo */
5922 else if (TREE_CODE (optype) == COMPLEX_TYPE
5923 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5924 return fold_build1 (REALPART_EXPR, type, op);
5925 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
5926 else if (TREE_CODE (optype) == VECTOR_TYPE
5927 && useless_type_conversion_p (type, TREE_TYPE (optype)))
5928 {
5929 tree part_width = TYPE_SIZE (type);
5930 tree index = bitsize_int (0);
5931 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
5932 }
5933 }
5934
5935 /* *(p + CST) -> ... */
5936 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
5937 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
5938 {
5939 tree addr = TREE_OPERAND (sub, 0);
5940 tree off = TREE_OPERAND (sub, 1);
5941 tree addrtype;
5942
5943 STRIP_NOPS (addr);
5944 addrtype = TREE_TYPE (addr);
5945
5946 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
5947 if (TREE_CODE (addr) == ADDR_EXPR
5948 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
5949 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
cc269bb6 5950 && tree_fits_uhwi_p (off))
b184c8f1 5951 {
ae7e9ddd 5952 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
b184c8f1
AM
5953 tree part_width = TYPE_SIZE (type);
5954 unsigned HOST_WIDE_INT part_widthi
9439e9a1 5955 = tree_to_shwi (part_width) / BITS_PER_UNIT;
b184c8f1
AM
5956 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
5957 tree index = bitsize_int (indexi);
5958 if (offset / part_widthi
e934916c 5959 < TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
b184c8f1
AM
5960 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
5961 part_width, index);
5962 }
5963
5964 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
5965 if (TREE_CODE (addr) == ADDR_EXPR
5966 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
5967 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
5968 {
5969 tree size = TYPE_SIZE_UNIT (type);
5970 if (tree_int_cst_equal (size, off))
5971 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
5972 }
5973
5974 /* *(p + CST) -> MEM_REF <p, CST>. */
5975 if (TREE_CODE (addr) != ADDR_EXPR
5976 || DECL_P (TREE_OPERAND (addr, 0)))
5977 return fold_build2 (MEM_REF, type,
5978 addr,
807e902e 5979 wide_int_to_tree (ptype, off));
b184c8f1
AM
5980 }
5981
5982 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
5983 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
5984 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
5985 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
5986 {
5987 tree type_domain;
5988 tree min_val = size_zero_node;
5989 tree osub = sub;
5990 sub = gimple_fold_indirect_ref (sub);
5991 if (! sub)
5992 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
5993 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
5994 if (type_domain && TYPE_MIN_VALUE (type_domain))
5995 min_val = TYPE_MIN_VALUE (type_domain);
5996 if (TREE_CODE (min_val) == INTEGER_CST)
5997 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
5998 }
5999
6000 return NULL_TREE;
6001}
19e51b40
JJ
6002
6003/* Return true if CODE is an operation that when operating on signed
6004 integer types involves undefined behavior on overflow and the
6005 operation can be expressed with unsigned arithmetic. */
6006
6007bool
6008arith_code_with_undefined_signed_overflow (tree_code code)
6009{
6010 switch (code)
6011 {
6012 case PLUS_EXPR:
6013 case MINUS_EXPR:
6014 case MULT_EXPR:
6015 case NEGATE_EXPR:
6016 case POINTER_PLUS_EXPR:
6017 return true;
6018 default:
6019 return false;
6020 }
6021}
6022
6023/* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
6024 operation that can be transformed to unsigned arithmetic by converting
6025 its operand, carrying out the operation in the corresponding unsigned
6026 type and converting the result back to the original type.
6027
6028 Returns a sequence of statements that replace STMT and also contain
6029 a modified form of STMT itself. */
6030
6031gimple_seq
6032rewrite_to_defined_overflow (gimple stmt)
6033{
6034 if (dump_file && (dump_flags & TDF_DETAILS))
6035 {
6036 fprintf (dump_file, "rewriting stmt with undefined signed "
6037 "overflow ");
6038 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
6039 }
6040
6041 tree lhs = gimple_assign_lhs (stmt);
6042 tree type = unsigned_type_for (TREE_TYPE (lhs));
6043 gimple_seq stmts = NULL;
6044 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
6045 {
6046 gimple_seq stmts2 = NULL;
6047 gimple_set_op (stmt, i,
6048 force_gimple_operand (fold_convert (type,
6049 gimple_op (stmt, i)),
6050 &stmts2, true, NULL_TREE));
6051 gimple_seq_add_seq (&stmts, stmts2);
6052 }
6053 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
6054 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
6055 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
6056 gimple_seq_add_stmt (&stmts, stmt);
0d0e4a03 6057 gimple cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
19e51b40
JJ
6058 gimple_seq_add_stmt (&stmts, cvt);
6059
6060 return stmts;
6061}
d4f5cd5e 6062
3d2cf79f
RB
6063
6064/* Build the expression CODE OP0 of type TYPE with location LOC,
6065 simplifying it first if possible using VALUEIZE if not NULL.
6066 OP0 is expected to be valueized already. Returns the built
6067 expression value and appends statements possibly defining it
6068 to SEQ. */
6069
6070tree
6071gimple_build (gimple_seq *seq, location_t loc,
6072 enum tree_code code, tree type, tree op0,
6073 tree (*valueize)(tree))
6074{
6075 tree res = gimple_simplify (code, type, op0, seq, valueize);
6076 if (!res)
6077 {
6078 if (gimple_in_ssa_p (cfun))
b731b390 6079 res = make_ssa_name (type);
3d2cf79f 6080 else
b731b390 6081 res = create_tmp_reg (type);
3d2cf79f
RB
6082 gimple stmt;
6083 if (code == REALPART_EXPR
6084 || code == IMAGPART_EXPR
6085 || code == VIEW_CONVERT_EXPR)
0d0e4a03 6086 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
3d2cf79f 6087 else
0d0e4a03 6088 stmt = gimple_build_assign (res, code, op0);
3d2cf79f
RB
6089 gimple_set_location (stmt, loc);
6090 gimple_seq_add_stmt_without_update (seq, stmt);
6091 }
6092 return res;
6093}
6094
6095/* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
6096 simplifying it first if possible using VALUEIZE if not NULL.
6097 OP0 and OP1 are expected to be valueized already. Returns the built
6098 expression value and appends statements possibly defining it
6099 to SEQ. */
6100
6101tree
6102gimple_build (gimple_seq *seq, location_t loc,
6103 enum tree_code code, tree type, tree op0, tree op1,
6104 tree (*valueize)(tree))
6105{
6106 tree res = gimple_simplify (code, type, op0, op1, seq, valueize);
6107 if (!res)
6108 {
6109 if (gimple_in_ssa_p (cfun))
b731b390 6110 res = make_ssa_name (type);
3d2cf79f 6111 else
b731b390 6112 res = create_tmp_reg (type);
0d0e4a03 6113 gimple stmt = gimple_build_assign (res, code, op0, op1);
3d2cf79f
RB
6114 gimple_set_location (stmt, loc);
6115 gimple_seq_add_stmt_without_update (seq, stmt);
6116 }
6117 return res;
6118}
6119
6120/* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
6121 simplifying it first if possible using VALUEIZE if not NULL.
6122 OP0, OP1 and OP2 are expected to be valueized already. Returns the built
6123 expression value and appends statements possibly defining it
6124 to SEQ. */
6125
6126tree
6127gimple_build (gimple_seq *seq, location_t loc,
6128 enum tree_code code, tree type, tree op0, tree op1, tree op2,
6129 tree (*valueize)(tree))
6130{
6131 tree res = gimple_simplify (code, type, op0, op1, op2,
6132 seq, valueize);
6133 if (!res)
6134 {
6135 if (gimple_in_ssa_p (cfun))
b731b390 6136 res = make_ssa_name (type);
3d2cf79f 6137 else
b731b390 6138 res = create_tmp_reg (type);
3d2cf79f
RB
6139 gimple stmt;
6140 if (code == BIT_FIELD_REF)
0d0e4a03
JJ
6141 stmt = gimple_build_assign (res, code,
6142 build3 (code, type, op0, op1, op2));
3d2cf79f 6143 else
0d0e4a03 6144 stmt = gimple_build_assign (res, code, op0, op1, op2);
3d2cf79f
RB
6145 gimple_set_location (stmt, loc);
6146 gimple_seq_add_stmt_without_update (seq, stmt);
6147 }
6148 return res;
6149}
6150
6151/* Build the call FN (ARG0) with a result of type TYPE
6152 (or no result if TYPE is void) with location LOC,
6153 simplifying it first if possible using VALUEIZE if not NULL.
6154 ARG0 is expected to be valueized already. Returns the built
6155 expression value (or NULL_TREE if TYPE is void) and appends
6156 statements possibly defining it to SEQ. */
6157
6158tree
6159gimple_build (gimple_seq *seq, location_t loc,
6160 enum built_in_function fn, tree type, tree arg0,
6161 tree (*valueize)(tree))
6162{
6163 tree res = gimple_simplify (fn, type, arg0, seq, valueize);
6164 if (!res)
6165 {
6166 tree decl = builtin_decl_implicit (fn);
6167 gimple stmt = gimple_build_call (decl, 1, arg0);
6168 if (!VOID_TYPE_P (type))
6169 {
6170 if (gimple_in_ssa_p (cfun))
b731b390 6171 res = make_ssa_name (type);
3d2cf79f 6172 else
b731b390 6173 res = create_tmp_reg (type);
3d2cf79f
RB
6174 gimple_call_set_lhs (stmt, res);
6175 }
6176 gimple_set_location (stmt, loc);
6177 gimple_seq_add_stmt_without_update (seq, stmt);
6178 }
6179 return res;
6180}
6181
6182/* Build the call FN (ARG0, ARG1) with a result of type TYPE
6183 (or no result if TYPE is void) with location LOC,
6184 simplifying it first if possible using VALUEIZE if not NULL.
6185 ARG0 is expected to be valueized already. Returns the built
6186 expression value (or NULL_TREE if TYPE is void) and appends
6187 statements possibly defining it to SEQ. */
6188
6189tree
6190gimple_build (gimple_seq *seq, location_t loc,
6191 enum built_in_function fn, tree type, tree arg0, tree arg1,
6192 tree (*valueize)(tree))
6193{
6194 tree res = gimple_simplify (fn, type, arg0, arg1, seq, valueize);
6195 if (!res)
6196 {
6197 tree decl = builtin_decl_implicit (fn);
6198 gimple stmt = gimple_build_call (decl, 2, arg0, arg1);
6199 if (!VOID_TYPE_P (type))
6200 {
6201 if (gimple_in_ssa_p (cfun))
b731b390 6202 res = make_ssa_name (type);
3d2cf79f 6203 else
b731b390 6204 res = create_tmp_reg (type);
3d2cf79f
RB
6205 gimple_call_set_lhs (stmt, res);
6206 }
6207 gimple_set_location (stmt, loc);
6208 gimple_seq_add_stmt_without_update (seq, stmt);
6209 }
6210 return res;
6211}
6212
6213/* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
6214 (or no result if TYPE is void) with location LOC,
6215 simplifying it first if possible using VALUEIZE if not NULL.
6216 ARG0 is expected to be valueized already. Returns the built
6217 expression value (or NULL_TREE if TYPE is void) and appends
6218 statements possibly defining it to SEQ. */
6219
6220tree
6221gimple_build (gimple_seq *seq, location_t loc,
6222 enum built_in_function fn, tree type,
6223 tree arg0, tree arg1, tree arg2,
6224 tree (*valueize)(tree))
6225{
6226 tree res = gimple_simplify (fn, type, arg0, arg1, arg2, seq, valueize);
6227 if (!res)
6228 {
6229 tree decl = builtin_decl_implicit (fn);
6230 gimple stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
6231 if (!VOID_TYPE_P (type))
6232 {
6233 if (gimple_in_ssa_p (cfun))
b731b390 6234 res = make_ssa_name (type);
3d2cf79f 6235 else
b731b390 6236 res = create_tmp_reg (type);
3d2cf79f
RB
6237 gimple_call_set_lhs (stmt, res);
6238 }
6239 gimple_set_location (stmt, loc);
6240 gimple_seq_add_stmt_without_update (seq, stmt);
6241 }
6242 return res;
6243}
6244
6245/* Build the conversion (TYPE) OP with a result of type TYPE
6246 with location LOC if such conversion is neccesary in GIMPLE,
6247 simplifying it first.
6248 Returns the built expression value and appends
6249 statements possibly defining it to SEQ. */
d4f5cd5e
RB
6250
6251tree
6252gimple_convert (gimple_seq *seq, location_t loc, tree type, tree op)
6253{
6254 if (useless_type_conversion_p (type, TREE_TYPE (op)))
6255 return op;
3d2cf79f 6256 return gimple_build (seq, loc, NOP_EXPR, type, op);
d4f5cd5e 6257}
This page took 2.769319 seconds and 5 git commands to generate.