]> gcc.gnu.org Git - gcc.git/blob - gcc/gimple-fold.cc
Merge branch 'master' into devel/modula-2.
[gcc.git] / gcc / gimple-fold.cc
1 /* Statement simplification on GIMPLE.
2 Copyright (C) 2010-2022 Free Software Foundation, Inc.
3 Split out from tree-ssa-ccp.cc.
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
11
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "target.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "predict.h"
30 #include "ssa.h"
31 #include "cgraph.h"
32 #include "gimple-pretty-print.h"
33 #include "gimple-ssa-warn-access.h"
34 #include "gimple-ssa-warn-restrict.h"
35 #include "fold-const.h"
36 #include "stmt.h"
37 #include "expr.h"
38 #include "stor-layout.h"
39 #include "dumpfile.h"
40 #include "gimple-iterator.h"
41 #include "gimple-fold.h"
42 #include "gimplify.h"
43 #include "tree-into-ssa.h"
44 #include "tree-dfa.h"
45 #include "tree-object-size.h"
46 #include "tree-ssa.h"
47 #include "tree-ssa-propagate.h"
48 #include "ipa-utils.h"
49 #include "tree-ssa-address.h"
50 #include "langhooks.h"
51 #include "gimplify-me.h"
52 #include "dbgcnt.h"
53 #include "builtins.h"
54 #include "tree-eh.h"
55 #include "gimple-match.h"
56 #include "gomp-constants.h"
57 #include "optabs-query.h"
58 #include "omp-general.h"
59 #include "tree-cfg.h"
60 #include "fold-const-call.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "asan.h"
64 #include "diagnostic-core.h"
65 #include "intl.h"
66 #include "calls.h"
67 #include "tree-vector-builder.h"
68 #include "tree-ssa-strlen.h"
69 #include "varasm.h"
70 #include "internal-fn.h"
71
72 enum strlen_range_kind {
73 /* Compute the exact constant string length. */
74 SRK_STRLEN,
75 /* Compute the maximum constant string length. */
76 SRK_STRLENMAX,
77 /* Compute a range of string lengths bounded by object sizes. When
78 the length of a string cannot be determined, consider as the upper
79 bound the size of the enclosing object the string may be a member
80 or element of. Also determine the size of the largest character
81 array the string may refer to. */
82 SRK_LENRANGE,
83 /* Determine the integer value of the argument (not string length). */
84 SRK_INT_VALUE
85 };
86
87 static bool
88 get_range_strlen (tree, bitmap, strlen_range_kind, c_strlen_data *, unsigned);
89
90 /* Return true when DECL can be referenced from current unit.
91 FROM_DECL (if non-null) specify constructor of variable DECL was taken from.
92 We can get declarations that are not possible to reference for various
93 reasons:
94
95 1) When analyzing C++ virtual tables.
96 C++ virtual tables do have known constructors even
97 when they are keyed to other compilation unit.
98 Those tables can contain pointers to methods and vars
99 in other units. Those methods have both STATIC and EXTERNAL
100 set.
101 2) In WHOPR mode devirtualization might lead to reference
102 to method that was partitioned elsehwere.
103 In this case we have static VAR_DECL or FUNCTION_DECL
104 that has no corresponding callgraph/varpool node
105 declaring the body.
106 3) COMDAT functions referred by external vtables that
107 we devirtualize only during final compilation stage.
108 At this time we already decided that we will not output
109 the function body and thus we can't reference the symbol
110 directly. */
111
112 static bool
113 can_refer_decl_in_current_unit_p (tree decl, tree from_decl)
114 {
115 varpool_node *vnode;
116 struct cgraph_node *node;
117 symtab_node *snode;
118
119 if (DECL_ABSTRACT_P (decl))
120 return false;
121
122 /* We are concerned only about static/external vars and functions. */
123 if ((!TREE_STATIC (decl) && !DECL_EXTERNAL (decl))
124 || !VAR_OR_FUNCTION_DECL_P (decl))
125 return true;
126
127 /* Static objects can be referred only if they are defined and not optimized
128 out yet. */
129 if (!TREE_PUBLIC (decl))
130 {
131 if (DECL_EXTERNAL (decl))
132 return false;
133 /* Before we start optimizing unreachable code we can be sure all
134 static objects are defined. */
135 if (symtab->function_flags_ready)
136 return true;
137 snode = symtab_node::get (decl);
138 if (!snode || !snode->definition)
139 return false;
140 node = dyn_cast <cgraph_node *> (snode);
141 return !node || !node->inlined_to;
142 }
143
144 /* We will later output the initializer, so we can refer to it.
145 So we are concerned only when DECL comes from initializer of
146 external var or var that has been optimized out. */
147 if (!from_decl
148 || !VAR_P (from_decl)
149 || (!DECL_EXTERNAL (from_decl)
150 && (vnode = varpool_node::get (from_decl)) != NULL
151 && vnode->definition)
152 || (flag_ltrans
153 && (vnode = varpool_node::get (from_decl)) != NULL
154 && vnode->in_other_partition))
155 return true;
156 /* We are folding reference from external vtable. The vtable may reffer
157 to a symbol keyed to other compilation unit. The other compilation
158 unit may be in separate DSO and the symbol may be hidden. */
159 if (DECL_VISIBILITY_SPECIFIED (decl)
160 && DECL_EXTERNAL (decl)
161 && DECL_VISIBILITY (decl) != VISIBILITY_DEFAULT
162 && (!(snode = symtab_node::get (decl)) || !snode->in_other_partition))
163 return false;
164 /* When function is public, we always can introduce new reference.
165 Exception are the COMDAT functions where introducing a direct
166 reference imply need to include function body in the curren tunit. */
167 if (TREE_PUBLIC (decl) && !DECL_COMDAT (decl))
168 return true;
169 /* We have COMDAT. We are going to check if we still have definition
170 or if the definition is going to be output in other partition.
171 Bypass this when gimplifying; all needed functions will be produced.
172
173 As observed in PR20991 for already optimized out comdat virtual functions
174 it may be tempting to not necessarily give up because the copy will be
175 output elsewhere when corresponding vtable is output.
176 This is however not possible - ABI specify that COMDATs are output in
177 units where they are used and when the other unit was compiled with LTO
178 it is possible that vtable was kept public while the function itself
179 was privatized. */
180 if (!symtab->function_flags_ready)
181 return true;
182
183 snode = symtab_node::get (decl);
184 if (!snode
185 || ((!snode->definition || DECL_EXTERNAL (decl))
186 && (!snode->in_other_partition
187 || (!snode->forced_by_abi && !snode->force_output))))
188 return false;
189 node = dyn_cast <cgraph_node *> (snode);
190 return !node || !node->inlined_to;
191 }
192
193 /* Create a temporary for TYPE for a statement STMT. If the current function
194 is in SSA form, a SSA name is created. Otherwise a temporary register
195 is made. */
196
197 tree
198 create_tmp_reg_or_ssa_name (tree type, gimple *stmt)
199 {
200 if (gimple_in_ssa_p (cfun))
201 return make_ssa_name (type, stmt);
202 else
203 return create_tmp_reg (type);
204 }
205
206 /* CVAL is value taken from DECL_INITIAL of variable. Try to transform it into
207 acceptable form for is_gimple_min_invariant.
208 FROM_DECL (if non-NULL) specify variable whose constructor contains CVAL. */
209
210 tree
211 canonicalize_constructor_val (tree cval, tree from_decl)
212 {
213 if (CONSTANT_CLASS_P (cval))
214 return cval;
215
216 tree orig_cval = cval;
217 STRIP_NOPS (cval);
218 if (TREE_CODE (cval) == POINTER_PLUS_EXPR
219 && TREE_CODE (TREE_OPERAND (cval, 1)) == INTEGER_CST)
220 {
221 tree ptr = TREE_OPERAND (cval, 0);
222 if (is_gimple_min_invariant (ptr))
223 cval = build1_loc (EXPR_LOCATION (cval),
224 ADDR_EXPR, TREE_TYPE (ptr),
225 fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (ptr)),
226 ptr,
227 fold_convert (ptr_type_node,
228 TREE_OPERAND (cval, 1))));
229 }
230 if (TREE_CODE (cval) == ADDR_EXPR)
231 {
232 tree base = NULL_TREE;
233 if (TREE_CODE (TREE_OPERAND (cval, 0)) == COMPOUND_LITERAL_EXPR)
234 {
235 base = COMPOUND_LITERAL_EXPR_DECL (TREE_OPERAND (cval, 0));
236 if (base)
237 TREE_OPERAND (cval, 0) = base;
238 }
239 else
240 base = get_base_address (TREE_OPERAND (cval, 0));
241 if (!base)
242 return NULL_TREE;
243
244 if (VAR_OR_FUNCTION_DECL_P (base)
245 && !can_refer_decl_in_current_unit_p (base, from_decl))
246 return NULL_TREE;
247 if (TREE_TYPE (base) == error_mark_node)
248 return NULL_TREE;
249 if (VAR_P (base))
250 /* ??? We should be able to assert that TREE_ADDRESSABLE is set,
251 but since the use can be in a debug stmt we can't. */
252 ;
253 else if (TREE_CODE (base) == FUNCTION_DECL)
254 {
255 /* Make sure we create a cgraph node for functions we'll reference.
256 They can be non-existent if the reference comes from an entry
257 of an external vtable for example. */
258 cgraph_node::get_create (base);
259 }
260 /* Fixup types in global initializers. */
261 if (TREE_TYPE (TREE_TYPE (cval)) != TREE_TYPE (TREE_OPERAND (cval, 0)))
262 cval = build_fold_addr_expr (TREE_OPERAND (cval, 0));
263
264 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
265 cval = fold_convert (TREE_TYPE (orig_cval), cval);
266 return cval;
267 }
268 /* In CONSTRUCTORs we may see unfolded constants like (int (*) ()) 0. */
269 if (TREE_CODE (cval) == INTEGER_CST)
270 {
271 if (TREE_OVERFLOW_P (cval))
272 cval = drop_tree_overflow (cval);
273 if (!useless_type_conversion_p (TREE_TYPE (orig_cval), TREE_TYPE (cval)))
274 cval = fold_convert (TREE_TYPE (orig_cval), cval);
275 return cval;
276 }
277 return orig_cval;
278 }
279
280 /* If SYM is a constant variable with known value, return the value.
281 NULL_TREE is returned otherwise. */
282
283 tree
284 get_symbol_constant_value (tree sym)
285 {
286 tree val = ctor_for_folding (sym);
287 if (val != error_mark_node)
288 {
289 if (val)
290 {
291 val = canonicalize_constructor_val (unshare_expr (val), sym);
292 if (val
293 && is_gimple_min_invariant (val)
294 && useless_type_conversion_p (TREE_TYPE (sym), TREE_TYPE (val)))
295 return val;
296 else
297 return NULL_TREE;
298 }
299 /* Variables declared 'const' without an initializer
300 have zero as the initializer if they may not be
301 overridden at link or run time. */
302 if (!val
303 && is_gimple_reg_type (TREE_TYPE (sym)))
304 return build_zero_cst (TREE_TYPE (sym));
305 }
306
307 return NULL_TREE;
308 }
309
310
311
312 /* Subroutine of fold_stmt. We perform constant folding of the
313 memory reference tree EXPR. */
314
315 static tree
316 maybe_fold_reference (tree expr)
317 {
318 tree result = NULL_TREE;
319
320 if ((TREE_CODE (expr) == VIEW_CONVERT_EXPR
321 || TREE_CODE (expr) == REALPART_EXPR
322 || TREE_CODE (expr) == IMAGPART_EXPR)
323 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
324 result = fold_unary_loc (EXPR_LOCATION (expr),
325 TREE_CODE (expr),
326 TREE_TYPE (expr),
327 TREE_OPERAND (expr, 0));
328 else if (TREE_CODE (expr) == BIT_FIELD_REF
329 && CONSTANT_CLASS_P (TREE_OPERAND (expr, 0)))
330 result = fold_ternary_loc (EXPR_LOCATION (expr),
331 TREE_CODE (expr),
332 TREE_TYPE (expr),
333 TREE_OPERAND (expr, 0),
334 TREE_OPERAND (expr, 1),
335 TREE_OPERAND (expr, 2));
336 else
337 result = fold_const_aggregate_ref (expr);
338
339 if (result && is_gimple_min_invariant (result))
340 return result;
341
342 return NULL_TREE;
343 }
344
345 /* Return true if EXPR is an acceptable right-hand-side for a
346 GIMPLE assignment. We validate the entire tree, not just
347 the root node, thus catching expressions that embed complex
348 operands that are not permitted in GIMPLE. This function
349 is needed because the folding routines in fold-const.cc
350 may return such expressions in some cases, e.g., an array
351 access with an embedded index addition. It may make more
352 sense to have folding routines that are sensitive to the
353 constraints on GIMPLE operands, rather than abandoning any
354 any attempt to fold if the usual folding turns out to be too
355 aggressive. */
356
357 bool
358 valid_gimple_rhs_p (tree expr)
359 {
360 enum tree_code code = TREE_CODE (expr);
361
362 switch (TREE_CODE_CLASS (code))
363 {
364 case tcc_declaration:
365 if (!is_gimple_variable (expr))
366 return false;
367 break;
368
369 case tcc_constant:
370 /* All constants are ok. */
371 break;
372
373 case tcc_comparison:
374 /* GENERIC allows comparisons with non-boolean types, reject
375 those for GIMPLE. Let vector-typed comparisons pass - rules
376 for GENERIC and GIMPLE are the same here. */
377 if (!(INTEGRAL_TYPE_P (TREE_TYPE (expr))
378 && (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE
379 || TYPE_PRECISION (TREE_TYPE (expr)) == 1))
380 && ! VECTOR_TYPE_P (TREE_TYPE (expr)))
381 return false;
382
383 /* Fallthru. */
384 case tcc_binary:
385 if (!is_gimple_val (TREE_OPERAND (expr, 0))
386 || !is_gimple_val (TREE_OPERAND (expr, 1)))
387 return false;
388 break;
389
390 case tcc_unary:
391 if (!is_gimple_val (TREE_OPERAND (expr, 0)))
392 return false;
393 break;
394
395 case tcc_expression:
396 switch (code)
397 {
398 case ADDR_EXPR:
399 {
400 tree t;
401 if (is_gimple_min_invariant (expr))
402 return true;
403 t = TREE_OPERAND (expr, 0);
404 while (handled_component_p (t))
405 {
406 /* ??? More checks needed, see the GIMPLE verifier. */
407 if ((TREE_CODE (t) == ARRAY_REF
408 || TREE_CODE (t) == ARRAY_RANGE_REF)
409 && !is_gimple_val (TREE_OPERAND (t, 1)))
410 return false;
411 t = TREE_OPERAND (t, 0);
412 }
413 if (!is_gimple_id (t))
414 return false;
415 }
416 break;
417
418 default:
419 if (get_gimple_rhs_class (code) == GIMPLE_TERNARY_RHS)
420 {
421 if (!is_gimple_val (TREE_OPERAND (expr, 0))
422 || !is_gimple_val (TREE_OPERAND (expr, 1))
423 || !is_gimple_val (TREE_OPERAND (expr, 2)))
424 return false;
425 break;
426 }
427 return false;
428 }
429 break;
430
431 case tcc_vl_exp:
432 return false;
433
434 case tcc_exceptional:
435 if (code == CONSTRUCTOR)
436 {
437 unsigned i;
438 tree elt;
439 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (expr), i, elt)
440 if (!is_gimple_val (elt))
441 return false;
442 return true;
443 }
444 if (code != SSA_NAME)
445 return false;
446 break;
447
448 case tcc_reference:
449 if (code == BIT_FIELD_REF)
450 return is_gimple_val (TREE_OPERAND (expr, 0));
451 return false;
452
453 default:
454 return false;
455 }
456
457 return true;
458 }
459
460
461 /* Attempt to fold an assignment statement pointed-to by SI. Returns a
462 replacement rhs for the statement or NULL_TREE if no simplification
463 could be made. It is assumed that the operands have been previously
464 folded. */
465
466 static tree
467 fold_gimple_assign (gimple_stmt_iterator *si)
468 {
469 gimple *stmt = gsi_stmt (*si);
470 enum tree_code subcode = gimple_assign_rhs_code (stmt);
471 location_t loc = gimple_location (stmt);
472
473 tree result = NULL_TREE;
474
475 switch (get_gimple_rhs_class (subcode))
476 {
477 case GIMPLE_SINGLE_RHS:
478 {
479 tree rhs = gimple_assign_rhs1 (stmt);
480
481 if (TREE_CLOBBER_P (rhs))
482 return NULL_TREE;
483
484 if (REFERENCE_CLASS_P (rhs))
485 return maybe_fold_reference (rhs);
486
487 else if (TREE_CODE (rhs) == OBJ_TYPE_REF)
488 {
489 tree val = OBJ_TYPE_REF_EXPR (rhs);
490 if (is_gimple_min_invariant (val))
491 return val;
492 else if (flag_devirtualize && virtual_method_call_p (rhs))
493 {
494 bool final;
495 vec <cgraph_node *>targets
496 = possible_polymorphic_call_targets (rhs, stmt, &final);
497 if (final && targets.length () <= 1 && dbg_cnt (devirt))
498 {
499 if (dump_enabled_p ())
500 {
501 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
502 "resolving virtual function address "
503 "reference to function %s\n",
504 targets.length () == 1
505 ? targets[0]->name ()
506 : "NULL");
507 }
508 if (targets.length () == 1)
509 {
510 val = fold_convert (TREE_TYPE (val),
511 build_fold_addr_expr_loc
512 (loc, targets[0]->decl));
513 STRIP_USELESS_TYPE_CONVERSION (val);
514 }
515 else
516 /* We cannot use __builtin_unreachable here because it
517 cannot have address taken. */
518 val = build_int_cst (TREE_TYPE (val), 0);
519 return val;
520 }
521 }
522 }
523
524 else if (TREE_CODE (rhs) == ADDR_EXPR)
525 {
526 tree ref = TREE_OPERAND (rhs, 0);
527 if (TREE_CODE (ref) == MEM_REF
528 && integer_zerop (TREE_OPERAND (ref, 1)))
529 {
530 result = TREE_OPERAND (ref, 0);
531 if (!useless_type_conversion_p (TREE_TYPE (rhs),
532 TREE_TYPE (result)))
533 result = build1 (NOP_EXPR, TREE_TYPE (rhs), result);
534 return result;
535 }
536 }
537
538 else if (TREE_CODE (rhs) == CONSTRUCTOR
539 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE)
540 {
541 /* Fold a constant vector CONSTRUCTOR to VECTOR_CST. */
542 unsigned i;
543 tree val;
544
545 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
546 if (! CONSTANT_CLASS_P (val))
547 return NULL_TREE;
548
549 return build_vector_from_ctor (TREE_TYPE (rhs),
550 CONSTRUCTOR_ELTS (rhs));
551 }
552
553 else if (DECL_P (rhs)
554 && is_gimple_reg_type (TREE_TYPE (rhs)))
555 return get_symbol_constant_value (rhs);
556 }
557 break;
558
559 case GIMPLE_UNARY_RHS:
560 break;
561
562 case GIMPLE_BINARY_RHS:
563 break;
564
565 case GIMPLE_TERNARY_RHS:
566 result = fold_ternary_loc (loc, subcode,
567 TREE_TYPE (gimple_assign_lhs (stmt)),
568 gimple_assign_rhs1 (stmt),
569 gimple_assign_rhs2 (stmt),
570 gimple_assign_rhs3 (stmt));
571
572 if (result)
573 {
574 STRIP_USELESS_TYPE_CONVERSION (result);
575 if (valid_gimple_rhs_p (result))
576 return result;
577 }
578 break;
579
580 case GIMPLE_INVALID_RHS:
581 gcc_unreachable ();
582 }
583
584 return NULL_TREE;
585 }
586
587
588 /* Replace a statement at *SI_P with a sequence of statements in STMTS,
589 adjusting the replacement stmts location and virtual operands.
590 If the statement has a lhs the last stmt in the sequence is expected
591 to assign to that lhs. */
592
593 static void
594 gsi_replace_with_seq_vops (gimple_stmt_iterator *si_p, gimple_seq stmts)
595 {
596 gimple *stmt = gsi_stmt (*si_p);
597
598 if (gimple_has_location (stmt))
599 annotate_all_with_location (stmts, gimple_location (stmt));
600
601 /* First iterate over the replacement statements backward, assigning
602 virtual operands to their defining statements. */
603 gimple *laststore = NULL;
604 for (gimple_stmt_iterator i = gsi_last (stmts);
605 !gsi_end_p (i); gsi_prev (&i))
606 {
607 gimple *new_stmt = gsi_stmt (i);
608 if ((gimple_assign_single_p (new_stmt)
609 && !is_gimple_reg (gimple_assign_lhs (new_stmt)))
610 || (is_gimple_call (new_stmt)
611 && (gimple_call_flags (new_stmt)
612 & (ECF_NOVOPS | ECF_PURE | ECF_CONST | ECF_NORETURN)) == 0))
613 {
614 tree vdef;
615 if (!laststore)
616 vdef = gimple_vdef (stmt);
617 else
618 vdef = make_ssa_name (gimple_vop (cfun), new_stmt);
619 gimple_set_vdef (new_stmt, vdef);
620 if (vdef && TREE_CODE (vdef) == SSA_NAME)
621 SSA_NAME_DEF_STMT (vdef) = new_stmt;
622 laststore = new_stmt;
623 }
624 }
625
626 /* Second iterate over the statements forward, assigning virtual
627 operands to their uses. */
628 tree reaching_vuse = gimple_vuse (stmt);
629 for (gimple_stmt_iterator i = gsi_start (stmts);
630 !gsi_end_p (i); gsi_next (&i))
631 {
632 gimple *new_stmt = gsi_stmt (i);
633 /* If the new statement possibly has a VUSE, update it with exact SSA
634 name we know will reach this one. */
635 if (gimple_has_mem_ops (new_stmt))
636 gimple_set_vuse (new_stmt, reaching_vuse);
637 gimple_set_modified (new_stmt, true);
638 if (gimple_vdef (new_stmt))
639 reaching_vuse = gimple_vdef (new_stmt);
640 }
641
642 /* If the new sequence does not do a store release the virtual
643 definition of the original statement. */
644 if (reaching_vuse
645 && reaching_vuse == gimple_vuse (stmt))
646 {
647 tree vdef = gimple_vdef (stmt);
648 if (vdef
649 && TREE_CODE (vdef) == SSA_NAME)
650 {
651 unlink_stmt_vdef (stmt);
652 release_ssa_name (vdef);
653 }
654 }
655
656 /* Finally replace the original statement with the sequence. */
657 gsi_replace_with_seq (si_p, stmts, false);
658 }
659
660 /* Helper function for update_gimple_call and
661 gimplify_and_update_call_from_tree. A GIMPLE_CALL STMT is being replaced
662 with GIMPLE_CALL NEW_STMT. */
663
664 static void
665 finish_update_gimple_call (gimple_stmt_iterator *si_p, gimple *new_stmt,
666 gimple *stmt)
667 {
668 tree lhs = gimple_call_lhs (stmt);
669 gimple_call_set_lhs (new_stmt, lhs);
670 if (lhs && TREE_CODE (lhs) == SSA_NAME)
671 SSA_NAME_DEF_STMT (lhs) = new_stmt;
672 gimple_move_vops (new_stmt, stmt);
673 gimple_set_location (new_stmt, gimple_location (stmt));
674 if (gimple_block (new_stmt) == NULL_TREE)
675 gimple_set_block (new_stmt, gimple_block (stmt));
676 gsi_replace (si_p, new_stmt, false);
677 }
678
679 /* Update a GIMPLE_CALL statement at iterator *SI_P to call to FN
680 with number of arguments NARGS, where the arguments in GIMPLE form
681 follow NARGS argument. */
682
683 bool
684 update_gimple_call (gimple_stmt_iterator *si_p, tree fn, int nargs, ...)
685 {
686 va_list ap;
687 gcall *new_stmt, *stmt = as_a <gcall *> (gsi_stmt (*si_p));
688
689 gcc_assert (is_gimple_call (stmt));
690 va_start (ap, nargs);
691 new_stmt = gimple_build_call_valist (fn, nargs, ap);
692 finish_update_gimple_call (si_p, new_stmt, stmt);
693 va_end (ap);
694 return true;
695 }
696
697 /* Return true if EXPR is a CALL_EXPR suitable for representation
698 as a single GIMPLE_CALL statement. If the arguments require
699 further gimplification, return false. */
700
701 static bool
702 valid_gimple_call_p (tree expr)
703 {
704 unsigned i, nargs;
705
706 if (TREE_CODE (expr) != CALL_EXPR)
707 return false;
708
709 nargs = call_expr_nargs (expr);
710 for (i = 0; i < nargs; i++)
711 {
712 tree arg = CALL_EXPR_ARG (expr, i);
713 if (is_gimple_reg_type (TREE_TYPE (arg)))
714 {
715 if (!is_gimple_val (arg))
716 return false;
717 }
718 else
719 if (!is_gimple_lvalue (arg))
720 return false;
721 }
722
723 return true;
724 }
725
726 /* Convert EXPR into a GIMPLE value suitable for substitution on the
727 RHS of an assignment. Insert the necessary statements before
728 iterator *SI_P. The statement at *SI_P, which must be a GIMPLE_CALL
729 is replaced. If the call is expected to produces a result, then it
730 is replaced by an assignment of the new RHS to the result variable.
731 If the result is to be ignored, then the call is replaced by a
732 GIMPLE_NOP. A proper VDEF chain is retained by making the first
733 VUSE and the last VDEF of the whole sequence be the same as the replaced
734 statement and using new SSA names for stores in between. */
735
736 void
737 gimplify_and_update_call_from_tree (gimple_stmt_iterator *si_p, tree expr)
738 {
739 tree lhs;
740 gimple *stmt, *new_stmt;
741 gimple_stmt_iterator i;
742 gimple_seq stmts = NULL;
743
744 stmt = gsi_stmt (*si_p);
745
746 gcc_assert (is_gimple_call (stmt));
747
748 if (valid_gimple_call_p (expr))
749 {
750 /* The call has simplified to another call. */
751 tree fn = CALL_EXPR_FN (expr);
752 unsigned i;
753 unsigned nargs = call_expr_nargs (expr);
754 vec<tree> args = vNULL;
755 gcall *new_stmt;
756
757 if (nargs > 0)
758 {
759 args.create (nargs);
760 args.safe_grow_cleared (nargs, true);
761
762 for (i = 0; i < nargs; i++)
763 args[i] = CALL_EXPR_ARG (expr, i);
764 }
765
766 new_stmt = gimple_build_call_vec (fn, args);
767 finish_update_gimple_call (si_p, new_stmt, stmt);
768 args.release ();
769 return;
770 }
771
772 lhs = gimple_call_lhs (stmt);
773 if (lhs == NULL_TREE)
774 {
775 push_gimplify_context (gimple_in_ssa_p (cfun));
776 gimplify_and_add (expr, &stmts);
777 pop_gimplify_context (NULL);
778
779 /* We can end up with folding a memcpy of an empty class assignment
780 which gets optimized away by C++ gimplification. */
781 if (gimple_seq_empty_p (stmts))
782 {
783 if (gimple_in_ssa_p (cfun))
784 {
785 unlink_stmt_vdef (stmt);
786 release_defs (stmt);
787 }
788 gsi_replace (si_p, gimple_build_nop (), false);
789 return;
790 }
791 }
792 else
793 {
794 tree tmp = force_gimple_operand (expr, &stmts, false, NULL_TREE);
795 new_stmt = gimple_build_assign (lhs, tmp);
796 i = gsi_last (stmts);
797 gsi_insert_after_without_update (&i, new_stmt,
798 GSI_CONTINUE_LINKING);
799 }
800
801 gsi_replace_with_seq_vops (si_p, stmts);
802 }
803
804
805 /* Replace the call at *GSI with the gimple value VAL. */
806
807 void
808 replace_call_with_value (gimple_stmt_iterator *gsi, tree val)
809 {
810 gimple *stmt = gsi_stmt (*gsi);
811 tree lhs = gimple_call_lhs (stmt);
812 gimple *repl;
813 if (lhs)
814 {
815 if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (val)))
816 val = fold_convert (TREE_TYPE (lhs), val);
817 repl = gimple_build_assign (lhs, val);
818 }
819 else
820 repl = gimple_build_nop ();
821 tree vdef = gimple_vdef (stmt);
822 if (vdef && TREE_CODE (vdef) == SSA_NAME)
823 {
824 unlink_stmt_vdef (stmt);
825 release_ssa_name (vdef);
826 }
827 gsi_replace (gsi, repl, false);
828 }
829
830 /* Replace the call at *GSI with the new call REPL and fold that
831 again. */
832
833 static void
834 replace_call_with_call_and_fold (gimple_stmt_iterator *gsi, gimple *repl)
835 {
836 gimple *stmt = gsi_stmt (*gsi);
837 gimple_call_set_lhs (repl, gimple_call_lhs (stmt));
838 gimple_set_location (repl, gimple_location (stmt));
839 gimple_move_vops (repl, stmt);
840 gsi_replace (gsi, repl, false);
841 fold_stmt (gsi);
842 }
843
844 /* Return true if VAR is a VAR_DECL or a component thereof. */
845
846 static bool
847 var_decl_component_p (tree var)
848 {
849 tree inner = var;
850 while (handled_component_p (inner))
851 inner = TREE_OPERAND (inner, 0);
852 return (DECL_P (inner)
853 || (TREE_CODE (inner) == MEM_REF
854 && TREE_CODE (TREE_OPERAND (inner, 0)) == ADDR_EXPR));
855 }
856
857 /* Return TRUE if the SIZE argument, representing the size of an
858 object, is in a range of values of which exactly zero is valid. */
859
860 static bool
861 size_must_be_zero_p (tree size)
862 {
863 if (integer_zerop (size))
864 return true;
865
866 if (TREE_CODE (size) != SSA_NAME || !INTEGRAL_TYPE_P (TREE_TYPE (size)))
867 return false;
868
869 tree type = TREE_TYPE (size);
870 int prec = TYPE_PRECISION (type);
871
872 /* Compute the value of SSIZE_MAX, the largest positive value that
873 can be stored in ssize_t, the signed counterpart of size_t. */
874 wide_int ssize_max = wi::lshift (wi::one (prec), prec - 1) - 1;
875 value_range valid_range (build_int_cst (type, 0),
876 wide_int_to_tree (type, ssize_max));
877 value_range vr;
878 if (cfun)
879 get_range_query (cfun)->range_of_expr (vr, size);
880 else
881 get_global_range_query ()->range_of_expr (vr, size);
882 if (vr.undefined_p ())
883 vr.set_varying (TREE_TYPE (size));
884 vr.intersect (valid_range);
885 return vr.zero_p ();
886 }
887
888 /* Fold function call to builtin mem{{,p}cpy,move}. Try to detect and
889 diagnose (otherwise undefined) overlapping copies without preventing
890 folding. When folded, GCC guarantees that overlapping memcpy has
891 the same semantics as memmove. Call to the library memcpy need not
892 provide the same guarantee. Return false if no simplification can
893 be made. */
894
895 static bool
896 gimple_fold_builtin_memory_op (gimple_stmt_iterator *gsi,
897 tree dest, tree src, enum built_in_function code)
898 {
899 gimple *stmt = gsi_stmt (*gsi);
900 tree lhs = gimple_call_lhs (stmt);
901 tree len = gimple_call_arg (stmt, 2);
902 location_t loc = gimple_location (stmt);
903
904 /* If the LEN parameter is a constant zero or in range where
905 the only valid value is zero, return DEST. */
906 if (size_must_be_zero_p (len))
907 {
908 gimple *repl;
909 if (gimple_call_lhs (stmt))
910 repl = gimple_build_assign (gimple_call_lhs (stmt), dest);
911 else
912 repl = gimple_build_nop ();
913 tree vdef = gimple_vdef (stmt);
914 if (vdef && TREE_CODE (vdef) == SSA_NAME)
915 {
916 unlink_stmt_vdef (stmt);
917 release_ssa_name (vdef);
918 }
919 gsi_replace (gsi, repl, false);
920 return true;
921 }
922
923 /* If SRC and DEST are the same (and not volatile), return
924 DEST{,+LEN,+LEN-1}. */
925 if (operand_equal_p (src, dest, 0))
926 {
927 /* Avoid diagnosing exact overlap in calls to __builtin_memcpy.
928 It's safe and may even be emitted by GCC itself (see bug
929 32667). */
930 unlink_stmt_vdef (stmt);
931 if (gimple_vdef (stmt) && TREE_CODE (gimple_vdef (stmt)) == SSA_NAME)
932 release_ssa_name (gimple_vdef (stmt));
933 if (!lhs)
934 {
935 gsi_replace (gsi, gimple_build_nop (), false);
936 return true;
937 }
938 goto done;
939 }
940 else
941 {
942 /* We cannot (easily) change the type of the copy if it is a storage
943 order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that can
944 modify the storage order of objects (see storage_order_barrier_p). */
945 tree srctype
946 = POINTER_TYPE_P (TREE_TYPE (src))
947 ? TREE_TYPE (TREE_TYPE (src)) : NULL_TREE;
948 tree desttype
949 = POINTER_TYPE_P (TREE_TYPE (dest))
950 ? TREE_TYPE (TREE_TYPE (dest)) : NULL_TREE;
951 tree destvar, srcvar, srcoff;
952 unsigned int src_align, dest_align;
953 unsigned HOST_WIDE_INT tmp_len;
954 const char *tmp_str;
955
956 /* Build accesses at offset zero with a ref-all character type. */
957 tree off0
958 = build_int_cst (build_pointer_type_for_mode (char_type_node,
959 ptr_mode, true), 0);
960
961 /* If we can perform the copy efficiently with first doing all loads
962 and then all stores inline it that way. Currently efficiently
963 means that we can load all the memory into a single integer
964 register which is what MOVE_MAX gives us. */
965 src_align = get_pointer_alignment (src);
966 dest_align = get_pointer_alignment (dest);
967 if (tree_fits_uhwi_p (len)
968 && compare_tree_int (len, MOVE_MAX) <= 0
969 /* FIXME: Don't transform copies from strings with known length.
970 Until GCC 9 this prevented a case in gcc.dg/strlenopt-8.c
971 from being handled, and the case was XFAILed for that reason.
972 Now that it is handled and the XFAIL removed, as soon as other
973 strlenopt tests that rely on it for passing are adjusted, this
974 hack can be removed. */
975 && !c_strlen (src, 1)
976 && !((tmp_str = getbyterep (src, &tmp_len)) != NULL
977 && memchr (tmp_str, 0, tmp_len) == NULL)
978 && !(srctype
979 && AGGREGATE_TYPE_P (srctype)
980 && TYPE_REVERSE_STORAGE_ORDER (srctype))
981 && !(desttype
982 && AGGREGATE_TYPE_P (desttype)
983 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
984 {
985 unsigned ilen = tree_to_uhwi (len);
986 if (pow2p_hwi (ilen))
987 {
988 /* Detect out-of-bounds accesses without issuing warnings.
989 Avoid folding out-of-bounds copies but to avoid false
990 positives for unreachable code defer warning until after
991 DCE has worked its magic.
992 -Wrestrict is still diagnosed. */
993 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
994 dest, src, len, len,
995 false, false))
996 if (warning != OPT_Wrestrict)
997 return false;
998
999 scalar_int_mode mode;
1000 if (int_mode_for_size (ilen * 8, 0).exists (&mode)
1001 && GET_MODE_SIZE (mode) * BITS_PER_UNIT == ilen * 8
1002 /* If the destination pointer is not aligned we must be able
1003 to emit an unaligned store. */
1004 && (dest_align >= GET_MODE_ALIGNMENT (mode)
1005 || !targetm.slow_unaligned_access (mode, dest_align)
1006 || (optab_handler (movmisalign_optab, mode)
1007 != CODE_FOR_nothing)))
1008 {
1009 tree type = build_nonstandard_integer_type (ilen * 8, 1);
1010 tree srctype = type;
1011 tree desttype = type;
1012 if (src_align < GET_MODE_ALIGNMENT (mode))
1013 srctype = build_aligned_type (type, src_align);
1014 tree srcmem = fold_build2 (MEM_REF, srctype, src, off0);
1015 tree tem = fold_const_aggregate_ref (srcmem);
1016 if (tem)
1017 srcmem = tem;
1018 else if (src_align < GET_MODE_ALIGNMENT (mode)
1019 && targetm.slow_unaligned_access (mode, src_align)
1020 && (optab_handler (movmisalign_optab, mode)
1021 == CODE_FOR_nothing))
1022 srcmem = NULL_TREE;
1023 if (srcmem)
1024 {
1025 gimple *new_stmt;
1026 if (is_gimple_reg_type (TREE_TYPE (srcmem)))
1027 {
1028 new_stmt = gimple_build_assign (NULL_TREE, srcmem);
1029 srcmem
1030 = create_tmp_reg_or_ssa_name (TREE_TYPE (srcmem),
1031 new_stmt);
1032 gimple_assign_set_lhs (new_stmt, srcmem);
1033 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1034 gimple_set_location (new_stmt, loc);
1035 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1036 }
1037 if (dest_align < GET_MODE_ALIGNMENT (mode))
1038 desttype = build_aligned_type (type, dest_align);
1039 new_stmt
1040 = gimple_build_assign (fold_build2 (MEM_REF, desttype,
1041 dest, off0),
1042 srcmem);
1043 gimple_move_vops (new_stmt, stmt);
1044 if (!lhs)
1045 {
1046 gsi_replace (gsi, new_stmt, false);
1047 return true;
1048 }
1049 gimple_set_location (new_stmt, loc);
1050 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1051 goto done;
1052 }
1053 }
1054 }
1055 }
1056
1057 if (code == BUILT_IN_MEMMOVE)
1058 {
1059 /* Both DEST and SRC must be pointer types.
1060 ??? This is what old code did. Is the testing for pointer types
1061 really mandatory?
1062
1063 If either SRC is readonly or length is 1, we can use memcpy. */
1064 if (!dest_align || !src_align)
1065 return false;
1066 if (readonly_data_expr (src)
1067 || (tree_fits_uhwi_p (len)
1068 && (MIN (src_align, dest_align) / BITS_PER_UNIT
1069 >= tree_to_uhwi (len))))
1070 {
1071 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1072 if (!fn)
1073 return false;
1074 gimple_call_set_fndecl (stmt, fn);
1075 gimple_call_set_arg (stmt, 0, dest);
1076 gimple_call_set_arg (stmt, 1, src);
1077 fold_stmt (gsi);
1078 return true;
1079 }
1080
1081 /* If *src and *dest can't overlap, optimize into memcpy as well. */
1082 if (TREE_CODE (src) == ADDR_EXPR
1083 && TREE_CODE (dest) == ADDR_EXPR)
1084 {
1085 tree src_base, dest_base, fn;
1086 poly_int64 src_offset = 0, dest_offset = 0;
1087 poly_uint64 maxsize;
1088
1089 srcvar = TREE_OPERAND (src, 0);
1090 src_base = get_addr_base_and_unit_offset (srcvar, &src_offset);
1091 if (src_base == NULL)
1092 src_base = srcvar;
1093 destvar = TREE_OPERAND (dest, 0);
1094 dest_base = get_addr_base_and_unit_offset (destvar,
1095 &dest_offset);
1096 if (dest_base == NULL)
1097 dest_base = destvar;
1098 if (!poly_int_tree_p (len, &maxsize))
1099 maxsize = -1;
1100 if (SSA_VAR_P (src_base)
1101 && SSA_VAR_P (dest_base))
1102 {
1103 if (operand_equal_p (src_base, dest_base, 0)
1104 && ranges_maybe_overlap_p (src_offset, maxsize,
1105 dest_offset, maxsize))
1106 return false;
1107 }
1108 else if (TREE_CODE (src_base) == MEM_REF
1109 && TREE_CODE (dest_base) == MEM_REF)
1110 {
1111 if (! operand_equal_p (TREE_OPERAND (src_base, 0),
1112 TREE_OPERAND (dest_base, 0), 0))
1113 return false;
1114 poly_offset_int full_src_offset
1115 = mem_ref_offset (src_base) + src_offset;
1116 poly_offset_int full_dest_offset
1117 = mem_ref_offset (dest_base) + dest_offset;
1118 if (ranges_maybe_overlap_p (full_src_offset, maxsize,
1119 full_dest_offset, maxsize))
1120 return false;
1121 }
1122 else
1123 return false;
1124
1125 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1126 if (!fn)
1127 return false;
1128 gimple_call_set_fndecl (stmt, fn);
1129 gimple_call_set_arg (stmt, 0, dest);
1130 gimple_call_set_arg (stmt, 1, src);
1131 fold_stmt (gsi);
1132 return true;
1133 }
1134
1135 /* If the destination and source do not alias optimize into
1136 memcpy as well. */
1137 if ((is_gimple_min_invariant (dest)
1138 || TREE_CODE (dest) == SSA_NAME)
1139 && (is_gimple_min_invariant (src)
1140 || TREE_CODE (src) == SSA_NAME))
1141 {
1142 ao_ref destr, srcr;
1143 ao_ref_init_from_ptr_and_size (&destr, dest, len);
1144 ao_ref_init_from_ptr_and_size (&srcr, src, len);
1145 if (!refs_may_alias_p_1 (&destr, &srcr, false))
1146 {
1147 tree fn;
1148 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
1149 if (!fn)
1150 return false;
1151 gimple_call_set_fndecl (stmt, fn);
1152 gimple_call_set_arg (stmt, 0, dest);
1153 gimple_call_set_arg (stmt, 1, src);
1154 fold_stmt (gsi);
1155 return true;
1156 }
1157 }
1158
1159 return false;
1160 }
1161
1162 if (!tree_fits_shwi_p (len))
1163 return false;
1164 if (!srctype
1165 || (AGGREGATE_TYPE_P (srctype)
1166 && TYPE_REVERSE_STORAGE_ORDER (srctype)))
1167 return false;
1168 if (!desttype
1169 || (AGGREGATE_TYPE_P (desttype)
1170 && TYPE_REVERSE_STORAGE_ORDER (desttype)))
1171 return false;
1172 /* In the following try to find a type that is most natural to be
1173 used for the memcpy source and destination and that allows
1174 the most optimization when memcpy is turned into a plain assignment
1175 using that type. In theory we could always use a char[len] type
1176 but that only gains us that the destination and source possibly
1177 no longer will have their address taken. */
1178 if (TREE_CODE (srctype) == ARRAY_TYPE
1179 && !tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
1180 srctype = TREE_TYPE (srctype);
1181 if (TREE_CODE (desttype) == ARRAY_TYPE
1182 && !tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
1183 desttype = TREE_TYPE (desttype);
1184 if (TREE_ADDRESSABLE (srctype)
1185 || TREE_ADDRESSABLE (desttype))
1186 return false;
1187
1188 /* Make sure we are not copying using a floating-point mode or
1189 a type whose size possibly does not match its precision. */
1190 if (FLOAT_MODE_P (TYPE_MODE (desttype))
1191 || TREE_CODE (desttype) == BOOLEAN_TYPE
1192 || TREE_CODE (desttype) == ENUMERAL_TYPE)
1193 desttype = bitwise_type_for_mode (TYPE_MODE (desttype));
1194 if (FLOAT_MODE_P (TYPE_MODE (srctype))
1195 || TREE_CODE (srctype) == BOOLEAN_TYPE
1196 || TREE_CODE (srctype) == ENUMERAL_TYPE)
1197 srctype = bitwise_type_for_mode (TYPE_MODE (srctype));
1198 if (!srctype)
1199 srctype = desttype;
1200 if (!desttype)
1201 desttype = srctype;
1202 if (!srctype)
1203 return false;
1204
1205 src_align = get_pointer_alignment (src);
1206 dest_align = get_pointer_alignment (dest);
1207
1208 /* Choose between src and destination type for the access based
1209 on alignment, whether the access constitutes a register access
1210 and whether it may actually expose a declaration for SSA rewrite
1211 or SRA decomposition. Also try to expose a string constant, we
1212 might be able to concatenate several of them later into a single
1213 string store. */
1214 destvar = NULL_TREE;
1215 srcvar = NULL_TREE;
1216 if (TREE_CODE (dest) == ADDR_EXPR
1217 && var_decl_component_p (TREE_OPERAND (dest, 0))
1218 && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len)
1219 && dest_align >= TYPE_ALIGN (desttype)
1220 && (is_gimple_reg_type (desttype)
1221 || src_align >= TYPE_ALIGN (desttype)))
1222 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1223 else if (TREE_CODE (src) == ADDR_EXPR
1224 && var_decl_component_p (TREE_OPERAND (src, 0))
1225 && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len)
1226 && src_align >= TYPE_ALIGN (srctype)
1227 && (is_gimple_reg_type (srctype)
1228 || dest_align >= TYPE_ALIGN (srctype)))
1229 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1230 /* FIXME: Don't transform copies from strings with known original length.
1231 As soon as strlenopt tests that rely on it for passing are adjusted,
1232 this hack can be removed. */
1233 else if (gimple_call_alloca_for_var_p (stmt)
1234 && (srcvar = string_constant (src, &srcoff, NULL, NULL))
1235 && integer_zerop (srcoff)
1236 && tree_int_cst_equal (TYPE_SIZE_UNIT (TREE_TYPE (srcvar)), len)
1237 && dest_align >= TYPE_ALIGN (TREE_TYPE (srcvar)))
1238 srctype = TREE_TYPE (srcvar);
1239 else
1240 return false;
1241
1242 /* Now that we chose an access type express the other side in
1243 terms of it if the target allows that with respect to alignment
1244 constraints. */
1245 if (srcvar == NULL_TREE)
1246 {
1247 if (src_align >= TYPE_ALIGN (desttype))
1248 srcvar = fold_build2 (MEM_REF, desttype, src, off0);
1249 else
1250 {
1251 enum machine_mode mode = TYPE_MODE (desttype);
1252 if ((mode == BLKmode && STRICT_ALIGNMENT)
1253 || (targetm.slow_unaligned_access (mode, src_align)
1254 && (optab_handler (movmisalign_optab, mode)
1255 == CODE_FOR_nothing)))
1256 return false;
1257 srctype = build_aligned_type (TYPE_MAIN_VARIANT (desttype),
1258 src_align);
1259 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1260 }
1261 }
1262 else if (destvar == NULL_TREE)
1263 {
1264 if (dest_align >= TYPE_ALIGN (srctype))
1265 destvar = fold_build2 (MEM_REF, srctype, dest, off0);
1266 else
1267 {
1268 enum machine_mode mode = TYPE_MODE (srctype);
1269 if ((mode == BLKmode && STRICT_ALIGNMENT)
1270 || (targetm.slow_unaligned_access (mode, dest_align)
1271 && (optab_handler (movmisalign_optab, mode)
1272 == CODE_FOR_nothing)))
1273 return false;
1274 desttype = build_aligned_type (TYPE_MAIN_VARIANT (srctype),
1275 dest_align);
1276 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1277 }
1278 }
1279
1280 /* Same as above, detect out-of-bounds accesses without issuing
1281 warnings. Avoid folding out-of-bounds copies but to avoid
1282 false positives for unreachable code defer warning until
1283 after DCE has worked its magic.
1284 -Wrestrict is still diagnosed. */
1285 if (int warning = check_bounds_or_overlap (as_a <gcall *>(stmt),
1286 dest, src, len, len,
1287 false, false))
1288 if (warning != OPT_Wrestrict)
1289 return false;
1290
1291 gimple *new_stmt;
1292 if (is_gimple_reg_type (TREE_TYPE (srcvar)))
1293 {
1294 tree tem = fold_const_aggregate_ref (srcvar);
1295 if (tem)
1296 srcvar = tem;
1297 if (! is_gimple_min_invariant (srcvar))
1298 {
1299 new_stmt = gimple_build_assign (NULL_TREE, srcvar);
1300 srcvar = create_tmp_reg_or_ssa_name (TREE_TYPE (srcvar),
1301 new_stmt);
1302 gimple_assign_set_lhs (new_stmt, srcvar);
1303 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
1304 gimple_set_location (new_stmt, loc);
1305 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1306 }
1307 new_stmt = gimple_build_assign (destvar, srcvar);
1308 goto set_vop_and_replace;
1309 }
1310
1311 /* We get an aggregate copy. If the source is a STRING_CST, then
1312 directly use its type to perform the copy. */
1313 if (TREE_CODE (srcvar) == STRING_CST)
1314 desttype = srctype;
1315
1316 /* Or else, use an unsigned char[] type to perform the copy in order
1317 to preserve padding and to avoid any issues with TREE_ADDRESSABLE
1318 types or float modes behavior on copying. */
1319 else
1320 {
1321 desttype = build_array_type_nelts (unsigned_char_type_node,
1322 tree_to_uhwi (len));
1323 srctype = desttype;
1324 if (src_align > TYPE_ALIGN (srctype))
1325 srctype = build_aligned_type (srctype, src_align);
1326 srcvar = fold_build2 (MEM_REF, srctype, src, off0);
1327 }
1328
1329 if (dest_align > TYPE_ALIGN (desttype))
1330 desttype = build_aligned_type (desttype, dest_align);
1331 destvar = fold_build2 (MEM_REF, desttype, dest, off0);
1332 new_stmt = gimple_build_assign (destvar, srcvar);
1333
1334 set_vop_and_replace:
1335 gimple_move_vops (new_stmt, stmt);
1336 if (!lhs)
1337 {
1338 gsi_replace (gsi, new_stmt, false);
1339 return true;
1340 }
1341 gimple_set_location (new_stmt, loc);
1342 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
1343 }
1344
1345 done:
1346 gimple_seq stmts = NULL;
1347 if (code == BUILT_IN_MEMCPY || code == BUILT_IN_MEMMOVE)
1348 len = NULL_TREE;
1349 else if (code == BUILT_IN_MEMPCPY)
1350 {
1351 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
1352 dest = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
1353 TREE_TYPE (dest), dest, len);
1354 }
1355 else
1356 gcc_unreachable ();
1357
1358 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
1359 gimple *repl = gimple_build_assign (lhs, dest);
1360 gsi_replace (gsi, repl, false);
1361 return true;
1362 }
1363
1364 /* Transform a call to built-in bcmp(a, b, len) at *GSI into one
1365 to built-in memcmp (a, b, len). */
1366
1367 static bool
1368 gimple_fold_builtin_bcmp (gimple_stmt_iterator *gsi)
1369 {
1370 tree fn = builtin_decl_implicit (BUILT_IN_MEMCMP);
1371
1372 if (!fn)
1373 return false;
1374
1375 /* Transform bcmp (a, b, len) into memcmp (a, b, len). */
1376
1377 gimple *stmt = gsi_stmt (*gsi);
1378 tree a = gimple_call_arg (stmt, 0);
1379 tree b = gimple_call_arg (stmt, 1);
1380 tree len = gimple_call_arg (stmt, 2);
1381
1382 gimple *repl = gimple_build_call (fn, 3, a, b, len);
1383 replace_call_with_call_and_fold (gsi, repl);
1384
1385 return true;
1386 }
1387
1388 /* Transform a call to built-in bcopy (src, dest, len) at *GSI into one
1389 to built-in memmove (dest, src, len). */
1390
1391 static bool
1392 gimple_fold_builtin_bcopy (gimple_stmt_iterator *gsi)
1393 {
1394 tree fn = builtin_decl_implicit (BUILT_IN_MEMMOVE);
1395
1396 if (!fn)
1397 return false;
1398
1399 /* bcopy has been removed from POSIX in Issue 7 but Issue 6 specifies
1400 it's quivalent to memmove (not memcpy). Transform bcopy (src, dest,
1401 len) into memmove (dest, src, len). */
1402
1403 gimple *stmt = gsi_stmt (*gsi);
1404 tree src = gimple_call_arg (stmt, 0);
1405 tree dest = gimple_call_arg (stmt, 1);
1406 tree len = gimple_call_arg (stmt, 2);
1407
1408 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
1409 gimple_call_set_fntype (as_a <gcall *> (stmt), TREE_TYPE (fn));
1410 replace_call_with_call_and_fold (gsi, repl);
1411
1412 return true;
1413 }
1414
1415 /* Transform a call to built-in bzero (dest, len) at *GSI into one
1416 to built-in memset (dest, 0, len). */
1417
1418 static bool
1419 gimple_fold_builtin_bzero (gimple_stmt_iterator *gsi)
1420 {
1421 tree fn = builtin_decl_implicit (BUILT_IN_MEMSET);
1422
1423 if (!fn)
1424 return false;
1425
1426 /* Transform bzero (dest, len) into memset (dest, 0, len). */
1427
1428 gimple *stmt = gsi_stmt (*gsi);
1429 tree dest = gimple_call_arg (stmt, 0);
1430 tree len = gimple_call_arg (stmt, 1);
1431
1432 gimple_seq seq = NULL;
1433 gimple *repl = gimple_build_call (fn, 3, dest, integer_zero_node, len);
1434 gimple_seq_add_stmt_without_update (&seq, repl);
1435 gsi_replace_with_seq_vops (gsi, seq);
1436 fold_stmt (gsi);
1437
1438 return true;
1439 }
1440
1441 /* Fold function call to builtin memset or bzero at *GSI setting the
1442 memory of size LEN to VAL. Return whether a simplification was made. */
1443
1444 static bool
1445 gimple_fold_builtin_memset (gimple_stmt_iterator *gsi, tree c, tree len)
1446 {
1447 gimple *stmt = gsi_stmt (*gsi);
1448 tree etype;
1449 unsigned HOST_WIDE_INT length, cval;
1450
1451 /* If the LEN parameter is zero, return DEST. */
1452 if (integer_zerop (len))
1453 {
1454 replace_call_with_value (gsi, gimple_call_arg (stmt, 0));
1455 return true;
1456 }
1457
1458 if (! tree_fits_uhwi_p (len))
1459 return false;
1460
1461 if (TREE_CODE (c) != INTEGER_CST)
1462 return false;
1463
1464 tree dest = gimple_call_arg (stmt, 0);
1465 tree var = dest;
1466 if (TREE_CODE (var) != ADDR_EXPR)
1467 return false;
1468
1469 var = TREE_OPERAND (var, 0);
1470 if (TREE_THIS_VOLATILE (var))
1471 return false;
1472
1473 etype = TREE_TYPE (var);
1474 if (TREE_CODE (etype) == ARRAY_TYPE)
1475 etype = TREE_TYPE (etype);
1476
1477 if (!INTEGRAL_TYPE_P (etype)
1478 && !POINTER_TYPE_P (etype))
1479 return NULL_TREE;
1480
1481 if (! var_decl_component_p (var))
1482 return NULL_TREE;
1483
1484 length = tree_to_uhwi (len);
1485 if (GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (etype)) != length
1486 || (GET_MODE_PRECISION (SCALAR_INT_TYPE_MODE (etype))
1487 != GET_MODE_BITSIZE (SCALAR_INT_TYPE_MODE (etype)))
1488 || get_pointer_alignment (dest) / BITS_PER_UNIT < length)
1489 return NULL_TREE;
1490
1491 if (length > HOST_BITS_PER_WIDE_INT / BITS_PER_UNIT)
1492 return NULL_TREE;
1493
1494 if (!type_has_mode_precision_p (etype))
1495 etype = lang_hooks.types.type_for_mode (SCALAR_INT_TYPE_MODE (etype),
1496 TYPE_UNSIGNED (etype));
1497
1498 if (integer_zerop (c))
1499 cval = 0;
1500 else
1501 {
1502 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8 || HOST_BITS_PER_WIDE_INT > 64)
1503 return NULL_TREE;
1504
1505 cval = TREE_INT_CST_LOW (c);
1506 cval &= 0xff;
1507 cval |= cval << 8;
1508 cval |= cval << 16;
1509 cval |= (cval << 31) << 1;
1510 }
1511
1512 var = fold_build2 (MEM_REF, etype, dest, build_int_cst (ptr_type_node, 0));
1513 gimple *store = gimple_build_assign (var, build_int_cst_type (etype, cval));
1514 gimple_move_vops (store, stmt);
1515 gimple_set_location (store, gimple_location (stmt));
1516 gsi_insert_before (gsi, store, GSI_SAME_STMT);
1517 if (gimple_call_lhs (stmt))
1518 {
1519 gimple *asgn = gimple_build_assign (gimple_call_lhs (stmt), dest);
1520 gsi_replace (gsi, asgn, false);
1521 }
1522 else
1523 {
1524 gimple_stmt_iterator gsi2 = *gsi;
1525 gsi_prev (gsi);
1526 gsi_remove (&gsi2, true);
1527 }
1528
1529 return true;
1530 }
1531
1532 /* Helper of get_range_strlen for ARG that is not an SSA_NAME. */
1533
1534 static bool
1535 get_range_strlen_tree (tree arg, bitmap visited, strlen_range_kind rkind,
1536 c_strlen_data *pdata, unsigned eltsize)
1537 {
1538 gcc_assert (TREE_CODE (arg) != SSA_NAME);
1539
1540 /* The length computed by this invocation of the function. */
1541 tree val = NULL_TREE;
1542
1543 /* True if VAL is an optimistic (tight) bound determined from
1544 the size of the character array in which the string may be
1545 stored. In that case, the computed VAL is used to set
1546 PDATA->MAXBOUND. */
1547 bool tight_bound = false;
1548
1549 /* We can end up with &(*iftmp_1)[0] here as well, so handle it. */
1550 if (TREE_CODE (arg) == ADDR_EXPR
1551 && TREE_CODE (TREE_OPERAND (arg, 0)) == ARRAY_REF)
1552 {
1553 tree op = TREE_OPERAND (arg, 0);
1554 if (integer_zerop (TREE_OPERAND (op, 1)))
1555 {
1556 tree aop0 = TREE_OPERAND (op, 0);
1557 if (TREE_CODE (aop0) == INDIRECT_REF
1558 && TREE_CODE (TREE_OPERAND (aop0, 0)) == SSA_NAME)
1559 return get_range_strlen (TREE_OPERAND (aop0, 0), visited, rkind,
1560 pdata, eltsize);
1561 }
1562 else if (TREE_CODE (TREE_OPERAND (op, 0)) == COMPONENT_REF
1563 && rkind == SRK_LENRANGE)
1564 {
1565 /* Fail if an array is the last member of a struct object
1566 since it could be treated as a (fake) flexible array
1567 member. */
1568 tree idx = TREE_OPERAND (op, 1);
1569
1570 arg = TREE_OPERAND (op, 0);
1571 tree optype = TREE_TYPE (arg);
1572 if (tree dom = TYPE_DOMAIN (optype))
1573 if (tree bound = TYPE_MAX_VALUE (dom))
1574 if (TREE_CODE (bound) == INTEGER_CST
1575 && TREE_CODE (idx) == INTEGER_CST
1576 && tree_int_cst_lt (bound, idx))
1577 return false;
1578 }
1579 }
1580
1581 if (rkind == SRK_INT_VALUE)
1582 {
1583 /* We are computing the maximum value (not string length). */
1584 val = arg;
1585 if (TREE_CODE (val) != INTEGER_CST
1586 || tree_int_cst_sgn (val) < 0)
1587 return false;
1588 }
1589 else
1590 {
1591 c_strlen_data lendata = { };
1592 val = c_strlen (arg, 1, &lendata, eltsize);
1593
1594 if (!val && lendata.decl)
1595 {
1596 /* ARG refers to an unterminated const character array.
1597 DATA.DECL with size DATA.LEN. */
1598 val = lendata.minlen;
1599 pdata->decl = lendata.decl;
1600 }
1601 }
1602
1603 /* Set if VAL represents the maximum length based on array size (set
1604 when exact length cannot be determined). */
1605 bool maxbound = false;
1606
1607 if (!val && rkind == SRK_LENRANGE)
1608 {
1609 if (TREE_CODE (arg) == ADDR_EXPR)
1610 return get_range_strlen (TREE_OPERAND (arg, 0), visited, rkind,
1611 pdata, eltsize);
1612
1613 if (TREE_CODE (arg) == ARRAY_REF)
1614 {
1615 tree optype = TREE_TYPE (TREE_OPERAND (arg, 0));
1616
1617 /* Determine the "innermost" array type. */
1618 while (TREE_CODE (optype) == ARRAY_TYPE
1619 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1620 optype = TREE_TYPE (optype);
1621
1622 /* Avoid arrays of pointers. */
1623 tree eltype = TREE_TYPE (optype);
1624 if (TREE_CODE (optype) != ARRAY_TYPE
1625 || !INTEGRAL_TYPE_P (eltype))
1626 return false;
1627
1628 /* Fail when the array bound is unknown or zero. */
1629 val = TYPE_SIZE_UNIT (optype);
1630 if (!val
1631 || TREE_CODE (val) != INTEGER_CST
1632 || integer_zerop (val))
1633 return false;
1634
1635 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1636 integer_one_node);
1637
1638 /* Set the minimum size to zero since the string in
1639 the array could have zero length. */
1640 pdata->minlen = ssize_int (0);
1641
1642 tight_bound = true;
1643 }
1644 else if (TREE_CODE (arg) == COMPONENT_REF
1645 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (arg, 1)))
1646 == ARRAY_TYPE))
1647 {
1648 /* Use the type of the member array to determine the upper
1649 bound on the length of the array. This may be overly
1650 optimistic if the array itself isn't NUL-terminated and
1651 the caller relies on the subsequent member to contain
1652 the NUL but that would only be considered valid if
1653 the array were the last member of a struct. */
1654
1655 tree fld = TREE_OPERAND (arg, 1);
1656
1657 tree optype = TREE_TYPE (fld);
1658
1659 /* Determine the "innermost" array type. */
1660 while (TREE_CODE (optype) == ARRAY_TYPE
1661 && TREE_CODE (TREE_TYPE (optype)) == ARRAY_TYPE)
1662 optype = TREE_TYPE (optype);
1663
1664 /* Fail when the array bound is unknown or zero. */
1665 val = TYPE_SIZE_UNIT (optype);
1666 if (!val
1667 || TREE_CODE (val) != INTEGER_CST
1668 || integer_zerop (val))
1669 return false;
1670 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1671 integer_one_node);
1672
1673 /* Set the minimum size to zero since the string in
1674 the array could have zero length. */
1675 pdata->minlen = ssize_int (0);
1676
1677 /* The array size determined above is an optimistic bound
1678 on the length. If the array isn't nul-terminated the
1679 length computed by the library function would be greater.
1680 Even though using strlen to cross the subobject boundary
1681 is undefined, avoid drawing conclusions from the member
1682 type about the length here. */
1683 tight_bound = true;
1684 }
1685 else if (TREE_CODE (arg) == MEM_REF
1686 && TREE_CODE (TREE_TYPE (arg)) == ARRAY_TYPE
1687 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == INTEGER_TYPE
1688 && TREE_CODE (TREE_OPERAND (arg, 0)) == ADDR_EXPR)
1689 {
1690 /* Handle a MEM_REF into a DECL accessing an array of integers,
1691 being conservative about references to extern structures with
1692 flexible array members that can be initialized to arbitrary
1693 numbers of elements as an extension (static structs are okay).
1694 FIXME: Make this less conservative -- see
1695 component_ref_size in tree.cc. */
1696 tree ref = TREE_OPERAND (TREE_OPERAND (arg, 0), 0);
1697 if ((TREE_CODE (ref) == PARM_DECL || VAR_P (ref))
1698 && (decl_binds_to_current_def_p (ref)
1699 || !array_at_struct_end_p (arg)))
1700 {
1701 /* Fail if the offset is out of bounds. Such accesses
1702 should be diagnosed at some point. */
1703 val = DECL_SIZE_UNIT (ref);
1704 if (!val
1705 || TREE_CODE (val) != INTEGER_CST
1706 || integer_zerop (val))
1707 return false;
1708
1709 poly_offset_int psiz = wi::to_offset (val);
1710 poly_offset_int poff = mem_ref_offset (arg);
1711 if (known_le (psiz, poff))
1712 return false;
1713
1714 pdata->minlen = ssize_int (0);
1715
1716 /* Subtract the offset and one for the terminating nul. */
1717 psiz -= poff;
1718 psiz -= 1;
1719 val = wide_int_to_tree (TREE_TYPE (val), psiz);
1720 /* Since VAL reflects the size of a declared object
1721 rather the type of the access it is not a tight bound. */
1722 }
1723 }
1724 else if (TREE_CODE (arg) == PARM_DECL || VAR_P (arg))
1725 {
1726 /* Avoid handling pointers to arrays. GCC might misuse
1727 a pointer to an array of one bound to point to an array
1728 object of a greater bound. */
1729 tree argtype = TREE_TYPE (arg);
1730 if (TREE_CODE (argtype) == ARRAY_TYPE)
1731 {
1732 val = TYPE_SIZE_UNIT (argtype);
1733 if (!val
1734 || TREE_CODE (val) != INTEGER_CST
1735 || integer_zerop (val))
1736 return false;
1737 val = wide_int_to_tree (TREE_TYPE (val),
1738 wi::sub (wi::to_wide (val), 1));
1739
1740 /* Set the minimum size to zero since the string in
1741 the array could have zero length. */
1742 pdata->minlen = ssize_int (0);
1743 }
1744 }
1745 maxbound = true;
1746 }
1747
1748 if (!val)
1749 return false;
1750
1751 /* Adjust the lower bound on the string length as necessary. */
1752 if (!pdata->minlen
1753 || (rkind != SRK_STRLEN
1754 && TREE_CODE (pdata->minlen) == INTEGER_CST
1755 && TREE_CODE (val) == INTEGER_CST
1756 && tree_int_cst_lt (val, pdata->minlen)))
1757 pdata->minlen = val;
1758
1759 if (pdata->maxbound && TREE_CODE (pdata->maxbound) == INTEGER_CST)
1760 {
1761 /* Adjust the tighter (more optimistic) string length bound
1762 if necessary and proceed to adjust the more conservative
1763 bound. */
1764 if (TREE_CODE (val) == INTEGER_CST)
1765 {
1766 if (tree_int_cst_lt (pdata->maxbound, val))
1767 pdata->maxbound = val;
1768 }
1769 else
1770 pdata->maxbound = val;
1771 }
1772 else if (pdata->maxbound || maxbound)
1773 /* Set PDATA->MAXBOUND only if it either isn't INTEGER_CST or
1774 if VAL corresponds to the maximum length determined based
1775 on the type of the object. */
1776 pdata->maxbound = val;
1777
1778 if (tight_bound)
1779 {
1780 /* VAL computed above represents an optimistically tight bound
1781 on the length of the string based on the referenced object's
1782 or subobject's type. Determine the conservative upper bound
1783 based on the enclosing object's size if possible. */
1784 if (rkind == SRK_LENRANGE)
1785 {
1786 poly_int64 offset;
1787 tree base = get_addr_base_and_unit_offset (arg, &offset);
1788 if (!base)
1789 {
1790 /* When the call above fails due to a non-constant offset
1791 assume the offset is zero and use the size of the whole
1792 enclosing object instead. */
1793 base = get_base_address (arg);
1794 offset = 0;
1795 }
1796 /* If the base object is a pointer no upper bound on the length
1797 can be determined. Otherwise the maximum length is equal to
1798 the size of the enclosing object minus the offset of
1799 the referenced subobject minus 1 (for the terminating nul). */
1800 tree type = TREE_TYPE (base);
1801 if (TREE_CODE (type) == POINTER_TYPE
1802 || (TREE_CODE (base) != PARM_DECL && !VAR_P (base))
1803 || !(val = DECL_SIZE_UNIT (base)))
1804 val = build_all_ones_cst (size_type_node);
1805 else
1806 {
1807 val = DECL_SIZE_UNIT (base);
1808 val = fold_build2 (MINUS_EXPR, TREE_TYPE (val), val,
1809 size_int (offset + 1));
1810 }
1811 }
1812 else
1813 return false;
1814 }
1815
1816 if (pdata->maxlen)
1817 {
1818 /* Adjust the more conservative bound if possible/necessary
1819 and fail otherwise. */
1820 if (rkind != SRK_STRLEN)
1821 {
1822 if (TREE_CODE (pdata->maxlen) != INTEGER_CST
1823 || TREE_CODE (val) != INTEGER_CST)
1824 return false;
1825
1826 if (tree_int_cst_lt (pdata->maxlen, val))
1827 pdata->maxlen = val;
1828 return true;
1829 }
1830 else if (simple_cst_equal (val, pdata->maxlen) != 1)
1831 {
1832 /* Fail if the length of this ARG is different from that
1833 previously determined from another ARG. */
1834 return false;
1835 }
1836 }
1837
1838 pdata->maxlen = val;
1839 return rkind == SRK_LENRANGE || !integer_all_onesp (val);
1840 }
1841
1842 /* For an ARG referencing one or more strings, try to obtain the range
1843 of their lengths, or the size of the largest array ARG referes to if
1844 the range of lengths cannot be determined, and store all in *PDATA.
1845 For an integer ARG (when RKIND == SRK_INT_VALUE), try to determine
1846 the maximum constant value.
1847 If ARG is an SSA_NAME, follow its use-def chains. When RKIND ==
1848 SRK_STRLEN, then if PDATA->MAXLEN is not equal to the determined
1849 length or if we are unable to determine the length, return false.
1850 VISITED is a bitmap of visited variables.
1851 RKIND determines the kind of value or range to obtain (see
1852 strlen_range_kind).
1853 Set PDATA->DECL if ARG refers to an unterminated constant array.
1854 On input, set ELTSIZE to 1 for normal single byte character strings,
1855 and either 2 or 4 for wide characer strings (the size of wchar_t).
1856 Return true if *PDATA was successfully populated and false otherwise. */
1857
1858 static bool
1859 get_range_strlen (tree arg, bitmap visited,
1860 strlen_range_kind rkind,
1861 c_strlen_data *pdata, unsigned eltsize)
1862 {
1863
1864 if (TREE_CODE (arg) != SSA_NAME)
1865 return get_range_strlen_tree (arg, visited, rkind, pdata, eltsize);
1866
1867 /* If ARG is registered for SSA update we cannot look at its defining
1868 statement. */
1869 if (name_registered_for_update_p (arg))
1870 return false;
1871
1872 /* If we were already here, break the infinite cycle. */
1873 if (!bitmap_set_bit (visited, SSA_NAME_VERSION (arg)))
1874 return true;
1875
1876 tree var = arg;
1877 gimple *def_stmt = SSA_NAME_DEF_STMT (var);
1878
1879 switch (gimple_code (def_stmt))
1880 {
1881 case GIMPLE_ASSIGN:
1882 /* The RHS of the statement defining VAR must either have a
1883 constant length or come from another SSA_NAME with a constant
1884 length. */
1885 if (gimple_assign_single_p (def_stmt)
1886 || gimple_assign_unary_nop_p (def_stmt))
1887 {
1888 tree rhs = gimple_assign_rhs1 (def_stmt);
1889 return get_range_strlen (rhs, visited, rkind, pdata, eltsize);
1890 }
1891 else if (gimple_assign_rhs_code (def_stmt) == COND_EXPR)
1892 {
1893 tree ops[2] = { gimple_assign_rhs2 (def_stmt),
1894 gimple_assign_rhs3 (def_stmt) };
1895
1896 for (unsigned int i = 0; i < 2; i++)
1897 if (!get_range_strlen (ops[i], visited, rkind, pdata, eltsize))
1898 {
1899 if (rkind != SRK_LENRANGE)
1900 return false;
1901 /* Set the upper bound to the maximum to prevent
1902 it from being adjusted in the next iteration but
1903 leave MINLEN and the more conservative MAXBOUND
1904 determined so far alone (or leave them null if
1905 they haven't been set yet). That the MINLEN is
1906 in fact zero can be determined from MAXLEN being
1907 unbounded but the discovered minimum is used for
1908 diagnostics. */
1909 pdata->maxlen = build_all_ones_cst (size_type_node);
1910 }
1911 return true;
1912 }
1913 return false;
1914
1915 case GIMPLE_PHI:
1916 /* Unless RKIND == SRK_LENRANGE, all arguments of the PHI node
1917 must have a constant length. */
1918 for (unsigned i = 0; i < gimple_phi_num_args (def_stmt); i++)
1919 {
1920 tree arg = gimple_phi_arg (def_stmt, i)->def;
1921
1922 /* If this PHI has itself as an argument, we cannot
1923 determine the string length of this argument. However,
1924 if we can find a constant string length for the other
1925 PHI args then we can still be sure that this is a
1926 constant string length. So be optimistic and just
1927 continue with the next argument. */
1928 if (arg == gimple_phi_result (def_stmt))
1929 continue;
1930
1931 if (!get_range_strlen (arg, visited, rkind, pdata, eltsize))
1932 {
1933 if (rkind != SRK_LENRANGE)
1934 return false;
1935 /* Set the upper bound to the maximum to prevent
1936 it from being adjusted in the next iteration but
1937 leave MINLEN and the more conservative MAXBOUND
1938 determined so far alone (or leave them null if
1939 they haven't been set yet). That the MINLEN is
1940 in fact zero can be determined from MAXLEN being
1941 unbounded but the discovered minimum is used for
1942 diagnostics. */
1943 pdata->maxlen = build_all_ones_cst (size_type_node);
1944 }
1945 }
1946 return true;
1947
1948 default:
1949 return false;
1950 }
1951 }
1952
1953 /* Try to obtain the range of the lengths of the string(s) referenced
1954 by ARG, or the size of the largest array ARG refers to if the range
1955 of lengths cannot be determined, and store all in *PDATA which must
1956 be zero-initialized on input except PDATA->MAXBOUND may be set to
1957 a non-null tree node other than INTEGER_CST to request to have it
1958 set to the length of the longest string in a PHI. ELTSIZE is
1959 the expected size of the string element in bytes: 1 for char and
1960 some power of 2 for wide characters.
1961 Return true if the range [PDATA->MINLEN, PDATA->MAXLEN] is suitable
1962 for optimization. Returning false means that a nonzero PDATA->MINLEN
1963 doesn't reflect the true lower bound of the range when PDATA->MAXLEN
1964 is -1 (in that case, the actual range is indeterminate, i.e.,
1965 [0, PTRDIFF_MAX - 2]. */
1966
1967 bool
1968 get_range_strlen (tree arg, c_strlen_data *pdata, unsigned eltsize)
1969 {
1970 auto_bitmap visited;
1971 tree maxbound = pdata->maxbound;
1972
1973 if (!get_range_strlen (arg, visited, SRK_LENRANGE, pdata, eltsize))
1974 {
1975 /* On failure extend the length range to an impossible maximum
1976 (a valid MAXLEN must be less than PTRDIFF_MAX - 1). Other
1977 members can stay unchanged regardless. */
1978 pdata->minlen = ssize_int (0);
1979 pdata->maxlen = build_all_ones_cst (size_type_node);
1980 }
1981 else if (!pdata->minlen)
1982 pdata->minlen = ssize_int (0);
1983
1984 /* If it's unchanged from it initial non-null value, set the conservative
1985 MAXBOUND to SIZE_MAX. Otherwise leave it null (if it is null). */
1986 if (maxbound && pdata->maxbound == maxbound)
1987 pdata->maxbound = build_all_ones_cst (size_type_node);
1988
1989 return !integer_all_onesp (pdata->maxlen);
1990 }
1991
1992 /* Return the maximum value for ARG given RKIND (see strlen_range_kind).
1993 For ARG of pointer types, NONSTR indicates if the caller is prepared
1994 to handle unterminated strings. For integer ARG and when RKIND ==
1995 SRK_INT_VALUE, NONSTR must be null.
1996
1997 If an unterminated array is discovered and our caller handles
1998 unterminated arrays, then bubble up the offending DECL and
1999 return the maximum size. Otherwise return NULL. */
2000
2001 static tree
2002 get_maxval_strlen (tree arg, strlen_range_kind rkind, tree *nonstr = NULL)
2003 {
2004 /* A non-null NONSTR is meaningless when determining the maximum
2005 value of an integer ARG. */
2006 gcc_assert (rkind != SRK_INT_VALUE || nonstr == NULL);
2007 /* ARG must have an integral type when RKIND says so. */
2008 gcc_assert (rkind != SRK_INT_VALUE || INTEGRAL_TYPE_P (TREE_TYPE (arg)));
2009
2010 auto_bitmap visited;
2011
2012 /* Reset DATA.MAXLEN if the call fails or when DATA.MAXLEN
2013 is unbounded. */
2014 c_strlen_data lendata = { };
2015 if (!get_range_strlen (arg, visited, rkind, &lendata, /* eltsize = */1))
2016 lendata.maxlen = NULL_TREE;
2017 else if (lendata.maxlen && integer_all_onesp (lendata.maxlen))
2018 lendata.maxlen = NULL_TREE;
2019
2020 if (nonstr)
2021 {
2022 /* For callers prepared to handle unterminated arrays set
2023 *NONSTR to point to the declaration of the array and return
2024 the maximum length/size. */
2025 *nonstr = lendata.decl;
2026 return lendata.maxlen;
2027 }
2028
2029 /* Fail if the constant array isn't nul-terminated. */
2030 return lendata.decl ? NULL_TREE : lendata.maxlen;
2031 }
2032
2033 /* Return true if LEN is known to be less than or equal to (or if STRICT is
2034 true, strictly less than) the lower bound of SIZE at compile time and false
2035 otherwise. */
2036
2037 static bool
2038 known_lower (gimple *stmt, tree len, tree size, bool strict = false)
2039 {
2040 if (len == NULL_TREE)
2041 return false;
2042
2043 wide_int size_range[2];
2044 wide_int len_range[2];
2045 if (get_range (len, stmt, len_range) && get_range (size, stmt, size_range))
2046 {
2047 if (strict)
2048 return wi::ltu_p (len_range[1], size_range[0]);
2049 else
2050 return wi::leu_p (len_range[1], size_range[0]);
2051 }
2052
2053 return false;
2054 }
2055
2056 /* Fold function call to builtin strcpy with arguments DEST and SRC.
2057 If LEN is not NULL, it represents the length of the string to be
2058 copied. Return NULL_TREE if no simplification can be made. */
2059
2060 static bool
2061 gimple_fold_builtin_strcpy (gimple_stmt_iterator *gsi,
2062 tree dest, tree src)
2063 {
2064 gimple *stmt = gsi_stmt (*gsi);
2065 location_t loc = gimple_location (stmt);
2066 tree fn;
2067
2068 /* If SRC and DEST are the same (and not volatile), return DEST. */
2069 if (operand_equal_p (src, dest, 0))
2070 {
2071 /* Issue -Wrestrict unless the pointers are null (those do
2072 not point to objects and so do not indicate an overlap;
2073 such calls could be the result of sanitization and jump
2074 threading). */
2075 if (!integer_zerop (dest) && !warning_suppressed_p (stmt, OPT_Wrestrict))
2076 {
2077 tree func = gimple_call_fndecl (stmt);
2078
2079 warning_at (loc, OPT_Wrestrict,
2080 "%qD source argument is the same as destination",
2081 func);
2082 }
2083
2084 replace_call_with_value (gsi, dest);
2085 return true;
2086 }
2087
2088 if (optimize_function_for_size_p (cfun))
2089 return false;
2090
2091 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2092 if (!fn)
2093 return false;
2094
2095 /* Set to non-null if ARG refers to an unterminated array. */
2096 tree nonstr = NULL;
2097 tree len = get_maxval_strlen (src, SRK_STRLEN, &nonstr);
2098
2099 if (nonstr)
2100 {
2101 /* Avoid folding calls with unterminated arrays. */
2102 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
2103 warn_string_no_nul (loc, stmt, "strcpy", src, nonstr);
2104 suppress_warning (stmt, OPT_Wstringop_overread);
2105 return false;
2106 }
2107
2108 if (!len)
2109 return false;
2110
2111 len = fold_convert_loc (loc, size_type_node, len);
2112 len = size_binop_loc (loc, PLUS_EXPR, len, build_int_cst (size_type_node, 1));
2113 len = force_gimple_operand_gsi (gsi, len, true,
2114 NULL_TREE, true, GSI_SAME_STMT);
2115 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2116 replace_call_with_call_and_fold (gsi, repl);
2117 return true;
2118 }
2119
2120 /* Fold function call to builtin strncpy with arguments DEST, SRC, and LEN.
2121 If SLEN is not NULL, it represents the length of the source string.
2122 Return NULL_TREE if no simplification can be made. */
2123
2124 static bool
2125 gimple_fold_builtin_strncpy (gimple_stmt_iterator *gsi,
2126 tree dest, tree src, tree len)
2127 {
2128 gimple *stmt = gsi_stmt (*gsi);
2129 location_t loc = gimple_location (stmt);
2130 bool nonstring = get_attr_nonstring_decl (dest) != NULL_TREE;
2131
2132 /* If the LEN parameter is zero, return DEST. */
2133 if (integer_zerop (len))
2134 {
2135 /* Avoid warning if the destination refers to an array/pointer
2136 decorate with attribute nonstring. */
2137 if (!nonstring)
2138 {
2139 tree fndecl = gimple_call_fndecl (stmt);
2140
2141 /* Warn about the lack of nul termination: the result is not
2142 a (nul-terminated) string. */
2143 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2144 if (slen && !integer_zerop (slen))
2145 warning_at (loc, OPT_Wstringop_truncation,
2146 "%qD destination unchanged after copying no bytes "
2147 "from a string of length %E",
2148 fndecl, slen);
2149 else
2150 warning_at (loc, OPT_Wstringop_truncation,
2151 "%qD destination unchanged after copying no bytes",
2152 fndecl);
2153 }
2154
2155 replace_call_with_value (gsi, dest);
2156 return true;
2157 }
2158
2159 /* We can't compare slen with len as constants below if len is not a
2160 constant. */
2161 if (TREE_CODE (len) != INTEGER_CST)
2162 return false;
2163
2164 /* Now, we must be passed a constant src ptr parameter. */
2165 tree slen = get_maxval_strlen (src, SRK_STRLEN);
2166 if (!slen || TREE_CODE (slen) != INTEGER_CST)
2167 return false;
2168
2169 /* The size of the source string including the terminating nul. */
2170 tree ssize = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
2171
2172 /* We do not support simplification of this case, though we do
2173 support it when expanding trees into RTL. */
2174 /* FIXME: generate a call to __builtin_memset. */
2175 if (tree_int_cst_lt (ssize, len))
2176 return false;
2177
2178 /* Diagnose truncation that leaves the copy unterminated. */
2179 maybe_diag_stxncpy_trunc (*gsi, src, len);
2180
2181 /* OK transform into builtin memcpy. */
2182 tree fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2183 if (!fn)
2184 return false;
2185
2186 len = fold_convert_loc (loc, size_type_node, len);
2187 len = force_gimple_operand_gsi (gsi, len, true,
2188 NULL_TREE, true, GSI_SAME_STMT);
2189 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2190 replace_call_with_call_and_fold (gsi, repl);
2191
2192 return true;
2193 }
2194
2195 /* Fold function call to builtin strchr or strrchr.
2196 If both arguments are constant, evaluate and fold the result,
2197 otherwise simplify str(r)chr (str, 0) into str + strlen (str).
2198 In general strlen is significantly faster than strchr
2199 due to being a simpler operation. */
2200 static bool
2201 gimple_fold_builtin_strchr (gimple_stmt_iterator *gsi, bool is_strrchr)
2202 {
2203 gimple *stmt = gsi_stmt (*gsi);
2204 tree str = gimple_call_arg (stmt, 0);
2205 tree c = gimple_call_arg (stmt, 1);
2206 location_t loc = gimple_location (stmt);
2207 const char *p;
2208 char ch;
2209
2210 if (!gimple_call_lhs (stmt))
2211 return false;
2212
2213 /* Avoid folding if the first argument is not a nul-terminated array.
2214 Defer warning until later. */
2215 if (!check_nul_terminated_array (NULL_TREE, str))
2216 return false;
2217
2218 if ((p = c_getstr (str)) && target_char_cst_p (c, &ch))
2219 {
2220 const char *p1 = is_strrchr ? strrchr (p, ch) : strchr (p, ch);
2221
2222 if (p1 == NULL)
2223 {
2224 replace_call_with_value (gsi, integer_zero_node);
2225 return true;
2226 }
2227
2228 tree len = build_int_cst (size_type_node, p1 - p);
2229 gimple_seq stmts = NULL;
2230 gimple *new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2231 POINTER_PLUS_EXPR, str, len);
2232 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2233 gsi_replace_with_seq_vops (gsi, stmts);
2234 return true;
2235 }
2236
2237 if (!integer_zerop (c))
2238 return false;
2239
2240 /* Transform strrchr (s, 0) to strchr (s, 0) when optimizing for size. */
2241 if (is_strrchr && optimize_function_for_size_p (cfun))
2242 {
2243 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2244
2245 if (strchr_fn)
2246 {
2247 gimple *repl = gimple_build_call (strchr_fn, 2, str, c);
2248 replace_call_with_call_and_fold (gsi, repl);
2249 return true;
2250 }
2251
2252 return false;
2253 }
2254
2255 tree len;
2256 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2257
2258 if (!strlen_fn)
2259 return false;
2260
2261 /* Create newstr = strlen (str). */
2262 gimple_seq stmts = NULL;
2263 gimple *new_stmt = gimple_build_call (strlen_fn, 1, str);
2264 gimple_set_location (new_stmt, loc);
2265 len = create_tmp_reg_or_ssa_name (size_type_node);
2266 gimple_call_set_lhs (new_stmt, len);
2267 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2268
2269 /* Create (str p+ strlen (str)). */
2270 new_stmt = gimple_build_assign (gimple_call_lhs (stmt),
2271 POINTER_PLUS_EXPR, str, len);
2272 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2273 gsi_replace_with_seq_vops (gsi, stmts);
2274 /* gsi now points at the assignment to the lhs, get a
2275 stmt iterator to the strlen.
2276 ??? We can't use gsi_for_stmt as that doesn't work when the
2277 CFG isn't built yet. */
2278 gimple_stmt_iterator gsi2 = *gsi;
2279 gsi_prev (&gsi2);
2280 fold_stmt (&gsi2);
2281 return true;
2282 }
2283
2284 /* Fold function call to builtin strstr.
2285 If both arguments are constant, evaluate and fold the result,
2286 additionally fold strstr (x, "") into x and strstr (x, "c")
2287 into strchr (x, 'c'). */
2288 static bool
2289 gimple_fold_builtin_strstr (gimple_stmt_iterator *gsi)
2290 {
2291 gimple *stmt = gsi_stmt (*gsi);
2292 if (!gimple_call_lhs (stmt))
2293 return false;
2294
2295 tree haystack = gimple_call_arg (stmt, 0);
2296 tree needle = gimple_call_arg (stmt, 1);
2297
2298 /* Avoid folding if either argument is not a nul-terminated array.
2299 Defer warning until later. */
2300 if (!check_nul_terminated_array (NULL_TREE, haystack)
2301 || !check_nul_terminated_array (NULL_TREE, needle))
2302 return false;
2303
2304 const char *q = c_getstr (needle);
2305 if (q == NULL)
2306 return false;
2307
2308 if (const char *p = c_getstr (haystack))
2309 {
2310 const char *r = strstr (p, q);
2311
2312 if (r == NULL)
2313 {
2314 replace_call_with_value (gsi, integer_zero_node);
2315 return true;
2316 }
2317
2318 tree len = build_int_cst (size_type_node, r - p);
2319 gimple_seq stmts = NULL;
2320 gimple *new_stmt
2321 = gimple_build_assign (gimple_call_lhs (stmt), POINTER_PLUS_EXPR,
2322 haystack, len);
2323 gimple_seq_add_stmt_without_update (&stmts, new_stmt);
2324 gsi_replace_with_seq_vops (gsi, stmts);
2325 return true;
2326 }
2327
2328 /* For strstr (x, "") return x. */
2329 if (q[0] == '\0')
2330 {
2331 replace_call_with_value (gsi, haystack);
2332 return true;
2333 }
2334
2335 /* Transform strstr (x, "c") into strchr (x, 'c'). */
2336 if (q[1] == '\0')
2337 {
2338 tree strchr_fn = builtin_decl_implicit (BUILT_IN_STRCHR);
2339 if (strchr_fn)
2340 {
2341 tree c = build_int_cst (integer_type_node, q[0]);
2342 gimple *repl = gimple_build_call (strchr_fn, 2, haystack, c);
2343 replace_call_with_call_and_fold (gsi, repl);
2344 return true;
2345 }
2346 }
2347
2348 return false;
2349 }
2350
2351 /* Simplify a call to the strcat builtin. DST and SRC are the arguments
2352 to the call.
2353
2354 Return NULL_TREE if no simplification was possible, otherwise return the
2355 simplified form of the call as a tree.
2356
2357 The simplified form may be a constant or other expression which
2358 computes the same value, but in a more efficient manner (including
2359 calls to other builtin functions).
2360
2361 The call may contain arguments which need to be evaluated, but
2362 which are not useful to determine the result of the call. In
2363 this case we return a chain of COMPOUND_EXPRs. The LHS of each
2364 COMPOUND_EXPR will be an argument which must be evaluated.
2365 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
2366 COMPOUND_EXPR in the chain will contain the tree for the simplified
2367 form of the builtin function call. */
2368
2369 static bool
2370 gimple_fold_builtin_strcat (gimple_stmt_iterator *gsi, tree dst, tree src)
2371 {
2372 gimple *stmt = gsi_stmt (*gsi);
2373 location_t loc = gimple_location (stmt);
2374
2375 const char *p = c_getstr (src);
2376
2377 /* If the string length is zero, return the dst parameter. */
2378 if (p && *p == '\0')
2379 {
2380 replace_call_with_value (gsi, dst);
2381 return true;
2382 }
2383
2384 if (!optimize_bb_for_speed_p (gimple_bb (stmt)))
2385 return false;
2386
2387 /* See if we can store by pieces into (dst + strlen(dst)). */
2388 tree newdst;
2389 tree strlen_fn = builtin_decl_implicit (BUILT_IN_STRLEN);
2390 tree memcpy_fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
2391
2392 if (!strlen_fn || !memcpy_fn)
2393 return false;
2394
2395 /* If the length of the source string isn't computable don't
2396 split strcat into strlen and memcpy. */
2397 tree len = get_maxval_strlen (src, SRK_STRLEN);
2398 if (! len)
2399 return false;
2400
2401 /* Create strlen (dst). */
2402 gimple_seq stmts = NULL, stmts2;
2403 gimple *repl = gimple_build_call (strlen_fn, 1, dst);
2404 gimple_set_location (repl, loc);
2405 newdst = create_tmp_reg_or_ssa_name (size_type_node);
2406 gimple_call_set_lhs (repl, newdst);
2407 gimple_seq_add_stmt_without_update (&stmts, repl);
2408
2409 /* Create (dst p+ strlen (dst)). */
2410 newdst = fold_build_pointer_plus_loc (loc, dst, newdst);
2411 newdst = force_gimple_operand (newdst, &stmts2, true, NULL_TREE);
2412 gimple_seq_add_seq_without_update (&stmts, stmts2);
2413
2414 len = fold_convert_loc (loc, size_type_node, len);
2415 len = size_binop_loc (loc, PLUS_EXPR, len,
2416 build_int_cst (size_type_node, 1));
2417 len = force_gimple_operand (len, &stmts2, true, NULL_TREE);
2418 gimple_seq_add_seq_without_update (&stmts, stmts2);
2419
2420 repl = gimple_build_call (memcpy_fn, 3, newdst, src, len);
2421 gimple_seq_add_stmt_without_update (&stmts, repl);
2422 if (gimple_call_lhs (stmt))
2423 {
2424 repl = gimple_build_assign (gimple_call_lhs (stmt), dst);
2425 gimple_seq_add_stmt_without_update (&stmts, repl);
2426 gsi_replace_with_seq_vops (gsi, stmts);
2427 /* gsi now points at the assignment to the lhs, get a
2428 stmt iterator to the memcpy call.
2429 ??? We can't use gsi_for_stmt as that doesn't work when the
2430 CFG isn't built yet. */
2431 gimple_stmt_iterator gsi2 = *gsi;
2432 gsi_prev (&gsi2);
2433 fold_stmt (&gsi2);
2434 }
2435 else
2436 {
2437 gsi_replace_with_seq_vops (gsi, stmts);
2438 fold_stmt (gsi);
2439 }
2440 return true;
2441 }
2442
2443 /* Fold a call to the __strcat_chk builtin FNDECL. DEST, SRC, and SIZE
2444 are the arguments to the call. */
2445
2446 static bool
2447 gimple_fold_builtin_strcat_chk (gimple_stmt_iterator *gsi)
2448 {
2449 gimple *stmt = gsi_stmt (*gsi);
2450 tree dest = gimple_call_arg (stmt, 0);
2451 tree src = gimple_call_arg (stmt, 1);
2452 tree size = gimple_call_arg (stmt, 2);
2453 tree fn;
2454 const char *p;
2455
2456
2457 p = c_getstr (src);
2458 /* If the SRC parameter is "", return DEST. */
2459 if (p && *p == '\0')
2460 {
2461 replace_call_with_value (gsi, dest);
2462 return true;
2463 }
2464
2465 if (! tree_fits_uhwi_p (size) || ! integer_all_onesp (size))
2466 return false;
2467
2468 /* If __builtin_strcat_chk is used, assume strcat is available. */
2469 fn = builtin_decl_explicit (BUILT_IN_STRCAT);
2470 if (!fn)
2471 return false;
2472
2473 gimple *repl = gimple_build_call (fn, 2, dest, src);
2474 replace_call_with_call_and_fold (gsi, repl);
2475 return true;
2476 }
2477
2478 /* Simplify a call to the strncat builtin. */
2479
2480 static bool
2481 gimple_fold_builtin_strncat (gimple_stmt_iterator *gsi)
2482 {
2483 gimple *stmt = gsi_stmt (*gsi);
2484 tree dst = gimple_call_arg (stmt, 0);
2485 tree src = gimple_call_arg (stmt, 1);
2486 tree len = gimple_call_arg (stmt, 2);
2487 tree src_len = c_strlen (src, 1);
2488
2489 /* If the requested length is zero, or the src parameter string
2490 length is zero, return the dst parameter. */
2491 if (integer_zerop (len) || (src_len && integer_zerop (src_len)))
2492 {
2493 replace_call_with_value (gsi, dst);
2494 return true;
2495 }
2496
2497 /* Return early if the requested len is less than the string length.
2498 Warnings will be issued elsewhere later. */
2499 if (!src_len || known_lower (stmt, len, src_len, true))
2500 return false;
2501
2502 /* Warn on constant LEN. */
2503 if (TREE_CODE (len) == INTEGER_CST)
2504 {
2505 bool nowarn = warning_suppressed_p (stmt, OPT_Wstringop_overflow_);
2506 tree dstsize;
2507
2508 if (!nowarn && compute_builtin_object_size (dst, 1, &dstsize)
2509 && TREE_CODE (dstsize) == INTEGER_CST)
2510 {
2511 int cmpdst = tree_int_cst_compare (len, dstsize);
2512
2513 if (cmpdst >= 0)
2514 {
2515 tree fndecl = gimple_call_fndecl (stmt);
2516
2517 /* Strncat copies (at most) LEN bytes and always appends
2518 the terminating NUL so the specified bound should never
2519 be equal to (or greater than) the size of the destination.
2520 If it is, the copy could overflow. */
2521 location_t loc = gimple_location (stmt);
2522 nowarn = warning_at (loc, OPT_Wstringop_overflow_,
2523 cmpdst == 0
2524 ? G_("%qD specified bound %E equals "
2525 "destination size")
2526 : G_("%qD specified bound %E exceeds "
2527 "destination size %E"),
2528 fndecl, len, dstsize);
2529 if (nowarn)
2530 suppress_warning (stmt, OPT_Wstringop_overflow_);
2531 }
2532 }
2533
2534 if (!nowarn && TREE_CODE (src_len) == INTEGER_CST
2535 && tree_int_cst_compare (src_len, len) == 0)
2536 {
2537 tree fndecl = gimple_call_fndecl (stmt);
2538 location_t loc = gimple_location (stmt);
2539
2540 /* To avoid possible overflow the specified bound should also
2541 not be equal to the length of the source, even when the size
2542 of the destination is unknown (it's not an uncommon mistake
2543 to specify as the bound to strncpy the length of the source). */
2544 if (warning_at (loc, OPT_Wstringop_overflow_,
2545 "%qD specified bound %E equals source length",
2546 fndecl, len))
2547 suppress_warning (stmt, OPT_Wstringop_overflow_);
2548 }
2549 }
2550
2551 if (!known_lower (stmt, src_len, len))
2552 return false;
2553
2554 tree fn = builtin_decl_implicit (BUILT_IN_STRCAT);
2555
2556 /* If the replacement _DECL isn't initialized, don't do the
2557 transformation. */
2558 if (!fn)
2559 return false;
2560
2561 /* Otherwise, emit a call to strcat. */
2562 gcall *repl = gimple_build_call (fn, 2, dst, src);
2563 replace_call_with_call_and_fold (gsi, repl);
2564 return true;
2565 }
2566
2567 /* Fold a call to the __strncat_chk builtin with arguments DEST, SRC,
2568 LEN, and SIZE. */
2569
2570 static bool
2571 gimple_fold_builtin_strncat_chk (gimple_stmt_iterator *gsi)
2572 {
2573 gimple *stmt = gsi_stmt (*gsi);
2574 tree dest = gimple_call_arg (stmt, 0);
2575 tree src = gimple_call_arg (stmt, 1);
2576 tree len = gimple_call_arg (stmt, 2);
2577 tree size = gimple_call_arg (stmt, 3);
2578 tree fn;
2579 const char *p;
2580
2581 p = c_getstr (src);
2582 /* If the SRC parameter is "" or if LEN is 0, return DEST. */
2583 if ((p && *p == '\0')
2584 || integer_zerop (len))
2585 {
2586 replace_call_with_value (gsi, dest);
2587 return true;
2588 }
2589
2590 if (! integer_all_onesp (size))
2591 {
2592 tree src_len = c_strlen (src, 1);
2593 if (known_lower (stmt, src_len, len))
2594 {
2595 /* If LEN >= strlen (SRC), optimize into __strcat_chk. */
2596 fn = builtin_decl_explicit (BUILT_IN_STRCAT_CHK);
2597 if (!fn)
2598 return false;
2599
2600 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
2601 replace_call_with_call_and_fold (gsi, repl);
2602 return true;
2603 }
2604 return false;
2605 }
2606
2607 /* If __builtin_strncat_chk is used, assume strncat is available. */
2608 fn = builtin_decl_explicit (BUILT_IN_STRNCAT);
2609 if (!fn)
2610 return false;
2611
2612 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
2613 replace_call_with_call_and_fold (gsi, repl);
2614 return true;
2615 }
2616
2617 /* Build and append gimple statements to STMTS that would load a first
2618 character of a memory location identified by STR. LOC is location
2619 of the statement. */
2620
2621 static tree
2622 gimple_load_first_char (location_t loc, tree str, gimple_seq *stmts)
2623 {
2624 tree var;
2625
2626 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
2627 tree cst_uchar_ptr_node
2628 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
2629 tree off0 = build_int_cst (cst_uchar_ptr_node, 0);
2630
2631 tree temp = fold_build2_loc (loc, MEM_REF, cst_uchar_node, str, off0);
2632 gassign *stmt = gimple_build_assign (NULL_TREE, temp);
2633 var = create_tmp_reg_or_ssa_name (cst_uchar_node, stmt);
2634
2635 gimple_assign_set_lhs (stmt, var);
2636 gimple_seq_add_stmt_without_update (stmts, stmt);
2637
2638 return var;
2639 }
2640
2641 /* Fold a call to the str{n}{case}cmp builtin pointed by GSI iterator. */
2642
2643 static bool
2644 gimple_fold_builtin_string_compare (gimple_stmt_iterator *gsi)
2645 {
2646 gimple *stmt = gsi_stmt (*gsi);
2647 tree callee = gimple_call_fndecl (stmt);
2648 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
2649
2650 tree type = integer_type_node;
2651 tree str1 = gimple_call_arg (stmt, 0);
2652 tree str2 = gimple_call_arg (stmt, 1);
2653 tree lhs = gimple_call_lhs (stmt);
2654
2655 tree bound_node = NULL_TREE;
2656 unsigned HOST_WIDE_INT bound = HOST_WIDE_INT_M1U;
2657
2658 /* Handle strncmp and strncasecmp functions. */
2659 if (gimple_call_num_args (stmt) == 3)
2660 {
2661 bound_node = gimple_call_arg (stmt, 2);
2662 if (tree_fits_uhwi_p (bound_node))
2663 bound = tree_to_uhwi (bound_node);
2664 }
2665
2666 /* If the BOUND parameter is zero, return zero. */
2667 if (bound == 0)
2668 {
2669 replace_call_with_value (gsi, integer_zero_node);
2670 return true;
2671 }
2672
2673 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
2674 if (operand_equal_p (str1, str2, 0))
2675 {
2676 replace_call_with_value (gsi, integer_zero_node);
2677 return true;
2678 }
2679
2680 /* Initially set to the number of characters, including the terminating
2681 nul if each array has one. LENx == strnlen (Sx, LENx) implies that
2682 the array Sx is not terminated by a nul.
2683 For nul-terminated strings then adjusted to their length so that
2684 LENx == NULPOSx holds. */
2685 unsigned HOST_WIDE_INT len1 = HOST_WIDE_INT_MAX, len2 = len1;
2686 const char *p1 = getbyterep (str1, &len1);
2687 const char *p2 = getbyterep (str2, &len2);
2688
2689 /* The position of the terminating nul character if one exists, otherwise
2690 a value greater than LENx. */
2691 unsigned HOST_WIDE_INT nulpos1 = HOST_WIDE_INT_MAX, nulpos2 = nulpos1;
2692
2693 if (p1)
2694 {
2695 size_t n = strnlen (p1, len1);
2696 if (n < len1)
2697 len1 = nulpos1 = n;
2698 }
2699
2700 if (p2)
2701 {
2702 size_t n = strnlen (p2, len2);
2703 if (n < len2)
2704 len2 = nulpos2 = n;
2705 }
2706
2707 /* For known strings, return an immediate value. */
2708 if (p1 && p2)
2709 {
2710 int r = 0;
2711 bool known_result = false;
2712
2713 switch (fcode)
2714 {
2715 case BUILT_IN_STRCMP:
2716 case BUILT_IN_STRCMP_EQ:
2717 if (len1 != nulpos1 || len2 != nulpos2)
2718 break;
2719
2720 r = strcmp (p1, p2);
2721 known_result = true;
2722 break;
2723
2724 case BUILT_IN_STRNCMP:
2725 case BUILT_IN_STRNCMP_EQ:
2726 {
2727 if (bound == HOST_WIDE_INT_M1U)
2728 break;
2729
2730 /* Reduce the bound to be no more than the length
2731 of the shorter of the two strings, or the sizes
2732 of the unterminated arrays. */
2733 unsigned HOST_WIDE_INT n = bound;
2734
2735 if (len1 == nulpos1 && len1 < n)
2736 n = len1 + 1;
2737 if (len2 == nulpos2 && len2 < n)
2738 n = len2 + 1;
2739
2740 if (MIN (nulpos1, nulpos2) + 1 < n)
2741 break;
2742
2743 r = strncmp (p1, p2, n);
2744 known_result = true;
2745 break;
2746 }
2747 /* Only handleable situation is where the string are equal (result 0),
2748 which is already handled by operand_equal_p case. */
2749 case BUILT_IN_STRCASECMP:
2750 break;
2751 case BUILT_IN_STRNCASECMP:
2752 {
2753 if (bound == HOST_WIDE_INT_M1U)
2754 break;
2755 r = strncmp (p1, p2, bound);
2756 if (r == 0)
2757 known_result = true;
2758 break;
2759 }
2760 default:
2761 gcc_unreachable ();
2762 }
2763
2764 if (known_result)
2765 {
2766 replace_call_with_value (gsi, build_cmp_result (type, r));
2767 return true;
2768 }
2769 }
2770
2771 bool nonzero_bound = (bound >= 1 && bound < HOST_WIDE_INT_M1U)
2772 || fcode == BUILT_IN_STRCMP
2773 || fcode == BUILT_IN_STRCMP_EQ
2774 || fcode == BUILT_IN_STRCASECMP;
2775
2776 location_t loc = gimple_location (stmt);
2777
2778 /* If the second arg is "", return *(const unsigned char*)arg1. */
2779 if (p2 && *p2 == '\0' && nonzero_bound)
2780 {
2781 gimple_seq stmts = NULL;
2782 tree var = gimple_load_first_char (loc, str1, &stmts);
2783 if (lhs)
2784 {
2785 stmt = gimple_build_assign (lhs, NOP_EXPR, var);
2786 gimple_seq_add_stmt_without_update (&stmts, stmt);
2787 }
2788
2789 gsi_replace_with_seq_vops (gsi, stmts);
2790 return true;
2791 }
2792
2793 /* If the first arg is "", return -*(const unsigned char*)arg2. */
2794 if (p1 && *p1 == '\0' && nonzero_bound)
2795 {
2796 gimple_seq stmts = NULL;
2797 tree var = gimple_load_first_char (loc, str2, &stmts);
2798
2799 if (lhs)
2800 {
2801 tree c = create_tmp_reg_or_ssa_name (integer_type_node);
2802 stmt = gimple_build_assign (c, NOP_EXPR, var);
2803 gimple_seq_add_stmt_without_update (&stmts, stmt);
2804
2805 stmt = gimple_build_assign (lhs, NEGATE_EXPR, c);
2806 gimple_seq_add_stmt_without_update (&stmts, stmt);
2807 }
2808
2809 gsi_replace_with_seq_vops (gsi, stmts);
2810 return true;
2811 }
2812
2813 /* If BOUND is one, return an expression corresponding to
2814 (*(const unsigned char*)arg2 - *(const unsigned char*)arg1). */
2815 if (fcode == BUILT_IN_STRNCMP && bound == 1)
2816 {
2817 gimple_seq stmts = NULL;
2818 tree temp1 = gimple_load_first_char (loc, str1, &stmts);
2819 tree temp2 = gimple_load_first_char (loc, str2, &stmts);
2820
2821 if (lhs)
2822 {
2823 tree c1 = create_tmp_reg_or_ssa_name (integer_type_node);
2824 gassign *convert1 = gimple_build_assign (c1, NOP_EXPR, temp1);
2825 gimple_seq_add_stmt_without_update (&stmts, convert1);
2826
2827 tree c2 = create_tmp_reg_or_ssa_name (integer_type_node);
2828 gassign *convert2 = gimple_build_assign (c2, NOP_EXPR, temp2);
2829 gimple_seq_add_stmt_without_update (&stmts, convert2);
2830
2831 stmt = gimple_build_assign (lhs, MINUS_EXPR, c1, c2);
2832 gimple_seq_add_stmt_without_update (&stmts, stmt);
2833 }
2834
2835 gsi_replace_with_seq_vops (gsi, stmts);
2836 return true;
2837 }
2838
2839 /* If BOUND is greater than the length of one constant string,
2840 and the other argument is also a nul-terminated string, replace
2841 strncmp with strcmp. */
2842 if (fcode == BUILT_IN_STRNCMP
2843 && bound > 0 && bound < HOST_WIDE_INT_M1U
2844 && ((p2 && len2 < bound && len2 == nulpos2)
2845 || (p1 && len1 < bound && len1 == nulpos1)))
2846 {
2847 tree fn = builtin_decl_implicit (BUILT_IN_STRCMP);
2848 if (!fn)
2849 return false;
2850 gimple *repl = gimple_build_call (fn, 2, str1, str2);
2851 replace_call_with_call_and_fold (gsi, repl);
2852 return true;
2853 }
2854
2855 return false;
2856 }
2857
2858 /* Fold a call to the memchr pointed by GSI iterator. */
2859
2860 static bool
2861 gimple_fold_builtin_memchr (gimple_stmt_iterator *gsi)
2862 {
2863 gimple *stmt = gsi_stmt (*gsi);
2864 tree lhs = gimple_call_lhs (stmt);
2865 tree arg1 = gimple_call_arg (stmt, 0);
2866 tree arg2 = gimple_call_arg (stmt, 1);
2867 tree len = gimple_call_arg (stmt, 2);
2868
2869 /* If the LEN parameter is zero, return zero. */
2870 if (integer_zerop (len))
2871 {
2872 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2873 return true;
2874 }
2875
2876 char c;
2877 if (TREE_CODE (arg2) != INTEGER_CST
2878 || !tree_fits_uhwi_p (len)
2879 || !target_char_cst_p (arg2, &c))
2880 return false;
2881
2882 unsigned HOST_WIDE_INT length = tree_to_uhwi (len);
2883 unsigned HOST_WIDE_INT string_length;
2884 const char *p1 = getbyterep (arg1, &string_length);
2885
2886 if (p1)
2887 {
2888 const char *r = (const char *)memchr (p1, c, MIN (length, string_length));
2889 if (r == NULL)
2890 {
2891 tree mem_size, offset_node;
2892 byte_representation (arg1, &offset_node, &mem_size, NULL);
2893 unsigned HOST_WIDE_INT offset = (offset_node == NULL_TREE)
2894 ? 0 : tree_to_uhwi (offset_node);
2895 /* MEM_SIZE is the size of the array the string literal
2896 is stored in. */
2897 unsigned HOST_WIDE_INT string_size = tree_to_uhwi (mem_size) - offset;
2898 gcc_checking_assert (string_length <= string_size);
2899 if (length <= string_size)
2900 {
2901 replace_call_with_value (gsi, build_int_cst (ptr_type_node, 0));
2902 return true;
2903 }
2904 }
2905 else
2906 {
2907 unsigned HOST_WIDE_INT offset = r - p1;
2908 gimple_seq stmts = NULL;
2909 if (lhs != NULL_TREE)
2910 {
2911 tree offset_cst = build_int_cst (sizetype, offset);
2912 gassign *stmt = gimple_build_assign (lhs, POINTER_PLUS_EXPR,
2913 arg1, offset_cst);
2914 gimple_seq_add_stmt_without_update (&stmts, stmt);
2915 }
2916 else
2917 gimple_seq_add_stmt_without_update (&stmts,
2918 gimple_build_nop ());
2919
2920 gsi_replace_with_seq_vops (gsi, stmts);
2921 return true;
2922 }
2923 }
2924
2925 return false;
2926 }
2927
2928 /* Fold a call to the fputs builtin. ARG0 and ARG1 are the arguments
2929 to the call. IGNORE is true if the value returned
2930 by the builtin will be ignored. UNLOCKED is true is true if this
2931 actually a call to fputs_unlocked. If LEN in non-NULL, it represents
2932 the known length of the string. Return NULL_TREE if no simplification
2933 was possible. */
2934
2935 static bool
2936 gimple_fold_builtin_fputs (gimple_stmt_iterator *gsi,
2937 tree arg0, tree arg1,
2938 bool unlocked)
2939 {
2940 gimple *stmt = gsi_stmt (*gsi);
2941
2942 /* If we're using an unlocked function, assume the other unlocked
2943 functions exist explicitly. */
2944 tree const fn_fputc = (unlocked
2945 ? builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED)
2946 : builtin_decl_implicit (BUILT_IN_FPUTC));
2947 tree const fn_fwrite = (unlocked
2948 ? builtin_decl_explicit (BUILT_IN_FWRITE_UNLOCKED)
2949 : builtin_decl_implicit (BUILT_IN_FWRITE));
2950
2951 /* If the return value is used, don't do the transformation. */
2952 if (gimple_call_lhs (stmt))
2953 return false;
2954
2955 /* Get the length of the string passed to fputs. If the length
2956 can't be determined, punt. */
2957 tree len = get_maxval_strlen (arg0, SRK_STRLEN);
2958 if (!len
2959 || TREE_CODE (len) != INTEGER_CST)
2960 return false;
2961
2962 switch (compare_tree_int (len, 1))
2963 {
2964 case -1: /* length is 0, delete the call entirely . */
2965 replace_call_with_value (gsi, integer_zero_node);
2966 return true;
2967
2968 case 0: /* length is 1, call fputc. */
2969 {
2970 const char *p = c_getstr (arg0);
2971 if (p != NULL)
2972 {
2973 if (!fn_fputc)
2974 return false;
2975
2976 gimple *repl = gimple_build_call (fn_fputc, 2,
2977 build_int_cst
2978 (integer_type_node, p[0]), arg1);
2979 replace_call_with_call_and_fold (gsi, repl);
2980 return true;
2981 }
2982 }
2983 /* FALLTHROUGH */
2984 case 1: /* length is greater than 1, call fwrite. */
2985 {
2986 /* If optimizing for size keep fputs. */
2987 if (optimize_function_for_size_p (cfun))
2988 return false;
2989 /* New argument list transforming fputs(string, stream) to
2990 fwrite(string, 1, len, stream). */
2991 if (!fn_fwrite)
2992 return false;
2993
2994 gimple *repl = gimple_build_call (fn_fwrite, 4, arg0,
2995 size_one_node, len, arg1);
2996 replace_call_with_call_and_fold (gsi, repl);
2997 return true;
2998 }
2999 default:
3000 gcc_unreachable ();
3001 }
3002 }
3003
3004 /* Fold a call to the __mem{cpy,pcpy,move,set}_chk builtin.
3005 DEST, SRC, LEN, and SIZE are the arguments to the call.
3006 IGNORE is true, if return value can be ignored. FCODE is the BUILT_IN_*
3007 code of the builtin. If MAXLEN is not NULL, it is maximum length
3008 passed as third argument. */
3009
3010 static bool
3011 gimple_fold_builtin_memory_chk (gimple_stmt_iterator *gsi,
3012 tree dest, tree src, tree len, tree size,
3013 enum built_in_function fcode)
3014 {
3015 gimple *stmt = gsi_stmt (*gsi);
3016 location_t loc = gimple_location (stmt);
3017 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3018 tree fn;
3019
3020 /* If SRC and DEST are the same (and not volatile), return DEST
3021 (resp. DEST+LEN for __mempcpy_chk). */
3022 if (fcode != BUILT_IN_MEMSET_CHK && operand_equal_p (src, dest, 0))
3023 {
3024 if (fcode != BUILT_IN_MEMPCPY_CHK)
3025 {
3026 replace_call_with_value (gsi, dest);
3027 return true;
3028 }
3029 else
3030 {
3031 gimple_seq stmts = NULL;
3032 len = gimple_convert_to_ptrofftype (&stmts, loc, len);
3033 tree temp = gimple_build (&stmts, loc, POINTER_PLUS_EXPR,
3034 TREE_TYPE (dest), dest, len);
3035 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3036 replace_call_with_value (gsi, temp);
3037 return true;
3038 }
3039 }
3040
3041 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3042 if (! integer_all_onesp (size)
3043 && !known_lower (stmt, len, size)
3044 && !known_lower (stmt, maxlen, size))
3045 {
3046 /* MAXLEN and LEN both cannot be proved to be less than SIZE, at
3047 least try to optimize (void) __mempcpy_chk () into
3048 (void) __memcpy_chk () */
3049 if (fcode == BUILT_IN_MEMPCPY_CHK && ignore)
3050 {
3051 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3052 if (!fn)
3053 return false;
3054
3055 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3056 replace_call_with_call_and_fold (gsi, repl);
3057 return true;
3058 }
3059 return false;
3060 }
3061
3062 fn = NULL_TREE;
3063 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
3064 mem{cpy,pcpy,move,set} is available. */
3065 switch (fcode)
3066 {
3067 case BUILT_IN_MEMCPY_CHK:
3068 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
3069 break;
3070 case BUILT_IN_MEMPCPY_CHK:
3071 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
3072 break;
3073 case BUILT_IN_MEMMOVE_CHK:
3074 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
3075 break;
3076 case BUILT_IN_MEMSET_CHK:
3077 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
3078 break;
3079 default:
3080 break;
3081 }
3082
3083 if (!fn)
3084 return false;
3085
3086 gimple *repl = gimple_build_call (fn, 3, dest, src, len);
3087 replace_call_with_call_and_fold (gsi, repl);
3088 return true;
3089 }
3090
3091 /* Print a message in the dump file recording transformation of FROM to TO. */
3092
3093 static void
3094 dump_transformation (gcall *from, gcall *to)
3095 {
3096 if (dump_enabled_p ())
3097 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, from, "simplified %T to %T\n",
3098 gimple_call_fn (from), gimple_call_fn (to));
3099 }
3100
3101 /* Fold a call to the __st[rp]cpy_chk builtin.
3102 DEST, SRC, and SIZE are the arguments to the call.
3103 IGNORE is true if return value can be ignored. FCODE is the BUILT_IN_*
3104 code of the builtin. If MAXLEN is not NULL, it is maximum length of
3105 strings passed as second argument. */
3106
3107 static bool
3108 gimple_fold_builtin_stxcpy_chk (gimple_stmt_iterator *gsi,
3109 tree dest,
3110 tree src, tree size,
3111 enum built_in_function fcode)
3112 {
3113 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3114 location_t loc = gimple_location (stmt);
3115 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3116 tree len, fn;
3117
3118 /* If SRC and DEST are the same (and not volatile), return DEST. */
3119 if (fcode == BUILT_IN_STRCPY_CHK && operand_equal_p (src, dest, 0))
3120 {
3121 /* Issue -Wrestrict unless the pointers are null (those do
3122 not point to objects and so do not indicate an overlap;
3123 such calls could be the result of sanitization and jump
3124 threading). */
3125 if (!integer_zerop (dest)
3126 && !warning_suppressed_p (stmt, OPT_Wrestrict))
3127 {
3128 tree func = gimple_call_fndecl (stmt);
3129
3130 warning_at (loc, OPT_Wrestrict,
3131 "%qD source argument is the same as destination",
3132 func);
3133 }
3134
3135 replace_call_with_value (gsi, dest);
3136 return true;
3137 }
3138
3139 tree maxlen = get_maxval_strlen (src, SRK_STRLENMAX);
3140 if (! integer_all_onesp (size))
3141 {
3142 len = c_strlen (src, 1);
3143 if (!known_lower (stmt, len, size, true)
3144 && !known_lower (stmt, maxlen, size, true))
3145 {
3146 if (fcode == BUILT_IN_STPCPY_CHK)
3147 {
3148 if (! ignore)
3149 return false;
3150
3151 /* If return value of __stpcpy_chk is ignored,
3152 optimize into __strcpy_chk. */
3153 fn = builtin_decl_explicit (BUILT_IN_STRCPY_CHK);
3154 if (!fn)
3155 return false;
3156
3157 gimple *repl = gimple_build_call (fn, 3, dest, src, size);
3158 replace_call_with_call_and_fold (gsi, repl);
3159 return true;
3160 }
3161
3162 if (! len || TREE_SIDE_EFFECTS (len))
3163 return false;
3164
3165 /* If c_strlen returned something, but not provably less than size,
3166 transform __strcpy_chk into __memcpy_chk. */
3167 fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
3168 if (!fn)
3169 return false;
3170
3171 gimple_seq stmts = NULL;
3172 len = force_gimple_operand (len, &stmts, true, NULL_TREE);
3173 len = gimple_convert (&stmts, loc, size_type_node, len);
3174 len = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node, len,
3175 build_int_cst (size_type_node, 1));
3176 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3177 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3178 replace_call_with_call_and_fold (gsi, repl);
3179 return true;
3180 }
3181 }
3182
3183 /* If __builtin_st{r,p}cpy_chk is used, assume st{r,p}cpy is available. */
3184 fn = builtin_decl_explicit (fcode == BUILT_IN_STPCPY_CHK && !ignore
3185 ? BUILT_IN_STPCPY : BUILT_IN_STRCPY);
3186 if (!fn)
3187 return false;
3188
3189 gcall *repl = gimple_build_call (fn, 2, dest, src);
3190 dump_transformation (stmt, repl);
3191 replace_call_with_call_and_fold (gsi, repl);
3192 return true;
3193 }
3194
3195 /* Fold a call to the __st{r,p}ncpy_chk builtin. DEST, SRC, LEN, and SIZE
3196 are the arguments to the call. If MAXLEN is not NULL, it is maximum
3197 length passed as third argument. IGNORE is true if return value can be
3198 ignored. FCODE is the BUILT_IN_* code of the builtin. */
3199
3200 static bool
3201 gimple_fold_builtin_stxncpy_chk (gimple_stmt_iterator *gsi,
3202 tree dest, tree src,
3203 tree len, tree size,
3204 enum built_in_function fcode)
3205 {
3206 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3207 bool ignore = gimple_call_lhs (stmt) == NULL_TREE;
3208 tree fn;
3209
3210 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3211 if (! integer_all_onesp (size)
3212 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3213 {
3214 if (fcode == BUILT_IN_STPNCPY_CHK && ignore)
3215 {
3216 /* If return value of __stpncpy_chk is ignored,
3217 optimize into __strncpy_chk. */
3218 fn = builtin_decl_explicit (BUILT_IN_STRNCPY_CHK);
3219 if (fn)
3220 {
3221 gimple *repl = gimple_build_call (fn, 4, dest, src, len, size);
3222 replace_call_with_call_and_fold (gsi, repl);
3223 return true;
3224 }
3225 }
3226 return false;
3227 }
3228
3229 /* If __builtin_st{r,p}ncpy_chk is used, assume st{r,p}ncpy is available. */
3230 fn = builtin_decl_explicit (fcode == BUILT_IN_STPNCPY_CHK && !ignore
3231 ? BUILT_IN_STPNCPY : BUILT_IN_STRNCPY);
3232 if (!fn)
3233 return false;
3234
3235 gcall *repl = gimple_build_call (fn, 3, dest, src, len);
3236 dump_transformation (stmt, repl);
3237 replace_call_with_call_and_fold (gsi, repl);
3238 return true;
3239 }
3240
3241 /* Fold function call to builtin stpcpy with arguments DEST and SRC.
3242 Return NULL_TREE if no simplification can be made. */
3243
3244 static bool
3245 gimple_fold_builtin_stpcpy (gimple_stmt_iterator *gsi)
3246 {
3247 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3248 location_t loc = gimple_location (stmt);
3249 tree dest = gimple_call_arg (stmt, 0);
3250 tree src = gimple_call_arg (stmt, 1);
3251 tree fn, lenp1;
3252
3253 /* If the result is unused, replace stpcpy with strcpy. */
3254 if (gimple_call_lhs (stmt) == NULL_TREE)
3255 {
3256 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3257 if (!fn)
3258 return false;
3259 gimple_call_set_fndecl (stmt, fn);
3260 fold_stmt (gsi);
3261 return true;
3262 }
3263
3264 /* Set to non-null if ARG refers to an unterminated array. */
3265 c_strlen_data data = { };
3266 /* The size of the unterminated array if SRC referes to one. */
3267 tree size;
3268 /* True if the size is exact/constant, false if it's the lower bound
3269 of a range. */
3270 bool exact;
3271 tree len = c_strlen (src, 1, &data, 1);
3272 if (!len
3273 || TREE_CODE (len) != INTEGER_CST)
3274 {
3275 data.decl = unterminated_array (src, &size, &exact);
3276 if (!data.decl)
3277 return false;
3278 }
3279
3280 if (data.decl)
3281 {
3282 /* Avoid folding calls with unterminated arrays. */
3283 if (!warning_suppressed_p (stmt, OPT_Wstringop_overread))
3284 warn_string_no_nul (loc, stmt, "stpcpy", src, data.decl, size,
3285 exact);
3286 suppress_warning (stmt, OPT_Wstringop_overread);
3287 return false;
3288 }
3289
3290 if (optimize_function_for_size_p (cfun)
3291 /* If length is zero it's small enough. */
3292 && !integer_zerop (len))
3293 return false;
3294
3295 /* If the source has a known length replace stpcpy with memcpy. */
3296 fn = builtin_decl_implicit (BUILT_IN_MEMCPY);
3297 if (!fn)
3298 return false;
3299
3300 gimple_seq stmts = NULL;
3301 tree tem = gimple_convert (&stmts, loc, size_type_node, len);
3302 lenp1 = gimple_build (&stmts, loc, PLUS_EXPR, size_type_node,
3303 tem, build_int_cst (size_type_node, 1));
3304 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3305 gcall *repl = gimple_build_call (fn, 3, dest, src, lenp1);
3306 gimple_move_vops (repl, stmt);
3307 gsi_insert_before (gsi, repl, GSI_SAME_STMT);
3308 /* Replace the result with dest + len. */
3309 stmts = NULL;
3310 tem = gimple_convert (&stmts, loc, sizetype, len);
3311 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
3312 gassign *ret = gimple_build_assign (gimple_call_lhs (stmt),
3313 POINTER_PLUS_EXPR, dest, tem);
3314 gsi_replace (gsi, ret, false);
3315 /* Finally fold the memcpy call. */
3316 gimple_stmt_iterator gsi2 = *gsi;
3317 gsi_prev (&gsi2);
3318 fold_stmt (&gsi2);
3319 return true;
3320 }
3321
3322 /* Fold a call EXP to {,v}snprintf having NARGS passed as ARGS. Return
3323 NULL_TREE if a normal call should be emitted rather than expanding
3324 the function inline. FCODE is either BUILT_IN_SNPRINTF_CHK or
3325 BUILT_IN_VSNPRINTF_CHK. If MAXLEN is not NULL, it is maximum length
3326 passed as second argument. */
3327
3328 static bool
3329 gimple_fold_builtin_snprintf_chk (gimple_stmt_iterator *gsi,
3330 enum built_in_function fcode)
3331 {
3332 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3333 tree dest, size, len, fn, fmt, flag;
3334 const char *fmt_str;
3335
3336 /* Verify the required arguments in the original call. */
3337 if (gimple_call_num_args (stmt) < 5)
3338 return false;
3339
3340 dest = gimple_call_arg (stmt, 0);
3341 len = gimple_call_arg (stmt, 1);
3342 flag = gimple_call_arg (stmt, 2);
3343 size = gimple_call_arg (stmt, 3);
3344 fmt = gimple_call_arg (stmt, 4);
3345
3346 tree maxlen = get_maxval_strlen (len, SRK_INT_VALUE);
3347 if (! integer_all_onesp (size)
3348 && !known_lower (stmt, len, size) && !known_lower (stmt, maxlen, size))
3349 return false;
3350
3351 if (!init_target_chars ())
3352 return false;
3353
3354 /* Only convert __{,v}snprintf_chk to {,v}snprintf if flag is 0
3355 or if format doesn't contain % chars or is "%s". */
3356 if (! integer_zerop (flag))
3357 {
3358 fmt_str = c_getstr (fmt);
3359 if (fmt_str == NULL)
3360 return false;
3361 if (strchr (fmt_str, target_percent) != NULL
3362 && strcmp (fmt_str, target_percent_s))
3363 return false;
3364 }
3365
3366 /* If __builtin_{,v}snprintf_chk is used, assume {,v}snprintf is
3367 available. */
3368 fn = builtin_decl_explicit (fcode == BUILT_IN_VSNPRINTF_CHK
3369 ? BUILT_IN_VSNPRINTF : BUILT_IN_SNPRINTF);
3370 if (!fn)
3371 return false;
3372
3373 /* Replace the called function and the first 5 argument by 3 retaining
3374 trailing varargs. */
3375 gimple_call_set_fndecl (stmt, fn);
3376 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3377 gimple_call_set_arg (stmt, 0, dest);
3378 gimple_call_set_arg (stmt, 1, len);
3379 gimple_call_set_arg (stmt, 2, fmt);
3380 for (unsigned i = 3; i < gimple_call_num_args (stmt) - 2; ++i)
3381 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3382 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3383 fold_stmt (gsi);
3384 return true;
3385 }
3386
3387 /* Fold a call EXP to __{,v}sprintf_chk having NARGS passed as ARGS.
3388 Return NULL_TREE if a normal call should be emitted rather than
3389 expanding the function inline. FCODE is either BUILT_IN_SPRINTF_CHK
3390 or BUILT_IN_VSPRINTF_CHK. */
3391
3392 static bool
3393 gimple_fold_builtin_sprintf_chk (gimple_stmt_iterator *gsi,
3394 enum built_in_function fcode)
3395 {
3396 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3397 tree dest, size, len, fn, fmt, flag;
3398 const char *fmt_str;
3399 unsigned nargs = gimple_call_num_args (stmt);
3400
3401 /* Verify the required arguments in the original call. */
3402 if (nargs < 4)
3403 return false;
3404 dest = gimple_call_arg (stmt, 0);
3405 flag = gimple_call_arg (stmt, 1);
3406 size = gimple_call_arg (stmt, 2);
3407 fmt = gimple_call_arg (stmt, 3);
3408
3409 len = NULL_TREE;
3410
3411 if (!init_target_chars ())
3412 return false;
3413
3414 /* Check whether the format is a literal string constant. */
3415 fmt_str = c_getstr (fmt);
3416 if (fmt_str != NULL)
3417 {
3418 /* If the format doesn't contain % args or %%, we know the size. */
3419 if (strchr (fmt_str, target_percent) == 0)
3420 {
3421 if (fcode != BUILT_IN_SPRINTF_CHK || nargs == 4)
3422 len = build_int_cstu (size_type_node, strlen (fmt_str));
3423 }
3424 /* If the format is "%s" and first ... argument is a string literal,
3425 we know the size too. */
3426 else if (fcode == BUILT_IN_SPRINTF_CHK
3427 && strcmp (fmt_str, target_percent_s) == 0)
3428 {
3429 tree arg;
3430
3431 if (nargs == 5)
3432 {
3433 arg = gimple_call_arg (stmt, 4);
3434 if (POINTER_TYPE_P (TREE_TYPE (arg)))
3435 len = c_strlen (arg, 1);
3436 }
3437 }
3438 }
3439
3440 if (! integer_all_onesp (size) && !known_lower (stmt, len, size, true))
3441 return false;
3442
3443 /* Only convert __{,v}sprintf_chk to {,v}sprintf if flag is 0
3444 or if format doesn't contain % chars or is "%s". */
3445 if (! integer_zerop (flag))
3446 {
3447 if (fmt_str == NULL)
3448 return false;
3449 if (strchr (fmt_str, target_percent) != NULL
3450 && strcmp (fmt_str, target_percent_s))
3451 return false;
3452 }
3453
3454 /* If __builtin_{,v}sprintf_chk is used, assume {,v}sprintf is available. */
3455 fn = builtin_decl_explicit (fcode == BUILT_IN_VSPRINTF_CHK
3456 ? BUILT_IN_VSPRINTF : BUILT_IN_SPRINTF);
3457 if (!fn)
3458 return false;
3459
3460 /* Replace the called function and the first 4 argument by 2 retaining
3461 trailing varargs. */
3462 gimple_call_set_fndecl (stmt, fn);
3463 gimple_call_set_fntype (stmt, TREE_TYPE (fn));
3464 gimple_call_set_arg (stmt, 0, dest);
3465 gimple_call_set_arg (stmt, 1, fmt);
3466 for (unsigned i = 2; i < gimple_call_num_args (stmt) - 2; ++i)
3467 gimple_call_set_arg (stmt, i, gimple_call_arg (stmt, i + 2));
3468 gimple_set_num_ops (stmt, gimple_num_ops (stmt) - 2);
3469 fold_stmt (gsi);
3470 return true;
3471 }
3472
3473 /* Simplify a call to the sprintf builtin with arguments DEST, FMT, and ORIG.
3474 ORIG may be null if this is a 2-argument call. We don't attempt to
3475 simplify calls with more than 3 arguments.
3476
3477 Return true if simplification was possible, otherwise false. */
3478
3479 bool
3480 gimple_fold_builtin_sprintf (gimple_stmt_iterator *gsi)
3481 {
3482 gimple *stmt = gsi_stmt (*gsi);
3483
3484 /* Verify the required arguments in the original call. We deal with two
3485 types of sprintf() calls: 'sprintf (str, fmt)' and
3486 'sprintf (dest, "%s", orig)'. */
3487 if (gimple_call_num_args (stmt) > 3)
3488 return false;
3489
3490 tree orig = NULL_TREE;
3491 if (gimple_call_num_args (stmt) == 3)
3492 orig = gimple_call_arg (stmt, 2);
3493
3494 /* Check whether the format is a literal string constant. */
3495 tree fmt = gimple_call_arg (stmt, 1);
3496 const char *fmt_str = c_getstr (fmt);
3497 if (fmt_str == NULL)
3498 return false;
3499
3500 tree dest = gimple_call_arg (stmt, 0);
3501
3502 if (!init_target_chars ())
3503 return false;
3504
3505 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3506 if (!fn)
3507 return false;
3508
3509 /* If the format doesn't contain % args or %%, use strcpy. */
3510 if (strchr (fmt_str, target_percent) == NULL)
3511 {
3512 /* Don't optimize sprintf (buf, "abc", ptr++). */
3513 if (orig)
3514 return false;
3515
3516 /* Convert sprintf (str, fmt) into strcpy (str, fmt) when
3517 'format' is known to contain no % formats. */
3518 gimple_seq stmts = NULL;
3519 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3520
3521 /* Propagate the NO_WARNING bit to avoid issuing the same
3522 warning more than once. */
3523 copy_warning (repl, stmt);
3524
3525 gimple_seq_add_stmt_without_update (&stmts, repl);
3526 if (tree lhs = gimple_call_lhs (stmt))
3527 {
3528 repl = gimple_build_assign (lhs, build_int_cst (TREE_TYPE (lhs),
3529 strlen (fmt_str)));
3530 gimple_seq_add_stmt_without_update (&stmts, repl);
3531 gsi_replace_with_seq_vops (gsi, stmts);
3532 /* gsi now points at the assignment to the lhs, get a
3533 stmt iterator to the memcpy call.
3534 ??? We can't use gsi_for_stmt as that doesn't work when the
3535 CFG isn't built yet. */
3536 gimple_stmt_iterator gsi2 = *gsi;
3537 gsi_prev (&gsi2);
3538 fold_stmt (&gsi2);
3539 }
3540 else
3541 {
3542 gsi_replace_with_seq_vops (gsi, stmts);
3543 fold_stmt (gsi);
3544 }
3545 return true;
3546 }
3547
3548 /* If the format is "%s", use strcpy if the result isn't used. */
3549 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3550 {
3551 /* Don't crash on sprintf (str1, "%s"). */
3552 if (!orig)
3553 return false;
3554
3555 /* Don't fold calls with source arguments of invalid (nonpointer)
3556 types. */
3557 if (!POINTER_TYPE_P (TREE_TYPE (orig)))
3558 return false;
3559
3560 tree orig_len = NULL_TREE;
3561 if (gimple_call_lhs (stmt))
3562 {
3563 orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3564 if (!orig_len)
3565 return false;
3566 }
3567
3568 /* Convert sprintf (str1, "%s", str2) into strcpy (str1, str2). */
3569 gimple_seq stmts = NULL;
3570 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3571
3572 /* Propagate the NO_WARNING bit to avoid issuing the same
3573 warning more than once. */
3574 copy_warning (repl, stmt);
3575
3576 gimple_seq_add_stmt_without_update (&stmts, repl);
3577 if (tree lhs = gimple_call_lhs (stmt))
3578 {
3579 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3580 TREE_TYPE (orig_len)))
3581 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3582 repl = gimple_build_assign (lhs, orig_len);
3583 gimple_seq_add_stmt_without_update (&stmts, repl);
3584 gsi_replace_with_seq_vops (gsi, stmts);
3585 /* gsi now points at the assignment to the lhs, get a
3586 stmt iterator to the memcpy call.
3587 ??? We can't use gsi_for_stmt as that doesn't work when the
3588 CFG isn't built yet. */
3589 gimple_stmt_iterator gsi2 = *gsi;
3590 gsi_prev (&gsi2);
3591 fold_stmt (&gsi2);
3592 }
3593 else
3594 {
3595 gsi_replace_with_seq_vops (gsi, stmts);
3596 fold_stmt (gsi);
3597 }
3598 return true;
3599 }
3600 return false;
3601 }
3602
3603 /* Simplify a call to the snprintf builtin with arguments DEST, DESTSIZE,
3604 FMT, and ORIG. ORIG may be null if this is a 3-argument call. We don't
3605 attempt to simplify calls with more than 4 arguments.
3606
3607 Return true if simplification was possible, otherwise false. */
3608
3609 bool
3610 gimple_fold_builtin_snprintf (gimple_stmt_iterator *gsi)
3611 {
3612 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3613 tree dest = gimple_call_arg (stmt, 0);
3614 tree destsize = gimple_call_arg (stmt, 1);
3615 tree fmt = gimple_call_arg (stmt, 2);
3616 tree orig = NULL_TREE;
3617 const char *fmt_str = NULL;
3618
3619 if (gimple_call_num_args (stmt) > 4)
3620 return false;
3621
3622 if (gimple_call_num_args (stmt) == 4)
3623 orig = gimple_call_arg (stmt, 3);
3624
3625 /* Check whether the format is a literal string constant. */
3626 fmt_str = c_getstr (fmt);
3627 if (fmt_str == NULL)
3628 return false;
3629
3630 if (!init_target_chars ())
3631 return false;
3632
3633 /* If the format doesn't contain % args or %%, use strcpy. */
3634 if (strchr (fmt_str, target_percent) == NULL)
3635 {
3636 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3637 if (!fn)
3638 return false;
3639
3640 /* Don't optimize snprintf (buf, 4, "abc", ptr++). */
3641 if (orig)
3642 return false;
3643
3644 tree len = build_int_cstu (TREE_TYPE (destsize), strlen (fmt_str));
3645
3646 /* We could expand this as
3647 memcpy (str, fmt, cst - 1); str[cst - 1] = '\0';
3648 or to
3649 memcpy (str, fmt_with_nul_at_cstm1, cst);
3650 but in the former case that might increase code size
3651 and in the latter case grow .rodata section too much.
3652 So punt for now. */
3653 if (!known_lower (stmt, len, destsize, true))
3654 return false;
3655
3656 gimple_seq stmts = NULL;
3657 gimple *repl = gimple_build_call (fn, 2, dest, fmt);
3658 gimple_seq_add_stmt_without_update (&stmts, repl);
3659 if (tree lhs = gimple_call_lhs (stmt))
3660 {
3661 repl = gimple_build_assign (lhs,
3662 fold_convert (TREE_TYPE (lhs), len));
3663 gimple_seq_add_stmt_without_update (&stmts, repl);
3664 gsi_replace_with_seq_vops (gsi, stmts);
3665 /* gsi now points at the assignment to the lhs, get a
3666 stmt iterator to the memcpy call.
3667 ??? We can't use gsi_for_stmt as that doesn't work when the
3668 CFG isn't built yet. */
3669 gimple_stmt_iterator gsi2 = *gsi;
3670 gsi_prev (&gsi2);
3671 fold_stmt (&gsi2);
3672 }
3673 else
3674 {
3675 gsi_replace_with_seq_vops (gsi, stmts);
3676 fold_stmt (gsi);
3677 }
3678 return true;
3679 }
3680
3681 /* If the format is "%s", use strcpy if the result isn't used. */
3682 else if (fmt_str && strcmp (fmt_str, target_percent_s) == 0)
3683 {
3684 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
3685 if (!fn)
3686 return false;
3687
3688 /* Don't crash on snprintf (str1, cst, "%s"). */
3689 if (!orig)
3690 return false;
3691
3692 tree orig_len = get_maxval_strlen (orig, SRK_STRLEN);
3693
3694 /* We could expand this as
3695 memcpy (str1, str2, cst - 1); str1[cst - 1] = '\0';
3696 or to
3697 memcpy (str1, str2_with_nul_at_cstm1, cst);
3698 but in the former case that might increase code size
3699 and in the latter case grow .rodata section too much.
3700 So punt for now. */
3701 if (!known_lower (stmt, orig_len, destsize, true))
3702 return false;
3703
3704 /* Convert snprintf (str1, cst, "%s", str2) into
3705 strcpy (str1, str2) if strlen (str2) < cst. */
3706 gimple_seq stmts = NULL;
3707 gimple *repl = gimple_build_call (fn, 2, dest, orig);
3708 gimple_seq_add_stmt_without_update (&stmts, repl);
3709 if (tree lhs = gimple_call_lhs (stmt))
3710 {
3711 if (!useless_type_conversion_p (TREE_TYPE (lhs),
3712 TREE_TYPE (orig_len)))
3713 orig_len = fold_convert (TREE_TYPE (lhs), orig_len);
3714 repl = gimple_build_assign (lhs, orig_len);
3715 gimple_seq_add_stmt_without_update (&stmts, repl);
3716 gsi_replace_with_seq_vops (gsi, stmts);
3717 /* gsi now points at the assignment to the lhs, get a
3718 stmt iterator to the memcpy call.
3719 ??? We can't use gsi_for_stmt as that doesn't work when the
3720 CFG isn't built yet. */
3721 gimple_stmt_iterator gsi2 = *gsi;
3722 gsi_prev (&gsi2);
3723 fold_stmt (&gsi2);
3724 }
3725 else
3726 {
3727 gsi_replace_with_seq_vops (gsi, stmts);
3728 fold_stmt (gsi);
3729 }
3730 return true;
3731 }
3732 return false;
3733 }
3734
3735 /* Fold a call to the {,v}fprintf{,_unlocked} and __{,v}printf_chk builtins.
3736 FP, FMT, and ARG are the arguments to the call. We don't fold calls with
3737 more than 3 arguments, and ARG may be null in the 2-argument case.
3738
3739 Return NULL_TREE if no simplification was possible, otherwise return the
3740 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3741 code of the function to be simplified. */
3742
3743 static bool
3744 gimple_fold_builtin_fprintf (gimple_stmt_iterator *gsi,
3745 tree fp, tree fmt, tree arg,
3746 enum built_in_function fcode)
3747 {
3748 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3749 tree fn_fputc, fn_fputs;
3750 const char *fmt_str = NULL;
3751
3752 /* If the return value is used, don't do the transformation. */
3753 if (gimple_call_lhs (stmt) != NULL_TREE)
3754 return false;
3755
3756 /* Check whether the format is a literal string constant. */
3757 fmt_str = c_getstr (fmt);
3758 if (fmt_str == NULL)
3759 return false;
3760
3761 if (fcode == BUILT_IN_FPRINTF_UNLOCKED)
3762 {
3763 /* If we're using an unlocked function, assume the other
3764 unlocked functions exist explicitly. */
3765 fn_fputc = builtin_decl_explicit (BUILT_IN_FPUTC_UNLOCKED);
3766 fn_fputs = builtin_decl_explicit (BUILT_IN_FPUTS_UNLOCKED);
3767 }
3768 else
3769 {
3770 fn_fputc = builtin_decl_implicit (BUILT_IN_FPUTC);
3771 fn_fputs = builtin_decl_implicit (BUILT_IN_FPUTS);
3772 }
3773
3774 if (!init_target_chars ())
3775 return false;
3776
3777 /* If the format doesn't contain % args or %%, use strcpy. */
3778 if (strchr (fmt_str, target_percent) == NULL)
3779 {
3780 if (fcode != BUILT_IN_VFPRINTF && fcode != BUILT_IN_VFPRINTF_CHK
3781 && arg)
3782 return false;
3783
3784 /* If the format specifier was "", fprintf does nothing. */
3785 if (fmt_str[0] == '\0')
3786 {
3787 replace_call_with_value (gsi, NULL_TREE);
3788 return true;
3789 }
3790
3791 /* When "string" doesn't contain %, replace all cases of
3792 fprintf (fp, string) with fputs (string, fp). The fputs
3793 builtin will take care of special cases like length == 1. */
3794 if (fn_fputs)
3795 {
3796 gcall *repl = gimple_build_call (fn_fputs, 2, fmt, fp);
3797 replace_call_with_call_and_fold (gsi, repl);
3798 return true;
3799 }
3800 }
3801
3802 /* The other optimizations can be done only on the non-va_list variants. */
3803 else if (fcode == BUILT_IN_VFPRINTF || fcode == BUILT_IN_VFPRINTF_CHK)
3804 return false;
3805
3806 /* If the format specifier was "%s", call __builtin_fputs (arg, fp). */
3807 else if (strcmp (fmt_str, target_percent_s) == 0)
3808 {
3809 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3810 return false;
3811 if (fn_fputs)
3812 {
3813 gcall *repl = gimple_build_call (fn_fputs, 2, arg, fp);
3814 replace_call_with_call_and_fold (gsi, repl);
3815 return true;
3816 }
3817 }
3818
3819 /* If the format specifier was "%c", call __builtin_fputc (arg, fp). */
3820 else if (strcmp (fmt_str, target_percent_c) == 0)
3821 {
3822 if (!arg
3823 || ! useless_type_conversion_p (integer_type_node, TREE_TYPE (arg)))
3824 return false;
3825 if (fn_fputc)
3826 {
3827 gcall *repl = gimple_build_call (fn_fputc, 2, arg, fp);
3828 replace_call_with_call_and_fold (gsi, repl);
3829 return true;
3830 }
3831 }
3832
3833 return false;
3834 }
3835
3836 /* Fold a call to the {,v}printf{,_unlocked} and __{,v}printf_chk builtins.
3837 FMT and ARG are the arguments to the call; we don't fold cases with
3838 more than 2 arguments, and ARG may be null if this is a 1-argument case.
3839
3840 Return NULL_TREE if no simplification was possible, otherwise return the
3841 simplified form of the call as a tree. FCODE is the BUILT_IN_*
3842 code of the function to be simplified. */
3843
3844 static bool
3845 gimple_fold_builtin_printf (gimple_stmt_iterator *gsi, tree fmt,
3846 tree arg, enum built_in_function fcode)
3847 {
3848 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
3849 tree fn_putchar, fn_puts, newarg;
3850 const char *fmt_str = NULL;
3851
3852 /* If the return value is used, don't do the transformation. */
3853 if (gimple_call_lhs (stmt) != NULL_TREE)
3854 return false;
3855
3856 /* Check whether the format is a literal string constant. */
3857 fmt_str = c_getstr (fmt);
3858 if (fmt_str == NULL)
3859 return false;
3860
3861 if (fcode == BUILT_IN_PRINTF_UNLOCKED)
3862 {
3863 /* If we're using an unlocked function, assume the other
3864 unlocked functions exist explicitly. */
3865 fn_putchar = builtin_decl_explicit (BUILT_IN_PUTCHAR_UNLOCKED);
3866 fn_puts = builtin_decl_explicit (BUILT_IN_PUTS_UNLOCKED);
3867 }
3868 else
3869 {
3870 fn_putchar = builtin_decl_implicit (BUILT_IN_PUTCHAR);
3871 fn_puts = builtin_decl_implicit (BUILT_IN_PUTS);
3872 }
3873
3874 if (!init_target_chars ())
3875 return false;
3876
3877 if (strcmp (fmt_str, target_percent_s) == 0
3878 || strchr (fmt_str, target_percent) == NULL)
3879 {
3880 const char *str;
3881
3882 if (strcmp (fmt_str, target_percent_s) == 0)
3883 {
3884 if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3885 return false;
3886
3887 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3888 return false;
3889
3890 str = c_getstr (arg);
3891 if (str == NULL)
3892 return false;
3893 }
3894 else
3895 {
3896 /* The format specifier doesn't contain any '%' characters. */
3897 if (fcode != BUILT_IN_VPRINTF && fcode != BUILT_IN_VPRINTF_CHK
3898 && arg)
3899 return false;
3900 str = fmt_str;
3901 }
3902
3903 /* If the string was "", printf does nothing. */
3904 if (str[0] == '\0')
3905 {
3906 replace_call_with_value (gsi, NULL_TREE);
3907 return true;
3908 }
3909
3910 /* If the string has length of 1, call putchar. */
3911 if (str[1] == '\0')
3912 {
3913 /* Given printf("c"), (where c is any one character,)
3914 convert "c"[0] to an int and pass that to the replacement
3915 function. */
3916 newarg = build_int_cst (integer_type_node, str[0]);
3917 if (fn_putchar)
3918 {
3919 gcall *repl = gimple_build_call (fn_putchar, 1, newarg);
3920 replace_call_with_call_and_fold (gsi, repl);
3921 return true;
3922 }
3923 }
3924 else
3925 {
3926 /* If the string was "string\n", call puts("string"). */
3927 size_t len = strlen (str);
3928 if ((unsigned char)str[len - 1] == target_newline
3929 && (size_t) (int) len == len
3930 && (int) len > 0)
3931 {
3932 char *newstr;
3933
3934 /* Create a NUL-terminated string that's one char shorter
3935 than the original, stripping off the trailing '\n'. */
3936 newstr = xstrdup (str);
3937 newstr[len - 1] = '\0';
3938 newarg = build_string_literal (len, newstr);
3939 free (newstr);
3940 if (fn_puts)
3941 {
3942 gcall *repl = gimple_build_call (fn_puts, 1, newarg);
3943 replace_call_with_call_and_fold (gsi, repl);
3944 return true;
3945 }
3946 }
3947 else
3948 /* We'd like to arrange to call fputs(string,stdout) here,
3949 but we need stdout and don't have a way to get it yet. */
3950 return false;
3951 }
3952 }
3953
3954 /* The other optimizations can be done only on the non-va_list variants. */
3955 else if (fcode == BUILT_IN_VPRINTF || fcode == BUILT_IN_VPRINTF_CHK)
3956 return false;
3957
3958 /* If the format specifier was "%s\n", call __builtin_puts(arg). */
3959 else if (strcmp (fmt_str, target_percent_s_newline) == 0)
3960 {
3961 if (!arg || ! POINTER_TYPE_P (TREE_TYPE (arg)))
3962 return false;
3963 if (fn_puts)
3964 {
3965 gcall *repl = gimple_build_call (fn_puts, 1, arg);
3966 replace_call_with_call_and_fold (gsi, repl);
3967 return true;
3968 }
3969 }
3970
3971 /* If the format specifier was "%c", call __builtin_putchar(arg). */
3972 else if (strcmp (fmt_str, target_percent_c) == 0)
3973 {
3974 if (!arg || ! useless_type_conversion_p (integer_type_node,
3975 TREE_TYPE (arg)))
3976 return false;
3977 if (fn_putchar)
3978 {
3979 gcall *repl = gimple_build_call (fn_putchar, 1, arg);
3980 replace_call_with_call_and_fold (gsi, repl);
3981 return true;
3982 }
3983 }
3984
3985 return false;
3986 }
3987
3988
3989
3990 /* Fold a call to __builtin_strlen with known length LEN. */
3991
3992 static bool
3993 gimple_fold_builtin_strlen (gimple_stmt_iterator *gsi)
3994 {
3995 gimple *stmt = gsi_stmt (*gsi);
3996 tree arg = gimple_call_arg (stmt, 0);
3997
3998 wide_int minlen;
3999 wide_int maxlen;
4000
4001 c_strlen_data lendata = { };
4002 if (get_range_strlen (arg, &lendata, /* eltsize = */ 1)
4003 && !lendata.decl
4004 && lendata.minlen && TREE_CODE (lendata.minlen) == INTEGER_CST
4005 && lendata.maxlen && TREE_CODE (lendata.maxlen) == INTEGER_CST)
4006 {
4007 /* The range of lengths refers to either a single constant
4008 string or to the longest and shortest constant string
4009 referenced by the argument of the strlen() call, or to
4010 the strings that can possibly be stored in the arrays
4011 the argument refers to. */
4012 minlen = wi::to_wide (lendata.minlen);
4013 maxlen = wi::to_wide (lendata.maxlen);
4014 }
4015 else
4016 {
4017 unsigned prec = TYPE_PRECISION (sizetype);
4018
4019 minlen = wi::shwi (0, prec);
4020 maxlen = wi::to_wide (max_object_size (), prec) - 2;
4021 }
4022
4023 if (minlen == maxlen)
4024 {
4025 /* Fold the strlen call to a constant. */
4026 tree type = TREE_TYPE (lendata.minlen);
4027 tree len = force_gimple_operand_gsi (gsi,
4028 wide_int_to_tree (type, minlen),
4029 true, NULL, true, GSI_SAME_STMT);
4030 replace_call_with_value (gsi, len);
4031 return true;
4032 }
4033
4034 /* Set the strlen() range to [0, MAXLEN]. */
4035 if (tree lhs = gimple_call_lhs (stmt))
4036 set_strlen_range (lhs, minlen, maxlen);
4037
4038 return false;
4039 }
4040
4041 /* Fold a call to __builtin_acc_on_device. */
4042
4043 static bool
4044 gimple_fold_builtin_acc_on_device (gimple_stmt_iterator *gsi, tree arg0)
4045 {
4046 /* Defer folding until we know which compiler we're in. */
4047 if (symtab->state != EXPANSION)
4048 return false;
4049
4050 unsigned val_host = GOMP_DEVICE_HOST;
4051 unsigned val_dev = GOMP_DEVICE_NONE;
4052
4053 #ifdef ACCEL_COMPILER
4054 val_host = GOMP_DEVICE_NOT_HOST;
4055 val_dev = ACCEL_COMPILER_acc_device;
4056 #endif
4057
4058 location_t loc = gimple_location (gsi_stmt (*gsi));
4059
4060 tree host_eq = make_ssa_name (boolean_type_node);
4061 gimple *host_ass = gimple_build_assign
4062 (host_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_host));
4063 gimple_set_location (host_ass, loc);
4064 gsi_insert_before (gsi, host_ass, GSI_SAME_STMT);
4065
4066 tree dev_eq = make_ssa_name (boolean_type_node);
4067 gimple *dev_ass = gimple_build_assign
4068 (dev_eq, EQ_EXPR, arg0, build_int_cst (TREE_TYPE (arg0), val_dev));
4069 gimple_set_location (dev_ass, loc);
4070 gsi_insert_before (gsi, dev_ass, GSI_SAME_STMT);
4071
4072 tree result = make_ssa_name (boolean_type_node);
4073 gimple *result_ass = gimple_build_assign
4074 (result, BIT_IOR_EXPR, host_eq, dev_eq);
4075 gimple_set_location (result_ass, loc);
4076 gsi_insert_before (gsi, result_ass, GSI_SAME_STMT);
4077
4078 replace_call_with_value (gsi, result);
4079
4080 return true;
4081 }
4082
4083 /* Fold realloc (0, n) -> malloc (n). */
4084
4085 static bool
4086 gimple_fold_builtin_realloc (gimple_stmt_iterator *gsi)
4087 {
4088 gimple *stmt = gsi_stmt (*gsi);
4089 tree arg = gimple_call_arg (stmt, 0);
4090 tree size = gimple_call_arg (stmt, 1);
4091
4092 if (operand_equal_p (arg, null_pointer_node, 0))
4093 {
4094 tree fn_malloc = builtin_decl_implicit (BUILT_IN_MALLOC);
4095 if (fn_malloc)
4096 {
4097 gcall *repl = gimple_build_call (fn_malloc, 1, size);
4098 replace_call_with_call_and_fold (gsi, repl);
4099 return true;
4100 }
4101 }
4102 return false;
4103 }
4104
4105 /* Number of bytes into which any type but aggregate or vector types
4106 should fit. */
4107 static constexpr size_t clear_padding_unit
4108 = MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT;
4109 /* Buffer size on which __builtin_clear_padding folding code works. */
4110 static const size_t clear_padding_buf_size = 32 * clear_padding_unit;
4111
4112 /* Data passed through __builtin_clear_padding folding. */
4113 struct clear_padding_struct {
4114 location_t loc;
4115 /* 0 during __builtin_clear_padding folding, nonzero during
4116 clear_type_padding_in_mask. In that case, instead of clearing the
4117 non-padding bits in union_ptr array clear the padding bits in there. */
4118 bool clear_in_mask;
4119 tree base;
4120 tree alias_type;
4121 gimple_stmt_iterator *gsi;
4122 /* Alignment of buf->base + 0. */
4123 unsigned align;
4124 /* Offset from buf->base. Should be always a multiple of UNITS_PER_WORD. */
4125 HOST_WIDE_INT off;
4126 /* Number of padding bytes before buf->off that don't have padding clear
4127 code emitted yet. */
4128 HOST_WIDE_INT padding_bytes;
4129 /* The size of the whole object. Never emit code to touch
4130 buf->base + buf->sz or following bytes. */
4131 HOST_WIDE_INT sz;
4132 /* Number of bytes recorded in buf->buf. */
4133 size_t size;
4134 /* When inside union, instead of emitting code we and bits inside of
4135 the union_ptr array. */
4136 unsigned char *union_ptr;
4137 /* Set bits mean padding bits that need to be cleared by the builtin. */
4138 unsigned char buf[clear_padding_buf_size + clear_padding_unit];
4139 };
4140
4141 /* Emit code to clear padding requested in BUF->buf - set bits
4142 in there stand for padding that should be cleared. FULL is true
4143 if everything from the buffer should be flushed, otherwise
4144 it can leave up to 2 * clear_padding_unit bytes for further
4145 processing. */
4146
4147 static void
4148 clear_padding_flush (clear_padding_struct *buf, bool full)
4149 {
4150 gcc_assert ((clear_padding_unit % UNITS_PER_WORD) == 0);
4151 if (!full && buf->size < 2 * clear_padding_unit)
4152 return;
4153 gcc_assert ((buf->off % UNITS_PER_WORD) == 0);
4154 size_t end = buf->size;
4155 if (!full)
4156 end = ((end - clear_padding_unit - 1) / clear_padding_unit
4157 * clear_padding_unit);
4158 size_t padding_bytes = buf->padding_bytes;
4159 if (buf->union_ptr)
4160 {
4161 if (buf->clear_in_mask)
4162 {
4163 /* During clear_type_padding_in_mask, clear the padding
4164 bits set in buf->buf in the buf->union_ptr mask. */
4165 for (size_t i = 0; i < end; i++)
4166 {
4167 if (buf->buf[i] == (unsigned char) ~0)
4168 padding_bytes++;
4169 else
4170 {
4171 memset (&buf->union_ptr[buf->off + i - padding_bytes],
4172 0, padding_bytes);
4173 padding_bytes = 0;
4174 buf->union_ptr[buf->off + i] &= ~buf->buf[i];
4175 }
4176 }
4177 if (full)
4178 {
4179 memset (&buf->union_ptr[buf->off + end - padding_bytes],
4180 0, padding_bytes);
4181 buf->off = 0;
4182 buf->size = 0;
4183 buf->padding_bytes = 0;
4184 }
4185 else
4186 {
4187 memmove (buf->buf, buf->buf + end, buf->size - end);
4188 buf->off += end;
4189 buf->size -= end;
4190 buf->padding_bytes = padding_bytes;
4191 }
4192 return;
4193 }
4194 /* Inside of a union, instead of emitting any code, instead
4195 clear all bits in the union_ptr buffer that are clear
4196 in buf. Whole padding bytes don't clear anything. */
4197 for (size_t i = 0; i < end; i++)
4198 {
4199 if (buf->buf[i] == (unsigned char) ~0)
4200 padding_bytes++;
4201 else
4202 {
4203 padding_bytes = 0;
4204 buf->union_ptr[buf->off + i] &= buf->buf[i];
4205 }
4206 }
4207 if (full)
4208 {
4209 buf->off = 0;
4210 buf->size = 0;
4211 buf->padding_bytes = 0;
4212 }
4213 else
4214 {
4215 memmove (buf->buf, buf->buf + end, buf->size - end);
4216 buf->off += end;
4217 buf->size -= end;
4218 buf->padding_bytes = padding_bytes;
4219 }
4220 return;
4221 }
4222 size_t wordsize = UNITS_PER_WORD;
4223 for (size_t i = 0; i < end; i += wordsize)
4224 {
4225 size_t nonzero_first = wordsize;
4226 size_t nonzero_last = 0;
4227 size_t zero_first = wordsize;
4228 size_t zero_last = 0;
4229 bool all_ones = true, bytes_only = true;
4230 if ((unsigned HOST_WIDE_INT) (buf->off + i + wordsize)
4231 > (unsigned HOST_WIDE_INT) buf->sz)
4232 {
4233 gcc_assert (wordsize > 1);
4234 wordsize /= 2;
4235 i -= wordsize;
4236 continue;
4237 }
4238 for (size_t j = i; j < i + wordsize && j < end; j++)
4239 {
4240 if (buf->buf[j])
4241 {
4242 if (nonzero_first == wordsize)
4243 {
4244 nonzero_first = j - i;
4245 nonzero_last = j - i;
4246 }
4247 if (nonzero_last != j - i)
4248 all_ones = false;
4249 nonzero_last = j + 1 - i;
4250 }
4251 else
4252 {
4253 if (zero_first == wordsize)
4254 zero_first = j - i;
4255 zero_last = j + 1 - i;
4256 }
4257 if (buf->buf[j] != 0 && buf->buf[j] != (unsigned char) ~0)
4258 {
4259 all_ones = false;
4260 bytes_only = false;
4261 }
4262 }
4263 size_t padding_end = i;
4264 if (padding_bytes)
4265 {
4266 if (nonzero_first == 0
4267 && nonzero_last == wordsize
4268 && all_ones)
4269 {
4270 /* All bits are padding and we had some padding
4271 before too. Just extend it. */
4272 padding_bytes += wordsize;
4273 continue;
4274 }
4275 if (all_ones && nonzero_first == 0)
4276 {
4277 padding_bytes += nonzero_last;
4278 padding_end += nonzero_last;
4279 nonzero_first = wordsize;
4280 nonzero_last = 0;
4281 }
4282 else if (bytes_only && nonzero_first == 0)
4283 {
4284 gcc_assert (zero_first && zero_first != wordsize);
4285 padding_bytes += zero_first;
4286 padding_end += zero_first;
4287 }
4288 tree atype, src;
4289 if (padding_bytes == 1)
4290 {
4291 atype = char_type_node;
4292 src = build_zero_cst (char_type_node);
4293 }
4294 else
4295 {
4296 atype = build_array_type_nelts (char_type_node, padding_bytes);
4297 src = build_constructor (atype, NULL);
4298 }
4299 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4300 build_int_cst (buf->alias_type,
4301 buf->off + padding_end
4302 - padding_bytes));
4303 gimple *g = gimple_build_assign (dst, src);
4304 gimple_set_location (g, buf->loc);
4305 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4306 padding_bytes = 0;
4307 buf->padding_bytes = 0;
4308 }
4309 if (nonzero_first == wordsize)
4310 /* All bits in a word are 0, there are no padding bits. */
4311 continue;
4312 if (all_ones && nonzero_last == wordsize)
4313 {
4314 /* All bits between nonzero_first and end of word are padding
4315 bits, start counting padding_bytes. */
4316 padding_bytes = nonzero_last - nonzero_first;
4317 continue;
4318 }
4319 if (bytes_only)
4320 {
4321 /* If bitfields aren't involved in this word, prefer storing
4322 individual bytes or groups of them over performing a RMW
4323 operation on the whole word. */
4324 gcc_assert (i + zero_last <= end);
4325 for (size_t j = padding_end; j < i + zero_last; j++)
4326 {
4327 if (buf->buf[j])
4328 {
4329 size_t k;
4330 for (k = j; k < i + zero_last; k++)
4331 if (buf->buf[k] == 0)
4332 break;
4333 HOST_WIDE_INT off = buf->off + j;
4334 tree atype, src;
4335 if (k - j == 1)
4336 {
4337 atype = char_type_node;
4338 src = build_zero_cst (char_type_node);
4339 }
4340 else
4341 {
4342 atype = build_array_type_nelts (char_type_node, k - j);
4343 src = build_constructor (atype, NULL);
4344 }
4345 tree dst = build2_loc (buf->loc, MEM_REF, atype,
4346 buf->base,
4347 build_int_cst (buf->alias_type, off));
4348 gimple *g = gimple_build_assign (dst, src);
4349 gimple_set_location (g, buf->loc);
4350 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4351 j = k;
4352 }
4353 }
4354 if (nonzero_last == wordsize)
4355 padding_bytes = nonzero_last - zero_last;
4356 continue;
4357 }
4358 for (size_t eltsz = 1; eltsz <= wordsize; eltsz <<= 1)
4359 {
4360 if (nonzero_last - nonzero_first <= eltsz
4361 && ((nonzero_first & ~(eltsz - 1))
4362 == ((nonzero_last - 1) & ~(eltsz - 1))))
4363 {
4364 tree type;
4365 if (eltsz == 1)
4366 type = char_type_node;
4367 else
4368 type = lang_hooks.types.type_for_size (eltsz * BITS_PER_UNIT,
4369 0);
4370 size_t start = nonzero_first & ~(eltsz - 1);
4371 HOST_WIDE_INT off = buf->off + i + start;
4372 tree atype = type;
4373 if (eltsz > 1 && buf->align < TYPE_ALIGN (type))
4374 atype = build_aligned_type (type, buf->align);
4375 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4376 build_int_cst (buf->alias_type, off));
4377 tree src;
4378 gimple *g;
4379 if (all_ones
4380 && nonzero_first == start
4381 && nonzero_last == start + eltsz)
4382 src = build_zero_cst (type);
4383 else
4384 {
4385 src = make_ssa_name (type);
4386 tree tmp_dst = unshare_expr (dst);
4387 /* The folding introduces a read from the tmp_dst, we should
4388 prevent uninitialized warning analysis from issuing warning
4389 for such fake read. In order to suppress warning only for
4390 this expr, we should set the location of tmp_dst to
4391 UNKNOWN_LOCATION first, then suppress_warning will call
4392 set_no_warning_bit to set the no_warning flag only for
4393 tmp_dst. */
4394 SET_EXPR_LOCATION (tmp_dst, UNKNOWN_LOCATION);
4395 suppress_warning (tmp_dst, OPT_Wuninitialized);
4396 g = gimple_build_assign (src, tmp_dst);
4397 gimple_set_location (g, buf->loc);
4398 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4399 tree mask = native_interpret_expr (type,
4400 buf->buf + i + start,
4401 eltsz);
4402 gcc_assert (mask && TREE_CODE (mask) == INTEGER_CST);
4403 mask = fold_build1 (BIT_NOT_EXPR, type, mask);
4404 tree src_masked = make_ssa_name (type);
4405 g = gimple_build_assign (src_masked, BIT_AND_EXPR,
4406 src, mask);
4407 gimple_set_location (g, buf->loc);
4408 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4409 src = src_masked;
4410 }
4411 g = gimple_build_assign (dst, src);
4412 gimple_set_location (g, buf->loc);
4413 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4414 break;
4415 }
4416 }
4417 }
4418 if (full)
4419 {
4420 if (padding_bytes)
4421 {
4422 tree atype, src;
4423 if (padding_bytes == 1)
4424 {
4425 atype = char_type_node;
4426 src = build_zero_cst (char_type_node);
4427 }
4428 else
4429 {
4430 atype = build_array_type_nelts (char_type_node, padding_bytes);
4431 src = build_constructor (atype, NULL);
4432 }
4433 tree dst = build2_loc (buf->loc, MEM_REF, atype, buf->base,
4434 build_int_cst (buf->alias_type,
4435 buf->off + end
4436 - padding_bytes));
4437 gimple *g = gimple_build_assign (dst, src);
4438 gimple_set_location (g, buf->loc);
4439 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4440 }
4441 size_t end_rem = end % UNITS_PER_WORD;
4442 buf->off += end - end_rem;
4443 buf->size = end_rem;
4444 memset (buf->buf, 0, buf->size);
4445 buf->padding_bytes = 0;
4446 }
4447 else
4448 {
4449 memmove (buf->buf, buf->buf + end, buf->size - end);
4450 buf->off += end;
4451 buf->size -= end;
4452 buf->padding_bytes = padding_bytes;
4453 }
4454 }
4455
4456 /* Append PADDING_BYTES padding bytes. */
4457
4458 static void
4459 clear_padding_add_padding (clear_padding_struct *buf,
4460 HOST_WIDE_INT padding_bytes)
4461 {
4462 if (padding_bytes == 0)
4463 return;
4464 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4465 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4466 clear_padding_flush (buf, false);
4467 if ((unsigned HOST_WIDE_INT) padding_bytes + buf->size
4468 > (unsigned HOST_WIDE_INT) clear_padding_buf_size)
4469 {
4470 memset (buf->buf + buf->size, ~0, clear_padding_buf_size - buf->size);
4471 padding_bytes -= clear_padding_buf_size - buf->size;
4472 buf->size = clear_padding_buf_size;
4473 clear_padding_flush (buf, false);
4474 gcc_assert (buf->padding_bytes);
4475 /* At this point buf->buf[0] through buf->buf[buf->size - 1]
4476 is guaranteed to be all ones. */
4477 padding_bytes += buf->size;
4478 buf->size = padding_bytes % UNITS_PER_WORD;
4479 memset (buf->buf, ~0, buf->size);
4480 buf->off += padding_bytes - buf->size;
4481 buf->padding_bytes += padding_bytes - buf->size;
4482 }
4483 else
4484 {
4485 memset (buf->buf + buf->size, ~0, padding_bytes);
4486 buf->size += padding_bytes;
4487 }
4488 }
4489
4490 static void clear_padding_type (clear_padding_struct *, tree,
4491 HOST_WIDE_INT, bool);
4492
4493 /* Clear padding bits of union type TYPE. */
4494
4495 static void
4496 clear_padding_union (clear_padding_struct *buf, tree type,
4497 HOST_WIDE_INT sz, bool for_auto_init)
4498 {
4499 clear_padding_struct *union_buf;
4500 HOST_WIDE_INT start_off = 0, next_off = 0;
4501 size_t start_size = 0;
4502 if (buf->union_ptr)
4503 {
4504 start_off = buf->off + buf->size;
4505 next_off = start_off + sz;
4506 start_size = start_off % UNITS_PER_WORD;
4507 start_off -= start_size;
4508 clear_padding_flush (buf, true);
4509 union_buf = buf;
4510 }
4511 else
4512 {
4513 if (sz + buf->size > clear_padding_buf_size)
4514 clear_padding_flush (buf, false);
4515 union_buf = XALLOCA (clear_padding_struct);
4516 union_buf->loc = buf->loc;
4517 union_buf->clear_in_mask = buf->clear_in_mask;
4518 union_buf->base = NULL_TREE;
4519 union_buf->alias_type = NULL_TREE;
4520 union_buf->gsi = NULL;
4521 union_buf->align = 0;
4522 union_buf->off = 0;
4523 union_buf->padding_bytes = 0;
4524 union_buf->sz = sz;
4525 union_buf->size = 0;
4526 if (sz + buf->size <= clear_padding_buf_size)
4527 union_buf->union_ptr = buf->buf + buf->size;
4528 else
4529 union_buf->union_ptr = XNEWVEC (unsigned char, sz);
4530 memset (union_buf->union_ptr, ~0, sz);
4531 }
4532
4533 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4534 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4535 {
4536 if (DECL_SIZE_UNIT (field) == NULL_TREE)
4537 {
4538 if (TREE_TYPE (field) == error_mark_node)
4539 continue;
4540 gcc_assert (TREE_CODE (TREE_TYPE (field)) == ARRAY_TYPE
4541 && !COMPLETE_TYPE_P (TREE_TYPE (field)));
4542 if (!buf->clear_in_mask && !for_auto_init)
4543 error_at (buf->loc, "flexible array member %qD does not have "
4544 "well defined padding bits for %qs",
4545 field, "__builtin_clear_padding");
4546 continue;
4547 }
4548 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4549 gcc_assert (union_buf->size == 0);
4550 union_buf->off = start_off;
4551 union_buf->size = start_size;
4552 memset (union_buf->buf, ~0, start_size);
4553 clear_padding_type (union_buf, TREE_TYPE (field), fldsz, for_auto_init);
4554 clear_padding_add_padding (union_buf, sz - fldsz);
4555 clear_padding_flush (union_buf, true);
4556 }
4557
4558 if (buf == union_buf)
4559 {
4560 buf->off = next_off;
4561 buf->size = next_off % UNITS_PER_WORD;
4562 buf->off -= buf->size;
4563 memset (buf->buf, ~0, buf->size);
4564 }
4565 else if (sz + buf->size <= clear_padding_buf_size)
4566 buf->size += sz;
4567 else
4568 {
4569 unsigned char *union_ptr = union_buf->union_ptr;
4570 while (sz)
4571 {
4572 clear_padding_flush (buf, false);
4573 HOST_WIDE_INT this_sz
4574 = MIN ((unsigned HOST_WIDE_INT) sz,
4575 clear_padding_buf_size - buf->size);
4576 memcpy (buf->buf + buf->size, union_ptr, this_sz);
4577 buf->size += this_sz;
4578 union_ptr += this_sz;
4579 sz -= this_sz;
4580 }
4581 XDELETE (union_buf->union_ptr);
4582 }
4583 }
4584
4585 /* The only known floating point formats with padding bits are the
4586 IEEE extended ones. */
4587
4588 static bool
4589 clear_padding_real_needs_padding_p (tree type)
4590 {
4591 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
4592 return (fmt->b == 2
4593 && fmt->signbit_ro == fmt->signbit_rw
4594 && (fmt->signbit_ro == 79 || fmt->signbit_ro == 95));
4595 }
4596
4597 /* Return true if TYPE might contain any padding bits. */
4598
4599 bool
4600 clear_padding_type_may_have_padding_p (tree type)
4601 {
4602 switch (TREE_CODE (type))
4603 {
4604 case RECORD_TYPE:
4605 case UNION_TYPE:
4606 return true;
4607 case ARRAY_TYPE:
4608 case COMPLEX_TYPE:
4609 case VECTOR_TYPE:
4610 return clear_padding_type_may_have_padding_p (TREE_TYPE (type));
4611 case REAL_TYPE:
4612 return clear_padding_real_needs_padding_p (type);
4613 default:
4614 return false;
4615 }
4616 }
4617
4618 /* Emit a runtime loop:
4619 for (; buf.base != end; buf.base += sz)
4620 __builtin_clear_padding (buf.base); */
4621
4622 static void
4623 clear_padding_emit_loop (clear_padding_struct *buf, tree type,
4624 tree end, bool for_auto_init)
4625 {
4626 tree l1 = create_artificial_label (buf->loc);
4627 tree l2 = create_artificial_label (buf->loc);
4628 tree l3 = create_artificial_label (buf->loc);
4629 gimple *g = gimple_build_goto (l2);
4630 gimple_set_location (g, buf->loc);
4631 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4632 g = gimple_build_label (l1);
4633 gimple_set_location (g, buf->loc);
4634 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4635 clear_padding_type (buf, type, buf->sz, for_auto_init);
4636 clear_padding_flush (buf, true);
4637 g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR, buf->base,
4638 size_int (buf->sz));
4639 gimple_set_location (g, buf->loc);
4640 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4641 g = gimple_build_label (l2);
4642 gimple_set_location (g, buf->loc);
4643 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4644 g = gimple_build_cond (NE_EXPR, buf->base, end, l1, l3);
4645 gimple_set_location (g, buf->loc);
4646 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4647 g = gimple_build_label (l3);
4648 gimple_set_location (g, buf->loc);
4649 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4650 }
4651
4652 /* Clear padding bits for TYPE. Called recursively from
4653 gimple_fold_builtin_clear_padding. If FOR_AUTO_INIT is true,
4654 the __builtin_clear_padding is not called by the end user,
4655 instead, it's inserted by the compiler to initialize the
4656 paddings of automatic variable. Therefore, we should not
4657 emit the error messages for flexible array members to confuse
4658 the end user. */
4659
4660 static void
4661 clear_padding_type (clear_padding_struct *buf, tree type,
4662 HOST_WIDE_INT sz, bool for_auto_init)
4663 {
4664 switch (TREE_CODE (type))
4665 {
4666 case RECORD_TYPE:
4667 HOST_WIDE_INT cur_pos;
4668 cur_pos = 0;
4669 for (tree field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
4670 if (TREE_CODE (field) == FIELD_DECL && !DECL_PADDING_P (field))
4671 {
4672 tree ftype = TREE_TYPE (field);
4673 if (DECL_BIT_FIELD (field))
4674 {
4675 HOST_WIDE_INT fldsz = TYPE_PRECISION (ftype);
4676 if (fldsz == 0)
4677 continue;
4678 HOST_WIDE_INT pos = int_byte_position (field);
4679 if (pos >= sz)
4680 continue;
4681 HOST_WIDE_INT bpos
4682 = tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field));
4683 bpos %= BITS_PER_UNIT;
4684 HOST_WIDE_INT end
4685 = ROUND_UP (bpos + fldsz, BITS_PER_UNIT) / BITS_PER_UNIT;
4686 if (pos + end > cur_pos)
4687 {
4688 clear_padding_add_padding (buf, pos + end - cur_pos);
4689 cur_pos = pos + end;
4690 }
4691 gcc_assert (cur_pos > pos
4692 && ((unsigned HOST_WIDE_INT) buf->size
4693 >= (unsigned HOST_WIDE_INT) cur_pos - pos));
4694 unsigned char *p = buf->buf + buf->size - (cur_pos - pos);
4695 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
4696 sorry_at (buf->loc, "PDP11 bit-field handling unsupported"
4697 " in %qs", "__builtin_clear_padding");
4698 else if (BYTES_BIG_ENDIAN)
4699 {
4700 /* Big endian. */
4701 if (bpos + fldsz <= BITS_PER_UNIT)
4702 *p &= ~(((1 << fldsz) - 1)
4703 << (BITS_PER_UNIT - bpos - fldsz));
4704 else
4705 {
4706 if (bpos)
4707 {
4708 *p &= ~(((1U << BITS_PER_UNIT) - 1) >> bpos);
4709 p++;
4710 fldsz -= BITS_PER_UNIT - bpos;
4711 }
4712 memset (p, 0, fldsz / BITS_PER_UNIT);
4713 p += fldsz / BITS_PER_UNIT;
4714 fldsz %= BITS_PER_UNIT;
4715 if (fldsz)
4716 *p &= ((1U << BITS_PER_UNIT) - 1) >> fldsz;
4717 }
4718 }
4719 else
4720 {
4721 /* Little endian. */
4722 if (bpos + fldsz <= BITS_PER_UNIT)
4723 *p &= ~(((1 << fldsz) - 1) << bpos);
4724 else
4725 {
4726 if (bpos)
4727 {
4728 *p &= ~(((1 << BITS_PER_UNIT) - 1) << bpos);
4729 p++;
4730 fldsz -= BITS_PER_UNIT - bpos;
4731 }
4732 memset (p, 0, fldsz / BITS_PER_UNIT);
4733 p += fldsz / BITS_PER_UNIT;
4734 fldsz %= BITS_PER_UNIT;
4735 if (fldsz)
4736 *p &= ~((1 << fldsz) - 1);
4737 }
4738 }
4739 }
4740 else if (DECL_SIZE_UNIT (field) == NULL_TREE)
4741 {
4742 if (ftype == error_mark_node)
4743 continue;
4744 gcc_assert (TREE_CODE (ftype) == ARRAY_TYPE
4745 && !COMPLETE_TYPE_P (ftype));
4746 if (!buf->clear_in_mask && !for_auto_init)
4747 error_at (buf->loc, "flexible array member %qD does not "
4748 "have well defined padding bits for %qs",
4749 field, "__builtin_clear_padding");
4750 }
4751 else if (is_empty_type (ftype))
4752 continue;
4753 else
4754 {
4755 HOST_WIDE_INT pos = int_byte_position (field);
4756 if (pos >= sz)
4757 continue;
4758 HOST_WIDE_INT fldsz = tree_to_shwi (DECL_SIZE_UNIT (field));
4759 gcc_assert (pos >= 0 && fldsz >= 0 && pos >= cur_pos);
4760 clear_padding_add_padding (buf, pos - cur_pos);
4761 cur_pos = pos;
4762 if (tree asbase = lang_hooks.types.classtype_as_base (field))
4763 ftype = asbase;
4764 clear_padding_type (buf, ftype, fldsz, for_auto_init);
4765 cur_pos += fldsz;
4766 }
4767 }
4768 gcc_assert (sz >= cur_pos);
4769 clear_padding_add_padding (buf, sz - cur_pos);
4770 break;
4771 case ARRAY_TYPE:
4772 HOST_WIDE_INT nelts, fldsz;
4773 fldsz = int_size_in_bytes (TREE_TYPE (type));
4774 if (fldsz == 0)
4775 break;
4776 nelts = sz / fldsz;
4777 if (nelts > 1
4778 && sz > 8 * UNITS_PER_WORD
4779 && buf->union_ptr == NULL
4780 && clear_padding_type_may_have_padding_p (TREE_TYPE (type)))
4781 {
4782 /* For sufficiently large array of more than one elements,
4783 emit a runtime loop to keep code size manageable. */
4784 tree base = buf->base;
4785 unsigned int prev_align = buf->align;
4786 HOST_WIDE_INT off = buf->off + buf->size;
4787 HOST_WIDE_INT prev_sz = buf->sz;
4788 clear_padding_flush (buf, true);
4789 tree elttype = TREE_TYPE (type);
4790 buf->base = create_tmp_var (build_pointer_type (elttype));
4791 tree end = make_ssa_name (TREE_TYPE (buf->base));
4792 gimple *g = gimple_build_assign (buf->base, POINTER_PLUS_EXPR,
4793 base, size_int (off));
4794 gimple_set_location (g, buf->loc);
4795 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4796 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf->base,
4797 size_int (sz));
4798 gimple_set_location (g, buf->loc);
4799 gsi_insert_before (buf->gsi, g, GSI_SAME_STMT);
4800 buf->sz = fldsz;
4801 buf->align = TYPE_ALIGN (elttype);
4802 buf->off = 0;
4803 buf->size = 0;
4804 clear_padding_emit_loop (buf, elttype, end, for_auto_init);
4805 buf->base = base;
4806 buf->sz = prev_sz;
4807 buf->align = prev_align;
4808 buf->size = off % UNITS_PER_WORD;
4809 buf->off = off - buf->size;
4810 memset (buf->buf, 0, buf->size);
4811 break;
4812 }
4813 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4814 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4815 break;
4816 case UNION_TYPE:
4817 clear_padding_union (buf, type, sz, for_auto_init);
4818 break;
4819 case REAL_TYPE:
4820 gcc_assert ((size_t) sz <= clear_padding_unit);
4821 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4822 clear_padding_flush (buf, false);
4823 if (clear_padding_real_needs_padding_p (type))
4824 {
4825 /* Use native_interpret_real + native_encode_expr to figure out
4826 which bits are padding. */
4827 memset (buf->buf + buf->size, ~0, sz);
4828 tree cst = native_interpret_real (type, buf->buf + buf->size, sz);
4829 gcc_assert (cst && TREE_CODE (cst) == REAL_CST);
4830 int len = native_encode_expr (cst, buf->buf + buf->size, sz);
4831 gcc_assert (len > 0 && (size_t) len == (size_t) sz);
4832 for (size_t i = 0; i < (size_t) sz; i++)
4833 buf->buf[buf->size + i] ^= ~0;
4834 }
4835 else
4836 memset (buf->buf + buf->size, 0, sz);
4837 buf->size += sz;
4838 break;
4839 case COMPLEX_TYPE:
4840 fldsz = int_size_in_bytes (TREE_TYPE (type));
4841 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4842 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4843 break;
4844 case VECTOR_TYPE:
4845 nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
4846 fldsz = int_size_in_bytes (TREE_TYPE (type));
4847 for (HOST_WIDE_INT i = 0; i < nelts; i++)
4848 clear_padding_type (buf, TREE_TYPE (type), fldsz, for_auto_init);
4849 break;
4850 case NULLPTR_TYPE:
4851 gcc_assert ((size_t) sz <= clear_padding_unit);
4852 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4853 clear_padding_flush (buf, false);
4854 memset (buf->buf + buf->size, ~0, sz);
4855 buf->size += sz;
4856 break;
4857 default:
4858 gcc_assert ((size_t) sz <= clear_padding_unit);
4859 if ((unsigned HOST_WIDE_INT) sz + buf->size > clear_padding_buf_size)
4860 clear_padding_flush (buf, false);
4861 memset (buf->buf + buf->size, 0, sz);
4862 buf->size += sz;
4863 break;
4864 }
4865 }
4866
4867 /* Clear padding bits of TYPE in MASK. */
4868
4869 void
4870 clear_type_padding_in_mask (tree type, unsigned char *mask)
4871 {
4872 clear_padding_struct buf;
4873 buf.loc = UNKNOWN_LOCATION;
4874 buf.clear_in_mask = true;
4875 buf.base = NULL_TREE;
4876 buf.alias_type = NULL_TREE;
4877 buf.gsi = NULL;
4878 buf.align = 0;
4879 buf.off = 0;
4880 buf.padding_bytes = 0;
4881 buf.sz = int_size_in_bytes (type);
4882 buf.size = 0;
4883 buf.union_ptr = mask;
4884 clear_padding_type (&buf, type, buf.sz, false);
4885 clear_padding_flush (&buf, true);
4886 }
4887
4888 /* Fold __builtin_clear_padding builtin. */
4889
4890 static bool
4891 gimple_fold_builtin_clear_padding (gimple_stmt_iterator *gsi)
4892 {
4893 gimple *stmt = gsi_stmt (*gsi);
4894 gcc_assert (gimple_call_num_args (stmt) == 2);
4895 tree ptr = gimple_call_arg (stmt, 0);
4896 tree typearg = gimple_call_arg (stmt, 1);
4897 /* The 2nd argument of __builtin_clear_padding's value is used to
4898 distinguish whether this call is made by the user or by the compiler
4899 for automatic variable initialization. */
4900 bool for_auto_init = (bool) TREE_INT_CST_LOW (typearg);
4901 tree type = TREE_TYPE (TREE_TYPE (typearg));
4902 location_t loc = gimple_location (stmt);
4903 clear_padding_struct buf;
4904 gimple_stmt_iterator gsiprev = *gsi;
4905 /* This should be folded during the lower pass. */
4906 gcc_assert (!gimple_in_ssa_p (cfun) && cfun->cfg == NULL);
4907 gcc_assert (COMPLETE_TYPE_P (type));
4908 gsi_prev (&gsiprev);
4909
4910 buf.loc = loc;
4911 buf.clear_in_mask = false;
4912 buf.base = ptr;
4913 buf.alias_type = NULL_TREE;
4914 buf.gsi = gsi;
4915 buf.align = get_pointer_alignment (ptr);
4916 unsigned int talign = min_align_of_type (type) * BITS_PER_UNIT;
4917 buf.align = MAX (buf.align, talign);
4918 buf.off = 0;
4919 buf.padding_bytes = 0;
4920 buf.size = 0;
4921 buf.sz = int_size_in_bytes (type);
4922 buf.union_ptr = NULL;
4923 if (buf.sz < 0 && int_size_in_bytes (strip_array_types (type)) < 0)
4924 sorry_at (loc, "%s not supported for variable length aggregates",
4925 "__builtin_clear_padding");
4926 /* The implementation currently assumes 8-bit host and target
4927 chars which is the case for all currently supported targets
4928 and hosts and is required e.g. for native_{encode,interpret}* APIs. */
4929 else if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
4930 sorry_at (loc, "%s not supported on this target",
4931 "__builtin_clear_padding");
4932 else if (!clear_padding_type_may_have_padding_p (type))
4933 ;
4934 else if (TREE_CODE (type) == ARRAY_TYPE && buf.sz < 0)
4935 {
4936 tree sz = TYPE_SIZE_UNIT (type);
4937 tree elttype = type;
4938 /* Only supports C/C++ VLAs and flattens all the VLA levels. */
4939 while (TREE_CODE (elttype) == ARRAY_TYPE
4940 && int_size_in_bytes (elttype) < 0)
4941 elttype = TREE_TYPE (elttype);
4942 HOST_WIDE_INT eltsz = int_size_in_bytes (elttype);
4943 gcc_assert (eltsz >= 0);
4944 if (eltsz)
4945 {
4946 buf.base = create_tmp_var (build_pointer_type (elttype));
4947 tree end = make_ssa_name (TREE_TYPE (buf.base));
4948 gimple *g = gimple_build_assign (buf.base, ptr);
4949 gimple_set_location (g, loc);
4950 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4951 g = gimple_build_assign (end, POINTER_PLUS_EXPR, buf.base, sz);
4952 gimple_set_location (g, loc);
4953 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4954 buf.sz = eltsz;
4955 buf.align = TYPE_ALIGN (elttype);
4956 buf.alias_type = build_pointer_type (elttype);
4957 clear_padding_emit_loop (&buf, elttype, end, for_auto_init);
4958 }
4959 }
4960 else
4961 {
4962 if (!is_gimple_mem_ref_addr (buf.base))
4963 {
4964 buf.base = make_ssa_name (TREE_TYPE (ptr));
4965 gimple *g = gimple_build_assign (buf.base, ptr);
4966 gimple_set_location (g, loc);
4967 gsi_insert_before (gsi, g, GSI_SAME_STMT);
4968 }
4969 buf.alias_type = build_pointer_type (type);
4970 clear_padding_type (&buf, type, buf.sz, for_auto_init);
4971 clear_padding_flush (&buf, true);
4972 }
4973
4974 gimple_stmt_iterator gsiprev2 = *gsi;
4975 gsi_prev (&gsiprev2);
4976 if (gsi_stmt (gsiprev) == gsi_stmt (gsiprev2))
4977 gsi_replace (gsi, gimple_build_nop (), true);
4978 else
4979 {
4980 gsi_remove (gsi, true);
4981 *gsi = gsiprev2;
4982 }
4983 return true;
4984 }
4985
4986 /* Fold the non-target builtin at *GSI and return whether any simplification
4987 was made. */
4988
4989 static bool
4990 gimple_fold_builtin (gimple_stmt_iterator *gsi)
4991 {
4992 gcall *stmt = as_a <gcall *>(gsi_stmt (*gsi));
4993 tree callee = gimple_call_fndecl (stmt);
4994
4995 /* Give up for always_inline inline builtins until they are
4996 inlined. */
4997 if (avoid_folding_inline_builtin (callee))
4998 return false;
4999
5000 unsigned n = gimple_call_num_args (stmt);
5001 enum built_in_function fcode = DECL_FUNCTION_CODE (callee);
5002 switch (fcode)
5003 {
5004 case BUILT_IN_BCMP:
5005 return gimple_fold_builtin_bcmp (gsi);
5006 case BUILT_IN_BCOPY:
5007 return gimple_fold_builtin_bcopy (gsi);
5008 case BUILT_IN_BZERO:
5009 return gimple_fold_builtin_bzero (gsi);
5010
5011 case BUILT_IN_MEMSET:
5012 return gimple_fold_builtin_memset (gsi,
5013 gimple_call_arg (stmt, 1),
5014 gimple_call_arg (stmt, 2));
5015 case BUILT_IN_MEMCPY:
5016 case BUILT_IN_MEMPCPY:
5017 case BUILT_IN_MEMMOVE:
5018 return gimple_fold_builtin_memory_op (gsi, gimple_call_arg (stmt, 0),
5019 gimple_call_arg (stmt, 1), fcode);
5020 case BUILT_IN_SPRINTF_CHK:
5021 case BUILT_IN_VSPRINTF_CHK:
5022 return gimple_fold_builtin_sprintf_chk (gsi, fcode);
5023 case BUILT_IN_STRCAT_CHK:
5024 return gimple_fold_builtin_strcat_chk (gsi);
5025 case BUILT_IN_STRNCAT_CHK:
5026 return gimple_fold_builtin_strncat_chk (gsi);
5027 case BUILT_IN_STRLEN:
5028 return gimple_fold_builtin_strlen (gsi);
5029 case BUILT_IN_STRCPY:
5030 return gimple_fold_builtin_strcpy (gsi,
5031 gimple_call_arg (stmt, 0),
5032 gimple_call_arg (stmt, 1));
5033 case BUILT_IN_STRNCPY:
5034 return gimple_fold_builtin_strncpy (gsi,
5035 gimple_call_arg (stmt, 0),
5036 gimple_call_arg (stmt, 1),
5037 gimple_call_arg (stmt, 2));
5038 case BUILT_IN_STRCAT:
5039 return gimple_fold_builtin_strcat (gsi, gimple_call_arg (stmt, 0),
5040 gimple_call_arg (stmt, 1));
5041 case BUILT_IN_STRNCAT:
5042 return gimple_fold_builtin_strncat (gsi);
5043 case BUILT_IN_INDEX:
5044 case BUILT_IN_STRCHR:
5045 return gimple_fold_builtin_strchr (gsi, false);
5046 case BUILT_IN_RINDEX:
5047 case BUILT_IN_STRRCHR:
5048 return gimple_fold_builtin_strchr (gsi, true);
5049 case BUILT_IN_STRSTR:
5050 return gimple_fold_builtin_strstr (gsi);
5051 case BUILT_IN_STRCMP:
5052 case BUILT_IN_STRCMP_EQ:
5053 case BUILT_IN_STRCASECMP:
5054 case BUILT_IN_STRNCMP:
5055 case BUILT_IN_STRNCMP_EQ:
5056 case BUILT_IN_STRNCASECMP:
5057 return gimple_fold_builtin_string_compare (gsi);
5058 case BUILT_IN_MEMCHR:
5059 return gimple_fold_builtin_memchr (gsi);
5060 case BUILT_IN_FPUTS:
5061 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5062 gimple_call_arg (stmt, 1), false);
5063 case BUILT_IN_FPUTS_UNLOCKED:
5064 return gimple_fold_builtin_fputs (gsi, gimple_call_arg (stmt, 0),
5065 gimple_call_arg (stmt, 1), true);
5066 case BUILT_IN_MEMCPY_CHK:
5067 case BUILT_IN_MEMPCPY_CHK:
5068 case BUILT_IN_MEMMOVE_CHK:
5069 case BUILT_IN_MEMSET_CHK:
5070 return gimple_fold_builtin_memory_chk (gsi,
5071 gimple_call_arg (stmt, 0),
5072 gimple_call_arg (stmt, 1),
5073 gimple_call_arg (stmt, 2),
5074 gimple_call_arg (stmt, 3),
5075 fcode);
5076 case BUILT_IN_STPCPY:
5077 return gimple_fold_builtin_stpcpy (gsi);
5078 case BUILT_IN_STRCPY_CHK:
5079 case BUILT_IN_STPCPY_CHK:
5080 return gimple_fold_builtin_stxcpy_chk (gsi,
5081 gimple_call_arg (stmt, 0),
5082 gimple_call_arg (stmt, 1),
5083 gimple_call_arg (stmt, 2),
5084 fcode);
5085 case BUILT_IN_STRNCPY_CHK:
5086 case BUILT_IN_STPNCPY_CHK:
5087 return gimple_fold_builtin_stxncpy_chk (gsi,
5088 gimple_call_arg (stmt, 0),
5089 gimple_call_arg (stmt, 1),
5090 gimple_call_arg (stmt, 2),
5091 gimple_call_arg (stmt, 3),
5092 fcode);
5093 case BUILT_IN_SNPRINTF_CHK:
5094 case BUILT_IN_VSNPRINTF_CHK:
5095 return gimple_fold_builtin_snprintf_chk (gsi, fcode);
5096
5097 case BUILT_IN_FPRINTF:
5098 case BUILT_IN_FPRINTF_UNLOCKED:
5099 case BUILT_IN_VFPRINTF:
5100 if (n == 2 || n == 3)
5101 return gimple_fold_builtin_fprintf (gsi,
5102 gimple_call_arg (stmt, 0),
5103 gimple_call_arg (stmt, 1),
5104 n == 3
5105 ? gimple_call_arg (stmt, 2)
5106 : NULL_TREE,
5107 fcode);
5108 break;
5109 case BUILT_IN_FPRINTF_CHK:
5110 case BUILT_IN_VFPRINTF_CHK:
5111 if (n == 3 || n == 4)
5112 return gimple_fold_builtin_fprintf (gsi,
5113 gimple_call_arg (stmt, 0),
5114 gimple_call_arg (stmt, 2),
5115 n == 4
5116 ? gimple_call_arg (stmt, 3)
5117 : NULL_TREE,
5118 fcode);
5119 break;
5120 case BUILT_IN_PRINTF:
5121 case BUILT_IN_PRINTF_UNLOCKED:
5122 case BUILT_IN_VPRINTF:
5123 if (n == 1 || n == 2)
5124 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 0),
5125 n == 2
5126 ? gimple_call_arg (stmt, 1)
5127 : NULL_TREE, fcode);
5128 break;
5129 case BUILT_IN_PRINTF_CHK:
5130 case BUILT_IN_VPRINTF_CHK:
5131 if (n == 2 || n == 3)
5132 return gimple_fold_builtin_printf (gsi, gimple_call_arg (stmt, 1),
5133 n == 3
5134 ? gimple_call_arg (stmt, 2)
5135 : NULL_TREE, fcode);
5136 break;
5137 case BUILT_IN_ACC_ON_DEVICE:
5138 return gimple_fold_builtin_acc_on_device (gsi,
5139 gimple_call_arg (stmt, 0));
5140 case BUILT_IN_REALLOC:
5141 return gimple_fold_builtin_realloc (gsi);
5142
5143 case BUILT_IN_CLEAR_PADDING:
5144 return gimple_fold_builtin_clear_padding (gsi);
5145
5146 default:;
5147 }
5148
5149 /* Try the generic builtin folder. */
5150 bool ignore = (gimple_call_lhs (stmt) == NULL);
5151 tree result = fold_call_stmt (stmt, ignore);
5152 if (result)
5153 {
5154 if (ignore)
5155 STRIP_NOPS (result);
5156 else
5157 result = fold_convert (gimple_call_return_type (stmt), result);
5158 gimplify_and_update_call_from_tree (gsi, result);
5159 return true;
5160 }
5161
5162 return false;
5163 }
5164
5165 /* Transform IFN_GOACC_DIM_SIZE and IFN_GOACC_DIM_POS internal
5166 function calls to constants, where possible. */
5167
5168 static tree
5169 fold_internal_goacc_dim (const gimple *call)
5170 {
5171 int axis = oacc_get_ifn_dim_arg (call);
5172 int size = oacc_get_fn_dim_size (current_function_decl, axis);
5173 tree result = NULL_TREE;
5174 tree type = TREE_TYPE (gimple_call_lhs (call));
5175
5176 switch (gimple_call_internal_fn (call))
5177 {
5178 case IFN_GOACC_DIM_POS:
5179 /* If the size is 1, we know the answer. */
5180 if (size == 1)
5181 result = build_int_cst (type, 0);
5182 break;
5183 case IFN_GOACC_DIM_SIZE:
5184 /* If the size is not dynamic, we know the answer. */
5185 if (size)
5186 result = build_int_cst (type, size);
5187 break;
5188 default:
5189 break;
5190 }
5191
5192 return result;
5193 }
5194
5195 /* Return true if stmt is __atomic_compare_exchange_N call which is suitable
5196 for conversion into ATOMIC_COMPARE_EXCHANGE if the second argument is
5197 &var where var is only addressable because of such calls. */
5198
5199 bool
5200 optimize_atomic_compare_exchange_p (gimple *stmt)
5201 {
5202 if (gimple_call_num_args (stmt) != 6
5203 || !flag_inline_atomics
5204 || !optimize
5205 || sanitize_flags_p (SANITIZE_THREAD | SANITIZE_ADDRESS)
5206 || !gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5207 || !gimple_vdef (stmt)
5208 || !gimple_vuse (stmt))
5209 return false;
5210
5211 tree fndecl = gimple_call_fndecl (stmt);
5212 switch (DECL_FUNCTION_CODE (fndecl))
5213 {
5214 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
5215 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
5216 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
5217 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
5218 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
5219 break;
5220 default:
5221 return false;
5222 }
5223
5224 tree expected = gimple_call_arg (stmt, 1);
5225 if (TREE_CODE (expected) != ADDR_EXPR
5226 || !SSA_VAR_P (TREE_OPERAND (expected, 0)))
5227 return false;
5228
5229 tree etype = TREE_TYPE (TREE_OPERAND (expected, 0));
5230 if (!is_gimple_reg_type (etype)
5231 || !auto_var_in_fn_p (TREE_OPERAND (expected, 0), current_function_decl)
5232 || TREE_THIS_VOLATILE (etype)
5233 || VECTOR_TYPE_P (etype)
5234 || TREE_CODE (etype) == COMPLEX_TYPE
5235 /* Don't optimize floating point expected vars, VIEW_CONVERT_EXPRs
5236 might not preserve all the bits. See PR71716. */
5237 || SCALAR_FLOAT_TYPE_P (etype)
5238 || maybe_ne (TYPE_PRECISION (etype),
5239 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5240 return false;
5241
5242 tree weak = gimple_call_arg (stmt, 3);
5243 if (!integer_zerop (weak) && !integer_onep (weak))
5244 return false;
5245
5246 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5247 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5248 machine_mode mode = TYPE_MODE (itype);
5249
5250 if (direct_optab_handler (atomic_compare_and_swap_optab, mode)
5251 == CODE_FOR_nothing
5252 && optab_handler (sync_compare_and_swap_optab, mode) == CODE_FOR_nothing)
5253 return false;
5254
5255 if (maybe_ne (int_size_in_bytes (etype), GET_MODE_SIZE (mode)))
5256 return false;
5257
5258 return true;
5259 }
5260
5261 /* Fold
5262 r = __atomic_compare_exchange_N (p, &e, d, w, s, f);
5263 into
5264 _Complex uintN_t t = ATOMIC_COMPARE_EXCHANGE (p, e, d, w * 256 + N, s, f);
5265 i = IMAGPART_EXPR <t>;
5266 r = (_Bool) i;
5267 e = REALPART_EXPR <t>; */
5268
5269 void
5270 fold_builtin_atomic_compare_exchange (gimple_stmt_iterator *gsi)
5271 {
5272 gimple *stmt = gsi_stmt (*gsi);
5273 tree fndecl = gimple_call_fndecl (stmt);
5274 tree parmt = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
5275 tree itype = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (parmt)));
5276 tree ctype = build_complex_type (itype);
5277 tree expected = TREE_OPERAND (gimple_call_arg (stmt, 1), 0);
5278 bool throws = false;
5279 edge e = NULL;
5280 gimple *g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5281 expected);
5282 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5283 gimple_stmt_iterator gsiret = gsi_for_stmt (g);
5284 if (!useless_type_conversion_p (itype, TREE_TYPE (expected)))
5285 {
5286 g = gimple_build_assign (make_ssa_name (itype), VIEW_CONVERT_EXPR,
5287 build1 (VIEW_CONVERT_EXPR, itype,
5288 gimple_assign_lhs (g)));
5289 gsi_insert_before (gsi, g, GSI_SAME_STMT);
5290 }
5291 int flag = (integer_onep (gimple_call_arg (stmt, 3)) ? 256 : 0)
5292 + int_size_in_bytes (itype);
5293 g = gimple_build_call_internal (IFN_ATOMIC_COMPARE_EXCHANGE, 6,
5294 gimple_call_arg (stmt, 0),
5295 gimple_assign_lhs (g),
5296 gimple_call_arg (stmt, 2),
5297 build_int_cst (integer_type_node, flag),
5298 gimple_call_arg (stmt, 4),
5299 gimple_call_arg (stmt, 5));
5300 tree lhs = make_ssa_name (ctype);
5301 gimple_call_set_lhs (g, lhs);
5302 gimple_move_vops (g, stmt);
5303 tree oldlhs = gimple_call_lhs (stmt);
5304 if (stmt_can_throw_internal (cfun, stmt))
5305 {
5306 throws = true;
5307 e = find_fallthru_edge (gsi_bb (*gsi)->succs);
5308 }
5309 gimple_call_set_nothrow (as_a <gcall *> (g),
5310 gimple_call_nothrow_p (as_a <gcall *> (stmt)));
5311 gimple_call_set_lhs (stmt, NULL_TREE);
5312 gsi_replace (gsi, g, true);
5313 if (oldlhs)
5314 {
5315 g = gimple_build_assign (make_ssa_name (itype), IMAGPART_EXPR,
5316 build1 (IMAGPART_EXPR, itype, lhs));
5317 if (throws)
5318 {
5319 gsi_insert_on_edge_immediate (e, g);
5320 *gsi = gsi_for_stmt (g);
5321 }
5322 else
5323 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5324 g = gimple_build_assign (oldlhs, NOP_EXPR, gimple_assign_lhs (g));
5325 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5326 }
5327 g = gimple_build_assign (make_ssa_name (itype), REALPART_EXPR,
5328 build1 (REALPART_EXPR, itype, lhs));
5329 if (throws && oldlhs == NULL_TREE)
5330 {
5331 gsi_insert_on_edge_immediate (e, g);
5332 *gsi = gsi_for_stmt (g);
5333 }
5334 else
5335 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5336 if (!useless_type_conversion_p (TREE_TYPE (expected), itype))
5337 {
5338 g = gimple_build_assign (make_ssa_name (TREE_TYPE (expected)),
5339 VIEW_CONVERT_EXPR,
5340 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (expected),
5341 gimple_assign_lhs (g)));
5342 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5343 }
5344 g = gimple_build_assign (expected, SSA_NAME, gimple_assign_lhs (g));
5345 gsi_insert_after (gsi, g, GSI_NEW_STMT);
5346 *gsi = gsiret;
5347 }
5348
5349 /* Return true if ARG0 CODE ARG1 in infinite signed precision operation
5350 doesn't fit into TYPE. The test for overflow should be regardless of
5351 -fwrapv, and even for unsigned types. */
5352
5353 bool
5354 arith_overflowed_p (enum tree_code code, const_tree type,
5355 const_tree arg0, const_tree arg1)
5356 {
5357 widest2_int warg0 = widest2_int_cst (arg0);
5358 widest2_int warg1 = widest2_int_cst (arg1);
5359 widest2_int wres;
5360 switch (code)
5361 {
5362 case PLUS_EXPR: wres = wi::add (warg0, warg1); break;
5363 case MINUS_EXPR: wres = wi::sub (warg0, warg1); break;
5364 case MULT_EXPR: wres = wi::mul (warg0, warg1); break;
5365 default: gcc_unreachable ();
5366 }
5367 signop sign = TYPE_SIGN (type);
5368 if (sign == UNSIGNED && wi::neg_p (wres))
5369 return true;
5370 return wi::min_precision (wres, sign) > TYPE_PRECISION (type);
5371 }
5372
5373 /* If IFN_MASK_LOAD/STORE call CALL is unconditional, return a MEM_REF
5374 for the memory it references, otherwise return null. VECTYPE is the
5375 type of the memory vector. */
5376
5377 static tree
5378 gimple_fold_mask_load_store_mem_ref (gcall *call, tree vectype)
5379 {
5380 tree ptr = gimple_call_arg (call, 0);
5381 tree alias_align = gimple_call_arg (call, 1);
5382 tree mask = gimple_call_arg (call, 2);
5383 if (!tree_fits_uhwi_p (alias_align) || !integer_all_onesp (mask))
5384 return NULL_TREE;
5385
5386 unsigned HOST_WIDE_INT align = tree_to_uhwi (alias_align);
5387 if (TYPE_ALIGN (vectype) != align)
5388 vectype = build_aligned_type (vectype, align);
5389 tree offset = build_zero_cst (TREE_TYPE (alias_align));
5390 return fold_build2 (MEM_REF, vectype, ptr, offset);
5391 }
5392
5393 /* Try to fold IFN_MASK_LOAD call CALL. Return true on success. */
5394
5395 static bool
5396 gimple_fold_mask_load (gimple_stmt_iterator *gsi, gcall *call)
5397 {
5398 tree lhs = gimple_call_lhs (call);
5399 if (!lhs)
5400 return false;
5401
5402 if (tree rhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (lhs)))
5403 {
5404 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5405 gimple_set_location (new_stmt, gimple_location (call));
5406 gimple_move_vops (new_stmt, call);
5407 gsi_replace (gsi, new_stmt, false);
5408 return true;
5409 }
5410 return false;
5411 }
5412
5413 /* Try to fold IFN_MASK_STORE call CALL. Return true on success. */
5414
5415 static bool
5416 gimple_fold_mask_store (gimple_stmt_iterator *gsi, gcall *call)
5417 {
5418 tree rhs = gimple_call_arg (call, 3);
5419 if (tree lhs = gimple_fold_mask_load_store_mem_ref (call, TREE_TYPE (rhs)))
5420 {
5421 gassign *new_stmt = gimple_build_assign (lhs, rhs);
5422 gimple_set_location (new_stmt, gimple_location (call));
5423 gimple_move_vops (new_stmt, call);
5424 gsi_replace (gsi, new_stmt, false);
5425 return true;
5426 }
5427 return false;
5428 }
5429
5430 /* Attempt to fold a call statement referenced by the statement iterator GSI.
5431 The statement may be replaced by another statement, e.g., if the call
5432 simplifies to a constant value. Return true if any changes were made.
5433 It is assumed that the operands have been previously folded. */
5434
5435 static bool
5436 gimple_fold_call (gimple_stmt_iterator *gsi, bool inplace)
5437 {
5438 gcall *stmt = as_a <gcall *> (gsi_stmt (*gsi));
5439 tree callee;
5440 bool changed = false;
5441
5442 /* Check for virtual calls that became direct calls. */
5443 callee = gimple_call_fn (stmt);
5444 if (callee && TREE_CODE (callee) == OBJ_TYPE_REF)
5445 {
5446 if (gimple_call_addr_fndecl (OBJ_TYPE_REF_EXPR (callee)) != NULL_TREE)
5447 {
5448 if (dump_file && virtual_method_call_p (callee)
5449 && !possible_polymorphic_call_target_p
5450 (callee, stmt, cgraph_node::get (gimple_call_addr_fndecl
5451 (OBJ_TYPE_REF_EXPR (callee)))))
5452 {
5453 fprintf (dump_file,
5454 "Type inheritance inconsistent devirtualization of ");
5455 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
5456 fprintf (dump_file, " to ");
5457 print_generic_expr (dump_file, callee, TDF_SLIM);
5458 fprintf (dump_file, "\n");
5459 }
5460
5461 gimple_call_set_fn (stmt, OBJ_TYPE_REF_EXPR (callee));
5462 changed = true;
5463 }
5464 else if (flag_devirtualize && !inplace && virtual_method_call_p (callee))
5465 {
5466 bool final;
5467 vec <cgraph_node *>targets
5468 = possible_polymorphic_call_targets (callee, stmt, &final);
5469 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5470 {
5471 tree lhs = gimple_call_lhs (stmt);
5472 if (dump_enabled_p ())
5473 {
5474 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5475 "folding virtual function call to %s\n",
5476 targets.length () == 1
5477 ? targets[0]->name ()
5478 : "__builtin_unreachable");
5479 }
5480 if (targets.length () == 1)
5481 {
5482 tree fndecl = targets[0]->decl;
5483 gimple_call_set_fndecl (stmt, fndecl);
5484 changed = true;
5485 /* If changing the call to __cxa_pure_virtual
5486 or similar noreturn function, adjust gimple_call_fntype
5487 too. */
5488 if (gimple_call_noreturn_p (stmt)
5489 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fndecl)))
5490 && TYPE_ARG_TYPES (TREE_TYPE (fndecl))
5491 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fndecl)))
5492 == void_type_node))
5493 gimple_call_set_fntype (stmt, TREE_TYPE (fndecl));
5494 /* If the call becomes noreturn, remove the lhs. */
5495 if (lhs
5496 && gimple_call_noreturn_p (stmt)
5497 && (VOID_TYPE_P (TREE_TYPE (gimple_call_fntype (stmt)))
5498 || should_remove_lhs_p (lhs)))
5499 {
5500 if (TREE_CODE (lhs) == SSA_NAME)
5501 {
5502 tree var = create_tmp_var (TREE_TYPE (lhs));
5503 tree def = get_or_create_ssa_default_def (cfun, var);
5504 gimple *new_stmt = gimple_build_assign (lhs, def);
5505 gsi_insert_before (gsi, new_stmt, GSI_SAME_STMT);
5506 }
5507 gimple_call_set_lhs (stmt, NULL_TREE);
5508 }
5509 maybe_remove_unused_call_args (cfun, stmt);
5510 }
5511 else
5512 {
5513 location_t loc = gimple_location (stmt);
5514 gimple *new_stmt = gimple_build_builtin_unreachable (loc);
5515 /* If the call had a SSA name as lhs morph that into
5516 an uninitialized value. */
5517 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5518 {
5519 tree var = create_tmp_var (TREE_TYPE (lhs));
5520 SET_SSA_NAME_VAR_OR_IDENTIFIER (lhs, var);
5521 SSA_NAME_DEF_STMT (lhs) = gimple_build_nop ();
5522 set_ssa_default_def (cfun, var, lhs);
5523 }
5524 gimple_move_vops (new_stmt, stmt);
5525 gsi_replace (gsi, new_stmt, false);
5526 return true;
5527 }
5528 }
5529 }
5530 }
5531
5532 /* Check for indirect calls that became direct calls, and then
5533 no longer require a static chain. */
5534 if (gimple_call_chain (stmt))
5535 {
5536 tree fn = gimple_call_fndecl (stmt);
5537 if (fn && !DECL_STATIC_CHAIN (fn))
5538 {
5539 gimple_call_set_chain (stmt, NULL);
5540 changed = true;
5541 }
5542 }
5543
5544 if (inplace)
5545 return changed;
5546
5547 /* Check for builtins that CCP can handle using information not
5548 available in the generic fold routines. */
5549 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
5550 {
5551 if (gimple_fold_builtin (gsi))
5552 changed = true;
5553 }
5554 else if (gimple_call_builtin_p (stmt, BUILT_IN_MD))
5555 {
5556 changed |= targetm.gimple_fold_builtin (gsi);
5557 }
5558 else if (gimple_call_internal_p (stmt))
5559 {
5560 enum tree_code subcode = ERROR_MARK;
5561 tree result = NULL_TREE;
5562 bool cplx_result = false;
5563 tree overflow = NULL_TREE;
5564 switch (gimple_call_internal_fn (stmt))
5565 {
5566 case IFN_BUILTIN_EXPECT:
5567 result = fold_builtin_expect (gimple_location (stmt),
5568 gimple_call_arg (stmt, 0),
5569 gimple_call_arg (stmt, 1),
5570 gimple_call_arg (stmt, 2),
5571 NULL_TREE);
5572 break;
5573 case IFN_UBSAN_OBJECT_SIZE:
5574 {
5575 tree offset = gimple_call_arg (stmt, 1);
5576 tree objsize = gimple_call_arg (stmt, 2);
5577 if (integer_all_onesp (objsize)
5578 || (TREE_CODE (offset) == INTEGER_CST
5579 && TREE_CODE (objsize) == INTEGER_CST
5580 && tree_int_cst_le (offset, objsize)))
5581 {
5582 replace_call_with_value (gsi, NULL_TREE);
5583 return true;
5584 }
5585 }
5586 break;
5587 case IFN_UBSAN_PTR:
5588 if (integer_zerop (gimple_call_arg (stmt, 1)))
5589 {
5590 replace_call_with_value (gsi, NULL_TREE);
5591 return true;
5592 }
5593 break;
5594 case IFN_UBSAN_BOUNDS:
5595 {
5596 tree index = gimple_call_arg (stmt, 1);
5597 tree bound = gimple_call_arg (stmt, 2);
5598 if (TREE_CODE (index) == INTEGER_CST
5599 && TREE_CODE (bound) == INTEGER_CST)
5600 {
5601 index = fold_convert (TREE_TYPE (bound), index);
5602 if (TREE_CODE (index) == INTEGER_CST
5603 && tree_int_cst_le (index, bound))
5604 {
5605 replace_call_with_value (gsi, NULL_TREE);
5606 return true;
5607 }
5608 }
5609 }
5610 break;
5611 case IFN_GOACC_DIM_SIZE:
5612 case IFN_GOACC_DIM_POS:
5613 result = fold_internal_goacc_dim (stmt);
5614 break;
5615 case IFN_UBSAN_CHECK_ADD:
5616 subcode = PLUS_EXPR;
5617 break;
5618 case IFN_UBSAN_CHECK_SUB:
5619 subcode = MINUS_EXPR;
5620 break;
5621 case IFN_UBSAN_CHECK_MUL:
5622 subcode = MULT_EXPR;
5623 break;
5624 case IFN_ADD_OVERFLOW:
5625 subcode = PLUS_EXPR;
5626 cplx_result = true;
5627 break;
5628 case IFN_SUB_OVERFLOW:
5629 subcode = MINUS_EXPR;
5630 cplx_result = true;
5631 break;
5632 case IFN_MUL_OVERFLOW:
5633 subcode = MULT_EXPR;
5634 cplx_result = true;
5635 break;
5636 case IFN_MASK_LOAD:
5637 changed |= gimple_fold_mask_load (gsi, stmt);
5638 break;
5639 case IFN_MASK_STORE:
5640 changed |= gimple_fold_mask_store (gsi, stmt);
5641 break;
5642 default:
5643 break;
5644 }
5645 if (subcode != ERROR_MARK)
5646 {
5647 tree arg0 = gimple_call_arg (stmt, 0);
5648 tree arg1 = gimple_call_arg (stmt, 1);
5649 tree type = TREE_TYPE (arg0);
5650 if (cplx_result)
5651 {
5652 tree lhs = gimple_call_lhs (stmt);
5653 if (lhs == NULL_TREE)
5654 type = NULL_TREE;
5655 else
5656 type = TREE_TYPE (TREE_TYPE (lhs));
5657 }
5658 if (type == NULL_TREE)
5659 ;
5660 /* x = y + 0; x = y - 0; x = y * 0; */
5661 else if (integer_zerop (arg1))
5662 result = subcode == MULT_EXPR ? integer_zero_node : arg0;
5663 /* x = 0 + y; x = 0 * y; */
5664 else if (subcode != MINUS_EXPR && integer_zerop (arg0))
5665 result = subcode == MULT_EXPR ? integer_zero_node : arg1;
5666 /* x = y - y; */
5667 else if (subcode == MINUS_EXPR && operand_equal_p (arg0, arg1, 0))
5668 result = integer_zero_node;
5669 /* x = y * 1; x = 1 * y; */
5670 else if (subcode == MULT_EXPR && integer_onep (arg1))
5671 result = arg0;
5672 else if (subcode == MULT_EXPR && integer_onep (arg0))
5673 result = arg1;
5674 else if (TREE_CODE (arg0) == INTEGER_CST
5675 && TREE_CODE (arg1) == INTEGER_CST)
5676 {
5677 if (cplx_result)
5678 result = int_const_binop (subcode, fold_convert (type, arg0),
5679 fold_convert (type, arg1));
5680 else
5681 result = int_const_binop (subcode, arg0, arg1);
5682 if (result && arith_overflowed_p (subcode, type, arg0, arg1))
5683 {
5684 if (cplx_result)
5685 overflow = build_one_cst (type);
5686 else
5687 result = NULL_TREE;
5688 }
5689 }
5690 if (result)
5691 {
5692 if (result == integer_zero_node)
5693 result = build_zero_cst (type);
5694 else if (cplx_result && TREE_TYPE (result) != type)
5695 {
5696 if (TREE_CODE (result) == INTEGER_CST)
5697 {
5698 if (arith_overflowed_p (PLUS_EXPR, type, result,
5699 integer_zero_node))
5700 overflow = build_one_cst (type);
5701 }
5702 else if ((!TYPE_UNSIGNED (TREE_TYPE (result))
5703 && TYPE_UNSIGNED (type))
5704 || (TYPE_PRECISION (type)
5705 < (TYPE_PRECISION (TREE_TYPE (result))
5706 + (TYPE_UNSIGNED (TREE_TYPE (result))
5707 && !TYPE_UNSIGNED (type)))))
5708 result = NULL_TREE;
5709 if (result)
5710 result = fold_convert (type, result);
5711 }
5712 }
5713 }
5714
5715 if (result)
5716 {
5717 if (TREE_CODE (result) == INTEGER_CST && TREE_OVERFLOW (result))
5718 result = drop_tree_overflow (result);
5719 if (cplx_result)
5720 {
5721 if (overflow == NULL_TREE)
5722 overflow = build_zero_cst (TREE_TYPE (result));
5723 tree ctype = build_complex_type (TREE_TYPE (result));
5724 if (TREE_CODE (result) == INTEGER_CST
5725 && TREE_CODE (overflow) == INTEGER_CST)
5726 result = build_complex (ctype, result, overflow);
5727 else
5728 result = build2_loc (gimple_location (stmt), COMPLEX_EXPR,
5729 ctype, result, overflow);
5730 }
5731 gimplify_and_update_call_from_tree (gsi, result);
5732 changed = true;
5733 }
5734 }
5735
5736 return changed;
5737 }
5738
5739
5740 /* Return true whether NAME has a use on STMT. */
5741
5742 static bool
5743 has_use_on_stmt (tree name, gimple *stmt)
5744 {
5745 imm_use_iterator iter;
5746 use_operand_p use_p;
5747 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
5748 if (USE_STMT (use_p) == stmt)
5749 return true;
5750 return false;
5751 }
5752
5753 /* Worker for fold_stmt_1 dispatch to pattern based folding with
5754 gimple_simplify.
5755
5756 Replaces *GSI with the simplification result in RCODE and OPS
5757 and the associated statements in *SEQ. Does the replacement
5758 according to INPLACE and returns true if the operation succeeded. */
5759
5760 static bool
5761 replace_stmt_with_simplification (gimple_stmt_iterator *gsi,
5762 gimple_match_op *res_op,
5763 gimple_seq *seq, bool inplace)
5764 {
5765 gimple *stmt = gsi_stmt (*gsi);
5766 tree *ops = res_op->ops;
5767 unsigned int num_ops = res_op->num_ops;
5768
5769 /* Play safe and do not allow abnormals to be mentioned in
5770 newly created statements. See also maybe_push_res_to_seq.
5771 As an exception allow such uses if there was a use of the
5772 same SSA name on the old stmt. */
5773 for (unsigned int i = 0; i < num_ops; ++i)
5774 if (TREE_CODE (ops[i]) == SSA_NAME
5775 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ops[i])
5776 && !has_use_on_stmt (ops[i], stmt))
5777 return false;
5778
5779 if (num_ops > 0 && COMPARISON_CLASS_P (ops[0]))
5780 for (unsigned int i = 0; i < 2; ++i)
5781 if (TREE_CODE (TREE_OPERAND (ops[0], i)) == SSA_NAME
5782 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (ops[0], i))
5783 && !has_use_on_stmt (TREE_OPERAND (ops[0], i), stmt))
5784 return false;
5785
5786 /* Don't insert new statements when INPLACE is true, even if we could
5787 reuse STMT for the final statement. */
5788 if (inplace && !gimple_seq_empty_p (*seq))
5789 return false;
5790
5791 if (gcond *cond_stmt = dyn_cast <gcond *> (stmt))
5792 {
5793 gcc_assert (res_op->code.is_tree_code ());
5794 auto code = tree_code (res_op->code);
5795 if (TREE_CODE_CLASS (code) == tcc_comparison
5796 /* GIMPLE_CONDs condition may not throw. */
5797 && (!flag_exceptions
5798 || !cfun->can_throw_non_call_exceptions
5799 || !operation_could_trap_p (code,
5800 FLOAT_TYPE_P (TREE_TYPE (ops[0])),
5801 false, NULL_TREE)))
5802 gimple_cond_set_condition (cond_stmt, code, ops[0], ops[1]);
5803 else if (code == SSA_NAME)
5804 gimple_cond_set_condition (cond_stmt, NE_EXPR, ops[0],
5805 build_zero_cst (TREE_TYPE (ops[0])));
5806 else if (code == INTEGER_CST)
5807 {
5808 if (integer_zerop (ops[0]))
5809 gimple_cond_make_false (cond_stmt);
5810 else
5811 gimple_cond_make_true (cond_stmt);
5812 }
5813 else if (!inplace)
5814 {
5815 tree res = maybe_push_res_to_seq (res_op, seq);
5816 if (!res)
5817 return false;
5818 gimple_cond_set_condition (cond_stmt, NE_EXPR, res,
5819 build_zero_cst (TREE_TYPE (res)));
5820 }
5821 else
5822 return false;
5823 if (dump_file && (dump_flags & TDF_DETAILS))
5824 {
5825 fprintf (dump_file, "gimple_simplified to ");
5826 if (!gimple_seq_empty_p (*seq))
5827 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5828 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5829 0, TDF_SLIM);
5830 }
5831 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5832 return true;
5833 }
5834 else if (is_gimple_assign (stmt)
5835 && res_op->code.is_tree_code ())
5836 {
5837 auto code = tree_code (res_op->code);
5838 if (!inplace
5839 || gimple_num_ops (stmt) > get_gimple_rhs_num_ops (code))
5840 {
5841 maybe_build_generic_op (res_op);
5842 gimple_assign_set_rhs_with_ops (gsi, code,
5843 res_op->op_or_null (0),
5844 res_op->op_or_null (1),
5845 res_op->op_or_null (2));
5846 if (dump_file && (dump_flags & TDF_DETAILS))
5847 {
5848 fprintf (dump_file, "gimple_simplified to ");
5849 if (!gimple_seq_empty_p (*seq))
5850 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5851 print_gimple_stmt (dump_file, gsi_stmt (*gsi),
5852 0, TDF_SLIM);
5853 }
5854 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5855 return true;
5856 }
5857 }
5858 else if (res_op->code.is_fn_code ()
5859 && gimple_call_combined_fn (stmt) == combined_fn (res_op->code))
5860 {
5861 gcc_assert (num_ops == gimple_call_num_args (stmt));
5862 for (unsigned int i = 0; i < num_ops; ++i)
5863 gimple_call_set_arg (stmt, i, ops[i]);
5864 if (dump_file && (dump_flags & TDF_DETAILS))
5865 {
5866 fprintf (dump_file, "gimple_simplified to ");
5867 if (!gimple_seq_empty_p (*seq))
5868 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5869 print_gimple_stmt (dump_file, gsi_stmt (*gsi), 0, TDF_SLIM);
5870 }
5871 gsi_insert_seq_before (gsi, *seq, GSI_SAME_STMT);
5872 return true;
5873 }
5874 else if (!inplace)
5875 {
5876 if (gimple_has_lhs (stmt))
5877 {
5878 tree lhs = gimple_get_lhs (stmt);
5879 if (!maybe_push_res_to_seq (res_op, seq, lhs))
5880 return false;
5881 if (dump_file && (dump_flags & TDF_DETAILS))
5882 {
5883 fprintf (dump_file, "gimple_simplified to ");
5884 print_gimple_seq (dump_file, *seq, 0, TDF_SLIM);
5885 }
5886 gsi_replace_with_seq_vops (gsi, *seq);
5887 return true;
5888 }
5889 else
5890 gcc_unreachable ();
5891 }
5892
5893 return false;
5894 }
5895
5896 /* Canonicalize MEM_REFs invariant address operand after propagation. */
5897
5898 static bool
5899 maybe_canonicalize_mem_ref_addr (tree *t, bool is_debug = false)
5900 {
5901 bool res = false;
5902 tree *orig_t = t;
5903
5904 if (TREE_CODE (*t) == ADDR_EXPR)
5905 t = &TREE_OPERAND (*t, 0);
5906
5907 /* The C and C++ frontends use an ARRAY_REF for indexing with their
5908 generic vector extension. The actual vector referenced is
5909 view-converted to an array type for this purpose. If the index
5910 is constant the canonical representation in the middle-end is a
5911 BIT_FIELD_REF so re-write the former to the latter here. */
5912 if (TREE_CODE (*t) == ARRAY_REF
5913 && TREE_CODE (TREE_OPERAND (*t, 0)) == VIEW_CONVERT_EXPR
5914 && TREE_CODE (TREE_OPERAND (*t, 1)) == INTEGER_CST
5915 && VECTOR_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))))
5916 {
5917 tree vtype = TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0));
5918 if (VECTOR_TYPE_P (vtype))
5919 {
5920 tree low = array_ref_low_bound (*t);
5921 if (TREE_CODE (low) == INTEGER_CST)
5922 {
5923 if (tree_int_cst_le (low, TREE_OPERAND (*t, 1)))
5924 {
5925 widest_int idx = wi::sub (wi::to_widest (TREE_OPERAND (*t, 1)),
5926 wi::to_widest (low));
5927 idx = wi::mul (idx, wi::to_widest
5928 (TYPE_SIZE (TREE_TYPE (*t))));
5929 widest_int ext
5930 = wi::add (idx, wi::to_widest (TYPE_SIZE (TREE_TYPE (*t))));
5931 if (wi::les_p (ext, wi::to_widest (TYPE_SIZE (vtype))))
5932 {
5933 *t = build3_loc (EXPR_LOCATION (*t), BIT_FIELD_REF,
5934 TREE_TYPE (*t),
5935 TREE_OPERAND (TREE_OPERAND (*t, 0), 0),
5936 TYPE_SIZE (TREE_TYPE (*t)),
5937 wide_int_to_tree (bitsizetype, idx));
5938 res = true;
5939 }
5940 }
5941 }
5942 }
5943 }
5944
5945 while (handled_component_p (*t))
5946 t = &TREE_OPERAND (*t, 0);
5947
5948 /* Canonicalize MEM [&foo.bar, 0] which appears after propagating
5949 of invariant addresses into a SSA name MEM_REF address. */
5950 if (TREE_CODE (*t) == MEM_REF
5951 || TREE_CODE (*t) == TARGET_MEM_REF)
5952 {
5953 tree addr = TREE_OPERAND (*t, 0);
5954 if (TREE_CODE (addr) == ADDR_EXPR
5955 && (TREE_CODE (TREE_OPERAND (addr, 0)) == MEM_REF
5956 || handled_component_p (TREE_OPERAND (addr, 0))))
5957 {
5958 tree base;
5959 poly_int64 coffset;
5960 base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
5961 &coffset);
5962 if (!base)
5963 {
5964 if (is_debug)
5965 return false;
5966 gcc_unreachable ();
5967 }
5968
5969 TREE_OPERAND (*t, 0) = build_fold_addr_expr (base);
5970 TREE_OPERAND (*t, 1) = int_const_binop (PLUS_EXPR,
5971 TREE_OPERAND (*t, 1),
5972 size_int (coffset));
5973 res = true;
5974 }
5975 gcc_checking_assert (TREE_CODE (TREE_OPERAND (*t, 0)) == DEBUG_EXPR_DECL
5976 || is_gimple_mem_ref_addr (TREE_OPERAND (*t, 0)));
5977 }
5978
5979 /* Canonicalize back MEM_REFs to plain reference trees if the object
5980 accessed is a decl that has the same access semantics as the MEM_REF. */
5981 if (TREE_CODE (*t) == MEM_REF
5982 && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
5983 && integer_zerop (TREE_OPERAND (*t, 1))
5984 && MR_DEPENDENCE_CLIQUE (*t) == 0)
5985 {
5986 tree decl = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
5987 tree alias_type = TREE_TYPE (TREE_OPERAND (*t, 1));
5988 if (/* Same volatile qualification. */
5989 TREE_THIS_VOLATILE (*t) == TREE_THIS_VOLATILE (decl)
5990 /* Same TBAA behavior with -fstrict-aliasing. */
5991 && !TYPE_REF_CAN_ALIAS_ALL (alias_type)
5992 && (TYPE_MAIN_VARIANT (TREE_TYPE (decl))
5993 == TYPE_MAIN_VARIANT (TREE_TYPE (alias_type)))
5994 /* Same alignment. */
5995 && TYPE_ALIGN (TREE_TYPE (decl)) == TYPE_ALIGN (TREE_TYPE (*t))
5996 /* We have to look out here to not drop a required conversion
5997 from the rhs to the lhs if *t appears on the lhs or vice-versa
5998 if it appears on the rhs. Thus require strict type
5999 compatibility. */
6000 && types_compatible_p (TREE_TYPE (*t), TREE_TYPE (decl)))
6001 {
6002 *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
6003 res = true;
6004 }
6005 }
6006
6007 else if (TREE_CODE (*orig_t) == ADDR_EXPR
6008 && TREE_CODE (*t) == MEM_REF
6009 && TREE_CODE (TREE_OPERAND (*t, 0)) == INTEGER_CST)
6010 {
6011 tree base;
6012 poly_int64 coffset;
6013 base = get_addr_base_and_unit_offset (TREE_OPERAND (*orig_t, 0),
6014 &coffset);
6015 if (base)
6016 {
6017 gcc_assert (TREE_CODE (base) == MEM_REF);
6018 poly_int64 moffset;
6019 if (mem_ref_offset (base).to_shwi (&moffset))
6020 {
6021 coffset += moffset;
6022 if (wi::to_poly_wide (TREE_OPERAND (base, 0)).to_shwi (&moffset))
6023 {
6024 coffset += moffset;
6025 *orig_t = build_int_cst (TREE_TYPE (*orig_t), coffset);
6026 return true;
6027 }
6028 }
6029 }
6030 }
6031
6032 /* Canonicalize TARGET_MEM_REF in particular with respect to
6033 the indexes becoming constant. */
6034 else if (TREE_CODE (*t) == TARGET_MEM_REF)
6035 {
6036 tree tem = maybe_fold_tmr (*t);
6037 if (tem)
6038 {
6039 *t = tem;
6040 if (TREE_CODE (*orig_t) == ADDR_EXPR)
6041 recompute_tree_invariant_for_addr_expr (*orig_t);
6042 res = true;
6043 }
6044 }
6045
6046 return res;
6047 }
6048
6049 /* Worker for both fold_stmt and fold_stmt_inplace. The INPLACE argument
6050 distinguishes both cases. */
6051
6052 static bool
6053 fold_stmt_1 (gimple_stmt_iterator *gsi, bool inplace, tree (*valueize) (tree))
6054 {
6055 bool changed = false;
6056 gimple *stmt = gsi_stmt (*gsi);
6057 bool nowarning = warning_suppressed_p (stmt, OPT_Wstrict_overflow);
6058 unsigned i;
6059 fold_defer_overflow_warnings ();
6060
6061 /* First do required canonicalization of [TARGET_]MEM_REF addresses
6062 after propagation.
6063 ??? This shouldn't be done in generic folding but in the
6064 propagation helpers which also know whether an address was
6065 propagated.
6066 Also canonicalize operand order. */
6067 switch (gimple_code (stmt))
6068 {
6069 case GIMPLE_ASSIGN:
6070 if (gimple_assign_rhs_class (stmt) == GIMPLE_SINGLE_RHS)
6071 {
6072 tree *rhs = gimple_assign_rhs1_ptr (stmt);
6073 if ((REFERENCE_CLASS_P (*rhs)
6074 || TREE_CODE (*rhs) == ADDR_EXPR)
6075 && maybe_canonicalize_mem_ref_addr (rhs))
6076 changed = true;
6077 tree *lhs = gimple_assign_lhs_ptr (stmt);
6078 if (REFERENCE_CLASS_P (*lhs)
6079 && maybe_canonicalize_mem_ref_addr (lhs))
6080 changed = true;
6081 /* Canonicalize &MEM[ssa_n, CST] to ssa_n p+ CST.
6082 This cannot be done in maybe_canonicalize_mem_ref_addr
6083 as the gimple now has two operands rather than one.
6084 The same reason why this can't be done in
6085 maybe_canonicalize_mem_ref_addr is the same reason why
6086 this can't be done inplace. */
6087 if (!inplace && TREE_CODE (*rhs) == ADDR_EXPR)
6088 {
6089 tree inner = TREE_OPERAND (*rhs, 0);
6090 if (TREE_CODE (inner) == MEM_REF
6091 && TREE_CODE (TREE_OPERAND (inner, 0)) == SSA_NAME
6092 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
6093 {
6094 tree ptr = TREE_OPERAND (inner, 0);
6095 tree addon = TREE_OPERAND (inner, 1);
6096 addon = fold_convert (sizetype, addon);
6097 gimple_assign_set_rhs_with_ops (gsi, POINTER_PLUS_EXPR,
6098 ptr, addon);
6099 changed = true;
6100 stmt = gsi_stmt (*gsi);
6101 }
6102 }
6103 }
6104 else
6105 {
6106 /* Canonicalize operand order. */
6107 enum tree_code code = gimple_assign_rhs_code (stmt);
6108 if (TREE_CODE_CLASS (code) == tcc_comparison
6109 || commutative_tree_code (code)
6110 || commutative_ternary_tree_code (code))
6111 {
6112 tree rhs1 = gimple_assign_rhs1 (stmt);
6113 tree rhs2 = gimple_assign_rhs2 (stmt);
6114 if (tree_swap_operands_p (rhs1, rhs2))
6115 {
6116 gimple_assign_set_rhs1 (stmt, rhs2);
6117 gimple_assign_set_rhs2 (stmt, rhs1);
6118 if (TREE_CODE_CLASS (code) == tcc_comparison)
6119 gimple_assign_set_rhs_code (stmt,
6120 swap_tree_comparison (code));
6121 changed = true;
6122 }
6123 }
6124 }
6125 break;
6126 case GIMPLE_CALL:
6127 {
6128 gcall *call = as_a<gcall *> (stmt);
6129 for (i = 0; i < gimple_call_num_args (call); ++i)
6130 {
6131 tree *arg = gimple_call_arg_ptr (call, i);
6132 if (REFERENCE_CLASS_P (*arg)
6133 && maybe_canonicalize_mem_ref_addr (arg))
6134 changed = true;
6135 }
6136 tree *lhs = gimple_call_lhs_ptr (call);
6137 if (*lhs
6138 && REFERENCE_CLASS_P (*lhs)
6139 && maybe_canonicalize_mem_ref_addr (lhs))
6140 changed = true;
6141 if (*lhs)
6142 {
6143 combined_fn cfn = gimple_call_combined_fn (call);
6144 internal_fn ifn = associated_internal_fn (cfn, TREE_TYPE (*lhs));
6145 int opno = first_commutative_argument (ifn);
6146 if (opno >= 0)
6147 {
6148 tree arg1 = gimple_call_arg (call, opno);
6149 tree arg2 = gimple_call_arg (call, opno + 1);
6150 if (tree_swap_operands_p (arg1, arg2))
6151 {
6152 gimple_call_set_arg (call, opno, arg2);
6153 gimple_call_set_arg (call, opno + 1, arg1);
6154 changed = true;
6155 }
6156 }
6157 }
6158 break;
6159 }
6160 case GIMPLE_ASM:
6161 {
6162 gasm *asm_stmt = as_a <gasm *> (stmt);
6163 for (i = 0; i < gimple_asm_noutputs (asm_stmt); ++i)
6164 {
6165 tree link = gimple_asm_output_op (asm_stmt, i);
6166 tree op = TREE_VALUE (link);
6167 if (REFERENCE_CLASS_P (op)
6168 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6169 changed = true;
6170 }
6171 for (i = 0; i < gimple_asm_ninputs (asm_stmt); ++i)
6172 {
6173 tree link = gimple_asm_input_op (asm_stmt, i);
6174 tree op = TREE_VALUE (link);
6175 if ((REFERENCE_CLASS_P (op)
6176 || TREE_CODE (op) == ADDR_EXPR)
6177 && maybe_canonicalize_mem_ref_addr (&TREE_VALUE (link)))
6178 changed = true;
6179 }
6180 }
6181 break;
6182 case GIMPLE_DEBUG:
6183 if (gimple_debug_bind_p (stmt))
6184 {
6185 tree *val = gimple_debug_bind_get_value_ptr (stmt);
6186 if (*val
6187 && (REFERENCE_CLASS_P (*val)
6188 || TREE_CODE (*val) == ADDR_EXPR)
6189 && maybe_canonicalize_mem_ref_addr (val, true))
6190 changed = true;
6191 }
6192 break;
6193 case GIMPLE_COND:
6194 {
6195 /* Canonicalize operand order. */
6196 tree lhs = gimple_cond_lhs (stmt);
6197 tree rhs = gimple_cond_rhs (stmt);
6198 if (tree_swap_operands_p (lhs, rhs))
6199 {
6200 gcond *gc = as_a <gcond *> (stmt);
6201 gimple_cond_set_lhs (gc, rhs);
6202 gimple_cond_set_rhs (gc, lhs);
6203 gimple_cond_set_code (gc,
6204 swap_tree_comparison (gimple_cond_code (gc)));
6205 changed = true;
6206 }
6207 }
6208 default:;
6209 }
6210
6211 /* Dispatch to pattern-based folding. */
6212 if (!inplace
6213 || is_gimple_assign (stmt)
6214 || gimple_code (stmt) == GIMPLE_COND)
6215 {
6216 gimple_seq seq = NULL;
6217 gimple_match_op res_op;
6218 if (gimple_simplify (stmt, &res_op, inplace ? NULL : &seq,
6219 valueize, valueize))
6220 {
6221 if (replace_stmt_with_simplification (gsi, &res_op, &seq, inplace))
6222 changed = true;
6223 else
6224 gimple_seq_discard (seq);
6225 }
6226 }
6227
6228 stmt = gsi_stmt (*gsi);
6229
6230 /* Fold the main computation performed by the statement. */
6231 switch (gimple_code (stmt))
6232 {
6233 case GIMPLE_ASSIGN:
6234 {
6235 /* Try to canonicalize for boolean-typed X the comparisons
6236 X == 0, X == 1, X != 0, and X != 1. */
6237 if (gimple_assign_rhs_code (stmt) == EQ_EXPR
6238 || gimple_assign_rhs_code (stmt) == NE_EXPR)
6239 {
6240 tree lhs = gimple_assign_lhs (stmt);
6241 tree op1 = gimple_assign_rhs1 (stmt);
6242 tree op2 = gimple_assign_rhs2 (stmt);
6243 tree type = TREE_TYPE (op1);
6244
6245 /* Check whether the comparison operands are of the same boolean
6246 type as the result type is.
6247 Check that second operand is an integer-constant with value
6248 one or zero. */
6249 if (TREE_CODE (op2) == INTEGER_CST
6250 && (integer_zerop (op2) || integer_onep (op2))
6251 && useless_type_conversion_p (TREE_TYPE (lhs), type))
6252 {
6253 enum tree_code cmp_code = gimple_assign_rhs_code (stmt);
6254 bool is_logical_not = false;
6255
6256 /* X == 0 and X != 1 is a logical-not.of X
6257 X == 1 and X != 0 is X */
6258 if ((cmp_code == EQ_EXPR && integer_zerop (op2))
6259 || (cmp_code == NE_EXPR && integer_onep (op2)))
6260 is_logical_not = true;
6261
6262 if (is_logical_not == false)
6263 gimple_assign_set_rhs_with_ops (gsi, TREE_CODE (op1), op1);
6264 /* Only for one-bit precision typed X the transformation
6265 !X -> ~X is valied. */
6266 else if (TYPE_PRECISION (type) == 1)
6267 gimple_assign_set_rhs_with_ops (gsi, BIT_NOT_EXPR, op1);
6268 /* Otherwise we use !X -> X ^ 1. */
6269 else
6270 gimple_assign_set_rhs_with_ops (gsi, BIT_XOR_EXPR, op1,
6271 build_int_cst (type, 1));
6272 changed = true;
6273 break;
6274 }
6275 }
6276
6277 unsigned old_num_ops = gimple_num_ops (stmt);
6278 tree lhs = gimple_assign_lhs (stmt);
6279 tree new_rhs = fold_gimple_assign (gsi);
6280 if (new_rhs
6281 && !useless_type_conversion_p (TREE_TYPE (lhs),
6282 TREE_TYPE (new_rhs)))
6283 new_rhs = fold_convert (TREE_TYPE (lhs), new_rhs);
6284 if (new_rhs
6285 && (!inplace
6286 || get_gimple_rhs_num_ops (TREE_CODE (new_rhs)) < old_num_ops))
6287 {
6288 gimple_assign_set_rhs_from_tree (gsi, new_rhs);
6289 changed = true;
6290 }
6291 break;
6292 }
6293
6294 case GIMPLE_CALL:
6295 changed |= gimple_fold_call (gsi, inplace);
6296 break;
6297
6298 case GIMPLE_DEBUG:
6299 if (gimple_debug_bind_p (stmt))
6300 {
6301 tree val = gimple_debug_bind_get_value (stmt);
6302 if (val && REFERENCE_CLASS_P (val))
6303 {
6304 tree tem = maybe_fold_reference (val);
6305 if (tem)
6306 {
6307 gimple_debug_bind_set_value (stmt, tem);
6308 changed = true;
6309 }
6310 }
6311 }
6312 break;
6313
6314 case GIMPLE_RETURN:
6315 {
6316 greturn *ret_stmt = as_a<greturn *> (stmt);
6317 tree ret = gimple_return_retval(ret_stmt);
6318
6319 if (ret && TREE_CODE (ret) == SSA_NAME && valueize)
6320 {
6321 tree val = valueize (ret);
6322 if (val && val != ret
6323 && may_propagate_copy (ret, val))
6324 {
6325 gimple_return_set_retval (ret_stmt, val);
6326 changed = true;
6327 }
6328 }
6329 }
6330 break;
6331
6332 default:;
6333 }
6334
6335 stmt = gsi_stmt (*gsi);
6336
6337 fold_undefer_overflow_warnings (changed && !nowarning, stmt, 0);
6338 return changed;
6339 }
6340
6341 /* Valueziation callback that ends up not following SSA edges. */
6342
6343 tree
6344 no_follow_ssa_edges (tree)
6345 {
6346 return NULL_TREE;
6347 }
6348
6349 /* Valueization callback that ends up following single-use SSA edges only. */
6350
6351 tree
6352 follow_single_use_edges (tree val)
6353 {
6354 if (TREE_CODE (val) == SSA_NAME
6355 && !has_single_use (val))
6356 return NULL_TREE;
6357 return val;
6358 }
6359
6360 /* Valueization callback that follows all SSA edges. */
6361
6362 tree
6363 follow_all_ssa_edges (tree val)
6364 {
6365 return val;
6366 }
6367
6368 /* Fold the statement pointed to by GSI. In some cases, this function may
6369 replace the whole statement with a new one. Returns true iff folding
6370 makes any changes.
6371 The statement pointed to by GSI should be in valid gimple form but may
6372 be in unfolded state as resulting from for example constant propagation
6373 which can produce *&x = 0. */
6374
6375 bool
6376 fold_stmt (gimple_stmt_iterator *gsi)
6377 {
6378 return fold_stmt_1 (gsi, false, no_follow_ssa_edges);
6379 }
6380
6381 bool
6382 fold_stmt (gimple_stmt_iterator *gsi, tree (*valueize) (tree))
6383 {
6384 return fold_stmt_1 (gsi, false, valueize);
6385 }
6386
6387 /* Perform the minimal folding on statement *GSI. Only operations like
6388 *&x created by constant propagation are handled. The statement cannot
6389 be replaced with a new one. Return true if the statement was
6390 changed, false otherwise.
6391 The statement *GSI should be in valid gimple form but may
6392 be in unfolded state as resulting from for example constant propagation
6393 which can produce *&x = 0. */
6394
6395 bool
6396 fold_stmt_inplace (gimple_stmt_iterator *gsi)
6397 {
6398 gimple *stmt = gsi_stmt (*gsi);
6399 bool changed = fold_stmt_1 (gsi, true, no_follow_ssa_edges);
6400 gcc_assert (gsi_stmt (*gsi) == stmt);
6401 return changed;
6402 }
6403
6404 /* Canonicalize and possibly invert the boolean EXPR; return NULL_TREE
6405 if EXPR is null or we don't know how.
6406 If non-null, the result always has boolean type. */
6407
6408 static tree
6409 canonicalize_bool (tree expr, bool invert)
6410 {
6411 if (!expr)
6412 return NULL_TREE;
6413 else if (invert)
6414 {
6415 if (integer_nonzerop (expr))
6416 return boolean_false_node;
6417 else if (integer_zerop (expr))
6418 return boolean_true_node;
6419 else if (TREE_CODE (expr) == SSA_NAME)
6420 return fold_build2 (EQ_EXPR, boolean_type_node, expr,
6421 build_int_cst (TREE_TYPE (expr), 0));
6422 else if (COMPARISON_CLASS_P (expr))
6423 return fold_build2 (invert_tree_comparison (TREE_CODE (expr), false),
6424 boolean_type_node,
6425 TREE_OPERAND (expr, 0),
6426 TREE_OPERAND (expr, 1));
6427 else
6428 return NULL_TREE;
6429 }
6430 else
6431 {
6432 if (TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6433 return expr;
6434 if (integer_nonzerop (expr))
6435 return boolean_true_node;
6436 else if (integer_zerop (expr))
6437 return boolean_false_node;
6438 else if (TREE_CODE (expr) == SSA_NAME)
6439 return fold_build2 (NE_EXPR, boolean_type_node, expr,
6440 build_int_cst (TREE_TYPE (expr), 0));
6441 else if (COMPARISON_CLASS_P (expr))
6442 return fold_build2 (TREE_CODE (expr),
6443 boolean_type_node,
6444 TREE_OPERAND (expr, 0),
6445 TREE_OPERAND (expr, 1));
6446 else
6447 return NULL_TREE;
6448 }
6449 }
6450
6451 /* Check to see if a boolean expression EXPR is logically equivalent to the
6452 comparison (OP1 CODE OP2). Check for various identities involving
6453 SSA_NAMEs. */
6454
6455 static bool
6456 same_bool_comparison_p (const_tree expr, enum tree_code code,
6457 const_tree op1, const_tree op2)
6458 {
6459 gimple *s;
6460
6461 /* The obvious case. */
6462 if (TREE_CODE (expr) == code
6463 && operand_equal_p (TREE_OPERAND (expr, 0), op1, 0)
6464 && operand_equal_p (TREE_OPERAND (expr, 1), op2, 0))
6465 return true;
6466
6467 /* Check for comparing (name, name != 0) and the case where expr
6468 is an SSA_NAME with a definition matching the comparison. */
6469 if (TREE_CODE (expr) == SSA_NAME
6470 && TREE_CODE (TREE_TYPE (expr)) == BOOLEAN_TYPE)
6471 {
6472 if (operand_equal_p (expr, op1, 0))
6473 return ((code == NE_EXPR && integer_zerop (op2))
6474 || (code == EQ_EXPR && integer_nonzerop (op2)));
6475 s = SSA_NAME_DEF_STMT (expr);
6476 if (is_gimple_assign (s)
6477 && gimple_assign_rhs_code (s) == code
6478 && operand_equal_p (gimple_assign_rhs1 (s), op1, 0)
6479 && operand_equal_p (gimple_assign_rhs2 (s), op2, 0))
6480 return true;
6481 }
6482
6483 /* If op1 is of the form (name != 0) or (name == 0), and the definition
6484 of name is a comparison, recurse. */
6485 if (TREE_CODE (op1) == SSA_NAME
6486 && TREE_CODE (TREE_TYPE (op1)) == BOOLEAN_TYPE)
6487 {
6488 s = SSA_NAME_DEF_STMT (op1);
6489 if (is_gimple_assign (s)
6490 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison)
6491 {
6492 enum tree_code c = gimple_assign_rhs_code (s);
6493 if ((c == NE_EXPR && integer_zerop (op2))
6494 || (c == EQ_EXPR && integer_nonzerop (op2)))
6495 return same_bool_comparison_p (expr, c,
6496 gimple_assign_rhs1 (s),
6497 gimple_assign_rhs2 (s));
6498 if ((c == EQ_EXPR && integer_zerop (op2))
6499 || (c == NE_EXPR && integer_nonzerop (op2)))
6500 return same_bool_comparison_p (expr,
6501 invert_tree_comparison (c, false),
6502 gimple_assign_rhs1 (s),
6503 gimple_assign_rhs2 (s));
6504 }
6505 }
6506 return false;
6507 }
6508
6509 /* Check to see if two boolean expressions OP1 and OP2 are logically
6510 equivalent. */
6511
6512 static bool
6513 same_bool_result_p (const_tree op1, const_tree op2)
6514 {
6515 /* Simple cases first. */
6516 if (operand_equal_p (op1, op2, 0))
6517 return true;
6518
6519 /* Check the cases where at least one of the operands is a comparison.
6520 These are a bit smarter than operand_equal_p in that they apply some
6521 identifies on SSA_NAMEs. */
6522 if (COMPARISON_CLASS_P (op2)
6523 && same_bool_comparison_p (op1, TREE_CODE (op2),
6524 TREE_OPERAND (op2, 0),
6525 TREE_OPERAND (op2, 1)))
6526 return true;
6527 if (COMPARISON_CLASS_P (op1)
6528 && same_bool_comparison_p (op2, TREE_CODE (op1),
6529 TREE_OPERAND (op1, 0),
6530 TREE_OPERAND (op1, 1)))
6531 return true;
6532
6533 /* Default case. */
6534 return false;
6535 }
6536
6537 /* Forward declarations for some mutually recursive functions. */
6538
6539 static tree
6540 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6541 enum tree_code code2, tree op2a, tree op2b, basic_block);
6542 static tree
6543 and_var_with_comparison (tree type, tree var, bool invert,
6544 enum tree_code code2, tree op2a, tree op2b,
6545 basic_block);
6546 static tree
6547 and_var_with_comparison_1 (tree type, gimple *stmt,
6548 enum tree_code code2, tree op2a, tree op2b,
6549 basic_block);
6550 static tree
6551 or_comparisons_1 (tree, enum tree_code code1, tree op1a, tree op1b,
6552 enum tree_code code2, tree op2a, tree op2b,
6553 basic_block);
6554 static tree
6555 or_var_with_comparison (tree, tree var, bool invert,
6556 enum tree_code code2, tree op2a, tree op2b,
6557 basic_block);
6558 static tree
6559 or_var_with_comparison_1 (tree, gimple *stmt,
6560 enum tree_code code2, tree op2a, tree op2b,
6561 basic_block);
6562
6563 /* Helper function for and_comparisons_1: try to simplify the AND of the
6564 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
6565 If INVERT is true, invert the value of the VAR before doing the AND.
6566 Return NULL_EXPR if we can't simplify this to a single expression. */
6567
6568 static tree
6569 and_var_with_comparison (tree type, tree var, bool invert,
6570 enum tree_code code2, tree op2a, tree op2b,
6571 basic_block outer_cond_bb)
6572 {
6573 tree t;
6574 gimple *stmt = SSA_NAME_DEF_STMT (var);
6575
6576 /* We can only deal with variables whose definitions are assignments. */
6577 if (!is_gimple_assign (stmt))
6578 return NULL_TREE;
6579
6580 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
6581 !var AND (op2a code2 op2b) => !(var OR !(op2a code2 op2b))
6582 Then we only have to consider the simpler non-inverted cases. */
6583 if (invert)
6584 t = or_var_with_comparison_1 (type, stmt,
6585 invert_tree_comparison (code2, false),
6586 op2a, op2b, outer_cond_bb);
6587 else
6588 t = and_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
6589 outer_cond_bb);
6590 return canonicalize_bool (t, invert);
6591 }
6592
6593 /* Try to simplify the AND of the ssa variable defined by the assignment
6594 STMT with the comparison specified by (OP2A CODE2 OP2B).
6595 Return NULL_EXPR if we can't simplify this to a single expression. */
6596
6597 static tree
6598 and_var_with_comparison_1 (tree type, gimple *stmt,
6599 enum tree_code code2, tree op2a, tree op2b,
6600 basic_block outer_cond_bb)
6601 {
6602 tree var = gimple_assign_lhs (stmt);
6603 tree true_test_var = NULL_TREE;
6604 tree false_test_var = NULL_TREE;
6605 enum tree_code innercode = gimple_assign_rhs_code (stmt);
6606
6607 /* Check for identities like (var AND (var == 0)) => false. */
6608 if (TREE_CODE (op2a) == SSA_NAME
6609 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
6610 {
6611 if ((code2 == NE_EXPR && integer_zerop (op2b))
6612 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
6613 {
6614 true_test_var = op2a;
6615 if (var == true_test_var)
6616 return var;
6617 }
6618 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
6619 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
6620 {
6621 false_test_var = op2a;
6622 if (var == false_test_var)
6623 return boolean_false_node;
6624 }
6625 }
6626
6627 /* If the definition is a comparison, recurse on it. */
6628 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
6629 {
6630 tree t = and_comparisons_1 (type, innercode,
6631 gimple_assign_rhs1 (stmt),
6632 gimple_assign_rhs2 (stmt),
6633 code2,
6634 op2a,
6635 op2b, outer_cond_bb);
6636 if (t)
6637 return t;
6638 }
6639
6640 /* If the definition is an AND or OR expression, we may be able to
6641 simplify by reassociating. */
6642 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
6643 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
6644 {
6645 tree inner1 = gimple_assign_rhs1 (stmt);
6646 tree inner2 = gimple_assign_rhs2 (stmt);
6647 gimple *s;
6648 tree t;
6649 tree partial = NULL_TREE;
6650 bool is_and = (innercode == BIT_AND_EXPR);
6651
6652 /* Check for boolean identities that don't require recursive examination
6653 of inner1/inner2:
6654 inner1 AND (inner1 AND inner2) => inner1 AND inner2 => var
6655 inner1 AND (inner1 OR inner2) => inner1
6656 !inner1 AND (inner1 AND inner2) => false
6657 !inner1 AND (inner1 OR inner2) => !inner1 AND inner2
6658 Likewise for similar cases involving inner2. */
6659 if (inner1 == true_test_var)
6660 return (is_and ? var : inner1);
6661 else if (inner2 == true_test_var)
6662 return (is_and ? var : inner2);
6663 else if (inner1 == false_test_var)
6664 return (is_and
6665 ? boolean_false_node
6666 : and_var_with_comparison (type, inner2, false, code2, op2a,
6667 op2b, outer_cond_bb));
6668 else if (inner2 == false_test_var)
6669 return (is_and
6670 ? boolean_false_node
6671 : and_var_with_comparison (type, inner1, false, code2, op2a,
6672 op2b, outer_cond_bb));
6673
6674 /* Next, redistribute/reassociate the AND across the inner tests.
6675 Compute the first partial result, (inner1 AND (op2a code op2b)) */
6676 if (TREE_CODE (inner1) == SSA_NAME
6677 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
6678 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6679 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6680 gimple_assign_rhs1 (s),
6681 gimple_assign_rhs2 (s),
6682 code2, op2a, op2b,
6683 outer_cond_bb)))
6684 {
6685 /* Handle the AND case, where we are reassociating:
6686 (inner1 AND inner2) AND (op2a code2 op2b)
6687 => (t AND inner2)
6688 If the partial result t is a constant, we win. Otherwise
6689 continue on to try reassociating with the other inner test. */
6690 if (is_and)
6691 {
6692 if (integer_onep (t))
6693 return inner2;
6694 else if (integer_zerop (t))
6695 return boolean_false_node;
6696 }
6697
6698 /* Handle the OR case, where we are redistributing:
6699 (inner1 OR inner2) AND (op2a code2 op2b)
6700 => (t OR (inner2 AND (op2a code2 op2b))) */
6701 else if (integer_onep (t))
6702 return boolean_true_node;
6703
6704 /* Save partial result for later. */
6705 partial = t;
6706 }
6707
6708 /* Compute the second partial result, (inner2 AND (op2a code op2b)) */
6709 if (TREE_CODE (inner2) == SSA_NAME
6710 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
6711 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
6712 && (t = maybe_fold_and_comparisons (type, gimple_assign_rhs_code (s),
6713 gimple_assign_rhs1 (s),
6714 gimple_assign_rhs2 (s),
6715 code2, op2a, op2b,
6716 outer_cond_bb)))
6717 {
6718 /* Handle the AND case, where we are reassociating:
6719 (inner1 AND inner2) AND (op2a code2 op2b)
6720 => (inner1 AND t) */
6721 if (is_and)
6722 {
6723 if (integer_onep (t))
6724 return inner1;
6725 else if (integer_zerop (t))
6726 return boolean_false_node;
6727 /* If both are the same, we can apply the identity
6728 (x AND x) == x. */
6729 else if (partial && same_bool_result_p (t, partial))
6730 return t;
6731 }
6732
6733 /* Handle the OR case. where we are redistributing:
6734 (inner1 OR inner2) AND (op2a code2 op2b)
6735 => (t OR (inner1 AND (op2a code2 op2b)))
6736 => (t OR partial) */
6737 else
6738 {
6739 if (integer_onep (t))
6740 return boolean_true_node;
6741 else if (partial)
6742 {
6743 /* We already got a simplification for the other
6744 operand to the redistributed OR expression. The
6745 interesting case is when at least one is false.
6746 Or, if both are the same, we can apply the identity
6747 (x OR x) == x. */
6748 if (integer_zerop (partial))
6749 return t;
6750 else if (integer_zerop (t))
6751 return partial;
6752 else if (same_bool_result_p (t, partial))
6753 return t;
6754 }
6755 }
6756 }
6757 }
6758 return NULL_TREE;
6759 }
6760
6761 /* Try to simplify the AND of two comparisons defined by
6762 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
6763 If this can be done without constructing an intermediate value,
6764 return the resulting tree; otherwise NULL_TREE is returned.
6765 This function is deliberately asymmetric as it recurses on SSA_DEFs
6766 in the first comparison but not the second. */
6767
6768 static tree
6769 and_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
6770 enum tree_code code2, tree op2a, tree op2b,
6771 basic_block outer_cond_bb)
6772 {
6773 tree truth_type = truth_type_for (TREE_TYPE (op1a));
6774
6775 /* First check for ((x CODE1 y) AND (x CODE2 y)). */
6776 if (operand_equal_p (op1a, op2a, 0)
6777 && operand_equal_p (op1b, op2b, 0))
6778 {
6779 /* Result will be either NULL_TREE, or a combined comparison. */
6780 tree t = combine_comparisons (UNKNOWN_LOCATION,
6781 TRUTH_ANDIF_EXPR, code1, code2,
6782 truth_type, op1a, op1b);
6783 if (t)
6784 return t;
6785 }
6786
6787 /* Likewise the swapped case of the above. */
6788 if (operand_equal_p (op1a, op2b, 0)
6789 && operand_equal_p (op1b, op2a, 0))
6790 {
6791 /* Result will be either NULL_TREE, or a combined comparison. */
6792 tree t = combine_comparisons (UNKNOWN_LOCATION,
6793 TRUTH_ANDIF_EXPR, code1,
6794 swap_tree_comparison (code2),
6795 truth_type, op1a, op1b);
6796 if (t)
6797 return t;
6798 }
6799
6800 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
6801 NAME's definition is a truth value. See if there are any simplifications
6802 that can be done against the NAME's definition. */
6803 if (TREE_CODE (op1a) == SSA_NAME
6804 && (code1 == NE_EXPR || code1 == EQ_EXPR)
6805 && (integer_zerop (op1b) || integer_onep (op1b)))
6806 {
6807 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
6808 || (code1 == NE_EXPR && integer_onep (op1b)));
6809 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
6810 switch (gimple_code (stmt))
6811 {
6812 case GIMPLE_ASSIGN:
6813 /* Try to simplify by copy-propagating the definition. */
6814 return and_var_with_comparison (type, op1a, invert, code2, op2a,
6815 op2b, outer_cond_bb);
6816
6817 case GIMPLE_PHI:
6818 /* If every argument to the PHI produces the same result when
6819 ANDed with the second comparison, we win.
6820 Do not do this unless the type is bool since we need a bool
6821 result here anyway. */
6822 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
6823 {
6824 tree result = NULL_TREE;
6825 unsigned i;
6826 for (i = 0; i < gimple_phi_num_args (stmt); i++)
6827 {
6828 tree arg = gimple_phi_arg_def (stmt, i);
6829
6830 /* If this PHI has itself as an argument, ignore it.
6831 If all the other args produce the same result,
6832 we're still OK. */
6833 if (arg == gimple_phi_result (stmt))
6834 continue;
6835 else if (TREE_CODE (arg) == INTEGER_CST)
6836 {
6837 if (invert ? integer_nonzerop (arg) : integer_zerop (arg))
6838 {
6839 if (!result)
6840 result = boolean_false_node;
6841 else if (!integer_zerop (result))
6842 return NULL_TREE;
6843 }
6844 else if (!result)
6845 result = fold_build2 (code2, boolean_type_node,
6846 op2a, op2b);
6847 else if (!same_bool_comparison_p (result,
6848 code2, op2a, op2b))
6849 return NULL_TREE;
6850 }
6851 else if (TREE_CODE (arg) == SSA_NAME
6852 && !SSA_NAME_IS_DEFAULT_DEF (arg))
6853 {
6854 tree temp;
6855 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
6856 /* In simple cases we can look through PHI nodes,
6857 but we have to be careful with loops.
6858 See PR49073. */
6859 if (! dom_info_available_p (CDI_DOMINATORS)
6860 || gimple_bb (def_stmt) == gimple_bb (stmt)
6861 || dominated_by_p (CDI_DOMINATORS,
6862 gimple_bb (def_stmt),
6863 gimple_bb (stmt)))
6864 return NULL_TREE;
6865 temp = and_var_with_comparison (type, arg, invert, code2,
6866 op2a, op2b,
6867 outer_cond_bb);
6868 if (!temp)
6869 return NULL_TREE;
6870 else if (!result)
6871 result = temp;
6872 else if (!same_bool_result_p (result, temp))
6873 return NULL_TREE;
6874 }
6875 else
6876 return NULL_TREE;
6877 }
6878 return result;
6879 }
6880
6881 default:
6882 break;
6883 }
6884 }
6885 return NULL_TREE;
6886 }
6887
6888 static basic_block fosa_bb;
6889 static tree
6890 follow_outer_ssa_edges (tree val)
6891 {
6892 if (TREE_CODE (val) == SSA_NAME
6893 && !SSA_NAME_IS_DEFAULT_DEF (val))
6894 {
6895 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (val));
6896 if (!def_bb
6897 || def_bb == fosa_bb
6898 || (dom_info_available_p (CDI_DOMINATORS)
6899 && (def_bb == fosa_bb
6900 || dominated_by_p (CDI_DOMINATORS, fosa_bb, def_bb))))
6901 return val;
6902 return NULL_TREE;
6903 }
6904 return val;
6905 }
6906
6907 /* Helper function for maybe_fold_and_comparisons and maybe_fold_or_comparisons
6908 : try to simplify the AND/OR of the ssa variable VAR with the comparison
6909 specified by (OP2A CODE2 OP2B) from match.pd. Return NULL_EXPR if we can't
6910 simplify this to a single expression. As we are going to lower the cost
6911 of building SSA names / gimple stmts significantly, we need to allocate
6912 them ont the stack. This will cause the code to be a bit ugly. */
6913
6914 static tree
6915 maybe_fold_comparisons_from_match_pd (tree type, enum tree_code code,
6916 enum tree_code code1,
6917 tree op1a, tree op1b,
6918 enum tree_code code2, tree op2a,
6919 tree op2b,
6920 basic_block outer_cond_bb)
6921 {
6922 /* Allocate gimple stmt1 on the stack. */
6923 gassign *stmt1
6924 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6925 gimple_init (stmt1, GIMPLE_ASSIGN, 3);
6926 gimple_assign_set_rhs_code (stmt1, code1);
6927 gimple_assign_set_rhs1 (stmt1, op1a);
6928 gimple_assign_set_rhs2 (stmt1, op1b);
6929 gimple_set_bb (stmt1, NULL);
6930
6931 /* Allocate gimple stmt2 on the stack. */
6932 gassign *stmt2
6933 = (gassign *) XALLOCAVEC (char, gimple_size (GIMPLE_ASSIGN, 3));
6934 gimple_init (stmt2, GIMPLE_ASSIGN, 3);
6935 gimple_assign_set_rhs_code (stmt2, code2);
6936 gimple_assign_set_rhs1 (stmt2, op2a);
6937 gimple_assign_set_rhs2 (stmt2, op2b);
6938 gimple_set_bb (stmt2, NULL);
6939
6940 /* Allocate SSA names(lhs1) on the stack. */
6941 tree lhs1 = (tree)XALLOCA (tree_ssa_name);
6942 memset (lhs1, 0, sizeof (tree_ssa_name));
6943 TREE_SET_CODE (lhs1, SSA_NAME);
6944 TREE_TYPE (lhs1) = type;
6945 init_ssa_name_imm_use (lhs1);
6946
6947 /* Allocate SSA names(lhs2) on the stack. */
6948 tree lhs2 = (tree)XALLOCA (tree_ssa_name);
6949 memset (lhs2, 0, sizeof (tree_ssa_name));
6950 TREE_SET_CODE (lhs2, SSA_NAME);
6951 TREE_TYPE (lhs2) = type;
6952 init_ssa_name_imm_use (lhs2);
6953
6954 gimple_assign_set_lhs (stmt1, lhs1);
6955 gimple_assign_set_lhs (stmt2, lhs2);
6956
6957 gimple_match_op op (gimple_match_cond::UNCOND, code,
6958 type, gimple_assign_lhs (stmt1),
6959 gimple_assign_lhs (stmt2));
6960 fosa_bb = outer_cond_bb;
6961 if (op.resimplify (NULL, (!outer_cond_bb
6962 ? follow_all_ssa_edges : follow_outer_ssa_edges)))
6963 {
6964 if (gimple_simplified_result_is_gimple_val (&op))
6965 {
6966 tree res = op.ops[0];
6967 if (res == lhs1)
6968 return build2 (code1, type, op1a, op1b);
6969 else if (res == lhs2)
6970 return build2 (code2, type, op2a, op2b);
6971 else
6972 return res;
6973 }
6974 else if (op.code.is_tree_code ()
6975 && TREE_CODE_CLASS ((tree_code)op.code) == tcc_comparison)
6976 {
6977 tree op0 = op.ops[0];
6978 tree op1 = op.ops[1];
6979 if (op0 == lhs1 || op0 == lhs2 || op1 == lhs1 || op1 == lhs2)
6980 return NULL_TREE; /* not simple */
6981
6982 return build2 ((enum tree_code)op.code, op.type, op0, op1);
6983 }
6984 }
6985
6986 return NULL_TREE;
6987 }
6988
6989 /* Try to simplify the AND of two comparisons, specified by
6990 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
6991 If this can be simplified to a single expression (without requiring
6992 introducing more SSA variables to hold intermediate values),
6993 return the resulting tree. Otherwise return NULL_TREE.
6994 If the result expression is non-null, it has boolean type. */
6995
6996 tree
6997 maybe_fold_and_comparisons (tree type,
6998 enum tree_code code1, tree op1a, tree op1b,
6999 enum tree_code code2, tree op2a, tree op2b,
7000 basic_block outer_cond_bb)
7001 {
7002 if (tree t = and_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7003 outer_cond_bb))
7004 return t;
7005
7006 if (tree t = and_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7007 outer_cond_bb))
7008 return t;
7009
7010 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_AND_EXPR, code1,
7011 op1a, op1b, code2, op2a,
7012 op2b, outer_cond_bb))
7013 return t;
7014
7015 return NULL_TREE;
7016 }
7017
7018 /* Helper function for or_comparisons_1: try to simplify the OR of the
7019 ssa variable VAR with the comparison specified by (OP2A CODE2 OP2B).
7020 If INVERT is true, invert the value of VAR before doing the OR.
7021 Return NULL_EXPR if we can't simplify this to a single expression. */
7022
7023 static tree
7024 or_var_with_comparison (tree type, tree var, bool invert,
7025 enum tree_code code2, tree op2a, tree op2b,
7026 basic_block outer_cond_bb)
7027 {
7028 tree t;
7029 gimple *stmt = SSA_NAME_DEF_STMT (var);
7030
7031 /* We can only deal with variables whose definitions are assignments. */
7032 if (!is_gimple_assign (stmt))
7033 return NULL_TREE;
7034
7035 /* If we have an inverted comparison, apply DeMorgan's law and rewrite
7036 !var OR (op2a code2 op2b) => !(var AND !(op2a code2 op2b))
7037 Then we only have to consider the simpler non-inverted cases. */
7038 if (invert)
7039 t = and_var_with_comparison_1 (type, stmt,
7040 invert_tree_comparison (code2, false),
7041 op2a, op2b, outer_cond_bb);
7042 else
7043 t = or_var_with_comparison_1 (type, stmt, code2, op2a, op2b,
7044 outer_cond_bb);
7045 return canonicalize_bool (t, invert);
7046 }
7047
7048 /* Try to simplify the OR of the ssa variable defined by the assignment
7049 STMT with the comparison specified by (OP2A CODE2 OP2B).
7050 Return NULL_EXPR if we can't simplify this to a single expression. */
7051
7052 static tree
7053 or_var_with_comparison_1 (tree type, gimple *stmt,
7054 enum tree_code code2, tree op2a, tree op2b,
7055 basic_block outer_cond_bb)
7056 {
7057 tree var = gimple_assign_lhs (stmt);
7058 tree true_test_var = NULL_TREE;
7059 tree false_test_var = NULL_TREE;
7060 enum tree_code innercode = gimple_assign_rhs_code (stmt);
7061
7062 /* Check for identities like (var OR (var != 0)) => true . */
7063 if (TREE_CODE (op2a) == SSA_NAME
7064 && TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE)
7065 {
7066 if ((code2 == NE_EXPR && integer_zerop (op2b))
7067 || (code2 == EQ_EXPR && integer_nonzerop (op2b)))
7068 {
7069 true_test_var = op2a;
7070 if (var == true_test_var)
7071 return var;
7072 }
7073 else if ((code2 == EQ_EXPR && integer_zerop (op2b))
7074 || (code2 == NE_EXPR && integer_nonzerop (op2b)))
7075 {
7076 false_test_var = op2a;
7077 if (var == false_test_var)
7078 return boolean_true_node;
7079 }
7080 }
7081
7082 /* If the definition is a comparison, recurse on it. */
7083 if (TREE_CODE_CLASS (innercode) == tcc_comparison)
7084 {
7085 tree t = or_comparisons_1 (type, innercode,
7086 gimple_assign_rhs1 (stmt),
7087 gimple_assign_rhs2 (stmt),
7088 code2, op2a, op2b, outer_cond_bb);
7089 if (t)
7090 return t;
7091 }
7092
7093 /* If the definition is an AND or OR expression, we may be able to
7094 simplify by reassociating. */
7095 if (TREE_CODE (TREE_TYPE (var)) == BOOLEAN_TYPE
7096 && (innercode == BIT_AND_EXPR || innercode == BIT_IOR_EXPR))
7097 {
7098 tree inner1 = gimple_assign_rhs1 (stmt);
7099 tree inner2 = gimple_assign_rhs2 (stmt);
7100 gimple *s;
7101 tree t;
7102 tree partial = NULL_TREE;
7103 bool is_or = (innercode == BIT_IOR_EXPR);
7104
7105 /* Check for boolean identities that don't require recursive examination
7106 of inner1/inner2:
7107 inner1 OR (inner1 OR inner2) => inner1 OR inner2 => var
7108 inner1 OR (inner1 AND inner2) => inner1
7109 !inner1 OR (inner1 OR inner2) => true
7110 !inner1 OR (inner1 AND inner2) => !inner1 OR inner2
7111 */
7112 if (inner1 == true_test_var)
7113 return (is_or ? var : inner1);
7114 else if (inner2 == true_test_var)
7115 return (is_or ? var : inner2);
7116 else if (inner1 == false_test_var)
7117 return (is_or
7118 ? boolean_true_node
7119 : or_var_with_comparison (type, inner2, false, code2, op2a,
7120 op2b, outer_cond_bb));
7121 else if (inner2 == false_test_var)
7122 return (is_or
7123 ? boolean_true_node
7124 : or_var_with_comparison (type, inner1, false, code2, op2a,
7125 op2b, outer_cond_bb));
7126
7127 /* Next, redistribute/reassociate the OR across the inner tests.
7128 Compute the first partial result, (inner1 OR (op2a code op2b)) */
7129 if (TREE_CODE (inner1) == SSA_NAME
7130 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner1))
7131 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7132 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7133 gimple_assign_rhs1 (s),
7134 gimple_assign_rhs2 (s),
7135 code2, op2a, op2b,
7136 outer_cond_bb)))
7137 {
7138 /* Handle the OR case, where we are reassociating:
7139 (inner1 OR inner2) OR (op2a code2 op2b)
7140 => (t OR inner2)
7141 If the partial result t is a constant, we win. Otherwise
7142 continue on to try reassociating with the other inner test. */
7143 if (is_or)
7144 {
7145 if (integer_onep (t))
7146 return boolean_true_node;
7147 else if (integer_zerop (t))
7148 return inner2;
7149 }
7150
7151 /* Handle the AND case, where we are redistributing:
7152 (inner1 AND inner2) OR (op2a code2 op2b)
7153 => (t AND (inner2 OR (op2a code op2b))) */
7154 else if (integer_zerop (t))
7155 return boolean_false_node;
7156
7157 /* Save partial result for later. */
7158 partial = t;
7159 }
7160
7161 /* Compute the second partial result, (inner2 OR (op2a code op2b)) */
7162 if (TREE_CODE (inner2) == SSA_NAME
7163 && is_gimple_assign (s = SSA_NAME_DEF_STMT (inner2))
7164 && TREE_CODE_CLASS (gimple_assign_rhs_code (s)) == tcc_comparison
7165 && (t = maybe_fold_or_comparisons (type, gimple_assign_rhs_code (s),
7166 gimple_assign_rhs1 (s),
7167 gimple_assign_rhs2 (s),
7168 code2, op2a, op2b,
7169 outer_cond_bb)))
7170 {
7171 /* Handle the OR case, where we are reassociating:
7172 (inner1 OR inner2) OR (op2a code2 op2b)
7173 => (inner1 OR t)
7174 => (t OR partial) */
7175 if (is_or)
7176 {
7177 if (integer_zerop (t))
7178 return inner1;
7179 else if (integer_onep (t))
7180 return boolean_true_node;
7181 /* If both are the same, we can apply the identity
7182 (x OR x) == x. */
7183 else if (partial && same_bool_result_p (t, partial))
7184 return t;
7185 }
7186
7187 /* Handle the AND case, where we are redistributing:
7188 (inner1 AND inner2) OR (op2a code2 op2b)
7189 => (t AND (inner1 OR (op2a code2 op2b)))
7190 => (t AND partial) */
7191 else
7192 {
7193 if (integer_zerop (t))
7194 return boolean_false_node;
7195 else if (partial)
7196 {
7197 /* We already got a simplification for the other
7198 operand to the redistributed AND expression. The
7199 interesting case is when at least one is true.
7200 Or, if both are the same, we can apply the identity
7201 (x AND x) == x. */
7202 if (integer_onep (partial))
7203 return t;
7204 else if (integer_onep (t))
7205 return partial;
7206 else if (same_bool_result_p (t, partial))
7207 return t;
7208 }
7209 }
7210 }
7211 }
7212 return NULL_TREE;
7213 }
7214
7215 /* Try to simplify the OR of two comparisons defined by
7216 (OP1A CODE1 OP1B) and (OP2A CODE2 OP2B), respectively.
7217 If this can be done without constructing an intermediate value,
7218 return the resulting tree; otherwise NULL_TREE is returned.
7219 This function is deliberately asymmetric as it recurses on SSA_DEFs
7220 in the first comparison but not the second. */
7221
7222 static tree
7223 or_comparisons_1 (tree type, enum tree_code code1, tree op1a, tree op1b,
7224 enum tree_code code2, tree op2a, tree op2b,
7225 basic_block outer_cond_bb)
7226 {
7227 tree truth_type = truth_type_for (TREE_TYPE (op1a));
7228
7229 /* First check for ((x CODE1 y) OR (x CODE2 y)). */
7230 if (operand_equal_p (op1a, op2a, 0)
7231 && operand_equal_p (op1b, op2b, 0))
7232 {
7233 /* Result will be either NULL_TREE, or a combined comparison. */
7234 tree t = combine_comparisons (UNKNOWN_LOCATION,
7235 TRUTH_ORIF_EXPR, code1, code2,
7236 truth_type, op1a, op1b);
7237 if (t)
7238 return t;
7239 }
7240
7241 /* Likewise the swapped case of the above. */
7242 if (operand_equal_p (op1a, op2b, 0)
7243 && operand_equal_p (op1b, op2a, 0))
7244 {
7245 /* Result will be either NULL_TREE, or a combined comparison. */
7246 tree t = combine_comparisons (UNKNOWN_LOCATION,
7247 TRUTH_ORIF_EXPR, code1,
7248 swap_tree_comparison (code2),
7249 truth_type, op1a, op1b);
7250 if (t)
7251 return t;
7252 }
7253
7254 /* Perhaps the first comparison is (NAME != 0) or (NAME == 1) where
7255 NAME's definition is a truth value. See if there are any simplifications
7256 that can be done against the NAME's definition. */
7257 if (TREE_CODE (op1a) == SSA_NAME
7258 && (code1 == NE_EXPR || code1 == EQ_EXPR)
7259 && (integer_zerop (op1b) || integer_onep (op1b)))
7260 {
7261 bool invert = ((code1 == EQ_EXPR && integer_zerop (op1b))
7262 || (code1 == NE_EXPR && integer_onep (op1b)));
7263 gimple *stmt = SSA_NAME_DEF_STMT (op1a);
7264 switch (gimple_code (stmt))
7265 {
7266 case GIMPLE_ASSIGN:
7267 /* Try to simplify by copy-propagating the definition. */
7268 return or_var_with_comparison (type, op1a, invert, code2, op2a,
7269 op2b, outer_cond_bb);
7270
7271 case GIMPLE_PHI:
7272 /* If every argument to the PHI produces the same result when
7273 ORed with the second comparison, we win.
7274 Do not do this unless the type is bool since we need a bool
7275 result here anyway. */
7276 if (TREE_CODE (TREE_TYPE (op1a)) == BOOLEAN_TYPE)
7277 {
7278 tree result = NULL_TREE;
7279 unsigned i;
7280 for (i = 0; i < gimple_phi_num_args (stmt); i++)
7281 {
7282 tree arg = gimple_phi_arg_def (stmt, i);
7283
7284 /* If this PHI has itself as an argument, ignore it.
7285 If all the other args produce the same result,
7286 we're still OK. */
7287 if (arg == gimple_phi_result (stmt))
7288 continue;
7289 else if (TREE_CODE (arg) == INTEGER_CST)
7290 {
7291 if (invert ? integer_zerop (arg) : integer_nonzerop (arg))
7292 {
7293 if (!result)
7294 result = boolean_true_node;
7295 else if (!integer_onep (result))
7296 return NULL_TREE;
7297 }
7298 else if (!result)
7299 result = fold_build2 (code2, boolean_type_node,
7300 op2a, op2b);
7301 else if (!same_bool_comparison_p (result,
7302 code2, op2a, op2b))
7303 return NULL_TREE;
7304 }
7305 else if (TREE_CODE (arg) == SSA_NAME
7306 && !SSA_NAME_IS_DEFAULT_DEF (arg))
7307 {
7308 tree temp;
7309 gimple *def_stmt = SSA_NAME_DEF_STMT (arg);
7310 /* In simple cases we can look through PHI nodes,
7311 but we have to be careful with loops.
7312 See PR49073. */
7313 if (! dom_info_available_p (CDI_DOMINATORS)
7314 || gimple_bb (def_stmt) == gimple_bb (stmt)
7315 || dominated_by_p (CDI_DOMINATORS,
7316 gimple_bb (def_stmt),
7317 gimple_bb (stmt)))
7318 return NULL_TREE;
7319 temp = or_var_with_comparison (type, arg, invert, code2,
7320 op2a, op2b, outer_cond_bb);
7321 if (!temp)
7322 return NULL_TREE;
7323 else if (!result)
7324 result = temp;
7325 else if (!same_bool_result_p (result, temp))
7326 return NULL_TREE;
7327 }
7328 else
7329 return NULL_TREE;
7330 }
7331 return result;
7332 }
7333
7334 default:
7335 break;
7336 }
7337 }
7338 return NULL_TREE;
7339 }
7340
7341 /* Try to simplify the OR of two comparisons, specified by
7342 (OP1A CODE1 OP1B) and (OP2B CODE2 OP2B), respectively.
7343 If this can be simplified to a single expression (without requiring
7344 introducing more SSA variables to hold intermediate values),
7345 return the resulting tree. Otherwise return NULL_TREE.
7346 If the result expression is non-null, it has boolean type. */
7347
7348 tree
7349 maybe_fold_or_comparisons (tree type,
7350 enum tree_code code1, tree op1a, tree op1b,
7351 enum tree_code code2, tree op2a, tree op2b,
7352 basic_block outer_cond_bb)
7353 {
7354 if (tree t = or_comparisons_1 (type, code1, op1a, op1b, code2, op2a, op2b,
7355 outer_cond_bb))
7356 return t;
7357
7358 if (tree t = or_comparisons_1 (type, code2, op2a, op2b, code1, op1a, op1b,
7359 outer_cond_bb))
7360 return t;
7361
7362 if (tree t = maybe_fold_comparisons_from_match_pd (type, BIT_IOR_EXPR, code1,
7363 op1a, op1b, code2, op2a,
7364 op2b, outer_cond_bb))
7365 return t;
7366
7367 return NULL_TREE;
7368 }
7369
7370 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7371
7372 Either NULL_TREE, a simplified but non-constant or a constant
7373 is returned.
7374
7375 ??? This should go into a gimple-fold-inline.h file to be eventually
7376 privatized with the single valueize function used in the various TUs
7377 to avoid the indirect function call overhead. */
7378
7379 tree
7380 gimple_fold_stmt_to_constant_1 (gimple *stmt, tree (*valueize) (tree),
7381 tree (*gvalueize) (tree))
7382 {
7383 gimple_match_op res_op;
7384 /* ??? The SSA propagators do not correctly deal with following SSA use-def
7385 edges if there are intermediate VARYING defs. For this reason
7386 do not follow SSA edges here even though SCCVN can technically
7387 just deal fine with that. */
7388 if (gimple_simplify (stmt, &res_op, NULL, gvalueize, valueize))
7389 {
7390 tree res = NULL_TREE;
7391 if (gimple_simplified_result_is_gimple_val (&res_op))
7392 res = res_op.ops[0];
7393 else if (mprts_hook)
7394 res = mprts_hook (&res_op);
7395 if (res)
7396 {
7397 if (dump_file && dump_flags & TDF_DETAILS)
7398 {
7399 fprintf (dump_file, "Match-and-simplified ");
7400 print_gimple_expr (dump_file, stmt, 0, TDF_SLIM);
7401 fprintf (dump_file, " to ");
7402 print_generic_expr (dump_file, res);
7403 fprintf (dump_file, "\n");
7404 }
7405 return res;
7406 }
7407 }
7408
7409 location_t loc = gimple_location (stmt);
7410 switch (gimple_code (stmt))
7411 {
7412 case GIMPLE_ASSIGN:
7413 {
7414 enum tree_code subcode = gimple_assign_rhs_code (stmt);
7415
7416 switch (get_gimple_rhs_class (subcode))
7417 {
7418 case GIMPLE_SINGLE_RHS:
7419 {
7420 tree rhs = gimple_assign_rhs1 (stmt);
7421 enum tree_code_class kind = TREE_CODE_CLASS (subcode);
7422
7423 if (TREE_CODE (rhs) == SSA_NAME)
7424 {
7425 /* If the RHS is an SSA_NAME, return its known constant value,
7426 if any. */
7427 return (*valueize) (rhs);
7428 }
7429 /* Handle propagating invariant addresses into address
7430 operations. */
7431 else if (TREE_CODE (rhs) == ADDR_EXPR
7432 && !is_gimple_min_invariant (rhs))
7433 {
7434 poly_int64 offset = 0;
7435 tree base;
7436 base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (rhs, 0),
7437 &offset,
7438 valueize);
7439 if (base
7440 && (CONSTANT_CLASS_P (base)
7441 || decl_address_invariant_p (base)))
7442 return build_invariant_address (TREE_TYPE (rhs),
7443 base, offset);
7444 }
7445 else if (TREE_CODE (rhs) == CONSTRUCTOR
7446 && TREE_CODE (TREE_TYPE (rhs)) == VECTOR_TYPE
7447 && known_eq (CONSTRUCTOR_NELTS (rhs),
7448 TYPE_VECTOR_SUBPARTS (TREE_TYPE (rhs))))
7449 {
7450 unsigned i, nelts;
7451 tree val;
7452
7453 nelts = CONSTRUCTOR_NELTS (rhs);
7454 tree_vector_builder vec (TREE_TYPE (rhs), nelts, 1);
7455 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (rhs), i, val)
7456 {
7457 val = (*valueize) (val);
7458 if (TREE_CODE (val) == INTEGER_CST
7459 || TREE_CODE (val) == REAL_CST
7460 || TREE_CODE (val) == FIXED_CST)
7461 vec.quick_push (val);
7462 else
7463 return NULL_TREE;
7464 }
7465
7466 return vec.build ();
7467 }
7468 if (subcode == OBJ_TYPE_REF)
7469 {
7470 tree val = (*valueize) (OBJ_TYPE_REF_EXPR (rhs));
7471 /* If callee is constant, we can fold away the wrapper. */
7472 if (is_gimple_min_invariant (val))
7473 return val;
7474 }
7475
7476 if (kind == tcc_reference)
7477 {
7478 if ((TREE_CODE (rhs) == VIEW_CONVERT_EXPR
7479 || TREE_CODE (rhs) == REALPART_EXPR
7480 || TREE_CODE (rhs) == IMAGPART_EXPR)
7481 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7482 {
7483 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7484 return fold_unary_loc (EXPR_LOCATION (rhs),
7485 TREE_CODE (rhs),
7486 TREE_TYPE (rhs), val);
7487 }
7488 else if (TREE_CODE (rhs) == BIT_FIELD_REF
7489 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7490 {
7491 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7492 return fold_ternary_loc (EXPR_LOCATION (rhs),
7493 TREE_CODE (rhs),
7494 TREE_TYPE (rhs), val,
7495 TREE_OPERAND (rhs, 1),
7496 TREE_OPERAND (rhs, 2));
7497 }
7498 else if (TREE_CODE (rhs) == MEM_REF
7499 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
7500 {
7501 tree val = (*valueize) (TREE_OPERAND (rhs, 0));
7502 if (TREE_CODE (val) == ADDR_EXPR
7503 && is_gimple_min_invariant (val))
7504 {
7505 tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
7506 unshare_expr (val),
7507 TREE_OPERAND (rhs, 1));
7508 if (tem)
7509 rhs = tem;
7510 }
7511 }
7512 return fold_const_aggregate_ref_1 (rhs, valueize);
7513 }
7514 else if (kind == tcc_declaration)
7515 return get_symbol_constant_value (rhs);
7516 return rhs;
7517 }
7518
7519 case GIMPLE_UNARY_RHS:
7520 return NULL_TREE;
7521
7522 case GIMPLE_BINARY_RHS:
7523 /* Translate &x + CST into an invariant form suitable for
7524 further propagation. */
7525 if (subcode == POINTER_PLUS_EXPR)
7526 {
7527 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7528 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7529 if (TREE_CODE (op0) == ADDR_EXPR
7530 && TREE_CODE (op1) == INTEGER_CST)
7531 {
7532 tree off = fold_convert (ptr_type_node, op1);
7533 return build1_loc
7534 (loc, ADDR_EXPR, TREE_TYPE (op0),
7535 fold_build2 (MEM_REF,
7536 TREE_TYPE (TREE_TYPE (op0)),
7537 unshare_expr (op0), off));
7538 }
7539 }
7540 /* Canonicalize bool != 0 and bool == 0 appearing after
7541 valueization. While gimple_simplify handles this
7542 it can get confused by the ~X == 1 -> X == 0 transform
7543 which we cant reduce to a SSA name or a constant
7544 (and we have no way to tell gimple_simplify to not
7545 consider those transforms in the first place). */
7546 else if (subcode == EQ_EXPR
7547 || subcode == NE_EXPR)
7548 {
7549 tree lhs = gimple_assign_lhs (stmt);
7550 tree op0 = gimple_assign_rhs1 (stmt);
7551 if (useless_type_conversion_p (TREE_TYPE (lhs),
7552 TREE_TYPE (op0)))
7553 {
7554 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7555 op0 = (*valueize) (op0);
7556 if (TREE_CODE (op0) == INTEGER_CST)
7557 std::swap (op0, op1);
7558 if (TREE_CODE (op1) == INTEGER_CST
7559 && ((subcode == NE_EXPR && integer_zerop (op1))
7560 || (subcode == EQ_EXPR && integer_onep (op1))))
7561 return op0;
7562 }
7563 }
7564 return NULL_TREE;
7565
7566 case GIMPLE_TERNARY_RHS:
7567 {
7568 /* Handle ternary operators that can appear in GIMPLE form. */
7569 tree op0 = (*valueize) (gimple_assign_rhs1 (stmt));
7570 tree op1 = (*valueize) (gimple_assign_rhs2 (stmt));
7571 tree op2 = (*valueize) (gimple_assign_rhs3 (stmt));
7572 return fold_ternary_loc (loc, subcode,
7573 TREE_TYPE (gimple_assign_lhs (stmt)),
7574 op0, op1, op2);
7575 }
7576
7577 default:
7578 gcc_unreachable ();
7579 }
7580 }
7581
7582 case GIMPLE_CALL:
7583 {
7584 tree fn;
7585 gcall *call_stmt = as_a <gcall *> (stmt);
7586
7587 if (gimple_call_internal_p (stmt))
7588 {
7589 enum tree_code subcode = ERROR_MARK;
7590 switch (gimple_call_internal_fn (stmt))
7591 {
7592 case IFN_UBSAN_CHECK_ADD:
7593 subcode = PLUS_EXPR;
7594 break;
7595 case IFN_UBSAN_CHECK_SUB:
7596 subcode = MINUS_EXPR;
7597 break;
7598 case IFN_UBSAN_CHECK_MUL:
7599 subcode = MULT_EXPR;
7600 break;
7601 case IFN_BUILTIN_EXPECT:
7602 {
7603 tree arg0 = gimple_call_arg (stmt, 0);
7604 tree op0 = (*valueize) (arg0);
7605 if (TREE_CODE (op0) == INTEGER_CST)
7606 return op0;
7607 return NULL_TREE;
7608 }
7609 default:
7610 return NULL_TREE;
7611 }
7612 tree arg0 = gimple_call_arg (stmt, 0);
7613 tree arg1 = gimple_call_arg (stmt, 1);
7614 tree op0 = (*valueize) (arg0);
7615 tree op1 = (*valueize) (arg1);
7616
7617 if (TREE_CODE (op0) != INTEGER_CST
7618 || TREE_CODE (op1) != INTEGER_CST)
7619 {
7620 switch (subcode)
7621 {
7622 case MULT_EXPR:
7623 /* x * 0 = 0 * x = 0 without overflow. */
7624 if (integer_zerop (op0) || integer_zerop (op1))
7625 return build_zero_cst (TREE_TYPE (arg0));
7626 break;
7627 case MINUS_EXPR:
7628 /* y - y = 0 without overflow. */
7629 if (operand_equal_p (op0, op1, 0))
7630 return build_zero_cst (TREE_TYPE (arg0));
7631 break;
7632 default:
7633 break;
7634 }
7635 }
7636 tree res
7637 = fold_binary_loc (loc, subcode, TREE_TYPE (arg0), op0, op1);
7638 if (res
7639 && TREE_CODE (res) == INTEGER_CST
7640 && !TREE_OVERFLOW (res))
7641 return res;
7642 return NULL_TREE;
7643 }
7644
7645 fn = (*valueize) (gimple_call_fn (stmt));
7646 if (TREE_CODE (fn) == ADDR_EXPR
7647 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
7648 && fndecl_built_in_p (TREE_OPERAND (fn, 0))
7649 && gimple_builtin_call_types_compatible_p (stmt,
7650 TREE_OPERAND (fn, 0)))
7651 {
7652 tree *args = XALLOCAVEC (tree, gimple_call_num_args (stmt));
7653 tree retval;
7654 unsigned i;
7655 for (i = 0; i < gimple_call_num_args (stmt); ++i)
7656 args[i] = (*valueize) (gimple_call_arg (stmt, i));
7657 retval = fold_builtin_call_array (loc,
7658 gimple_call_return_type (call_stmt),
7659 fn, gimple_call_num_args (stmt), args);
7660 if (retval)
7661 {
7662 /* fold_call_expr wraps the result inside a NOP_EXPR. */
7663 STRIP_NOPS (retval);
7664 retval = fold_convert (gimple_call_return_type (call_stmt),
7665 retval);
7666 }
7667 return retval;
7668 }
7669 return NULL_TREE;
7670 }
7671
7672 default:
7673 return NULL_TREE;
7674 }
7675 }
7676
7677 /* Fold STMT to a constant using VALUEIZE to valueize SSA names.
7678 Returns NULL_TREE if folding to a constant is not possible, otherwise
7679 returns a constant according to is_gimple_min_invariant. */
7680
7681 tree
7682 gimple_fold_stmt_to_constant (gimple *stmt, tree (*valueize) (tree))
7683 {
7684 tree res = gimple_fold_stmt_to_constant_1 (stmt, valueize);
7685 if (res && is_gimple_min_invariant (res))
7686 return res;
7687 return NULL_TREE;
7688 }
7689
7690
7691 /* The following set of functions are supposed to fold references using
7692 their constant initializers. */
7693
7694 /* See if we can find constructor defining value of BASE.
7695 When we know the consructor with constant offset (such as
7696 base is array[40] and we do know constructor of array), then
7697 BIT_OFFSET is adjusted accordingly.
7698
7699 As a special case, return error_mark_node when constructor
7700 is not explicitly available, but it is known to be zero
7701 such as 'static const int a;'. */
7702 static tree
7703 get_base_constructor (tree base, poly_int64_pod *bit_offset,
7704 tree (*valueize)(tree))
7705 {
7706 poly_int64 bit_offset2, size, max_size;
7707 bool reverse;
7708
7709 if (TREE_CODE (base) == MEM_REF)
7710 {
7711 poly_offset_int boff = *bit_offset + mem_ref_offset (base) * BITS_PER_UNIT;
7712 if (!boff.to_shwi (bit_offset))
7713 return NULL_TREE;
7714
7715 if (valueize
7716 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
7717 base = valueize (TREE_OPERAND (base, 0));
7718 if (!base || TREE_CODE (base) != ADDR_EXPR)
7719 return NULL_TREE;
7720 base = TREE_OPERAND (base, 0);
7721 }
7722 else if (valueize
7723 && TREE_CODE (base) == SSA_NAME)
7724 base = valueize (base);
7725
7726 /* Get a CONSTRUCTOR. If BASE is a VAR_DECL, get its
7727 DECL_INITIAL. If BASE is a nested reference into another
7728 ARRAY_REF or COMPONENT_REF, make a recursive call to resolve
7729 the inner reference. */
7730 switch (TREE_CODE (base))
7731 {
7732 case VAR_DECL:
7733 case CONST_DECL:
7734 {
7735 tree init = ctor_for_folding (base);
7736
7737 /* Our semantic is exact opposite of ctor_for_folding;
7738 NULL means unknown, while error_mark_node is 0. */
7739 if (init == error_mark_node)
7740 return NULL_TREE;
7741 if (!init)
7742 return error_mark_node;
7743 return init;
7744 }
7745
7746 case VIEW_CONVERT_EXPR:
7747 return get_base_constructor (TREE_OPERAND (base, 0),
7748 bit_offset, valueize);
7749
7750 case ARRAY_REF:
7751 case COMPONENT_REF:
7752 base = get_ref_base_and_extent (base, &bit_offset2, &size, &max_size,
7753 &reverse);
7754 if (!known_size_p (max_size) || maybe_ne (size, max_size))
7755 return NULL_TREE;
7756 *bit_offset += bit_offset2;
7757 return get_base_constructor (base, bit_offset, valueize);
7758
7759 case CONSTRUCTOR:
7760 return base;
7761
7762 default:
7763 if (CONSTANT_CLASS_P (base))
7764 return base;
7765
7766 return NULL_TREE;
7767 }
7768 }
7769
7770 /* CTOR is CONSTRUCTOR of an array type. Fold a reference of SIZE bits
7771 to the memory at bit OFFSET. When non-null, TYPE is the expected
7772 type of the reference; otherwise the type of the referenced element
7773 is used instead. When SIZE is zero, attempt to fold a reference to
7774 the entire element which OFFSET refers to. Increment *SUBOFF by
7775 the bit offset of the accessed element. */
7776
7777 static tree
7778 fold_array_ctor_reference (tree type, tree ctor,
7779 unsigned HOST_WIDE_INT offset,
7780 unsigned HOST_WIDE_INT size,
7781 tree from_decl,
7782 unsigned HOST_WIDE_INT *suboff)
7783 {
7784 offset_int low_bound;
7785 offset_int elt_size;
7786 offset_int access_index;
7787 tree domain_type = NULL_TREE;
7788 HOST_WIDE_INT inner_offset;
7789
7790 /* Compute low bound and elt size. */
7791 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
7792 domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
7793 if (domain_type && TYPE_MIN_VALUE (domain_type))
7794 {
7795 /* Static constructors for variably sized objects make no sense. */
7796 if (TREE_CODE (TYPE_MIN_VALUE (domain_type)) != INTEGER_CST)
7797 return NULL_TREE;
7798 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
7799 }
7800 else
7801 low_bound = 0;
7802 /* Static constructors for variably sized objects make no sense. */
7803 if (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor)))) != INTEGER_CST)
7804 return NULL_TREE;
7805 elt_size = wi::to_offset (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ctor))));
7806
7807 /* When TYPE is non-null, verify that it specifies a constant-sized
7808 access of a multiple of the array element size. Avoid division
7809 by zero below when ELT_SIZE is zero, such as with the result of
7810 an initializer for a zero-length array or an empty struct. */
7811 if (elt_size == 0
7812 || (type
7813 && (!TYPE_SIZE_UNIT (type)
7814 || TREE_CODE (TYPE_SIZE_UNIT (type)) != INTEGER_CST)))
7815 return NULL_TREE;
7816
7817 /* Compute the array index we look for. */
7818 access_index = wi::udiv_trunc (offset_int (offset / BITS_PER_UNIT),
7819 elt_size);
7820 access_index += low_bound;
7821
7822 /* And offset within the access. */
7823 inner_offset = offset % (elt_size.to_uhwi () * BITS_PER_UNIT);
7824
7825 unsigned HOST_WIDE_INT elt_sz = elt_size.to_uhwi ();
7826 if (size > elt_sz * BITS_PER_UNIT)
7827 {
7828 /* native_encode_expr constraints. */
7829 if (size > MAX_BITSIZE_MODE_ANY_MODE
7830 || size % BITS_PER_UNIT != 0
7831 || inner_offset % BITS_PER_UNIT != 0
7832 || elt_sz > MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT)
7833 return NULL_TREE;
7834
7835 unsigned ctor_idx;
7836 tree val = get_array_ctor_element_at_index (ctor, access_index,
7837 &ctor_idx);
7838 if (!val && ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7839 return build_zero_cst (type);
7840
7841 /* native-encode adjacent ctor elements. */
7842 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
7843 unsigned bufoff = 0;
7844 offset_int index = 0;
7845 offset_int max_index = access_index;
7846 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7847 if (!val)
7848 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7849 else if (!CONSTANT_CLASS_P (val))
7850 return NULL_TREE;
7851 if (!elt->index)
7852 ;
7853 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7854 {
7855 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7856 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7857 }
7858 else
7859 index = max_index = wi::to_offset (elt->index);
7860 index = wi::umax (index, access_index);
7861 do
7862 {
7863 if (bufoff + elt_sz > sizeof (buf))
7864 elt_sz = sizeof (buf) - bufoff;
7865 int len = native_encode_expr (val, buf + bufoff, elt_sz,
7866 inner_offset / BITS_PER_UNIT);
7867 if (len != (int) elt_sz - inner_offset / BITS_PER_UNIT)
7868 return NULL_TREE;
7869 inner_offset = 0;
7870 bufoff += len;
7871
7872 access_index += 1;
7873 if (wi::cmpu (access_index, index) == 0)
7874 val = elt->value;
7875 else if (wi::cmpu (access_index, max_index) > 0)
7876 {
7877 ctor_idx++;
7878 if (ctor_idx >= CONSTRUCTOR_NELTS (ctor))
7879 {
7880 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7881 ++max_index;
7882 }
7883 else
7884 {
7885 elt = CONSTRUCTOR_ELT (ctor, ctor_idx);
7886 index = 0;
7887 max_index = access_index;
7888 if (!elt->index)
7889 ;
7890 else if (TREE_CODE (elt->index) == RANGE_EXPR)
7891 {
7892 index = wi::to_offset (TREE_OPERAND (elt->index, 0));
7893 max_index = wi::to_offset (TREE_OPERAND (elt->index, 1));
7894 }
7895 else
7896 index = max_index = wi::to_offset (elt->index);
7897 index = wi::umax (index, access_index);
7898 if (wi::cmpu (access_index, index) == 0)
7899 val = elt->value;
7900 else
7901 val = build_zero_cst (TREE_TYPE (TREE_TYPE (ctor)));
7902 }
7903 }
7904 }
7905 while (bufoff < size / BITS_PER_UNIT);
7906 *suboff += size;
7907 return native_interpret_expr (type, buf, size / BITS_PER_UNIT);
7908 }
7909
7910 if (tree val = get_array_ctor_element_at_index (ctor, access_index))
7911 {
7912 if (!size && TREE_CODE (val) != CONSTRUCTOR)
7913 {
7914 /* For the final reference to the entire accessed element
7915 (SIZE is zero), reset INNER_OFFSET, disegard TYPE (which
7916 may be null) in favor of the type of the element, and set
7917 SIZE to the size of the accessed element. */
7918 inner_offset = 0;
7919 type = TREE_TYPE (val);
7920 size = elt_sz * BITS_PER_UNIT;
7921 }
7922 else if (size && access_index < CONSTRUCTOR_NELTS (ctor) - 1
7923 && TREE_CODE (val) == CONSTRUCTOR
7924 && (elt_sz * BITS_PER_UNIT - inner_offset) < size)
7925 /* If this isn't the last element in the CTOR and a CTOR itself
7926 and it does not cover the whole object we are requesting give up
7927 since we're not set up for combining from multiple CTORs. */
7928 return NULL_TREE;
7929
7930 *suboff += access_index.to_uhwi () * elt_sz * BITS_PER_UNIT;
7931 return fold_ctor_reference (type, val, inner_offset, size, from_decl,
7932 suboff);
7933 }
7934
7935 /* Memory not explicitly mentioned in constructor is 0 (or
7936 the reference is out of range). */
7937 return type ? build_zero_cst (type) : NULL_TREE;
7938 }
7939
7940 /* CTOR is CONSTRUCTOR of an aggregate or vector. Fold a reference
7941 of SIZE bits to the memory at bit OFFSET. When non-null, TYPE
7942 is the expected type of the reference; otherwise the type of
7943 the referenced member is used instead. When SIZE is zero,
7944 attempt to fold a reference to the entire member which OFFSET
7945 refers to; in this case. Increment *SUBOFF by the bit offset
7946 of the accessed member. */
7947
7948 static tree
7949 fold_nonarray_ctor_reference (tree type, tree ctor,
7950 unsigned HOST_WIDE_INT offset,
7951 unsigned HOST_WIDE_INT size,
7952 tree from_decl,
7953 unsigned HOST_WIDE_INT *suboff)
7954 {
7955 unsigned HOST_WIDE_INT cnt;
7956 tree cfield, cval;
7957
7958 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield,
7959 cval)
7960 {
7961 tree byte_offset = DECL_FIELD_OFFSET (cfield);
7962 tree field_offset = DECL_FIELD_BIT_OFFSET (cfield);
7963 tree field_size = DECL_SIZE (cfield);
7964
7965 if (!field_size)
7966 {
7967 /* Determine the size of the flexible array member from
7968 the size of the initializer provided for it. */
7969 field_size = TYPE_SIZE (TREE_TYPE (cval));
7970 }
7971
7972 /* Variable sized objects in static constructors makes no sense,
7973 but field_size can be NULL for flexible array members. */
7974 gcc_assert (TREE_CODE (field_offset) == INTEGER_CST
7975 && TREE_CODE (byte_offset) == INTEGER_CST
7976 && (field_size != NULL_TREE
7977 ? TREE_CODE (field_size) == INTEGER_CST
7978 : TREE_CODE (TREE_TYPE (cfield)) == ARRAY_TYPE));
7979
7980 /* Compute bit offset of the field. */
7981 offset_int bitoffset
7982 = (wi::to_offset (field_offset)
7983 + (wi::to_offset (byte_offset) << LOG2_BITS_PER_UNIT));
7984 /* Compute bit offset where the field ends. */
7985 offset_int bitoffset_end;
7986 if (field_size != NULL_TREE)
7987 bitoffset_end = bitoffset + wi::to_offset (field_size);
7988 else
7989 bitoffset_end = 0;
7990
7991 /* Compute the bit offset of the end of the desired access.
7992 As a special case, if the size of the desired access is
7993 zero, assume the access is to the entire field (and let
7994 the caller make any necessary adjustments by storing
7995 the actual bounds of the field in FIELDBOUNDS). */
7996 offset_int access_end = offset_int (offset);
7997 if (size)
7998 access_end += size;
7999 else
8000 access_end = bitoffset_end;
8001
8002 /* Is there any overlap between the desired access at
8003 [OFFSET, OFFSET+SIZE) and the offset of the field within
8004 the object at [BITOFFSET, BITOFFSET_END)? */
8005 if (wi::cmps (access_end, bitoffset) > 0
8006 && (field_size == NULL_TREE
8007 || wi::lts_p (offset, bitoffset_end)))
8008 {
8009 *suboff += bitoffset.to_uhwi ();
8010
8011 if (!size && TREE_CODE (cval) != CONSTRUCTOR)
8012 {
8013 /* For the final reference to the entire accessed member
8014 (SIZE is zero), reset OFFSET, disegard TYPE (which may
8015 be null) in favor of the type of the member, and set
8016 SIZE to the size of the accessed member. */
8017 offset = bitoffset.to_uhwi ();
8018 type = TREE_TYPE (cval);
8019 size = (bitoffset_end - bitoffset).to_uhwi ();
8020 }
8021
8022 /* We do have overlap. Now see if the field is large enough
8023 to cover the access. Give up for accesses that extend
8024 beyond the end of the object or that span multiple fields. */
8025 if (wi::cmps (access_end, bitoffset_end) > 0)
8026 return NULL_TREE;
8027 if (offset < bitoffset)
8028 return NULL_TREE;
8029
8030 offset_int inner_offset = offset_int (offset) - bitoffset;
8031 return fold_ctor_reference (type, cval,
8032 inner_offset.to_uhwi (), size,
8033 from_decl, suboff);
8034 }
8035 }
8036
8037 if (!type)
8038 return NULL_TREE;
8039
8040 return build_zero_cst (type);
8041 }
8042
8043 /* CTOR is value initializing memory. Fold a reference of TYPE and
8044 bit size POLY_SIZE to the memory at bit POLY_OFFSET. When POLY_SIZE
8045 is zero, attempt to fold a reference to the entire subobject
8046 which OFFSET refers to. This is used when folding accesses to
8047 string members of aggregates. When non-null, set *SUBOFF to
8048 the bit offset of the accessed subobject. */
8049
8050 tree
8051 fold_ctor_reference (tree type, tree ctor, const poly_uint64 &poly_offset,
8052 const poly_uint64 &poly_size, tree from_decl,
8053 unsigned HOST_WIDE_INT *suboff /* = NULL */)
8054 {
8055 tree ret;
8056
8057 /* We found the field with exact match. */
8058 if (type
8059 && useless_type_conversion_p (type, TREE_TYPE (ctor))
8060 && known_eq (poly_offset, 0U))
8061 return canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8062
8063 /* The remaining optimizations need a constant size and offset. */
8064 unsigned HOST_WIDE_INT size, offset;
8065 if (!poly_size.is_constant (&size) || !poly_offset.is_constant (&offset))
8066 return NULL_TREE;
8067
8068 /* We are at the end of walk, see if we can view convert the
8069 result. */
8070 if (!AGGREGATE_TYPE_P (TREE_TYPE (ctor)) && !offset
8071 /* VIEW_CONVERT_EXPR is defined only for matching sizes. */
8072 && !compare_tree_int (TYPE_SIZE (type), size)
8073 && !compare_tree_int (TYPE_SIZE (TREE_TYPE (ctor)), size))
8074 {
8075 ret = canonicalize_constructor_val (unshare_expr (ctor), from_decl);
8076 if (ret)
8077 {
8078 ret = fold_unary (VIEW_CONVERT_EXPR, type, ret);
8079 if (ret)
8080 STRIP_USELESS_TYPE_CONVERSION (ret);
8081 }
8082 return ret;
8083 }
8084 /* For constants and byte-aligned/sized reads try to go through
8085 native_encode/interpret. */
8086 if (CONSTANT_CLASS_P (ctor)
8087 && BITS_PER_UNIT == 8
8088 && offset % BITS_PER_UNIT == 0
8089 && offset / BITS_PER_UNIT <= INT_MAX
8090 && size % BITS_PER_UNIT == 0
8091 && size <= MAX_BITSIZE_MODE_ANY_MODE
8092 && can_native_interpret_type_p (type))
8093 {
8094 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8095 int len = native_encode_expr (ctor, buf, size / BITS_PER_UNIT,
8096 offset / BITS_PER_UNIT);
8097 if (len > 0)
8098 return native_interpret_expr (type, buf, len);
8099 }
8100 if (TREE_CODE (ctor) == CONSTRUCTOR)
8101 {
8102 unsigned HOST_WIDE_INT dummy = 0;
8103 if (!suboff)
8104 suboff = &dummy;
8105
8106 tree ret;
8107 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
8108 || TREE_CODE (TREE_TYPE (ctor)) == VECTOR_TYPE)
8109 ret = fold_array_ctor_reference (type, ctor, offset, size,
8110 from_decl, suboff);
8111 else
8112 ret = fold_nonarray_ctor_reference (type, ctor, offset, size,
8113 from_decl, suboff);
8114
8115 /* Fall back to native_encode_initializer. Needs to be done
8116 only in the outermost fold_ctor_reference call (because it itself
8117 recurses into CONSTRUCTORs) and doesn't update suboff. */
8118 if (ret == NULL_TREE
8119 && suboff == &dummy
8120 && BITS_PER_UNIT == 8
8121 && offset % BITS_PER_UNIT == 0
8122 && offset / BITS_PER_UNIT <= INT_MAX
8123 && size % BITS_PER_UNIT == 0
8124 && size <= MAX_BITSIZE_MODE_ANY_MODE
8125 && can_native_interpret_type_p (type))
8126 {
8127 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
8128 int len = native_encode_initializer (ctor, buf, size / BITS_PER_UNIT,
8129 offset / BITS_PER_UNIT);
8130 if (len > 0)
8131 return native_interpret_expr (type, buf, len);
8132 }
8133
8134 return ret;
8135 }
8136
8137 return NULL_TREE;
8138 }
8139
8140 /* Return the tree representing the element referenced by T if T is an
8141 ARRAY_REF or COMPONENT_REF into constant aggregates valuezing SSA
8142 names using VALUEIZE. Return NULL_TREE otherwise. */
8143
8144 tree
8145 fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree))
8146 {
8147 tree ctor, idx, base;
8148 poly_int64 offset, size, max_size;
8149 tree tem;
8150 bool reverse;
8151
8152 if (TREE_THIS_VOLATILE (t))
8153 return NULL_TREE;
8154
8155 if (DECL_P (t))
8156 return get_symbol_constant_value (t);
8157
8158 tem = fold_read_from_constant_string (t);
8159 if (tem)
8160 return tem;
8161
8162 switch (TREE_CODE (t))
8163 {
8164 case ARRAY_REF:
8165 case ARRAY_RANGE_REF:
8166 /* Constant indexes are handled well by get_base_constructor.
8167 Only special case variable offsets.
8168 FIXME: This code can't handle nested references with variable indexes
8169 (they will be handled only by iteration of ccp). Perhaps we can bring
8170 get_ref_base_and_extent here and make it use a valueize callback. */
8171 if (TREE_CODE (TREE_OPERAND (t, 1)) == SSA_NAME
8172 && valueize
8173 && (idx = (*valueize) (TREE_OPERAND (t, 1)))
8174 && poly_int_tree_p (idx))
8175 {
8176 tree low_bound, unit_size;
8177
8178 /* If the resulting bit-offset is constant, track it. */
8179 if ((low_bound = array_ref_low_bound (t),
8180 poly_int_tree_p (low_bound))
8181 && (unit_size = array_ref_element_size (t),
8182 tree_fits_uhwi_p (unit_size)))
8183 {
8184 poly_offset_int woffset
8185 = wi::sext (wi::to_poly_offset (idx)
8186 - wi::to_poly_offset (low_bound),
8187 TYPE_PRECISION (sizetype));
8188 woffset *= tree_to_uhwi (unit_size);
8189 woffset *= BITS_PER_UNIT;
8190 if (woffset.to_shwi (&offset))
8191 {
8192 base = TREE_OPERAND (t, 0);
8193 ctor = get_base_constructor (base, &offset, valueize);
8194 /* Empty constructor. Always fold to 0. */
8195 if (ctor == error_mark_node)
8196 return build_zero_cst (TREE_TYPE (t));
8197 /* Out of bound array access. Value is undefined,
8198 but don't fold. */
8199 if (maybe_lt (offset, 0))
8200 return NULL_TREE;
8201 /* We cannot determine ctor. */
8202 if (!ctor)
8203 return NULL_TREE;
8204 return fold_ctor_reference (TREE_TYPE (t), ctor, offset,
8205 tree_to_uhwi (unit_size)
8206 * BITS_PER_UNIT,
8207 base);
8208 }
8209 }
8210 }
8211 /* Fallthru. */
8212
8213 case COMPONENT_REF:
8214 case BIT_FIELD_REF:
8215 case TARGET_MEM_REF:
8216 case MEM_REF:
8217 base = get_ref_base_and_extent (t, &offset, &size, &max_size, &reverse);
8218 ctor = get_base_constructor (base, &offset, valueize);
8219
8220 /* Empty constructor. Always fold to 0. */
8221 if (ctor == error_mark_node)
8222 return build_zero_cst (TREE_TYPE (t));
8223 /* We do not know precise address. */
8224 if (!known_size_p (max_size) || maybe_ne (max_size, size))
8225 return NULL_TREE;
8226 /* We cannot determine ctor. */
8227 if (!ctor)
8228 return NULL_TREE;
8229
8230 /* Out of bound array access. Value is undefined, but don't fold. */
8231 if (maybe_lt (offset, 0))
8232 return NULL_TREE;
8233
8234 tem = fold_ctor_reference (TREE_TYPE (t), ctor, offset, size, base);
8235 if (tem)
8236 return tem;
8237
8238 /* For bit field reads try to read the representative and
8239 adjust. */
8240 if (TREE_CODE (t) == COMPONENT_REF
8241 && DECL_BIT_FIELD (TREE_OPERAND (t, 1))
8242 && DECL_BIT_FIELD_REPRESENTATIVE (TREE_OPERAND (t, 1)))
8243 {
8244 HOST_WIDE_INT csize, coffset;
8245 tree field = TREE_OPERAND (t, 1);
8246 tree repr = DECL_BIT_FIELD_REPRESENTATIVE (field);
8247 if (INTEGRAL_TYPE_P (TREE_TYPE (repr))
8248 && size.is_constant (&csize)
8249 && offset.is_constant (&coffset)
8250 && (coffset % BITS_PER_UNIT != 0
8251 || csize % BITS_PER_UNIT != 0)
8252 && !reverse
8253 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN)
8254 {
8255 poly_int64 bitoffset;
8256 poly_uint64 field_offset, repr_offset;
8257 if (poly_int_tree_p (DECL_FIELD_OFFSET (field), &field_offset)
8258 && poly_int_tree_p (DECL_FIELD_OFFSET (repr), &repr_offset))
8259 bitoffset = (field_offset - repr_offset) * BITS_PER_UNIT;
8260 else
8261 bitoffset = 0;
8262 bitoffset += (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field))
8263 - tree_to_uhwi (DECL_FIELD_BIT_OFFSET (repr)));
8264 HOST_WIDE_INT bitoff;
8265 int diff = (TYPE_PRECISION (TREE_TYPE (repr))
8266 - TYPE_PRECISION (TREE_TYPE (field)));
8267 if (bitoffset.is_constant (&bitoff)
8268 && bitoff >= 0
8269 && bitoff <= diff)
8270 {
8271 offset -= bitoff;
8272 size = tree_to_uhwi (DECL_SIZE (repr));
8273
8274 tem = fold_ctor_reference (TREE_TYPE (repr), ctor, offset,
8275 size, base);
8276 if (tem && TREE_CODE (tem) == INTEGER_CST)
8277 {
8278 if (!BYTES_BIG_ENDIAN)
8279 tem = wide_int_to_tree (TREE_TYPE (field),
8280 wi::lrshift (wi::to_wide (tem),
8281 bitoff));
8282 else
8283 tem = wide_int_to_tree (TREE_TYPE (field),
8284 wi::lrshift (wi::to_wide (tem),
8285 diff - bitoff));
8286 return tem;
8287 }
8288 }
8289 }
8290 }
8291 break;
8292
8293 case REALPART_EXPR:
8294 case IMAGPART_EXPR:
8295 {
8296 tree c = fold_const_aggregate_ref_1 (TREE_OPERAND (t, 0), valueize);
8297 if (c && TREE_CODE (c) == COMPLEX_CST)
8298 return fold_build1_loc (EXPR_LOCATION (t),
8299 TREE_CODE (t), TREE_TYPE (t), c);
8300 break;
8301 }
8302
8303 default:
8304 break;
8305 }
8306
8307 return NULL_TREE;
8308 }
8309
8310 tree
8311 fold_const_aggregate_ref (tree t)
8312 {
8313 return fold_const_aggregate_ref_1 (t, NULL);
8314 }
8315
8316 /* Lookup virtual method with index TOKEN in a virtual table V
8317 at OFFSET.
8318 Set CAN_REFER if non-NULL to false if method
8319 is not referable or if the virtual table is ill-formed (such as rewriten
8320 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8321
8322 tree
8323 gimple_get_virt_method_for_vtable (HOST_WIDE_INT token,
8324 tree v,
8325 unsigned HOST_WIDE_INT offset,
8326 bool *can_refer)
8327 {
8328 tree vtable = v, init, fn;
8329 unsigned HOST_WIDE_INT size;
8330 unsigned HOST_WIDE_INT elt_size, access_index;
8331 tree domain_type;
8332
8333 if (can_refer)
8334 *can_refer = true;
8335
8336 /* First of all double check we have virtual table. */
8337 if (!VAR_P (v) || !DECL_VIRTUAL_P (v))
8338 {
8339 /* Pass down that we lost track of the target. */
8340 if (can_refer)
8341 *can_refer = false;
8342 return NULL_TREE;
8343 }
8344
8345 init = ctor_for_folding (v);
8346
8347 /* The virtual tables should always be born with constructors
8348 and we always should assume that they are avaialble for
8349 folding. At the moment we do not stream them in all cases,
8350 but it should never happen that ctor seem unreachable. */
8351 gcc_assert (init);
8352 if (init == error_mark_node)
8353 {
8354 /* Pass down that we lost track of the target. */
8355 if (can_refer)
8356 *can_refer = false;
8357 return NULL_TREE;
8358 }
8359 gcc_checking_assert (TREE_CODE (TREE_TYPE (v)) == ARRAY_TYPE);
8360 size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (v))));
8361 offset *= BITS_PER_UNIT;
8362 offset += token * size;
8363
8364 /* Lookup the value in the constructor that is assumed to be array.
8365 This is equivalent to
8366 fn = fold_ctor_reference (TREE_TYPE (TREE_TYPE (v)), init,
8367 offset, size, NULL);
8368 but in a constant time. We expect that frontend produced a simple
8369 array without indexed initializers. */
8370
8371 gcc_checking_assert (TREE_CODE (TREE_TYPE (init)) == ARRAY_TYPE);
8372 domain_type = TYPE_DOMAIN (TREE_TYPE (init));
8373 gcc_checking_assert (integer_zerop (TYPE_MIN_VALUE (domain_type)));
8374 elt_size = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (init))));
8375
8376 access_index = offset / BITS_PER_UNIT / elt_size;
8377 gcc_checking_assert (offset % (elt_size * BITS_PER_UNIT) == 0);
8378
8379 /* The C++ FE can now produce indexed fields, and we check if the indexes
8380 match. */
8381 if (access_index < CONSTRUCTOR_NELTS (init))
8382 {
8383 fn = CONSTRUCTOR_ELT (init, access_index)->value;
8384 tree idx = CONSTRUCTOR_ELT (init, access_index)->index;
8385 gcc_checking_assert (!idx || tree_to_uhwi (idx) == access_index);
8386 STRIP_NOPS (fn);
8387 }
8388 else
8389 fn = NULL;
8390
8391 /* For type inconsistent program we may end up looking up virtual method
8392 in virtual table that does not contain TOKEN entries. We may overrun
8393 the virtual table and pick up a constant or RTTI info pointer.
8394 In any case the call is undefined. */
8395 if (!fn
8396 || (TREE_CODE (fn) != ADDR_EXPR && TREE_CODE (fn) != FDESC_EXPR)
8397 || TREE_CODE (TREE_OPERAND (fn, 0)) != FUNCTION_DECL)
8398 fn = builtin_decl_unreachable ();
8399 else
8400 {
8401 fn = TREE_OPERAND (fn, 0);
8402
8403 /* When cgraph node is missing and function is not public, we cannot
8404 devirtualize. This can happen in WHOPR when the actual method
8405 ends up in other partition, because we found devirtualization
8406 possibility too late. */
8407 if (!can_refer_decl_in_current_unit_p (fn, vtable))
8408 {
8409 if (can_refer)
8410 {
8411 *can_refer = false;
8412 return fn;
8413 }
8414 return NULL_TREE;
8415 }
8416 }
8417
8418 /* Make sure we create a cgraph node for functions we'll reference.
8419 They can be non-existent if the reference comes from an entry
8420 of an external vtable for example. */
8421 cgraph_node::get_create (fn);
8422
8423 return fn;
8424 }
8425
8426 /* Return a declaration of a function which an OBJ_TYPE_REF references. TOKEN
8427 is integer form of OBJ_TYPE_REF_TOKEN of the reference expression.
8428 KNOWN_BINFO carries the binfo describing the true type of
8429 OBJ_TYPE_REF_OBJECT(REF).
8430 Set CAN_REFER if non-NULL to false if method
8431 is not referable or if the virtual table is ill-formed (such as rewriten
8432 by non-C++ produced symbol). Otherwise just return NULL in that calse. */
8433
8434 tree
8435 gimple_get_virt_method_for_binfo (HOST_WIDE_INT token, tree known_binfo,
8436 bool *can_refer)
8437 {
8438 unsigned HOST_WIDE_INT offset;
8439 tree v;
8440
8441 v = BINFO_VTABLE (known_binfo);
8442 /* If there is no virtual methods table, leave the OBJ_TYPE_REF alone. */
8443 if (!v)
8444 return NULL_TREE;
8445
8446 if (!vtable_pointer_value_to_vtable (v, &v, &offset))
8447 {
8448 if (can_refer)
8449 *can_refer = false;
8450 return NULL_TREE;
8451 }
8452 return gimple_get_virt_method_for_vtable (token, v, offset, can_refer);
8453 }
8454
8455 /* Given a pointer value T, return a simplified version of an
8456 indirection through T, or NULL_TREE if no simplification is
8457 possible. Note that the resulting type may be different from
8458 the type pointed to in the sense that it is still compatible
8459 from the langhooks point of view. */
8460
8461 tree
8462 gimple_fold_indirect_ref (tree t)
8463 {
8464 tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
8465 tree sub = t;
8466 tree subtype;
8467
8468 STRIP_NOPS (sub);
8469 subtype = TREE_TYPE (sub);
8470 if (!POINTER_TYPE_P (subtype)
8471 || TYPE_REF_CAN_ALIAS_ALL (ptype))
8472 return NULL_TREE;
8473
8474 if (TREE_CODE (sub) == ADDR_EXPR)
8475 {
8476 tree op = TREE_OPERAND (sub, 0);
8477 tree optype = TREE_TYPE (op);
8478 /* *&p => p */
8479 if (useless_type_conversion_p (type, optype))
8480 return op;
8481
8482 /* *(foo *)&fooarray => fooarray[0] */
8483 if (TREE_CODE (optype) == ARRAY_TYPE
8484 && TREE_CODE (TYPE_SIZE (TREE_TYPE (optype))) == INTEGER_CST
8485 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8486 {
8487 tree type_domain = TYPE_DOMAIN (optype);
8488 tree min_val = size_zero_node;
8489 if (type_domain && TYPE_MIN_VALUE (type_domain))
8490 min_val = TYPE_MIN_VALUE (type_domain);
8491 if (TREE_CODE (min_val) == INTEGER_CST)
8492 return build4 (ARRAY_REF, type, op, min_val, NULL_TREE, NULL_TREE);
8493 }
8494 /* *(foo *)&complexfoo => __real__ complexfoo */
8495 else if (TREE_CODE (optype) == COMPLEX_TYPE
8496 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8497 return fold_build1 (REALPART_EXPR, type, op);
8498 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
8499 else if (TREE_CODE (optype) == VECTOR_TYPE
8500 && useless_type_conversion_p (type, TREE_TYPE (optype)))
8501 {
8502 tree part_width = TYPE_SIZE (type);
8503 tree index = bitsize_int (0);
8504 return fold_build3 (BIT_FIELD_REF, type, op, part_width, index);
8505 }
8506 }
8507
8508 /* *(p + CST) -> ... */
8509 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
8510 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
8511 {
8512 tree addr = TREE_OPERAND (sub, 0);
8513 tree off = TREE_OPERAND (sub, 1);
8514 tree addrtype;
8515
8516 STRIP_NOPS (addr);
8517 addrtype = TREE_TYPE (addr);
8518
8519 /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
8520 if (TREE_CODE (addr) == ADDR_EXPR
8521 && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
8522 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype)))
8523 && tree_fits_uhwi_p (off))
8524 {
8525 unsigned HOST_WIDE_INT offset = tree_to_uhwi (off);
8526 tree part_width = TYPE_SIZE (type);
8527 unsigned HOST_WIDE_INT part_widthi
8528 = tree_to_shwi (part_width) / BITS_PER_UNIT;
8529 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
8530 tree index = bitsize_int (indexi);
8531 if (known_lt (offset / part_widthi,
8532 TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype))))
8533 return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
8534 part_width, index);
8535 }
8536
8537 /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
8538 if (TREE_CODE (addr) == ADDR_EXPR
8539 && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
8540 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
8541 {
8542 tree size = TYPE_SIZE_UNIT (type);
8543 if (tree_int_cst_equal (size, off))
8544 return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
8545 }
8546
8547 /* *(p + CST) -> MEM_REF <p, CST>. */
8548 if (TREE_CODE (addr) != ADDR_EXPR
8549 || DECL_P (TREE_OPERAND (addr, 0)))
8550 return fold_build2 (MEM_REF, type,
8551 addr,
8552 wide_int_to_tree (ptype, wi::to_wide (off)));
8553 }
8554
8555 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
8556 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
8557 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_TYPE (subtype)))) == INTEGER_CST
8558 && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (subtype))))
8559 {
8560 tree type_domain;
8561 tree min_val = size_zero_node;
8562 tree osub = sub;
8563 sub = gimple_fold_indirect_ref (sub);
8564 if (! sub)
8565 sub = build1 (INDIRECT_REF, TREE_TYPE (subtype), osub);
8566 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
8567 if (type_domain && TYPE_MIN_VALUE (type_domain))
8568 min_val = TYPE_MIN_VALUE (type_domain);
8569 if (TREE_CODE (min_val) == INTEGER_CST)
8570 return build4 (ARRAY_REF, type, sub, min_val, NULL_TREE, NULL_TREE);
8571 }
8572
8573 return NULL_TREE;
8574 }
8575
8576 /* Return true if CODE is an operation that when operating on signed
8577 integer types involves undefined behavior on overflow and the
8578 operation can be expressed with unsigned arithmetic. */
8579
8580 bool
8581 arith_code_with_undefined_signed_overflow (tree_code code)
8582 {
8583 switch (code)
8584 {
8585 case ABS_EXPR:
8586 case PLUS_EXPR:
8587 case MINUS_EXPR:
8588 case MULT_EXPR:
8589 case NEGATE_EXPR:
8590 case POINTER_PLUS_EXPR:
8591 return true;
8592 default:
8593 return false;
8594 }
8595 }
8596
8597 /* Rewrite STMT, an assignment with a signed integer or pointer arithmetic
8598 operation that can be transformed to unsigned arithmetic by converting
8599 its operand, carrying out the operation in the corresponding unsigned
8600 type and converting the result back to the original type.
8601
8602 If IN_PLACE is true, adjust the stmt in place and return NULL.
8603 Otherwise returns a sequence of statements that replace STMT and also
8604 contain a modified form of STMT itself. */
8605
8606 gimple_seq
8607 rewrite_to_defined_overflow (gimple *stmt, bool in_place /* = false */)
8608 {
8609 if (dump_file && (dump_flags & TDF_DETAILS))
8610 {
8611 fprintf (dump_file, "rewriting stmt with undefined signed "
8612 "overflow ");
8613 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
8614 }
8615
8616 tree lhs = gimple_assign_lhs (stmt);
8617 tree type = unsigned_type_for (TREE_TYPE (lhs));
8618 gimple_seq stmts = NULL;
8619 if (gimple_assign_rhs_code (stmt) == ABS_EXPR)
8620 gimple_assign_set_rhs_code (stmt, ABSU_EXPR);
8621 else
8622 for (unsigned i = 1; i < gimple_num_ops (stmt); ++i)
8623 {
8624 tree op = gimple_op (stmt, i);
8625 op = gimple_convert (&stmts, type, op);
8626 gimple_set_op (stmt, i, op);
8627 }
8628 gimple_assign_set_lhs (stmt, make_ssa_name (type, stmt));
8629 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
8630 gimple_assign_set_rhs_code (stmt, PLUS_EXPR);
8631 gimple_set_modified (stmt, true);
8632 if (in_place)
8633 {
8634 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8635 if (stmts)
8636 gsi_insert_seq_before (&gsi, stmts, GSI_SAME_STMT);
8637 stmts = NULL;
8638 }
8639 else
8640 gimple_seq_add_stmt (&stmts, stmt);
8641 gimple *cvt = gimple_build_assign (lhs, NOP_EXPR, gimple_assign_lhs (stmt));
8642 if (in_place)
8643 {
8644 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
8645 gsi_insert_after (&gsi, cvt, GSI_SAME_STMT);
8646 update_stmt (stmt);
8647 }
8648 else
8649 gimple_seq_add_stmt (&stmts, cvt);
8650
8651 return stmts;
8652 }
8653
8654
8655 /* The valueization hook we use for the gimple_build API simplification.
8656 This makes us match fold_buildN behavior by only combining with
8657 statements in the sequence(s) we are currently building. */
8658
8659 static tree
8660 gimple_build_valueize (tree op)
8661 {
8662 if (gimple_bb (SSA_NAME_DEF_STMT (op)) == NULL)
8663 return op;
8664 return NULL_TREE;
8665 }
8666
8667 /* Helper for gimple_build to perform the final insertion of stmts on SEQ. */
8668
8669 static inline void
8670 gimple_build_insert_seq (gimple_stmt_iterator *gsi,
8671 bool before, gsi_iterator_update update,
8672 gimple_seq seq)
8673 {
8674 if (before)
8675 {
8676 if (gsi->bb)
8677 gsi_insert_seq_before (gsi, seq, update);
8678 else
8679 gsi_insert_seq_before_without_update (gsi, seq, update);
8680 }
8681 else
8682 {
8683 if (gsi->bb)
8684 gsi_insert_seq_after (gsi, seq, update);
8685 else
8686 gsi_insert_seq_after_without_update (gsi, seq, update);
8687 }
8688 }
8689
8690 /* Build the expression CODE OP0 of type TYPE with location LOC,
8691 simplifying it first if possible. Returns the built
8692 expression value and inserts statements possibly defining it
8693 before GSI if BEFORE is true or after GSI if false and advance
8694 the iterator accordingly.
8695 If gsi refers to a basic block simplifying is allowed to look
8696 at all SSA defs while when it does not it is restricted to
8697 SSA defs that are not associated with a basic block yet,
8698 indicating they belong to the currently building sequence. */
8699
8700 tree
8701 gimple_build (gimple_stmt_iterator *gsi,
8702 bool before, gsi_iterator_update update,
8703 location_t loc, enum tree_code code, tree type, tree op0)
8704 {
8705 gimple_seq seq = NULL;
8706 tree res
8707 = gimple_simplify (code, type, op0, &seq,
8708 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8709 if (!res)
8710 {
8711 res = create_tmp_reg_or_ssa_name (type);
8712 gimple *stmt;
8713 if (code == REALPART_EXPR
8714 || code == IMAGPART_EXPR
8715 || code == VIEW_CONVERT_EXPR)
8716 stmt = gimple_build_assign (res, code, build1 (code, type, op0));
8717 else
8718 stmt = gimple_build_assign (res, code, op0);
8719 gimple_set_location (stmt, loc);
8720 gimple_seq_add_stmt_without_update (&seq, stmt);
8721 }
8722 gimple_build_insert_seq (gsi, before, update, seq);
8723 return res;
8724 }
8725
8726 /* Build the expression OP0 CODE OP1 of type TYPE with location LOC,
8727 simplifying it first if possible. Returns the built
8728 expression value inserting any new statements at GSI honoring BEFORE
8729 and UPDATE. */
8730
8731 tree
8732 gimple_build (gimple_stmt_iterator *gsi,
8733 bool before, gsi_iterator_update update,
8734 location_t loc, enum tree_code code, tree type,
8735 tree op0, tree op1)
8736 {
8737 gimple_seq seq = NULL;
8738 tree res
8739 = gimple_simplify (code, type, op0, op1, &seq,
8740 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8741 if (!res)
8742 {
8743 res = create_tmp_reg_or_ssa_name (type);
8744 gimple *stmt = gimple_build_assign (res, code, op0, op1);
8745 gimple_set_location (stmt, loc);
8746 gimple_seq_add_stmt_without_update (&seq, stmt);
8747 }
8748 gimple_build_insert_seq (gsi, before, update, seq);
8749 return res;
8750 }
8751
8752 /* Build the expression (CODE OP0 OP1 OP2) of type TYPE with location LOC,
8753 simplifying it first if possible. Returns the built
8754 expression value inserting any new statements at GSI honoring BEFORE
8755 and UPDATE. */
8756
8757 tree
8758 gimple_build (gimple_stmt_iterator *gsi,
8759 bool before, gsi_iterator_update update,
8760 location_t loc, enum tree_code code, tree type,
8761 tree op0, tree op1, tree op2)
8762 {
8763
8764 gimple_seq seq = NULL;
8765 tree res
8766 = gimple_simplify (code, type, op0, op1, op2, &seq,
8767 gsi->bb ? follow_all_ssa_edges : gimple_build_valueize);
8768 if (!res)
8769 {
8770 res = create_tmp_reg_or_ssa_name (type);
8771 gimple *stmt;
8772 if (code == BIT_FIELD_REF)
8773 stmt = gimple_build_assign (res, code,
8774 build3 (code, type, op0, op1, op2));
8775 else
8776 stmt = gimple_build_assign (res, code, op0, op1, op2);
8777 gimple_set_location (stmt, loc);
8778 gimple_seq_add_stmt_without_update (&seq, stmt);
8779 }
8780 gimple_build_insert_seq (gsi, before, update, seq);
8781 return res;
8782 }
8783
8784 /* Build the call FN () with a result of type TYPE (or no result if TYPE is
8785 void) with a location LOC. Returns the built expression value (or NULL_TREE
8786 if TYPE is void) inserting any new statements at GSI honoring BEFORE
8787 and UPDATE. */
8788
8789 tree
8790 gimple_build (gimple_stmt_iterator *gsi,
8791 bool before, gsi_iterator_update update,
8792 location_t loc, combined_fn fn, tree type)
8793 {
8794 tree res = NULL_TREE;
8795 gimple_seq seq = NULL;
8796 gcall *stmt;
8797 if (internal_fn_p (fn))
8798 stmt = gimple_build_call_internal (as_internal_fn (fn), 0);
8799 else
8800 {
8801 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8802 stmt = gimple_build_call (decl, 0);
8803 }
8804 if (!VOID_TYPE_P (type))
8805 {
8806 res = create_tmp_reg_or_ssa_name (type);
8807 gimple_call_set_lhs (stmt, res);
8808 }
8809 gimple_set_location (stmt, loc);
8810 gimple_seq_add_stmt_without_update (&seq, stmt);
8811 gimple_build_insert_seq (gsi, before, update, seq);
8812 return res;
8813 }
8814
8815 /* Build the call FN (ARG0) with a result of type TYPE
8816 (or no result if TYPE is void) with location LOC,
8817 simplifying it first if possible. Returns the built
8818 expression value (or NULL_TREE if TYPE is void) inserting any new
8819 statements at GSI honoring BEFORE and UPDATE. */
8820
8821 tree
8822 gimple_build (gimple_stmt_iterator *gsi,
8823 bool before, gsi_iterator_update update,
8824 location_t loc, combined_fn fn,
8825 tree type, tree arg0)
8826 {
8827 gimple_seq seq = NULL;
8828 tree res = gimple_simplify (fn, type, arg0, &seq, gimple_build_valueize);
8829 if (!res)
8830 {
8831 gcall *stmt;
8832 if (internal_fn_p (fn))
8833 stmt = gimple_build_call_internal (as_internal_fn (fn), 1, arg0);
8834 else
8835 {
8836 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8837 stmt = gimple_build_call (decl, 1, arg0);
8838 }
8839 if (!VOID_TYPE_P (type))
8840 {
8841 res = create_tmp_reg_or_ssa_name (type);
8842 gimple_call_set_lhs (stmt, res);
8843 }
8844 gimple_set_location (stmt, loc);
8845 gimple_seq_add_stmt_without_update (&seq, stmt);
8846 }
8847 gimple_build_insert_seq (gsi, before, update, seq);
8848 return res;
8849 }
8850
8851 /* Build the call FN (ARG0, ARG1) with a result of type TYPE
8852 (or no result if TYPE is void) with location LOC,
8853 simplifying it first if possible. Returns the built
8854 expression value (or NULL_TREE if TYPE is void) inserting any new
8855 statements at GSI honoring BEFORE and UPDATE. */
8856
8857 tree
8858 gimple_build (gimple_stmt_iterator *gsi,
8859 bool before, gsi_iterator_update update,
8860 location_t loc, combined_fn fn,
8861 tree type, tree arg0, tree arg1)
8862 {
8863 gimple_seq seq = NULL;
8864 tree res = gimple_simplify (fn, type, arg0, arg1, &seq,
8865 gimple_build_valueize);
8866 if (!res)
8867 {
8868 gcall *stmt;
8869 if (internal_fn_p (fn))
8870 stmt = gimple_build_call_internal (as_internal_fn (fn), 2, arg0, arg1);
8871 else
8872 {
8873 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8874 stmt = gimple_build_call (decl, 2, arg0, arg1);
8875 }
8876 if (!VOID_TYPE_P (type))
8877 {
8878 res = create_tmp_reg_or_ssa_name (type);
8879 gimple_call_set_lhs (stmt, res);
8880 }
8881 gimple_set_location (stmt, loc);
8882 gimple_seq_add_stmt_without_update (&seq, stmt);
8883 }
8884 gimple_build_insert_seq (gsi, before, update, seq);
8885 return res;
8886 }
8887
8888 /* Build the call FN (ARG0, ARG1, ARG2) with a result of type TYPE
8889 (or no result if TYPE is void) with location LOC,
8890 simplifying it first if possible. Returns the built
8891 expression value (or NULL_TREE if TYPE is void) inserting any new
8892 statements at GSI honoring BEFORE and UPDATE. */
8893
8894 tree
8895 gimple_build (gimple_stmt_iterator *gsi,
8896 bool before, gsi_iterator_update update,
8897 location_t loc, combined_fn fn,
8898 tree type, tree arg0, tree arg1, tree arg2)
8899 {
8900 gimple_seq seq = NULL;
8901 tree res = gimple_simplify (fn, type, arg0, arg1, arg2,
8902 &seq, gimple_build_valueize);
8903 if (!res)
8904 {
8905 gcall *stmt;
8906 if (internal_fn_p (fn))
8907 stmt = gimple_build_call_internal (as_internal_fn (fn),
8908 3, arg0, arg1, arg2);
8909 else
8910 {
8911 tree decl = builtin_decl_implicit (as_builtin_fn (fn));
8912 stmt = gimple_build_call (decl, 3, arg0, arg1, arg2);
8913 }
8914 if (!VOID_TYPE_P (type))
8915 {
8916 res = create_tmp_reg_or_ssa_name (type);
8917 gimple_call_set_lhs (stmt, res);
8918 }
8919 gimple_set_location (stmt, loc);
8920 gimple_seq_add_stmt_without_update (&seq, stmt);
8921 }
8922 gimple_build_insert_seq (gsi, before, update, seq);
8923 return res;
8924 }
8925
8926 /* Build CODE (OP0) with a result of type TYPE (or no result if TYPE is
8927 void) with location LOC, simplifying it first if possible. Returns the
8928 built expression value (or NULL_TREE if TYPE is void) inserting any new
8929 statements at GSI honoring BEFORE and UPDATE. */
8930
8931 tree
8932 gimple_build (gimple_stmt_iterator *gsi,
8933 bool before, gsi_iterator_update update,
8934 location_t loc, code_helper code, tree type, tree op0)
8935 {
8936 if (code.is_tree_code ())
8937 return gimple_build (gsi, before, update, loc, tree_code (code), type, op0);
8938 return gimple_build (gsi, before, update, loc, combined_fn (code), type, op0);
8939 }
8940
8941 /* Build CODE (OP0, OP1) with a result of type TYPE (or no result if TYPE is
8942 void) with location LOC, simplifying it first if possible. Returns the
8943 built expression value (or NULL_TREE if TYPE is void) inserting any new
8944 statements at GSI honoring BEFORE and UPDATE. */
8945
8946 tree
8947 gimple_build (gimple_stmt_iterator *gsi,
8948 bool before, gsi_iterator_update update,
8949 location_t loc, code_helper code, tree type, tree op0, tree op1)
8950 {
8951 if (code.is_tree_code ())
8952 return gimple_build (gsi, before, update,
8953 loc, tree_code (code), type, op0, op1);
8954 return gimple_build (gsi, before, update,
8955 loc, combined_fn (code), type, op0, op1);
8956 }
8957
8958 /* Build CODE (OP0, OP1, OP2) with a result of type TYPE (or no result if TYPE
8959 is void) with location LOC, simplifying it first if possible. Returns the
8960 built expression value (or NULL_TREE if TYPE is void) inserting any new
8961 statements at GSI honoring BEFORE and UPDATE. */
8962
8963 tree
8964 gimple_build (gimple_stmt_iterator *gsi,
8965 bool before, gsi_iterator_update update,
8966 location_t loc, code_helper code,
8967 tree type, tree op0, tree op1, tree op2)
8968 {
8969 if (code.is_tree_code ())
8970 return gimple_build (gsi, before, update,
8971 loc, tree_code (code), type, op0, op1, op2);
8972 return gimple_build (gsi, before, update,
8973 loc, combined_fn (code), type, op0, op1, op2);
8974 }
8975
8976 /* Build the conversion (TYPE) OP with a result of type TYPE
8977 with location LOC if such conversion is neccesary in GIMPLE,
8978 simplifying it first.
8979 Returns the built expression inserting any new statements
8980 at GSI honoring BEFORE and UPDATE. */
8981
8982 tree
8983 gimple_convert (gimple_stmt_iterator *gsi,
8984 bool before, gsi_iterator_update update,
8985 location_t loc, tree type, tree op)
8986 {
8987 if (useless_type_conversion_p (type, TREE_TYPE (op)))
8988 return op;
8989 return gimple_build (gsi, before, update, loc, NOP_EXPR, type, op);
8990 }
8991
8992 /* Build the conversion (ptrofftype) OP with a result of a type
8993 compatible with ptrofftype with location LOC if such conversion
8994 is neccesary in GIMPLE, simplifying it first.
8995 Returns the built expression value inserting any new statements
8996 at GSI honoring BEFORE and UPDATE. */
8997
8998 tree
8999 gimple_convert_to_ptrofftype (gimple_stmt_iterator *gsi,
9000 bool before, gsi_iterator_update update,
9001 location_t loc, tree op)
9002 {
9003 if (ptrofftype_p (TREE_TYPE (op)))
9004 return op;
9005 return gimple_convert (gsi, before, update, loc, sizetype, op);
9006 }
9007
9008 /* Build a vector of type TYPE in which each element has the value OP.
9009 Return a gimple value for the result, inserting any new statements
9010 at GSI honoring BEFORE and UPDATE. */
9011
9012 tree
9013 gimple_build_vector_from_val (gimple_stmt_iterator *gsi,
9014 bool before, gsi_iterator_update update,
9015 location_t loc, tree type, tree op)
9016 {
9017 if (!TYPE_VECTOR_SUBPARTS (type).is_constant ()
9018 && !CONSTANT_CLASS_P (op))
9019 return gimple_build (gsi, before, update,
9020 loc, VEC_DUPLICATE_EXPR, type, op);
9021
9022 tree res, vec = build_vector_from_val (type, op);
9023 if (is_gimple_val (vec))
9024 return vec;
9025 if (gimple_in_ssa_p (cfun))
9026 res = make_ssa_name (type);
9027 else
9028 res = create_tmp_reg (type);
9029 gimple_seq seq = NULL;
9030 gimple *stmt = gimple_build_assign (res, vec);
9031 gimple_set_location (stmt, loc);
9032 gimple_seq_add_stmt_without_update (&seq, stmt);
9033 gimple_build_insert_seq (gsi, before, update, seq);
9034 return res;
9035 }
9036
9037 /* Build a vector from BUILDER, handling the case in which some elements
9038 are non-constant. Return a gimple value for the result, inserting
9039 any new instructions to GSI honoring BEFORE and UPDATE.
9040
9041 BUILDER must not have a stepped encoding on entry. This is because
9042 the function is not geared up to handle the arithmetic that would
9043 be needed in the variable case, and any code building a vector that
9044 is known to be constant should use BUILDER->build () directly. */
9045
9046 tree
9047 gimple_build_vector (gimple_stmt_iterator *gsi,
9048 bool before, gsi_iterator_update update,
9049 location_t loc, tree_vector_builder *builder)
9050 {
9051 gcc_assert (builder->nelts_per_pattern () <= 2);
9052 unsigned int encoded_nelts = builder->encoded_nelts ();
9053 for (unsigned int i = 0; i < encoded_nelts; ++i)
9054 if (!CONSTANT_CLASS_P ((*builder)[i]))
9055 {
9056 gimple_seq seq = NULL;
9057 tree type = builder->type ();
9058 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type).to_constant ();
9059 vec<constructor_elt, va_gc> *v;
9060 vec_alloc (v, nelts);
9061 for (i = 0; i < nelts; ++i)
9062 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, builder->elt (i));
9063
9064 tree res;
9065 if (gimple_in_ssa_p (cfun))
9066 res = make_ssa_name (type);
9067 else
9068 res = create_tmp_reg (type);
9069 gimple *stmt = gimple_build_assign (res, build_constructor (type, v));
9070 gimple_set_location (stmt, loc);
9071 gimple_seq_add_stmt_without_update (&seq, stmt);
9072 gimple_build_insert_seq (gsi, before, update, seq);
9073 return res;
9074 }
9075 return builder->build ();
9076 }
9077
9078 /* Emit gimple statements into &stmts that take a value given in OLD_SIZE
9079 and generate a value guaranteed to be rounded upwards to ALIGN.
9080
9081 Return the tree node representing this size, it is of TREE_TYPE TYPE. */
9082
9083 tree
9084 gimple_build_round_up (gimple_stmt_iterator *gsi,
9085 bool before, gsi_iterator_update update,
9086 location_t loc, tree type,
9087 tree old_size, unsigned HOST_WIDE_INT align)
9088 {
9089 unsigned HOST_WIDE_INT tg_mask = align - 1;
9090 /* tree new_size = (old_size + tg_mask) & ~tg_mask; */
9091 gcc_assert (INTEGRAL_TYPE_P (type));
9092 tree tree_mask = build_int_cst (type, tg_mask);
9093 tree oversize = gimple_build (gsi, before, update,
9094 loc, PLUS_EXPR, type, old_size, tree_mask);
9095
9096 tree mask = build_int_cst (type, -align);
9097 return gimple_build (gsi, before, update,
9098 loc, BIT_AND_EXPR, type, oversize, mask);
9099 }
9100
9101 /* Return true if the result of assignment STMT is known to be non-negative.
9102 If the return value is based on the assumption that signed overflow is
9103 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9104 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9105
9106 static bool
9107 gimple_assign_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9108 int depth)
9109 {
9110 enum tree_code code = gimple_assign_rhs_code (stmt);
9111 tree type = TREE_TYPE (gimple_assign_lhs (stmt));
9112 switch (get_gimple_rhs_class (code))
9113 {
9114 case GIMPLE_UNARY_RHS:
9115 return tree_unary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9116 type,
9117 gimple_assign_rhs1 (stmt),
9118 strict_overflow_p, depth);
9119 case GIMPLE_BINARY_RHS:
9120 return tree_binary_nonnegative_warnv_p (gimple_assign_rhs_code (stmt),
9121 type,
9122 gimple_assign_rhs1 (stmt),
9123 gimple_assign_rhs2 (stmt),
9124 strict_overflow_p, depth);
9125 case GIMPLE_TERNARY_RHS:
9126 return false;
9127 case GIMPLE_SINGLE_RHS:
9128 return tree_single_nonnegative_warnv_p (gimple_assign_rhs1 (stmt),
9129 strict_overflow_p, depth);
9130 case GIMPLE_INVALID_RHS:
9131 break;
9132 }
9133 gcc_unreachable ();
9134 }
9135
9136 /* Return true if return value of call STMT is known to be non-negative.
9137 If the return value is based on the assumption that signed overflow is
9138 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9139 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9140
9141 static bool
9142 gimple_call_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9143 int depth)
9144 {
9145 tree arg0 = gimple_call_num_args (stmt) > 0 ?
9146 gimple_call_arg (stmt, 0) : NULL_TREE;
9147 tree arg1 = gimple_call_num_args (stmt) > 1 ?
9148 gimple_call_arg (stmt, 1) : NULL_TREE;
9149 tree lhs = gimple_call_lhs (stmt);
9150 return (lhs
9151 && tree_call_nonnegative_warnv_p (TREE_TYPE (lhs),
9152 gimple_call_combined_fn (stmt),
9153 arg0, arg1,
9154 strict_overflow_p, depth));
9155 }
9156
9157 /* Return true if return value of call STMT is known to be non-negative.
9158 If the return value is based on the assumption that signed overflow is
9159 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9160 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9161
9162 static bool
9163 gimple_phi_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9164 int depth)
9165 {
9166 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9167 {
9168 tree arg = gimple_phi_arg_def (stmt, i);
9169 if (!tree_single_nonnegative_warnv_p (arg, strict_overflow_p, depth + 1))
9170 return false;
9171 }
9172 return true;
9173 }
9174
9175 /* Return true if STMT is known to compute a non-negative value.
9176 If the return value is based on the assumption that signed overflow is
9177 undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't change
9178 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
9179
9180 bool
9181 gimple_stmt_nonnegative_warnv_p (gimple *stmt, bool *strict_overflow_p,
9182 int depth)
9183 {
9184 switch (gimple_code (stmt))
9185 {
9186 case GIMPLE_ASSIGN:
9187 return gimple_assign_nonnegative_warnv_p (stmt, strict_overflow_p,
9188 depth);
9189 case GIMPLE_CALL:
9190 return gimple_call_nonnegative_warnv_p (stmt, strict_overflow_p,
9191 depth);
9192 case GIMPLE_PHI:
9193 return gimple_phi_nonnegative_warnv_p (stmt, strict_overflow_p,
9194 depth);
9195 default:
9196 return false;
9197 }
9198 }
9199
9200 /* Return true if the floating-point value computed by assignment STMT
9201 is known to have an integer value. We also allow +Inf, -Inf and NaN
9202 to be considered integer values. Return false for signaling NaN.
9203
9204 DEPTH is the current nesting depth of the query. */
9205
9206 static bool
9207 gimple_assign_integer_valued_real_p (gimple *stmt, int depth)
9208 {
9209 enum tree_code code = gimple_assign_rhs_code (stmt);
9210 switch (get_gimple_rhs_class (code))
9211 {
9212 case GIMPLE_UNARY_RHS:
9213 return integer_valued_real_unary_p (gimple_assign_rhs_code (stmt),
9214 gimple_assign_rhs1 (stmt), depth);
9215 case GIMPLE_BINARY_RHS:
9216 return integer_valued_real_binary_p (gimple_assign_rhs_code (stmt),
9217 gimple_assign_rhs1 (stmt),
9218 gimple_assign_rhs2 (stmt), depth);
9219 case GIMPLE_TERNARY_RHS:
9220 return false;
9221 case GIMPLE_SINGLE_RHS:
9222 return integer_valued_real_single_p (gimple_assign_rhs1 (stmt), depth);
9223 case GIMPLE_INVALID_RHS:
9224 break;
9225 }
9226 gcc_unreachable ();
9227 }
9228
9229 /* Return true if the floating-point value computed by call STMT is known
9230 to have an integer value. We also allow +Inf, -Inf and NaN to be
9231 considered integer values. Return false for signaling NaN.
9232
9233 DEPTH is the current nesting depth of the query. */
9234
9235 static bool
9236 gimple_call_integer_valued_real_p (gimple *stmt, int depth)
9237 {
9238 tree arg0 = (gimple_call_num_args (stmt) > 0
9239 ? gimple_call_arg (stmt, 0)
9240 : NULL_TREE);
9241 tree arg1 = (gimple_call_num_args (stmt) > 1
9242 ? gimple_call_arg (stmt, 1)
9243 : NULL_TREE);
9244 return integer_valued_real_call_p (gimple_call_combined_fn (stmt),
9245 arg0, arg1, depth);
9246 }
9247
9248 /* Return true if the floating-point result of phi STMT is known to have
9249 an integer value. We also allow +Inf, -Inf and NaN to be considered
9250 integer values. Return false for signaling NaN.
9251
9252 DEPTH is the current nesting depth of the query. */
9253
9254 static bool
9255 gimple_phi_integer_valued_real_p (gimple *stmt, int depth)
9256 {
9257 for (unsigned i = 0; i < gimple_phi_num_args (stmt); ++i)
9258 {
9259 tree arg = gimple_phi_arg_def (stmt, i);
9260 if (!integer_valued_real_single_p (arg, depth + 1))
9261 return false;
9262 }
9263 return true;
9264 }
9265
9266 /* Return true if the floating-point value computed by STMT is known
9267 to have an integer value. We also allow +Inf, -Inf and NaN to be
9268 considered integer values. Return false for signaling NaN.
9269
9270 DEPTH is the current nesting depth of the query. */
9271
9272 bool
9273 gimple_stmt_integer_valued_real_p (gimple *stmt, int depth)
9274 {
9275 switch (gimple_code (stmt))
9276 {
9277 case GIMPLE_ASSIGN:
9278 return gimple_assign_integer_valued_real_p (stmt, depth);
9279 case GIMPLE_CALL:
9280 return gimple_call_integer_valued_real_p (stmt, depth);
9281 case GIMPLE_PHI:
9282 return gimple_phi_integer_valued_real_p (stmt, depth);
9283 default:
9284 return false;
9285 }
9286 }
This page took 0.511988 seconds and 5 git commands to generate.