1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
4 Copyright (C) 2008-2015 Free Software Foundation, Inc.
5 Contributed by Martin Jambor <mjambor@suse.cz>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 /* This file implements Scalar Reduction of Aggregates (SRA). SRA is run
24 twice, once in the early stages of compilation (early SRA) and once in the
25 late stages (late SRA). The aim of both is to turn references to scalar
26 parts of aggregates into uses of independent scalar variables.
28 The two passes are nearly identical, the only difference is that early SRA
29 does not scalarize unions which are used as the result in a GIMPLE_RETURN
30 statement because together with inlining this can lead to weird type
33 Both passes operate in four stages:
35 1. The declarations that have properties which make them candidates for
36 scalarization are identified in function find_var_candidates(). The
37 candidates are stored in candidate_bitmap.
39 2. The function body is scanned. In the process, declarations which are
40 used in a manner that prevent their scalarization are removed from the
41 candidate bitmap. More importantly, for every access into an aggregate,
42 an access structure (struct access) is created by create_access() and
43 stored in a vector associated with the aggregate. Among other
44 information, the aggregate declaration, the offset and size of the access
45 and its type are stored in the structure.
47 On a related note, assign_link structures are created for every assign
48 statement between candidate aggregates and attached to the related
51 3. The vectors of accesses are analyzed. They are first sorted according to
52 their offset and size and then scanned for partially overlapping accesses
53 (i.e. those which overlap but one is not entirely within another). Such
54 an access disqualifies the whole aggregate from being scalarized.
56 If there is no such inhibiting overlap, a representative access structure
57 is chosen for every unique combination of offset and size. Afterwards,
58 the pass builds a set of trees from these structures, in which children
59 of an access are within their parent (in terms of offset and size).
61 Then accesses are propagated whenever possible (i.e. in cases when it
62 does not create a partially overlapping access) across assign_links from
63 the right hand side to the left hand side.
65 Then the set of trees for each declaration is traversed again and those
66 accesses which should be replaced by a scalar are identified.
68 4. The function is traversed again, and for every reference into an
69 aggregate that has some component which is about to be scalarized,
70 statements are amended and new statements are created as necessary.
71 Finally, if a parameter got scalarized, the scalar replacements are
72 initialized with values from respective parameter aggregates. */
76 #include "coretypes.h"
77 #include "alloc-pool.h"
82 #include "fold-const.h"
84 #include "hard-reg-set.h"
86 #include "dominance.h"
88 #include "basic-block.h"
89 #include "tree-ssa-alias.h"
90 #include "internal-fn.h"
92 #include "gimple-expr.h"
94 #include "stor-layout.h"
96 #include "gimple-iterator.h"
97 #include "gimplify-me.h"
98 #include "gimple-walk.h"
100 #include "gimple-ssa.h"
101 #include "tree-cfg.h"
102 #include "tree-phinodes.h"
103 #include "ssa-iterators.h"
104 #include "stringpool.h"
105 #include "tree-ssanames.h"
108 #include "insn-config.h"
113 #include "emit-rtl.h"
117 #include "tree-dfa.h"
118 #include "tree-ssa.h"
119 #include "tree-pass.h"
120 #include "plugin-api.h"
123 #include "symbol-summary.h"
124 #include "ipa-prop.h"
128 #include "tree-inline.h"
129 #include "gimple-pretty-print.h"
130 #include "ipa-inline.h"
131 #include "ipa-utils.h"
132 #include "builtins.h"
134 /* Enumeration of all aggregate reductions we can do. */
135 enum sra_mode
{ SRA_MODE_EARLY_IPA
, /* early call regularization */
136 SRA_MODE_EARLY_INTRA
, /* early intraprocedural SRA */
137 SRA_MODE_INTRA
}; /* late intraprocedural SRA */
139 /* Global variable describing which aggregate reduction we are performing at
141 static enum sra_mode sra_mode
;
145 /* ACCESS represents each access to an aggregate variable (as a whole or a
146 part). It can also represent a group of accesses that refer to exactly the
147 same fragment of an aggregate (i.e. those that have exactly the same offset
148 and size). Such representatives for a single aggregate, once determined,
149 are linked in a linked list and have the group fields set.
151 Moreover, when doing intraprocedural SRA, a tree is built from those
152 representatives (by the means of first_child and next_sibling pointers), in
153 which all items in a subtree are "within" the root, i.e. their offset is
154 greater or equal to offset of the root and offset+size is smaller or equal
155 to offset+size of the root. Children of an access are sorted by offset.
157 Note that accesses to parts of vector and complex number types always
158 represented by an access to the whole complex number or a vector. It is a
159 duty of the modifying functions to replace them appropriately. */
163 /* Values returned by `get_ref_base_and_extent' for each component reference
164 If EXPR isn't a component reference just set `BASE = EXPR', `OFFSET = 0',
165 `SIZE = TREE_SIZE (TREE_TYPE (expr))'. */
166 HOST_WIDE_INT offset
;
170 /* Expression. It is context dependent so do not use it to create new
171 expressions to access the original aggregate. See PR 42154 for a
177 /* The statement this access belongs to. */
180 /* Next group representative for this aggregate. */
181 struct access
*next_grp
;
183 /* Pointer to the group representative. Pointer to itself if the struct is
184 the representative. */
185 struct access
*group_representative
;
187 /* If this access has any children (in terms of the definition above), this
188 points to the first one. */
189 struct access
*first_child
;
191 /* In intraprocedural SRA, pointer to the next sibling in the access tree as
192 described above. In IPA-SRA this is a pointer to the next access
193 belonging to the same group (having the same representative). */
194 struct access
*next_sibling
;
196 /* Pointers to the first and last element in the linked list of assign
198 struct assign_link
*first_link
, *last_link
;
200 /* Pointer to the next access in the work queue. */
201 struct access
*next_queued
;
203 /* Replacement variable for this access "region." Never to be accessed
204 directly, always only by the means of get_access_replacement() and only
205 when grp_to_be_replaced flag is set. */
206 tree replacement_decl
;
208 /* Is this particular access write access? */
211 /* Is this access an access to a non-addressable field? */
212 unsigned non_addressable
: 1;
214 /* Is this access currently in the work queue? */
215 unsigned grp_queued
: 1;
217 /* Does this group contain a write access? This flag is propagated down the
219 unsigned grp_write
: 1;
221 /* Does this group contain a read access? This flag is propagated down the
223 unsigned grp_read
: 1;
225 /* Does this group contain a read access that comes from an assignment
226 statement? This flag is propagated down the access tree. */
227 unsigned grp_assignment_read
: 1;
229 /* Does this group contain a write access that comes from an assignment
230 statement? This flag is propagated down the access tree. */
231 unsigned grp_assignment_write
: 1;
233 /* Does this group contain a read access through a scalar type? This flag is
234 not propagated in the access tree in any direction. */
235 unsigned grp_scalar_read
: 1;
237 /* Does this group contain a write access through a scalar type? This flag
238 is not propagated in the access tree in any direction. */
239 unsigned grp_scalar_write
: 1;
241 /* Is this access an artificial one created to scalarize some record
243 unsigned grp_total_scalarization
: 1;
245 /* Other passes of the analysis use this bit to make function
246 analyze_access_subtree create scalar replacements for this group if
248 unsigned grp_hint
: 1;
250 /* Is the subtree rooted in this access fully covered by scalar
252 unsigned grp_covered
: 1;
254 /* If set to true, this access and all below it in an access tree must not be
256 unsigned grp_unscalarizable_region
: 1;
258 /* Whether data have been written to parts of the aggregate covered by this
259 access which is not to be scalarized. This flag is propagated up in the
261 unsigned grp_unscalarized_data
: 1;
263 /* Does this access and/or group contain a write access through a
265 unsigned grp_partial_lhs
: 1;
267 /* Set when a scalar replacement should be created for this variable. */
268 unsigned grp_to_be_replaced
: 1;
270 /* Set when we want a replacement for the sole purpose of having it in
271 generated debug statements. */
272 unsigned grp_to_be_debug_replaced
: 1;
274 /* Should TREE_NO_WARNING of a replacement be set? */
275 unsigned grp_no_warning
: 1;
277 /* Is it possible that the group refers to data which might be (directly or
278 otherwise) modified? */
279 unsigned grp_maybe_modified
: 1;
281 /* Set when this is a representative of a pointer to scalar (i.e. by
282 reference) parameter which we consider for turning into a plain scalar
283 (i.e. a by value parameter). */
284 unsigned grp_scalar_ptr
: 1;
286 /* Set when we discover that this pointer is not safe to dereference in the
288 unsigned grp_not_necessarilly_dereferenced
: 1;
290 /* Pool allocation new operator. */
291 inline void *operator new (size_t)
293 return pool
.allocate ();
296 /* Delete operator utilizing pool allocation. */
297 inline void operator delete (void *ptr
)
299 pool
.remove ((access
*) ptr
);
302 /* Memory allocation pool. */
303 static pool_allocator
<access
> pool
;
306 typedef struct access
*access_p
;
309 /* Alloc pool for allocating access structures. */
310 pool_allocator
<struct access
> access::pool ("SRA accesses", 16);
312 /* A structure linking lhs and rhs accesses from an aggregate assignment. They
313 are used to propagate subaccesses from rhs to lhs as long as they don't
314 conflict with what is already there. */
317 struct access
*lacc
, *racc
;
318 struct assign_link
*next
;
320 /* Pool allocation new operator. */
321 inline void *operator new (size_t)
323 return pool
.allocate ();
326 /* Delete operator utilizing pool allocation. */
327 inline void operator delete (void *ptr
)
329 pool
.remove ((assign_link
*) ptr
);
332 /* Memory allocation pool. */
333 static pool_allocator
<assign_link
> pool
;
336 /* Alloc pool for allocating assign link structures. */
337 pool_allocator
<assign_link
> assign_link::pool ("SRA links", 16);
339 /* Base (tree) -> Vector (vec<access_p> *) map. */
340 static hash_map
<tree
, auto_vec
<access_p
> > *base_access_vec
;
342 /* Candidate hash table helpers. */
344 struct uid_decl_hasher
: nofree_ptr_hash
<tree_node
>
346 static inline hashval_t
hash (const tree_node
*);
347 static inline bool equal (const tree_node
*, const tree_node
*);
350 /* Hash a tree in a uid_decl_map. */
353 uid_decl_hasher::hash (const tree_node
*item
)
355 return item
->decl_minimal
.uid
;
358 /* Return true if the DECL_UID in both trees are equal. */
361 uid_decl_hasher::equal (const tree_node
*a
, const tree_node
*b
)
363 return (a
->decl_minimal
.uid
== b
->decl_minimal
.uid
);
366 /* Set of candidates. */
367 static bitmap candidate_bitmap
;
368 static hash_table
<uid_decl_hasher
> *candidates
;
370 /* For a candidate UID return the candidates decl. */
373 candidate (unsigned uid
)
376 t
.decl_minimal
.uid
= uid
;
377 return candidates
->find_with_hash (&t
, static_cast <hashval_t
> (uid
));
380 /* Bitmap of candidates which we should try to entirely scalarize away and
381 those which cannot be (because they are and need be used as a whole). */
382 static bitmap should_scalarize_away_bitmap
, cannot_scalarize_away_bitmap
;
384 /* Obstack for creation of fancy names. */
385 static struct obstack name_obstack
;
387 /* Head of a linked list of accesses that need to have its subaccesses
388 propagated to their assignment counterparts. */
389 static struct access
*work_queue_head
;
391 /* Number of parameters of the analyzed function when doing early ipa SRA. */
392 static int func_param_count
;
394 /* scan_function sets the following to true if it encounters a call to
395 __builtin_apply_args. */
396 static bool encountered_apply_args
;
398 /* Set by scan_function when it finds a recursive call. */
399 static bool encountered_recursive_call
;
401 /* Set by scan_function when it finds a recursive call with less actual
402 arguments than formal parameters.. */
403 static bool encountered_unchangable_recursive_call
;
405 /* This is a table in which for each basic block and parameter there is a
406 distance (offset + size) in that parameter which is dereferenced and
407 accessed in that BB. */
408 static HOST_WIDE_INT
*bb_dereferences
;
409 /* Bitmap of BBs that can cause the function to "stop" progressing by
410 returning, throwing externally, looping infinitely or calling a function
411 which might abort etc.. */
412 static bitmap final_bbs
;
414 /* Representative of no accesses at all. */
415 static struct access no_accesses_representant
;
417 /* Predicate to test the special value. */
420 no_accesses_p (struct access
*access
)
422 return access
== &no_accesses_representant
;
425 /* Dump contents of ACCESS to file F in a human friendly way. If GRP is true,
426 representative fields are dumped, otherwise those which only describe the
427 individual access are. */
431 /* Number of processed aggregates is readily available in
432 analyze_all_variable_accesses and so is not stored here. */
434 /* Number of created scalar replacements. */
437 /* Number of times sra_modify_expr or sra_modify_assign themselves changed an
441 /* Number of statements created by generate_subtree_copies. */
444 /* Number of statements created by load_assign_lhs_subreplacements. */
447 /* Number of times sra_modify_assign has deleted a statement. */
450 /* Number of times sra_modify_assign has to deal with subaccesses of LHS and
451 RHS reparately due to type conversions or nonexistent matching
453 int separate_lhs_rhs_handling
;
455 /* Number of parameters that were removed because they were unused. */
456 int deleted_unused_parameters
;
458 /* Number of scalars passed as parameters by reference that have been
459 converted to be passed by value. */
460 int scalar_by_ref_to_by_val
;
462 /* Number of aggregate parameters that were replaced by one or more of their
464 int aggregate_params_reduced
;
466 /* Numbber of components created when splitting aggregate parameters. */
467 int param_reductions_created
;
471 dump_access (FILE *f
, struct access
*access
, bool grp
)
473 fprintf (f
, "access { ");
474 fprintf (f
, "base = (%d)'", DECL_UID (access
->base
));
475 print_generic_expr (f
, access
->base
, 0);
476 fprintf (f
, "', offset = " HOST_WIDE_INT_PRINT_DEC
, access
->offset
);
477 fprintf (f
, ", size = " HOST_WIDE_INT_PRINT_DEC
, access
->size
);
478 fprintf (f
, ", expr = ");
479 print_generic_expr (f
, access
->expr
, 0);
480 fprintf (f
, ", type = ");
481 print_generic_expr (f
, access
->type
, 0);
483 fprintf (f
, ", grp_read = %d, grp_write = %d, grp_assignment_read = %d, "
484 "grp_assignment_write = %d, grp_scalar_read = %d, "
485 "grp_scalar_write = %d, grp_total_scalarization = %d, "
486 "grp_hint = %d, grp_covered = %d, "
487 "grp_unscalarizable_region = %d, grp_unscalarized_data = %d, "
488 "grp_partial_lhs = %d, grp_to_be_replaced = %d, "
489 "grp_to_be_debug_replaced = %d, grp_maybe_modified = %d, "
490 "grp_not_necessarilly_dereferenced = %d\n",
491 access
->grp_read
, access
->grp_write
, access
->grp_assignment_read
,
492 access
->grp_assignment_write
, access
->grp_scalar_read
,
493 access
->grp_scalar_write
, access
->grp_total_scalarization
,
494 access
->grp_hint
, access
->grp_covered
,
495 access
->grp_unscalarizable_region
, access
->grp_unscalarized_data
,
496 access
->grp_partial_lhs
, access
->grp_to_be_replaced
,
497 access
->grp_to_be_debug_replaced
, access
->grp_maybe_modified
,
498 access
->grp_not_necessarilly_dereferenced
);
500 fprintf (f
, ", write = %d, grp_total_scalarization = %d, "
501 "grp_partial_lhs = %d\n",
502 access
->write
, access
->grp_total_scalarization
,
503 access
->grp_partial_lhs
);
506 /* Dump a subtree rooted in ACCESS to file F, indent by LEVEL. */
509 dump_access_tree_1 (FILE *f
, struct access
*access
, int level
)
515 for (i
= 0; i
< level
; i
++)
516 fputs ("* ", dump_file
);
518 dump_access (f
, access
, true);
520 if (access
->first_child
)
521 dump_access_tree_1 (f
, access
->first_child
, level
+ 1);
523 access
= access
->next_sibling
;
528 /* Dump all access trees for a variable, given the pointer to the first root in
532 dump_access_tree (FILE *f
, struct access
*access
)
534 for (; access
; access
= access
->next_grp
)
535 dump_access_tree_1 (f
, access
, 0);
538 /* Return true iff ACC is non-NULL and has subaccesses. */
541 access_has_children_p (struct access
*acc
)
543 return acc
&& acc
->first_child
;
546 /* Return true iff ACC is (partly) covered by at least one replacement. */
549 access_has_replacements_p (struct access
*acc
)
551 struct access
*child
;
552 if (acc
->grp_to_be_replaced
)
554 for (child
= acc
->first_child
; child
; child
= child
->next_sibling
)
555 if (access_has_replacements_p (child
))
560 /* Return a vector of pointers to accesses for the variable given in BASE or
561 NULL if there is none. */
563 static vec
<access_p
> *
564 get_base_access_vector (tree base
)
566 return base_access_vec
->get (base
);
569 /* Find an access with required OFFSET and SIZE in a subtree of accesses rooted
570 in ACCESS. Return NULL if it cannot be found. */
572 static struct access
*
573 find_access_in_subtree (struct access
*access
, HOST_WIDE_INT offset
,
576 while (access
&& (access
->offset
!= offset
|| access
->size
!= size
))
578 struct access
*child
= access
->first_child
;
580 while (child
&& (child
->offset
+ child
->size
<= offset
))
581 child
= child
->next_sibling
;
588 /* Return the first group representative for DECL or NULL if none exists. */
590 static struct access
*
591 get_first_repr_for_decl (tree base
)
593 vec
<access_p
> *access_vec
;
595 access_vec
= get_base_access_vector (base
);
599 return (*access_vec
)[0];
602 /* Find an access representative for the variable BASE and given OFFSET and
603 SIZE. Requires that access trees have already been built. Return NULL if
604 it cannot be found. */
606 static struct access
*
607 get_var_base_offset_size_access (tree base
, HOST_WIDE_INT offset
,
610 struct access
*access
;
612 access
= get_first_repr_for_decl (base
);
613 while (access
&& (access
->offset
+ access
->size
<= offset
))
614 access
= access
->next_grp
;
618 return find_access_in_subtree (access
, offset
, size
);
621 /* Add LINK to the linked list of assign links of RACC. */
623 add_link_to_rhs (struct access
*racc
, struct assign_link
*link
)
625 gcc_assert (link
->racc
== racc
);
627 if (!racc
->first_link
)
629 gcc_assert (!racc
->last_link
);
630 racc
->first_link
= link
;
633 racc
->last_link
->next
= link
;
635 racc
->last_link
= link
;
639 /* Move all link structures in their linked list in OLD_RACC to the linked list
642 relink_to_new_repr (struct access
*new_racc
, struct access
*old_racc
)
644 if (!old_racc
->first_link
)
646 gcc_assert (!old_racc
->last_link
);
650 if (new_racc
->first_link
)
652 gcc_assert (!new_racc
->last_link
->next
);
653 gcc_assert (!old_racc
->last_link
|| !old_racc
->last_link
->next
);
655 new_racc
->last_link
->next
= old_racc
->first_link
;
656 new_racc
->last_link
= old_racc
->last_link
;
660 gcc_assert (!new_racc
->last_link
);
662 new_racc
->first_link
= old_racc
->first_link
;
663 new_racc
->last_link
= old_racc
->last_link
;
665 old_racc
->first_link
= old_racc
->last_link
= NULL
;
668 /* Add ACCESS to the work queue (which is actually a stack). */
671 add_access_to_work_queue (struct access
*access
)
673 if (!access
->grp_queued
)
675 gcc_assert (!access
->next_queued
);
676 access
->next_queued
= work_queue_head
;
677 access
->grp_queued
= 1;
678 work_queue_head
= access
;
682 /* Pop an access from the work queue, and return it, assuming there is one. */
684 static struct access
*
685 pop_access_from_work_queue (void)
687 struct access
*access
= work_queue_head
;
689 work_queue_head
= access
->next_queued
;
690 access
->next_queued
= NULL
;
691 access
->grp_queued
= 0;
696 /* Allocate necessary structures. */
699 sra_initialize (void)
701 candidate_bitmap
= BITMAP_ALLOC (NULL
);
702 candidates
= new hash_table
<uid_decl_hasher
>
703 (vec_safe_length (cfun
->local_decls
) / 2);
704 should_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
705 cannot_scalarize_away_bitmap
= BITMAP_ALLOC (NULL
);
706 gcc_obstack_init (&name_obstack
);
707 base_access_vec
= new hash_map
<tree
, auto_vec
<access_p
> >;
708 memset (&sra_stats
, 0, sizeof (sra_stats
));
709 encountered_apply_args
= false;
710 encountered_recursive_call
= false;
711 encountered_unchangable_recursive_call
= false;
714 /* Deallocate all general structures. */
717 sra_deinitialize (void)
719 BITMAP_FREE (candidate_bitmap
);
722 BITMAP_FREE (should_scalarize_away_bitmap
);
723 BITMAP_FREE (cannot_scalarize_away_bitmap
);
724 access::pool
.release ();
725 assign_link::pool
.release ();
726 obstack_free (&name_obstack
, NULL
);
728 delete base_access_vec
;
731 /* Remove DECL from candidates for SRA and write REASON to the dump file if
734 disqualify_candidate (tree decl
, const char *reason
)
736 if (bitmap_clear_bit (candidate_bitmap
, DECL_UID (decl
)))
737 candidates
->remove_elt_with_hash (decl
, DECL_UID (decl
));
739 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
741 fprintf (dump_file
, "! Disqualifying ");
742 print_generic_expr (dump_file
, decl
, 0);
743 fprintf (dump_file
, " - %s\n", reason
);
747 /* Return true iff the type contains a field or an element which does not allow
751 type_internals_preclude_sra_p (tree type
, const char **msg
)
756 switch (TREE_CODE (type
))
760 case QUAL_UNION_TYPE
:
761 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
762 if (TREE_CODE (fld
) == FIELD_DECL
)
764 tree ft
= TREE_TYPE (fld
);
766 if (TREE_THIS_VOLATILE (fld
))
768 *msg
= "volatile structure field";
771 if (!DECL_FIELD_OFFSET (fld
))
773 *msg
= "no structure field offset";
776 if (!DECL_SIZE (fld
))
778 *msg
= "zero structure field size";
781 if (!tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
783 *msg
= "structure field offset not fixed";
786 if (!tree_fits_uhwi_p (DECL_SIZE (fld
)))
788 *msg
= "structure field size not fixed";
791 if (!tree_fits_shwi_p (bit_position (fld
)))
793 *msg
= "structure field size too big";
796 if (AGGREGATE_TYPE_P (ft
)
797 && int_bit_position (fld
) % BITS_PER_UNIT
!= 0)
799 *msg
= "structure field is bit field";
803 if (AGGREGATE_TYPE_P (ft
) && type_internals_preclude_sra_p (ft
, msg
))
810 et
= TREE_TYPE (type
);
812 if (TYPE_VOLATILE (et
))
814 *msg
= "element type is volatile";
818 if (AGGREGATE_TYPE_P (et
) && type_internals_preclude_sra_p (et
, msg
))
828 /* If T is an SSA_NAME, return NULL if it is not a default def or return its
829 base variable if it is. Return T if it is not an SSA_NAME. */
832 get_ssa_base_param (tree t
)
834 if (TREE_CODE (t
) == SSA_NAME
)
836 if (SSA_NAME_IS_DEFAULT_DEF (t
))
837 return SSA_NAME_VAR (t
);
844 /* Mark a dereference of BASE of distance DIST in a basic block tht STMT
845 belongs to, unless the BB has already been marked as a potentially
849 mark_parm_dereference (tree base
, HOST_WIDE_INT dist
, gimple stmt
)
851 basic_block bb
= gimple_bb (stmt
);
852 int idx
, parm_index
= 0;
855 if (bitmap_bit_p (final_bbs
, bb
->index
))
858 for (parm
= DECL_ARGUMENTS (current_function_decl
);
859 parm
&& parm
!= base
;
860 parm
= DECL_CHAIN (parm
))
863 gcc_assert (parm_index
< func_param_count
);
865 idx
= bb
->index
* func_param_count
+ parm_index
;
866 if (bb_dereferences
[idx
] < dist
)
867 bb_dereferences
[idx
] = dist
;
870 /* Allocate an access structure for BASE, OFFSET and SIZE, clear it, fill in
871 the three fields. Also add it to the vector of accesses corresponding to
872 the base. Finally, return the new access. */
874 static struct access
*
875 create_access_1 (tree base
, HOST_WIDE_INT offset
, HOST_WIDE_INT size
)
877 struct access
*access
= new struct access ();
879 memset (access
, 0, sizeof (struct access
));
881 access
->offset
= offset
;
884 base_access_vec
->get_or_insert (base
).safe_push (access
);
889 /* Create and insert access for EXPR. Return created access, or NULL if it is
892 static struct access
*
893 create_access (tree expr
, gimple stmt
, bool write
)
895 struct access
*access
;
896 HOST_WIDE_INT offset
, size
, max_size
;
898 bool ptr
, unscalarizable_region
= false;
900 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
902 if (sra_mode
== SRA_MODE_EARLY_IPA
903 && TREE_CODE (base
) == MEM_REF
)
905 base
= get_ssa_base_param (TREE_OPERAND (base
, 0));
913 if (!DECL_P (base
) || !bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
916 if (sra_mode
== SRA_MODE_EARLY_IPA
)
918 if (size
< 0 || size
!= max_size
)
920 disqualify_candidate (base
, "Encountered a variable sized access.");
923 if (TREE_CODE (expr
) == COMPONENT_REF
924 && DECL_BIT_FIELD (TREE_OPERAND (expr
, 1)))
926 disqualify_candidate (base
, "Encountered a bit-field access.");
929 gcc_checking_assert ((offset
% BITS_PER_UNIT
) == 0);
932 mark_parm_dereference (base
, offset
+ size
, stmt
);
936 if (size
!= max_size
)
939 unscalarizable_region
= true;
943 disqualify_candidate (base
, "Encountered an unconstrained access.");
948 access
= create_access_1 (base
, offset
, size
);
950 access
->type
= TREE_TYPE (expr
);
951 access
->write
= write
;
952 access
->grp_unscalarizable_region
= unscalarizable_region
;
955 if (TREE_CODE (expr
) == COMPONENT_REF
956 && DECL_NONADDRESSABLE_P (TREE_OPERAND (expr
, 1)))
957 access
->non_addressable
= 1;
963 /* Return true iff TYPE is a RECORD_TYPE with fields that are either of gimple
964 register types or (recursively) records with only these two kinds of fields.
965 It also returns false if any of these records contains a bit-field. */
968 type_consists_of_records_p (tree type
)
972 if (TREE_CODE (type
) != RECORD_TYPE
)
975 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
976 if (TREE_CODE (fld
) == FIELD_DECL
)
978 tree ft
= TREE_TYPE (fld
);
980 if (DECL_BIT_FIELD (fld
))
983 if (!is_gimple_reg_type (ft
)
984 && !type_consists_of_records_p (ft
))
991 /* Create total_scalarization accesses for all scalar type fields in DECL that
992 must be of a RECORD_TYPE conforming to type_consists_of_records_p. BASE
993 must be the top-most VAR_DECL representing the variable, OFFSET must be the
994 offset of DECL within BASE. REF must be the memory reference expression for
998 completely_scalarize_record (tree base
, tree decl
, HOST_WIDE_INT offset
,
1001 tree fld
, decl_type
= TREE_TYPE (decl
);
1003 for (fld
= TYPE_FIELDS (decl_type
); fld
; fld
= DECL_CHAIN (fld
))
1004 if (TREE_CODE (fld
) == FIELD_DECL
)
1006 HOST_WIDE_INT pos
= offset
+ int_bit_position (fld
);
1007 tree ft
= TREE_TYPE (fld
);
1008 tree nref
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), ref
, fld
,
1011 if (is_gimple_reg_type (ft
))
1013 struct access
*access
;
1016 size
= tree_to_uhwi (DECL_SIZE (fld
));
1017 access
= create_access_1 (base
, pos
, size
);
1018 access
->expr
= nref
;
1020 access
->grp_total_scalarization
= 1;
1021 /* Accesses for intraprocedural SRA can have their stmt NULL. */
1024 completely_scalarize_record (base
, fld
, pos
, nref
);
1028 /* Create total_scalarization accesses for all scalar type fields in VAR and
1029 for VAR a a whole. VAR must be of a RECORD_TYPE conforming to
1030 type_consists_of_records_p. */
1033 completely_scalarize_var (tree var
)
1035 HOST_WIDE_INT size
= tree_to_uhwi (DECL_SIZE (var
));
1036 struct access
*access
;
1038 access
= create_access_1 (var
, 0, size
);
1040 access
->type
= TREE_TYPE (var
);
1041 access
->grp_total_scalarization
= 1;
1043 completely_scalarize_record (var
, var
, 0, var
);
1046 /* Return true if REF has an VIEW_CONVERT_EXPR somewhere in it. */
1049 contains_view_convert_expr_p (const_tree ref
)
1051 while (handled_component_p (ref
))
1053 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
)
1055 ref
= TREE_OPERAND (ref
, 0);
1061 /* Search the given tree for a declaration by skipping handled components and
1062 exclude it from the candidates. */
1065 disqualify_base_of_expr (tree t
, const char *reason
)
1067 t
= get_base_address (t
);
1068 if (sra_mode
== SRA_MODE_EARLY_IPA
1069 && TREE_CODE (t
) == MEM_REF
)
1070 t
= get_ssa_base_param (TREE_OPERAND (t
, 0));
1072 if (t
&& DECL_P (t
))
1073 disqualify_candidate (t
, reason
);
1076 /* Scan expression EXPR and create access structures for all accesses to
1077 candidates for scalarization. Return the created access or NULL if none is
1080 static struct access
*
1081 build_access_from_expr_1 (tree expr
, gimple stmt
, bool write
)
1083 struct access
*ret
= NULL
;
1086 if (TREE_CODE (expr
) == BIT_FIELD_REF
1087 || TREE_CODE (expr
) == IMAGPART_EXPR
1088 || TREE_CODE (expr
) == REALPART_EXPR
)
1090 expr
= TREE_OPERAND (expr
, 0);
1094 partial_ref
= false;
1096 /* We need to dive through V_C_Es in order to get the size of its parameter
1097 and not the result type. Ada produces such statements. We are also
1098 capable of handling the topmost V_C_E but not any of those buried in other
1099 handled components. */
1100 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
1101 expr
= TREE_OPERAND (expr
, 0);
1103 if (contains_view_convert_expr_p (expr
))
1105 disqualify_base_of_expr (expr
, "V_C_E under a different handled "
1109 if (TREE_THIS_VOLATILE (expr
))
1111 disqualify_base_of_expr (expr
, "part of a volatile reference.");
1115 switch (TREE_CODE (expr
))
1118 if (TREE_CODE (TREE_OPERAND (expr
, 0)) != ADDR_EXPR
1119 && sra_mode
!= SRA_MODE_EARLY_IPA
)
1127 case ARRAY_RANGE_REF
:
1128 ret
= create_access (expr
, stmt
, write
);
1135 if (write
&& partial_ref
&& ret
)
1136 ret
->grp_partial_lhs
= 1;
1141 /* Scan expression EXPR and create access structures for all accesses to
1142 candidates for scalarization. Return true if any access has been inserted.
1143 STMT must be the statement from which the expression is taken, WRITE must be
1144 true if the expression is a store and false otherwise. */
1147 build_access_from_expr (tree expr
, gimple stmt
, bool write
)
1149 struct access
*access
;
1151 access
= build_access_from_expr_1 (expr
, stmt
, write
);
1154 /* This means the aggregate is accesses as a whole in a way other than an
1155 assign statement and thus cannot be removed even if we had a scalar
1156 replacement for everything. */
1157 if (cannot_scalarize_away_bitmap
)
1158 bitmap_set_bit (cannot_scalarize_away_bitmap
, DECL_UID (access
->base
));
1164 /* Return the single non-EH successor edge of BB or NULL if there is none or
1168 single_non_eh_succ (basic_block bb
)
1173 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1174 if (!(e
->flags
& EDGE_EH
))
1184 /* Disqualify LHS and RHS for scalarization if STMT has to terminate its BB and
1185 there is no alternative spot where to put statements SRA might need to
1186 generate after it. The spot we are looking for is an edge leading to a
1187 single non-EH successor, if it exists and is indeed single. RHS may be
1188 NULL, in that case ignore it. */
1191 disqualify_if_bad_bb_terminating_stmt (gimple stmt
, tree lhs
, tree rhs
)
1193 if ((sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1194 && stmt_ends_bb_p (stmt
))
1196 if (single_non_eh_succ (gimple_bb (stmt
)))
1199 disqualify_base_of_expr (lhs
, "LHS of a throwing stmt.");
1201 disqualify_base_of_expr (rhs
, "RHS of a throwing stmt.");
1207 /* Scan expressions occurring in STMT, create access structures for all accesses
1208 to candidates for scalarization and remove those candidates which occur in
1209 statements or expressions that prevent them from being split apart. Return
1210 true if any access has been inserted. */
1213 build_accesses_from_assign (gimple stmt
)
1216 struct access
*lacc
, *racc
;
1218 if (!gimple_assign_single_p (stmt
)
1219 /* Scope clobbers don't influence scalarization. */
1220 || gimple_clobber_p (stmt
))
1223 lhs
= gimple_assign_lhs (stmt
);
1224 rhs
= gimple_assign_rhs1 (stmt
);
1226 if (disqualify_if_bad_bb_terminating_stmt (stmt
, lhs
, rhs
))
1229 racc
= build_access_from_expr_1 (rhs
, stmt
, false);
1230 lacc
= build_access_from_expr_1 (lhs
, stmt
, true);
1233 lacc
->grp_assignment_write
= 1;
1237 racc
->grp_assignment_read
= 1;
1238 if (should_scalarize_away_bitmap
&& !gimple_has_volatile_ops (stmt
)
1239 && !is_gimple_reg_type (racc
->type
))
1240 bitmap_set_bit (should_scalarize_away_bitmap
, DECL_UID (racc
->base
));
1244 && (sra_mode
== SRA_MODE_EARLY_INTRA
|| sra_mode
== SRA_MODE_INTRA
)
1245 && !lacc
->grp_unscalarizable_region
1246 && !racc
->grp_unscalarizable_region
1247 && AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
1248 && lacc
->size
== racc
->size
1249 && useless_type_conversion_p (lacc
->type
, racc
->type
))
1251 struct assign_link
*link
;
1253 link
= new assign_link
;
1254 memset (link
, 0, sizeof (struct assign_link
));
1259 add_link_to_rhs (racc
, link
);
1262 return lacc
|| racc
;
1265 /* Callback of walk_stmt_load_store_addr_ops visit_addr used to determine
1266 GIMPLE_ASM operands with memory constrains which cannot be scalarized. */
1269 asm_visit_addr (gimple
, tree op
, tree
, void *)
1271 op
= get_base_address (op
);
1274 disqualify_candidate (op
, "Non-scalarizable GIMPLE_ASM operand.");
1279 /* Return true iff callsite CALL has at least as many actual arguments as there
1280 are formal parameters of the function currently processed by IPA-SRA and
1281 that their types match. */
1284 callsite_arguments_match_p (gimple call
)
1286 if (gimple_call_num_args (call
) < (unsigned) func_param_count
)
1291 for (parm
= DECL_ARGUMENTS (current_function_decl
), i
= 0;
1293 parm
= DECL_CHAIN (parm
), i
++)
1295 tree arg
= gimple_call_arg (call
, i
);
1296 if (!useless_type_conversion_p (TREE_TYPE (parm
), TREE_TYPE (arg
)))
1302 /* Scan function and look for interesting expressions and create access
1303 structures for them. Return true iff any access is created. */
1306 scan_function (void)
1311 FOR_EACH_BB_FN (bb
, cfun
)
1313 gimple_stmt_iterator gsi
;
1314 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1316 gimple stmt
= gsi_stmt (gsi
);
1320 if (final_bbs
&& stmt_can_throw_external (stmt
))
1321 bitmap_set_bit (final_bbs
, bb
->index
);
1322 switch (gimple_code (stmt
))
1325 t
= gimple_return_retval (as_a
<greturn
*> (stmt
));
1327 ret
|= build_access_from_expr (t
, stmt
, false);
1329 bitmap_set_bit (final_bbs
, bb
->index
);
1333 ret
|= build_accesses_from_assign (stmt
);
1337 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1338 ret
|= build_access_from_expr (gimple_call_arg (stmt
, i
),
1341 if (sra_mode
== SRA_MODE_EARLY_IPA
)
1343 tree dest
= gimple_call_fndecl (stmt
);
1344 int flags
= gimple_call_flags (stmt
);
1348 if (DECL_BUILT_IN_CLASS (dest
) == BUILT_IN_NORMAL
1349 && DECL_FUNCTION_CODE (dest
) == BUILT_IN_APPLY_ARGS
)
1350 encountered_apply_args
= true;
1351 if (recursive_call_p (current_function_decl
, dest
))
1353 encountered_recursive_call
= true;
1354 if (!callsite_arguments_match_p (stmt
))
1355 encountered_unchangable_recursive_call
= true;
1360 && (flags
& (ECF_CONST
| ECF_PURE
)) == 0)
1361 bitmap_set_bit (final_bbs
, bb
->index
);
1364 t
= gimple_call_lhs (stmt
);
1365 if (t
&& !disqualify_if_bad_bb_terminating_stmt (stmt
, t
, NULL
))
1366 ret
|= build_access_from_expr (t
, stmt
, true);
1371 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
1372 walk_stmt_load_store_addr_ops (asm_stmt
, NULL
, NULL
, NULL
,
1375 bitmap_set_bit (final_bbs
, bb
->index
);
1377 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
1379 t
= TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
1380 ret
|= build_access_from_expr (t
, asm_stmt
, false);
1382 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
1384 t
= TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
1385 ret
|= build_access_from_expr (t
, asm_stmt
, true);
1399 /* Helper of QSORT function. There are pointers to accesses in the array. An
1400 access is considered smaller than another if it has smaller offset or if the
1401 offsets are the same but is size is bigger. */
1404 compare_access_positions (const void *a
, const void *b
)
1406 const access_p
*fp1
= (const access_p
*) a
;
1407 const access_p
*fp2
= (const access_p
*) b
;
1408 const access_p f1
= *fp1
;
1409 const access_p f2
= *fp2
;
1411 if (f1
->offset
!= f2
->offset
)
1412 return f1
->offset
< f2
->offset
? -1 : 1;
1414 if (f1
->size
== f2
->size
)
1416 if (f1
->type
== f2
->type
)
1418 /* Put any non-aggregate type before any aggregate type. */
1419 else if (!is_gimple_reg_type (f1
->type
)
1420 && is_gimple_reg_type (f2
->type
))
1422 else if (is_gimple_reg_type (f1
->type
)
1423 && !is_gimple_reg_type (f2
->type
))
1425 /* Put any complex or vector type before any other scalar type. */
1426 else if (TREE_CODE (f1
->type
) != COMPLEX_TYPE
1427 && TREE_CODE (f1
->type
) != VECTOR_TYPE
1428 && (TREE_CODE (f2
->type
) == COMPLEX_TYPE
1429 || TREE_CODE (f2
->type
) == VECTOR_TYPE
))
1431 else if ((TREE_CODE (f1
->type
) == COMPLEX_TYPE
1432 || TREE_CODE (f1
->type
) == VECTOR_TYPE
)
1433 && TREE_CODE (f2
->type
) != COMPLEX_TYPE
1434 && TREE_CODE (f2
->type
) != VECTOR_TYPE
)
1436 /* Put the integral type with the bigger precision first. */
1437 else if (INTEGRAL_TYPE_P (f1
->type
)
1438 && INTEGRAL_TYPE_P (f2
->type
))
1439 return TYPE_PRECISION (f2
->type
) - TYPE_PRECISION (f1
->type
);
1440 /* Put any integral type with non-full precision last. */
1441 else if (INTEGRAL_TYPE_P (f1
->type
)
1442 && (TREE_INT_CST_LOW (TYPE_SIZE (f1
->type
))
1443 != TYPE_PRECISION (f1
->type
)))
1445 else if (INTEGRAL_TYPE_P (f2
->type
)
1446 && (TREE_INT_CST_LOW (TYPE_SIZE (f2
->type
))
1447 != TYPE_PRECISION (f2
->type
)))
1449 /* Stabilize the sort. */
1450 return TYPE_UID (f1
->type
) - TYPE_UID (f2
->type
);
1453 /* We want the bigger accesses first, thus the opposite operator in the next
1455 return f1
->size
> f2
->size
? -1 : 1;
1459 /* Append a name of the declaration to the name obstack. A helper function for
1463 make_fancy_decl_name (tree decl
)
1467 tree name
= DECL_NAME (decl
);
1469 obstack_grow (&name_obstack
, IDENTIFIER_POINTER (name
),
1470 IDENTIFIER_LENGTH (name
));
1473 sprintf (buffer
, "D%u", DECL_UID (decl
));
1474 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1478 /* Helper for make_fancy_name. */
1481 make_fancy_name_1 (tree expr
)
1488 make_fancy_decl_name (expr
);
1492 switch (TREE_CODE (expr
))
1495 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1496 obstack_1grow (&name_obstack
, '$');
1497 make_fancy_decl_name (TREE_OPERAND (expr
, 1));
1501 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1502 obstack_1grow (&name_obstack
, '$');
1503 /* Arrays with only one element may not have a constant as their
1505 index
= TREE_OPERAND (expr
, 1);
1506 if (TREE_CODE (index
) != INTEGER_CST
)
1508 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
, TREE_INT_CST_LOW (index
));
1509 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1513 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1517 make_fancy_name_1 (TREE_OPERAND (expr
, 0));
1518 if (!integer_zerop (TREE_OPERAND (expr
, 1)))
1520 obstack_1grow (&name_obstack
, '$');
1521 sprintf (buffer
, HOST_WIDE_INT_PRINT_DEC
,
1522 TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1)));
1523 obstack_grow (&name_obstack
, buffer
, strlen (buffer
));
1530 gcc_unreachable (); /* we treat these as scalars. */
1537 /* Create a human readable name for replacement variable of ACCESS. */
1540 make_fancy_name (tree expr
)
1542 make_fancy_name_1 (expr
);
1543 obstack_1grow (&name_obstack
, '\0');
1544 return XOBFINISH (&name_obstack
, char *);
1547 /* Construct a MEM_REF that would reference a part of aggregate BASE of type
1548 EXP_TYPE at the given OFFSET. If BASE is something for which
1549 get_addr_base_and_unit_offset returns NULL, gsi must be non-NULL and is used
1550 to insert new statements either before or below the current one as specified
1551 by INSERT_AFTER. This function is not capable of handling bitfields.
1553 BASE must be either a declaration or a memory reference that has correct
1554 alignment ifformation embeded in it (e.g. a pre-existing one in SRA). */
1557 build_ref_for_offset (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1558 tree exp_type
, gimple_stmt_iterator
*gsi
,
1561 tree prev_base
= base
;
1564 HOST_WIDE_INT base_offset
;
1565 unsigned HOST_WIDE_INT misalign
;
1568 gcc_checking_assert (offset
% BITS_PER_UNIT
== 0);
1569 get_object_alignment_1 (base
, &align
, &misalign
);
1570 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1572 /* get_addr_base_and_unit_offset returns NULL for references with a variable
1573 offset such as array[var_index]. */
1579 gcc_checking_assert (gsi
);
1580 tmp
= make_ssa_name (build_pointer_type (TREE_TYPE (prev_base
)));
1581 addr
= build_fold_addr_expr (unshare_expr (prev_base
));
1582 STRIP_USELESS_TYPE_CONVERSION (addr
);
1583 stmt
= gimple_build_assign (tmp
, addr
);
1584 gimple_set_location (stmt
, loc
);
1586 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
1588 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
1590 off
= build_int_cst (reference_alias_ptr_type (prev_base
),
1591 offset
/ BITS_PER_UNIT
);
1594 else if (TREE_CODE (base
) == MEM_REF
)
1596 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1597 base_offset
+ offset
/ BITS_PER_UNIT
);
1598 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1599 base
= unshare_expr (TREE_OPERAND (base
, 0));
1603 off
= build_int_cst (reference_alias_ptr_type (base
),
1604 base_offset
+ offset
/ BITS_PER_UNIT
);
1605 base
= build_fold_addr_expr (unshare_expr (base
));
1608 misalign
= (misalign
+ offset
) & (align
- 1);
1610 align
= (misalign
& -misalign
);
1611 if (align
!= TYPE_ALIGN (exp_type
))
1612 exp_type
= build_aligned_type (exp_type
, align
);
1614 mem_ref
= fold_build2_loc (loc
, MEM_REF
, exp_type
, base
, off
);
1615 if (TREE_THIS_VOLATILE (prev_base
))
1616 TREE_THIS_VOLATILE (mem_ref
) = 1;
1617 if (TREE_SIDE_EFFECTS (prev_base
))
1618 TREE_SIDE_EFFECTS (mem_ref
) = 1;
1622 /* Construct a memory reference to a part of an aggregate BASE at the given
1623 OFFSET and of the same type as MODEL. In case this is a reference to a
1624 bit-field, the function will replicate the last component_ref of model's
1625 expr to access it. GSI and INSERT_AFTER have the same meaning as in
1626 build_ref_for_offset. */
1629 build_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1630 struct access
*model
, gimple_stmt_iterator
*gsi
,
1633 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1634 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1636 /* This access represents a bit-field. */
1637 tree t
, exp_type
, fld
= TREE_OPERAND (model
->expr
, 1);
1639 offset
-= int_bit_position (fld
);
1640 exp_type
= TREE_TYPE (TREE_OPERAND (model
->expr
, 0));
1641 t
= build_ref_for_offset (loc
, base
, offset
, exp_type
, gsi
, insert_after
);
1642 return fold_build3_loc (loc
, COMPONENT_REF
, TREE_TYPE (fld
), t
, fld
,
1646 return build_ref_for_offset (loc
, base
, offset
, model
->type
,
1650 /* Attempt to build a memory reference that we could but into a gimple
1651 debug_bind statement. Similar to build_ref_for_model but punts if it has to
1652 create statements and return s NULL instead. This function also ignores
1653 alignment issues and so its results should never end up in non-debug
1657 build_debug_ref_for_model (location_t loc
, tree base
, HOST_WIDE_INT offset
,
1658 struct access
*model
)
1660 HOST_WIDE_INT base_offset
;
1663 if (TREE_CODE (model
->expr
) == COMPONENT_REF
1664 && DECL_BIT_FIELD (TREE_OPERAND (model
->expr
, 1)))
1667 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
1670 if (TREE_CODE (base
) == MEM_REF
)
1672 off
= build_int_cst (TREE_TYPE (TREE_OPERAND (base
, 1)),
1673 base_offset
+ offset
/ BITS_PER_UNIT
);
1674 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1), off
);
1675 base
= unshare_expr (TREE_OPERAND (base
, 0));
1679 off
= build_int_cst (reference_alias_ptr_type (base
),
1680 base_offset
+ offset
/ BITS_PER_UNIT
);
1681 base
= build_fold_addr_expr (unshare_expr (base
));
1684 return fold_build2_loc (loc
, MEM_REF
, model
->type
, base
, off
);
1687 /* Construct a memory reference consisting of component_refs and array_refs to
1688 a part of an aggregate *RES (which is of type TYPE). The requested part
1689 should have type EXP_TYPE at be the given OFFSET. This function might not
1690 succeed, it returns true when it does and only then *RES points to something
1691 meaningful. This function should be used only to build expressions that we
1692 might need to present to user (e.g. in warnings). In all other situations,
1693 build_ref_for_model or build_ref_for_offset should be used instead. */
1696 build_user_friendly_ref_for_offset (tree
*res
, tree type
, HOST_WIDE_INT offset
,
1702 tree tr_size
, index
, minidx
;
1703 HOST_WIDE_INT el_size
;
1705 if (offset
== 0 && exp_type
1706 && types_compatible_p (exp_type
, type
))
1709 switch (TREE_CODE (type
))
1712 case QUAL_UNION_TYPE
:
1714 for (fld
= TYPE_FIELDS (type
); fld
; fld
= DECL_CHAIN (fld
))
1716 HOST_WIDE_INT pos
, size
;
1717 tree tr_pos
, expr
, *expr_ptr
;
1719 if (TREE_CODE (fld
) != FIELD_DECL
)
1722 tr_pos
= bit_position (fld
);
1723 if (!tr_pos
|| !tree_fits_uhwi_p (tr_pos
))
1725 pos
= tree_to_uhwi (tr_pos
);
1726 gcc_assert (TREE_CODE (type
) == RECORD_TYPE
|| pos
== 0);
1727 tr_size
= DECL_SIZE (fld
);
1728 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1730 size
= tree_to_uhwi (tr_size
);
1736 else if (pos
> offset
|| (pos
+ size
) <= offset
)
1739 expr
= build3 (COMPONENT_REF
, TREE_TYPE (fld
), *res
, fld
,
1742 if (build_user_friendly_ref_for_offset (expr_ptr
, TREE_TYPE (fld
),
1743 offset
- pos
, exp_type
))
1752 tr_size
= TYPE_SIZE (TREE_TYPE (type
));
1753 if (!tr_size
|| !tree_fits_uhwi_p (tr_size
))
1755 el_size
= tree_to_uhwi (tr_size
);
1757 minidx
= TYPE_MIN_VALUE (TYPE_DOMAIN (type
));
1758 if (TREE_CODE (minidx
) != INTEGER_CST
|| el_size
== 0)
1760 index
= build_int_cst (TYPE_DOMAIN (type
), offset
/ el_size
);
1761 if (!integer_zerop (minidx
))
1762 index
= int_const_binop (PLUS_EXPR
, index
, minidx
);
1763 *res
= build4 (ARRAY_REF
, TREE_TYPE (type
), *res
, index
,
1764 NULL_TREE
, NULL_TREE
);
1765 offset
= offset
% el_size
;
1766 type
= TREE_TYPE (type
);
1781 /* Return true iff TYPE is stdarg va_list type. */
1784 is_va_list_type (tree type
)
1786 return TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (va_list_type_node
);
1789 /* Print message to dump file why a variable was rejected. */
1792 reject (tree var
, const char *msg
)
1794 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1796 fprintf (dump_file
, "Rejected (%d): %s: ", DECL_UID (var
), msg
);
1797 print_generic_expr (dump_file
, var
, 0);
1798 fprintf (dump_file
, "\n");
1802 /* Return true if VAR is a candidate for SRA. */
1805 maybe_add_sra_candidate (tree var
)
1807 tree type
= TREE_TYPE (var
);
1811 if (!AGGREGATE_TYPE_P (type
))
1813 reject (var
, "not aggregate");
1816 if (needs_to_live_in_memory (var
))
1818 reject (var
, "needs to live in memory");
1821 if (TREE_THIS_VOLATILE (var
))
1823 reject (var
, "is volatile");
1826 if (!COMPLETE_TYPE_P (type
))
1828 reject (var
, "has incomplete type");
1831 if (!tree_fits_uhwi_p (TYPE_SIZE (type
)))
1833 reject (var
, "type size not fixed");
1836 if (tree_to_uhwi (TYPE_SIZE (type
)) == 0)
1838 reject (var
, "type size is zero");
1841 if (type_internals_preclude_sra_p (type
, &msg
))
1846 if (/* Fix for PR 41089. tree-stdarg.c needs to have va_lists intact but
1847 we also want to schedule it rather late. Thus we ignore it in
1849 (sra_mode
== SRA_MODE_EARLY_INTRA
1850 && is_va_list_type (type
)))
1852 reject (var
, "is va_list");
1856 bitmap_set_bit (candidate_bitmap
, DECL_UID (var
));
1857 slot
= candidates
->find_slot_with_hash (var
, DECL_UID (var
), INSERT
);
1860 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1862 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (var
));
1863 print_generic_expr (dump_file
, var
, 0);
1864 fprintf (dump_file
, "\n");
1870 /* The very first phase of intraprocedural SRA. It marks in candidate_bitmap
1871 those with type which is suitable for scalarization. */
1874 find_var_candidates (void)
1880 for (parm
= DECL_ARGUMENTS (current_function_decl
);
1882 parm
= DECL_CHAIN (parm
))
1883 ret
|= maybe_add_sra_candidate (parm
);
1885 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1887 if (TREE_CODE (var
) != VAR_DECL
)
1890 ret
|= maybe_add_sra_candidate (var
);
1896 /* Sort all accesses for the given variable, check for partial overlaps and
1897 return NULL if there are any. If there are none, pick a representative for
1898 each combination of offset and size and create a linked list out of them.
1899 Return the pointer to the first representative and make sure it is the first
1900 one in the vector of accesses. */
1902 static struct access
*
1903 sort_and_splice_var_accesses (tree var
)
1905 int i
, j
, access_count
;
1906 struct access
*res
, **prev_acc_ptr
= &res
;
1907 vec
<access_p
> *access_vec
;
1909 HOST_WIDE_INT low
= -1, high
= 0;
1911 access_vec
= get_base_access_vector (var
);
1914 access_count
= access_vec
->length ();
1916 /* Sort by <OFFSET, SIZE>. */
1917 access_vec
->qsort (compare_access_positions
);
1920 while (i
< access_count
)
1922 struct access
*access
= (*access_vec
)[i
];
1923 bool grp_write
= access
->write
;
1924 bool grp_read
= !access
->write
;
1925 bool grp_scalar_write
= access
->write
1926 && is_gimple_reg_type (access
->type
);
1927 bool grp_scalar_read
= !access
->write
1928 && is_gimple_reg_type (access
->type
);
1929 bool grp_assignment_read
= access
->grp_assignment_read
;
1930 bool grp_assignment_write
= access
->grp_assignment_write
;
1931 bool multiple_scalar_reads
= false;
1932 bool total_scalarization
= access
->grp_total_scalarization
;
1933 bool grp_partial_lhs
= access
->grp_partial_lhs
;
1934 bool first_scalar
= is_gimple_reg_type (access
->type
);
1935 bool unscalarizable_region
= access
->grp_unscalarizable_region
;
1937 if (first
|| access
->offset
>= high
)
1940 low
= access
->offset
;
1941 high
= access
->offset
+ access
->size
;
1943 else if (access
->offset
> low
&& access
->offset
+ access
->size
> high
)
1946 gcc_assert (access
->offset
>= low
1947 && access
->offset
+ access
->size
<= high
);
1950 while (j
< access_count
)
1952 struct access
*ac2
= (*access_vec
)[j
];
1953 if (ac2
->offset
!= access
->offset
|| ac2
->size
!= access
->size
)
1958 grp_scalar_write
= (grp_scalar_write
1959 || is_gimple_reg_type (ac2
->type
));
1964 if (is_gimple_reg_type (ac2
->type
))
1966 if (grp_scalar_read
)
1967 multiple_scalar_reads
= true;
1969 grp_scalar_read
= true;
1972 grp_assignment_read
|= ac2
->grp_assignment_read
;
1973 grp_assignment_write
|= ac2
->grp_assignment_write
;
1974 grp_partial_lhs
|= ac2
->grp_partial_lhs
;
1975 unscalarizable_region
|= ac2
->grp_unscalarizable_region
;
1976 total_scalarization
|= ac2
->grp_total_scalarization
;
1977 relink_to_new_repr (access
, ac2
);
1979 /* If there are both aggregate-type and scalar-type accesses with
1980 this combination of size and offset, the comparison function
1981 should have put the scalars first. */
1982 gcc_assert (first_scalar
|| !is_gimple_reg_type (ac2
->type
));
1983 ac2
->group_representative
= access
;
1989 access
->group_representative
= access
;
1990 access
->grp_write
= grp_write
;
1991 access
->grp_read
= grp_read
;
1992 access
->grp_scalar_read
= grp_scalar_read
;
1993 access
->grp_scalar_write
= grp_scalar_write
;
1994 access
->grp_assignment_read
= grp_assignment_read
;
1995 access
->grp_assignment_write
= grp_assignment_write
;
1996 access
->grp_hint
= multiple_scalar_reads
|| total_scalarization
;
1997 access
->grp_total_scalarization
= total_scalarization
;
1998 access
->grp_partial_lhs
= grp_partial_lhs
;
1999 access
->grp_unscalarizable_region
= unscalarizable_region
;
2000 if (access
->first_link
)
2001 add_access_to_work_queue (access
);
2003 *prev_acc_ptr
= access
;
2004 prev_acc_ptr
= &access
->next_grp
;
2007 gcc_assert (res
== (*access_vec
)[0]);
2011 /* Create a variable for the given ACCESS which determines the type, name and a
2012 few other properties. Return the variable declaration and store it also to
2013 ACCESS->replacement. */
2016 create_access_replacement (struct access
*access
)
2020 if (access
->grp_to_be_debug_replaced
)
2022 repl
= create_tmp_var_raw (access
->type
);
2023 DECL_CONTEXT (repl
) = current_function_decl
;
2026 /* Drop any special alignment on the type if it's not on the main
2027 variant. This avoids issues with weirdo ABIs like AAPCS. */
2028 repl
= create_tmp_var (build_qualified_type
2029 (TYPE_MAIN_VARIANT (access
->type
),
2030 TYPE_QUALS (access
->type
)), "SR");
2031 if (TREE_CODE (access
->type
) == COMPLEX_TYPE
2032 || TREE_CODE (access
->type
) == VECTOR_TYPE
)
2034 if (!access
->grp_partial_lhs
)
2035 DECL_GIMPLE_REG_P (repl
) = 1;
2037 else if (access
->grp_partial_lhs
2038 && is_gimple_reg_type (access
->type
))
2039 TREE_ADDRESSABLE (repl
) = 1;
2041 DECL_SOURCE_LOCATION (repl
) = DECL_SOURCE_LOCATION (access
->base
);
2042 DECL_ARTIFICIAL (repl
) = 1;
2043 DECL_IGNORED_P (repl
) = DECL_IGNORED_P (access
->base
);
2045 if (DECL_NAME (access
->base
)
2046 && !DECL_IGNORED_P (access
->base
)
2047 && !DECL_ARTIFICIAL (access
->base
))
2049 char *pretty_name
= make_fancy_name (access
->expr
);
2050 tree debug_expr
= unshare_expr_without_location (access
->expr
), d
;
2053 DECL_NAME (repl
) = get_identifier (pretty_name
);
2054 obstack_free (&name_obstack
, pretty_name
);
2056 /* Get rid of any SSA_NAMEs embedded in debug_expr,
2057 as DECL_DEBUG_EXPR isn't considered when looking for still
2058 used SSA_NAMEs and thus they could be freed. All debug info
2059 generation cares is whether something is constant or variable
2060 and that get_ref_base_and_extent works properly on the
2061 expression. It cannot handle accesses at a non-constant offset
2062 though, so just give up in those cases. */
2063 for (d
= debug_expr
;
2064 !fail
&& (handled_component_p (d
) || TREE_CODE (d
) == MEM_REF
);
2065 d
= TREE_OPERAND (d
, 0))
2066 switch (TREE_CODE (d
))
2069 case ARRAY_RANGE_REF
:
2070 if (TREE_OPERAND (d
, 1)
2071 && TREE_CODE (TREE_OPERAND (d
, 1)) != INTEGER_CST
)
2073 if (TREE_OPERAND (d
, 3)
2074 && TREE_CODE (TREE_OPERAND (d
, 3)) != INTEGER_CST
)
2078 if (TREE_OPERAND (d
, 2)
2079 && TREE_CODE (TREE_OPERAND (d
, 2)) != INTEGER_CST
)
2083 if (TREE_CODE (TREE_OPERAND (d
, 0)) != ADDR_EXPR
)
2086 d
= TREE_OPERAND (d
, 0);
2093 SET_DECL_DEBUG_EXPR (repl
, debug_expr
);
2094 DECL_HAS_DEBUG_EXPR_P (repl
) = 1;
2096 if (access
->grp_no_warning
)
2097 TREE_NO_WARNING (repl
) = 1;
2099 TREE_NO_WARNING (repl
) = TREE_NO_WARNING (access
->base
);
2102 TREE_NO_WARNING (repl
) = 1;
2106 if (access
->grp_to_be_debug_replaced
)
2108 fprintf (dump_file
, "Created a debug-only replacement for ");
2109 print_generic_expr (dump_file
, access
->base
, 0);
2110 fprintf (dump_file
, " offset: %u, size: %u\n",
2111 (unsigned) access
->offset
, (unsigned) access
->size
);
2115 fprintf (dump_file
, "Created a replacement for ");
2116 print_generic_expr (dump_file
, access
->base
, 0);
2117 fprintf (dump_file
, " offset: %u, size: %u: ",
2118 (unsigned) access
->offset
, (unsigned) access
->size
);
2119 print_generic_expr (dump_file
, repl
, 0);
2120 fprintf (dump_file
, "\n");
2123 sra_stats
.replacements
++;
2128 /* Return ACCESS scalar replacement, create it if it does not exist yet. */
2131 get_access_replacement (struct access
*access
)
2133 gcc_checking_assert (access
->replacement_decl
);
2134 return access
->replacement_decl
;
2138 /* Build a subtree of accesses rooted in *ACCESS, and move the pointer in the
2139 linked list along the way. Stop when *ACCESS is NULL or the access pointed
2140 to it is not "within" the root. Return false iff some accesses partially
2144 build_access_subtree (struct access
**access
)
2146 struct access
*root
= *access
, *last_child
= NULL
;
2147 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2149 *access
= (*access
)->next_grp
;
2150 while (*access
&& (*access
)->offset
+ (*access
)->size
<= limit
)
2153 root
->first_child
= *access
;
2155 last_child
->next_sibling
= *access
;
2156 last_child
= *access
;
2158 if (!build_access_subtree (access
))
2162 if (*access
&& (*access
)->offset
< limit
)
2168 /* Build a tree of access representatives, ACCESS is the pointer to the first
2169 one, others are linked in a list by the next_grp field. Return false iff
2170 some accesses partially overlap. */
2173 build_access_trees (struct access
*access
)
2177 struct access
*root
= access
;
2179 if (!build_access_subtree (&access
))
2181 root
->next_grp
= access
;
2186 /* Return true if expr contains some ARRAY_REFs into a variable bounded
2190 expr_with_var_bounded_array_refs_p (tree expr
)
2192 while (handled_component_p (expr
))
2194 if (TREE_CODE (expr
) == ARRAY_REF
2195 && !tree_fits_shwi_p (array_ref_low_bound (expr
)))
2197 expr
= TREE_OPERAND (expr
, 0);
2202 /* Analyze the subtree of accesses rooted in ROOT, scheduling replacements when
2203 both seeming beneficial and when ALLOW_REPLACEMENTS allows it. Also set all
2204 sorts of access flags appropriately along the way, notably always set
2205 grp_read and grp_assign_read according to MARK_READ and grp_write when
2208 Creating a replacement for a scalar access is considered beneficial if its
2209 grp_hint is set (this means we are either attempting total scalarization or
2210 there is more than one direct read access) or according to the following
2213 Access written to through a scalar type (once or more times)
2215 | Written to in an assignment statement
2217 | | Access read as scalar _once_
2219 | | | Read in an assignment statement
2221 | | | | Scalarize Comment
2222 -----------------------------------------------------------------------------
2223 0 0 0 0 No access for the scalar
2224 0 0 0 1 No access for the scalar
2225 0 0 1 0 No Single read - won't help
2226 0 0 1 1 No The same case
2227 0 1 0 0 No access for the scalar
2228 0 1 0 1 No access for the scalar
2229 0 1 1 0 Yes s = *g; return s.i;
2230 0 1 1 1 Yes The same case as above
2231 1 0 0 0 No Won't help
2232 1 0 0 1 Yes s.i = 1; *g = s;
2233 1 0 1 0 Yes s.i = 5; g = s.i;
2234 1 0 1 1 Yes The same case as above
2235 1 1 0 0 No Won't help.
2236 1 1 0 1 Yes s.i = 1; *g = s;
2237 1 1 1 0 Yes s = *g; return s.i;
2238 1 1 1 1 Yes Any of the above yeses */
2241 analyze_access_subtree (struct access
*root
, struct access
*parent
,
2242 bool allow_replacements
)
2244 struct access
*child
;
2245 HOST_WIDE_INT limit
= root
->offset
+ root
->size
;
2246 HOST_WIDE_INT covered_to
= root
->offset
;
2247 bool scalar
= is_gimple_reg_type (root
->type
);
2248 bool hole
= false, sth_created
= false;
2252 if (parent
->grp_read
)
2254 if (parent
->grp_assignment_read
)
2255 root
->grp_assignment_read
= 1;
2256 if (parent
->grp_write
)
2257 root
->grp_write
= 1;
2258 if (parent
->grp_assignment_write
)
2259 root
->grp_assignment_write
= 1;
2260 if (parent
->grp_total_scalarization
)
2261 root
->grp_total_scalarization
= 1;
2264 if (root
->grp_unscalarizable_region
)
2265 allow_replacements
= false;
2267 if (allow_replacements
&& expr_with_var_bounded_array_refs_p (root
->expr
))
2268 allow_replacements
= false;
2270 for (child
= root
->first_child
; child
; child
= child
->next_sibling
)
2272 hole
|= covered_to
< child
->offset
;
2273 sth_created
|= analyze_access_subtree (child
, root
,
2274 allow_replacements
&& !scalar
);
2276 root
->grp_unscalarized_data
|= child
->grp_unscalarized_data
;
2277 root
->grp_total_scalarization
&= child
->grp_total_scalarization
;
2278 if (child
->grp_covered
)
2279 covered_to
+= child
->size
;
2284 if (allow_replacements
&& scalar
&& !root
->first_child
2286 || ((root
->grp_scalar_read
|| root
->grp_assignment_read
)
2287 && (root
->grp_scalar_write
|| root
->grp_assignment_write
))))
2289 /* Always create access replacements that cover the whole access.
2290 For integral types this means the precision has to match.
2291 Avoid assumptions based on the integral type kind, too. */
2292 if (INTEGRAL_TYPE_P (root
->type
)
2293 && (TREE_CODE (root
->type
) != INTEGER_TYPE
2294 || TYPE_PRECISION (root
->type
) != root
->size
)
2295 /* But leave bitfield accesses alone. */
2296 && (TREE_CODE (root
->expr
) != COMPONENT_REF
2297 || !DECL_BIT_FIELD (TREE_OPERAND (root
->expr
, 1))))
2299 tree rt
= root
->type
;
2300 gcc_assert ((root
->offset
% BITS_PER_UNIT
) == 0
2301 && (root
->size
% BITS_PER_UNIT
) == 0);
2302 root
->type
= build_nonstandard_integer_type (root
->size
,
2303 TYPE_UNSIGNED (rt
));
2304 root
->expr
= build_ref_for_offset (UNKNOWN_LOCATION
,
2305 root
->base
, root
->offset
,
2306 root
->type
, NULL
, false);
2308 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2310 fprintf (dump_file
, "Changing the type of a replacement for ");
2311 print_generic_expr (dump_file
, root
->base
, 0);
2312 fprintf (dump_file
, " offset: %u, size: %u ",
2313 (unsigned) root
->offset
, (unsigned) root
->size
);
2314 fprintf (dump_file
, " to an integer.\n");
2318 root
->grp_to_be_replaced
= 1;
2319 root
->replacement_decl
= create_access_replacement (root
);
2325 if (allow_replacements
2326 && scalar
&& !root
->first_child
2327 && (root
->grp_scalar_write
|| root
->grp_assignment_write
)
2328 && !bitmap_bit_p (cannot_scalarize_away_bitmap
,
2329 DECL_UID (root
->base
)))
2331 gcc_checking_assert (!root
->grp_scalar_read
2332 && !root
->grp_assignment_read
);
2334 if (MAY_HAVE_DEBUG_STMTS
)
2336 root
->grp_to_be_debug_replaced
= 1;
2337 root
->replacement_decl
= create_access_replacement (root
);
2341 if (covered_to
< limit
)
2344 root
->grp_total_scalarization
= 0;
2347 if (!hole
|| root
->grp_total_scalarization
)
2348 root
->grp_covered
= 1;
2349 else if (root
->grp_write
|| TREE_CODE (root
->base
) == PARM_DECL
)
2350 root
->grp_unscalarized_data
= 1; /* not covered and written to */
2354 /* Analyze all access trees linked by next_grp by the means of
2355 analyze_access_subtree. */
2357 analyze_access_trees (struct access
*access
)
2363 if (analyze_access_subtree (access
, NULL
, true))
2365 access
= access
->next_grp
;
2371 /* Return true iff a potential new child of LACC at offset OFFSET and with size
2372 SIZE would conflict with an already existing one. If exactly such a child
2373 already exists in LACC, store a pointer to it in EXACT_MATCH. */
2376 child_would_conflict_in_lacc (struct access
*lacc
, HOST_WIDE_INT norm_offset
,
2377 HOST_WIDE_INT size
, struct access
**exact_match
)
2379 struct access
*child
;
2381 for (child
= lacc
->first_child
; child
; child
= child
->next_sibling
)
2383 if (child
->offset
== norm_offset
&& child
->size
== size
)
2385 *exact_match
= child
;
2389 if (child
->offset
< norm_offset
+ size
2390 && child
->offset
+ child
->size
> norm_offset
)
2397 /* Create a new child access of PARENT, with all properties just like MODEL
2398 except for its offset and with its grp_write false and grp_read true.
2399 Return the new access or NULL if it cannot be created. Note that this access
2400 is created long after all splicing and sorting, it's not located in any
2401 access vector and is automatically a representative of its group. */
2403 static struct access
*
2404 create_artificial_child_access (struct access
*parent
, struct access
*model
,
2405 HOST_WIDE_INT new_offset
)
2407 struct access
**child
;
2408 tree expr
= parent
->base
;
2410 gcc_assert (!model
->grp_unscalarizable_region
);
2412 struct access
*access
= new struct access ();
2413 memset (access
, 0, sizeof (struct access
));
2414 if (!build_user_friendly_ref_for_offset (&expr
, TREE_TYPE (expr
), new_offset
,
2417 access
->grp_no_warning
= true;
2418 expr
= build_ref_for_model (EXPR_LOCATION (parent
->base
), parent
->base
,
2419 new_offset
, model
, NULL
, false);
2422 access
->base
= parent
->base
;
2423 access
->expr
= expr
;
2424 access
->offset
= new_offset
;
2425 access
->size
= model
->size
;
2426 access
->type
= model
->type
;
2427 access
->grp_write
= true;
2428 access
->grp_read
= false;
2430 child
= &parent
->first_child
;
2431 while (*child
&& (*child
)->offset
< new_offset
)
2432 child
= &(*child
)->next_sibling
;
2434 access
->next_sibling
= *child
;
2441 /* Propagate all subaccesses of RACC across an assignment link to LACC. Return
2442 true if any new subaccess was created. Additionally, if RACC is a scalar
2443 access but LACC is not, change the type of the latter, if possible. */
2446 propagate_subaccesses_across_link (struct access
*lacc
, struct access
*racc
)
2448 struct access
*rchild
;
2449 HOST_WIDE_INT norm_delta
= lacc
->offset
- racc
->offset
;
2452 if (is_gimple_reg_type (lacc
->type
)
2453 || lacc
->grp_unscalarizable_region
2454 || racc
->grp_unscalarizable_region
)
2457 if (is_gimple_reg_type (racc
->type
))
2459 if (!lacc
->first_child
&& !racc
->first_child
)
2461 tree t
= lacc
->base
;
2463 lacc
->type
= racc
->type
;
2464 if (build_user_friendly_ref_for_offset (&t
, TREE_TYPE (t
),
2465 lacc
->offset
, racc
->type
))
2469 lacc
->expr
= build_ref_for_model (EXPR_LOCATION (lacc
->base
),
2470 lacc
->base
, lacc
->offset
,
2472 lacc
->grp_no_warning
= true;
2478 for (rchild
= racc
->first_child
; rchild
; rchild
= rchild
->next_sibling
)
2480 struct access
*new_acc
= NULL
;
2481 HOST_WIDE_INT norm_offset
= rchild
->offset
+ norm_delta
;
2483 if (rchild
->grp_unscalarizable_region
)
2486 if (child_would_conflict_in_lacc (lacc
, norm_offset
, rchild
->size
,
2491 rchild
->grp_hint
= 1;
2492 new_acc
->grp_hint
|= new_acc
->grp_read
;
2493 if (rchild
->first_child
)
2494 ret
|= propagate_subaccesses_across_link (new_acc
, rchild
);
2499 rchild
->grp_hint
= 1;
2500 new_acc
= create_artificial_child_access (lacc
, rchild
, norm_offset
);
2504 if (racc
->first_child
)
2505 propagate_subaccesses_across_link (new_acc
, rchild
);
2512 /* Propagate all subaccesses across assignment links. */
2515 propagate_all_subaccesses (void)
2517 while (work_queue_head
)
2519 struct access
*racc
= pop_access_from_work_queue ();
2520 struct assign_link
*link
;
2522 gcc_assert (racc
->first_link
);
2524 for (link
= racc
->first_link
; link
; link
= link
->next
)
2526 struct access
*lacc
= link
->lacc
;
2528 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (lacc
->base
)))
2530 lacc
= lacc
->group_representative
;
2531 if (propagate_subaccesses_across_link (lacc
, racc
)
2532 && lacc
->first_link
)
2533 add_access_to_work_queue (lacc
);
2538 /* Go through all accesses collected throughout the (intraprocedural) analysis
2539 stage, exclude overlapping ones, identify representatives and build trees
2540 out of them, making decisions about scalarization on the way. Return true
2541 iff there are any to-be-scalarized variables after this stage. */
2544 analyze_all_variable_accesses (void)
2547 bitmap tmp
= BITMAP_ALLOC (NULL
);
2550 unsigned max_scalarization_size
2551 = (optimize_function_for_size_p (cfun
)
2552 ? PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SIZE
)
2553 : PARAM_VALUE (PARAM_SRA_MAX_SCALARIZATION_SIZE_SPEED
))
2556 EXECUTE_IF_SET_IN_BITMAP (candidate_bitmap
, 0, i
, bi
)
2557 if (bitmap_bit_p (should_scalarize_away_bitmap
, i
)
2558 && !bitmap_bit_p (cannot_scalarize_away_bitmap
, i
))
2560 tree var
= candidate (i
);
2562 if (TREE_CODE (var
) == VAR_DECL
2563 && type_consists_of_records_p (TREE_TYPE (var
)))
2565 if (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (var
)))
2566 <= max_scalarization_size
)
2568 completely_scalarize_var (var
);
2569 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2571 fprintf (dump_file
, "Will attempt to totally scalarize ");
2572 print_generic_expr (dump_file
, var
, 0);
2573 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2576 else if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2578 fprintf (dump_file
, "Too big to totally scalarize: ");
2579 print_generic_expr (dump_file
, var
, 0);
2580 fprintf (dump_file
, " (UID: %u)\n", DECL_UID (var
));
2585 bitmap_copy (tmp
, candidate_bitmap
);
2586 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2588 tree var
= candidate (i
);
2589 struct access
*access
;
2591 access
= sort_and_splice_var_accesses (var
);
2592 if (!access
|| !build_access_trees (access
))
2593 disqualify_candidate (var
,
2594 "No or inhibitingly overlapping accesses.");
2597 propagate_all_subaccesses ();
2599 bitmap_copy (tmp
, candidate_bitmap
);
2600 EXECUTE_IF_SET_IN_BITMAP (tmp
, 0, i
, bi
)
2602 tree var
= candidate (i
);
2603 struct access
*access
= get_first_repr_for_decl (var
);
2605 if (analyze_access_trees (access
))
2608 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2610 fprintf (dump_file
, "\nAccess trees for ");
2611 print_generic_expr (dump_file
, var
, 0);
2612 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (var
));
2613 dump_access_tree (dump_file
, access
);
2614 fprintf (dump_file
, "\n");
2618 disqualify_candidate (var
, "No scalar replacements to be created.");
2625 statistics_counter_event (cfun
, "Scalarized aggregates", res
);
2632 /* Generate statements copying scalar replacements of accesses within a subtree
2633 into or out of AGG. ACCESS, all its children, siblings and their children
2634 are to be processed. AGG is an aggregate type expression (can be a
2635 declaration but does not have to be, it can for example also be a mem_ref or
2636 a series of handled components). TOP_OFFSET is the offset of the processed
2637 subtree which has to be subtracted from offsets of individual accesses to
2638 get corresponding offsets for AGG. If CHUNK_SIZE is non-null, copy only
2639 replacements in the interval <start_offset, start_offset + chunk_size>,
2640 otherwise copy all. GSI is a statement iterator used to place the new
2641 statements. WRITE should be true when the statements should write from AGG
2642 to the replacement and false if vice versa. if INSERT_AFTER is true, new
2643 statements will be added after the current statement in GSI, they will be
2644 added before the statement otherwise. */
2647 generate_subtree_copies (struct access
*access
, tree agg
,
2648 HOST_WIDE_INT top_offset
,
2649 HOST_WIDE_INT start_offset
, HOST_WIDE_INT chunk_size
,
2650 gimple_stmt_iterator
*gsi
, bool write
,
2651 bool insert_after
, location_t loc
)
2655 if (chunk_size
&& access
->offset
>= start_offset
+ chunk_size
)
2658 if (access
->grp_to_be_replaced
2660 || access
->offset
+ access
->size
> start_offset
))
2662 tree expr
, repl
= get_access_replacement (access
);
2665 expr
= build_ref_for_model (loc
, agg
, access
->offset
- top_offset
,
2666 access
, gsi
, insert_after
);
2670 if (access
->grp_partial_lhs
)
2671 expr
= force_gimple_operand_gsi (gsi
, expr
, true, NULL_TREE
,
2673 insert_after
? GSI_NEW_STMT
2675 stmt
= gimple_build_assign (repl
, expr
);
2679 TREE_NO_WARNING (repl
) = 1;
2680 if (access
->grp_partial_lhs
)
2681 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2683 insert_after
? GSI_NEW_STMT
2685 stmt
= gimple_build_assign (expr
, repl
);
2687 gimple_set_location (stmt
, loc
);
2690 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2692 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2694 sra_stats
.subtree_copies
++;
2697 && access
->grp_to_be_debug_replaced
2699 || access
->offset
+ access
->size
> start_offset
))
2702 tree drhs
= build_debug_ref_for_model (loc
, agg
,
2703 access
->offset
- top_offset
,
2705 ds
= gimple_build_debug_bind (get_access_replacement (access
),
2706 drhs
, gsi_stmt (*gsi
));
2708 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2710 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2713 if (access
->first_child
)
2714 generate_subtree_copies (access
->first_child
, agg
, top_offset
,
2715 start_offset
, chunk_size
, gsi
,
2716 write
, insert_after
, loc
);
2718 access
= access
->next_sibling
;
2723 /* Assign zero to all scalar replacements in an access subtree. ACCESS is the
2724 the root of the subtree to be processed. GSI is the statement iterator used
2725 for inserting statements which are added after the current statement if
2726 INSERT_AFTER is true or before it otherwise. */
2729 init_subtree_with_zero (struct access
*access
, gimple_stmt_iterator
*gsi
,
2730 bool insert_after
, location_t loc
)
2733 struct access
*child
;
2735 if (access
->grp_to_be_replaced
)
2739 stmt
= gimple_build_assign (get_access_replacement (access
),
2740 build_zero_cst (access
->type
));
2742 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2744 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2746 gimple_set_location (stmt
, loc
);
2748 else if (access
->grp_to_be_debug_replaced
)
2751 = gimple_build_debug_bind (get_access_replacement (access
),
2752 build_zero_cst (access
->type
),
2755 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2757 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
2760 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2761 init_subtree_with_zero (child
, gsi
, insert_after
, loc
);
2764 /* Clobber all scalar replacements in an access subtree. ACCESS is the the
2765 root of the subtree to be processed. GSI is the statement iterator used
2766 for inserting statements which are added after the current statement if
2767 INSERT_AFTER is true or before it otherwise. */
2770 clobber_subtree (struct access
*access
, gimple_stmt_iterator
*gsi
,
2771 bool insert_after
, location_t loc
)
2774 struct access
*child
;
2776 if (access
->grp_to_be_replaced
)
2778 tree rep
= get_access_replacement (access
);
2779 tree clobber
= build_constructor (access
->type
, NULL
);
2780 TREE_THIS_VOLATILE (clobber
) = 1;
2781 gimple stmt
= gimple_build_assign (rep
, clobber
);
2784 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2786 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2788 gimple_set_location (stmt
, loc
);
2791 for (child
= access
->first_child
; child
; child
= child
->next_sibling
)
2792 clobber_subtree (child
, gsi
, insert_after
, loc
);
2795 /* Search for an access representative for the given expression EXPR and
2796 return it or NULL if it cannot be found. */
2798 static struct access
*
2799 get_access_for_expr (tree expr
)
2801 HOST_WIDE_INT offset
, size
, max_size
;
2804 /* FIXME: This should not be necessary but Ada produces V_C_Es with a type of
2805 a different size than the size of its argument and we need the latter
2807 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
)
2808 expr
= TREE_OPERAND (expr
, 0);
2810 base
= get_ref_base_and_extent (expr
, &offset
, &size
, &max_size
);
2811 if (max_size
== -1 || !DECL_P (base
))
2814 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (base
)))
2817 return get_var_base_offset_size_access (base
, offset
, max_size
);
2820 /* Replace the expression EXPR with a scalar replacement if there is one and
2821 generate other statements to do type conversion or subtree copying if
2822 necessary. GSI is used to place newly created statements, WRITE is true if
2823 the expression is being written to (it is on a LHS of a statement or output
2824 in an assembly statement). */
2827 sra_modify_expr (tree
*expr
, gimple_stmt_iterator
*gsi
, bool write
)
2830 struct access
*access
;
2831 tree type
, bfr
, orig_expr
;
2833 if (TREE_CODE (*expr
) == BIT_FIELD_REF
)
2836 expr
= &TREE_OPERAND (*expr
, 0);
2841 if (TREE_CODE (*expr
) == REALPART_EXPR
|| TREE_CODE (*expr
) == IMAGPART_EXPR
)
2842 expr
= &TREE_OPERAND (*expr
, 0);
2843 access
= get_access_for_expr (*expr
);
2846 type
= TREE_TYPE (*expr
);
2849 loc
= gimple_location (gsi_stmt (*gsi
));
2850 gimple_stmt_iterator alt_gsi
= gsi_none ();
2851 if (write
&& stmt_ends_bb_p (gsi_stmt (*gsi
)))
2853 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
2857 if (access
->grp_to_be_replaced
)
2859 tree repl
= get_access_replacement (access
);
2860 /* If we replace a non-register typed access simply use the original
2861 access expression to extract the scalar component afterwards.
2862 This happens if scalarizing a function return value or parameter
2863 like in gcc.c-torture/execute/20041124-1.c, 20050316-1.c and
2864 gcc.c-torture/compile/20011217-1.c.
2866 We also want to use this when accessing a complex or vector which can
2867 be accessed as a different type too, potentially creating a need for
2868 type conversion (see PR42196) and when scalarized unions are involved
2869 in assembler statements (see PR42398). */
2870 if (!useless_type_conversion_p (type
, access
->type
))
2874 ref
= build_ref_for_model (loc
, orig_expr
, 0, access
, gsi
, false);
2880 if (access
->grp_partial_lhs
)
2881 ref
= force_gimple_operand_gsi (gsi
, ref
, true, NULL_TREE
,
2882 false, GSI_NEW_STMT
);
2883 stmt
= gimple_build_assign (repl
, ref
);
2884 gimple_set_location (stmt
, loc
);
2885 gsi_insert_after (gsi
, stmt
, GSI_NEW_STMT
);
2891 if (access
->grp_partial_lhs
)
2892 repl
= force_gimple_operand_gsi (gsi
, repl
, true, NULL_TREE
,
2893 true, GSI_SAME_STMT
);
2894 stmt
= gimple_build_assign (ref
, repl
);
2895 gimple_set_location (stmt
, loc
);
2896 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
2903 else if (write
&& access
->grp_to_be_debug_replaced
)
2905 gdebug
*ds
= gimple_build_debug_bind (get_access_replacement (access
),
2908 gsi_insert_after (gsi
, ds
, GSI_NEW_STMT
);
2911 if (access
->first_child
)
2913 HOST_WIDE_INT start_offset
, chunk_size
;
2915 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 1))
2916 && tree_fits_uhwi_p (TREE_OPERAND (bfr
, 2)))
2918 chunk_size
= tree_to_uhwi (TREE_OPERAND (bfr
, 1));
2919 start_offset
= access
->offset
2920 + tree_to_uhwi (TREE_OPERAND (bfr
, 2));
2923 start_offset
= chunk_size
= 0;
2925 generate_subtree_copies (access
->first_child
, orig_expr
, access
->offset
,
2926 start_offset
, chunk_size
, gsi
, write
, write
,
2932 /* Where scalar replacements of the RHS have been written to when a replacement
2933 of a LHS of an assigments cannot be direclty loaded from a replacement of
2935 enum unscalarized_data_handling
{ SRA_UDH_NONE
, /* Nothing done so far. */
2936 SRA_UDH_RIGHT
, /* Data flushed to the RHS. */
2937 SRA_UDH_LEFT
}; /* Data flushed to the LHS. */
2939 struct subreplacement_assignment_data
2941 /* Offset of the access representing the lhs of the assignment. */
2942 HOST_WIDE_INT left_offset
;
2944 /* LHS and RHS of the original assignment. */
2945 tree assignment_lhs
, assignment_rhs
;
2947 /* Access representing the rhs of the whole assignment. */
2948 struct access
*top_racc
;
2950 /* Stmt iterator used for statement insertions after the original assignment.
2951 It points to the main GSI used to traverse a BB during function body
2953 gimple_stmt_iterator
*new_gsi
;
2955 /* Stmt iterator used for statement insertions before the original
2956 assignment. Keeps on pointing to the original statement. */
2957 gimple_stmt_iterator old_gsi
;
2959 /* Location of the assignment. */
2962 /* Keeps the information whether we have needed to refresh replacements of
2963 the LHS and from which side of the assignments this takes place. */
2964 enum unscalarized_data_handling refreshed
;
2967 /* Store all replacements in the access tree rooted in TOP_RACC either to their
2968 base aggregate if there are unscalarized data or directly to LHS of the
2969 statement that is pointed to by GSI otherwise. */
2972 handle_unscalarized_data_in_subtree (struct subreplacement_assignment_data
*sad
)
2975 if (sad
->top_racc
->grp_unscalarized_data
)
2977 src
= sad
->assignment_rhs
;
2978 sad
->refreshed
= SRA_UDH_RIGHT
;
2982 src
= sad
->assignment_lhs
;
2983 sad
->refreshed
= SRA_UDH_LEFT
;
2985 generate_subtree_copies (sad
->top_racc
->first_child
, src
,
2986 sad
->top_racc
->offset
, 0, 0,
2987 &sad
->old_gsi
, false, false, sad
->loc
);
2990 /* Try to generate statements to load all sub-replacements in an access subtree
2991 formed by children of LACC from scalar replacements in the SAD->top_racc
2992 subtree. If that is not possible, refresh the SAD->top_racc base aggregate
2993 and load the accesses from it. */
2996 load_assign_lhs_subreplacements (struct access
*lacc
,
2997 struct subreplacement_assignment_data
*sad
)
2999 for (lacc
= lacc
->first_child
; lacc
; lacc
= lacc
->next_sibling
)
3001 HOST_WIDE_INT offset
;
3002 offset
= lacc
->offset
- sad
->left_offset
+ sad
->top_racc
->offset
;
3004 if (lacc
->grp_to_be_replaced
)
3006 struct access
*racc
;
3010 racc
= find_access_in_subtree (sad
->top_racc
, offset
, lacc
->size
);
3011 if (racc
&& racc
->grp_to_be_replaced
)
3013 rhs
= get_access_replacement (racc
);
3014 if (!useless_type_conversion_p (lacc
->type
, racc
->type
))
3015 rhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3018 if (racc
->grp_partial_lhs
&& lacc
->grp_partial_lhs
)
3019 rhs
= force_gimple_operand_gsi (&sad
->old_gsi
, rhs
, true,
3020 NULL_TREE
, true, GSI_SAME_STMT
);
3024 /* No suitable access on the right hand side, need to load from
3025 the aggregate. See if we have to update it first... */
3026 if (sad
->refreshed
== SRA_UDH_NONE
)
3027 handle_unscalarized_data_in_subtree (sad
);
3029 if (sad
->refreshed
== SRA_UDH_LEFT
)
3030 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_lhs
,
3031 lacc
->offset
- sad
->left_offset
,
3032 lacc
, sad
->new_gsi
, true);
3034 rhs
= build_ref_for_model (sad
->loc
, sad
->assignment_rhs
,
3035 lacc
->offset
- sad
->left_offset
,
3036 lacc
, sad
->new_gsi
, true);
3037 if (lacc
->grp_partial_lhs
)
3038 rhs
= force_gimple_operand_gsi (sad
->new_gsi
,
3039 rhs
, true, NULL_TREE
,
3040 false, GSI_NEW_STMT
);
3043 stmt
= gimple_build_assign (get_access_replacement (lacc
), rhs
);
3044 gsi_insert_after (sad
->new_gsi
, stmt
, GSI_NEW_STMT
);
3045 gimple_set_location (stmt
, sad
->loc
);
3047 sra_stats
.subreplacements
++;
3051 if (sad
->refreshed
== SRA_UDH_NONE
3052 && lacc
->grp_read
&& !lacc
->grp_covered
)
3053 handle_unscalarized_data_in_subtree (sad
);
3055 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3059 struct access
*racc
= find_access_in_subtree (sad
->top_racc
,
3063 if (racc
&& racc
->grp_to_be_replaced
)
3065 if (racc
->grp_write
)
3066 drhs
= get_access_replacement (racc
);
3070 else if (sad
->refreshed
== SRA_UDH_LEFT
)
3071 drhs
= build_debug_ref_for_model (sad
->loc
, lacc
->base
,
3072 lacc
->offset
, lacc
);
3073 else if (sad
->refreshed
== SRA_UDH_RIGHT
)
3074 drhs
= build_debug_ref_for_model (sad
->loc
, sad
->top_racc
->base
,
3079 && !useless_type_conversion_p (lacc
->type
, TREE_TYPE (drhs
)))
3080 drhs
= fold_build1_loc (sad
->loc
, VIEW_CONVERT_EXPR
,
3082 ds
= gimple_build_debug_bind (get_access_replacement (lacc
),
3083 drhs
, gsi_stmt (sad
->old_gsi
));
3084 gsi_insert_after (sad
->new_gsi
, ds
, GSI_NEW_STMT
);
3088 if (lacc
->first_child
)
3089 load_assign_lhs_subreplacements (lacc
, sad
);
3093 /* Result code for SRA assignment modification. */
3094 enum assignment_mod_result
{ SRA_AM_NONE
, /* nothing done for the stmt */
3095 SRA_AM_MODIFIED
, /* stmt changed but not
3097 SRA_AM_REMOVED
}; /* stmt eliminated */
3099 /* Modify assignments with a CONSTRUCTOR on their RHS. STMT contains a pointer
3100 to the assignment and GSI is the statement iterator pointing at it. Returns
3101 the same values as sra_modify_assign. */
3103 static enum assignment_mod_result
3104 sra_modify_constructor_assign (gimple stmt
, gimple_stmt_iterator
*gsi
)
3106 tree lhs
= gimple_assign_lhs (stmt
);
3107 struct access
*acc
= get_access_for_expr (lhs
);
3110 location_t loc
= gimple_location (stmt
);
3112 if (gimple_clobber_p (stmt
))
3114 /* Clobber the replacement variable. */
3115 clobber_subtree (acc
, gsi
, !acc
->grp_covered
, loc
);
3116 /* Remove clobbers of fully scalarized variables, they are dead. */
3117 if (acc
->grp_covered
)
3119 unlink_stmt_vdef (stmt
);
3120 gsi_remove (gsi
, true);
3121 release_defs (stmt
);
3122 return SRA_AM_REMOVED
;
3125 return SRA_AM_MODIFIED
;
3128 if (vec_safe_length (CONSTRUCTOR_ELTS (gimple_assign_rhs1 (stmt
))) > 0)
3130 /* I have never seen this code path trigger but if it can happen the
3131 following should handle it gracefully. */
3132 if (access_has_children_p (acc
))
3133 generate_subtree_copies (acc
->first_child
, lhs
, acc
->offset
, 0, 0, gsi
,
3135 return SRA_AM_MODIFIED
;
3138 if (acc
->grp_covered
)
3140 init_subtree_with_zero (acc
, gsi
, false, loc
);
3141 unlink_stmt_vdef (stmt
);
3142 gsi_remove (gsi
, true);
3143 release_defs (stmt
);
3144 return SRA_AM_REMOVED
;
3148 init_subtree_with_zero (acc
, gsi
, true, loc
);
3149 return SRA_AM_MODIFIED
;
3153 /* Create and return a new suitable default definition SSA_NAME for RACC which
3154 is an access describing an uninitialized part of an aggregate that is being
3158 get_repl_default_def_ssa_name (struct access
*racc
)
3160 gcc_checking_assert (!racc
->grp_to_be_replaced
3161 && !racc
->grp_to_be_debug_replaced
);
3162 if (!racc
->replacement_decl
)
3163 racc
->replacement_decl
= create_access_replacement (racc
);
3164 return get_or_create_ssa_default_def (cfun
, racc
->replacement_decl
);
3167 /* Return true if REF has an VIEW_CONVERT_EXPR or a COMPONENT_REF with a
3168 bit-field field declaration somewhere in it. */
3171 contains_vce_or_bfcref_p (const_tree ref
)
3173 while (handled_component_p (ref
))
3175 if (TREE_CODE (ref
) == VIEW_CONVERT_EXPR
3176 || (TREE_CODE (ref
) == COMPONENT_REF
3177 && DECL_BIT_FIELD (TREE_OPERAND (ref
, 1))))
3179 ref
= TREE_OPERAND (ref
, 0);
3185 /* Examine both sides of the assignment statement pointed to by STMT, replace
3186 them with a scalare replacement if there is one and generate copying of
3187 replacements if scalarized aggregates have been used in the assignment. GSI
3188 is used to hold generated statements for type conversions and subtree
3191 static enum assignment_mod_result
3192 sra_modify_assign (gimple stmt
, gimple_stmt_iterator
*gsi
)
3194 struct access
*lacc
, *racc
;
3196 bool modify_this_stmt
= false;
3197 bool force_gimple_rhs
= false;
3199 gimple_stmt_iterator orig_gsi
= *gsi
;
3201 if (!gimple_assign_single_p (stmt
))
3203 lhs
= gimple_assign_lhs (stmt
);
3204 rhs
= gimple_assign_rhs1 (stmt
);
3206 if (TREE_CODE (rhs
) == CONSTRUCTOR
)
3207 return sra_modify_constructor_assign (stmt
, gsi
);
3209 if (TREE_CODE (rhs
) == REALPART_EXPR
|| TREE_CODE (lhs
) == REALPART_EXPR
3210 || TREE_CODE (rhs
) == IMAGPART_EXPR
|| TREE_CODE (lhs
) == IMAGPART_EXPR
3211 || TREE_CODE (rhs
) == BIT_FIELD_REF
|| TREE_CODE (lhs
) == BIT_FIELD_REF
)
3213 modify_this_stmt
= sra_modify_expr (gimple_assign_rhs1_ptr (stmt
),
3215 modify_this_stmt
|= sra_modify_expr (gimple_assign_lhs_ptr (stmt
),
3217 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3220 lacc
= get_access_for_expr (lhs
);
3221 racc
= get_access_for_expr (rhs
);
3225 loc
= gimple_location (stmt
);
3226 if (lacc
&& lacc
->grp_to_be_replaced
)
3228 lhs
= get_access_replacement (lacc
);
3229 gimple_assign_set_lhs (stmt
, lhs
);
3230 modify_this_stmt
= true;
3231 if (lacc
->grp_partial_lhs
)
3232 force_gimple_rhs
= true;
3236 if (racc
&& racc
->grp_to_be_replaced
)
3238 rhs
= get_access_replacement (racc
);
3239 modify_this_stmt
= true;
3240 if (racc
->grp_partial_lhs
)
3241 force_gimple_rhs
= true;
3245 && !racc
->grp_unscalarized_data
3246 && TREE_CODE (lhs
) == SSA_NAME
3247 && !access_has_replacements_p (racc
))
3249 rhs
= get_repl_default_def_ssa_name (racc
);
3250 modify_this_stmt
= true;
3254 if (modify_this_stmt
)
3256 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3258 /* If we can avoid creating a VIEW_CONVERT_EXPR do so.
3259 ??? This should move to fold_stmt which we simply should
3260 call after building a VIEW_CONVERT_EXPR here. */
3261 if (AGGREGATE_TYPE_P (TREE_TYPE (lhs
))
3262 && !contains_bitfld_component_ref_p (lhs
))
3264 lhs
= build_ref_for_model (loc
, lhs
, 0, racc
, gsi
, false);
3265 gimple_assign_set_lhs (stmt
, lhs
);
3267 else if (AGGREGATE_TYPE_P (TREE_TYPE (rhs
))
3268 && !contains_vce_or_bfcref_p (rhs
))
3269 rhs
= build_ref_for_model (loc
, rhs
, 0, lacc
, gsi
, false);
3271 if (!useless_type_conversion_p (TREE_TYPE (lhs
), TREE_TYPE (rhs
)))
3273 rhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
, TREE_TYPE (lhs
),
3275 if (is_gimple_reg_type (TREE_TYPE (lhs
))
3276 && TREE_CODE (lhs
) != SSA_NAME
)
3277 force_gimple_rhs
= true;
3282 if (lacc
&& lacc
->grp_to_be_debug_replaced
)
3284 tree dlhs
= get_access_replacement (lacc
);
3285 tree drhs
= unshare_expr (rhs
);
3286 if (!useless_type_conversion_p (TREE_TYPE (dlhs
), TREE_TYPE (drhs
)))
3288 if (AGGREGATE_TYPE_P (TREE_TYPE (drhs
))
3289 && !contains_vce_or_bfcref_p (drhs
))
3290 drhs
= build_debug_ref_for_model (loc
, drhs
, 0, lacc
);
3292 && !useless_type_conversion_p (TREE_TYPE (dlhs
),
3294 drhs
= fold_build1_loc (loc
, VIEW_CONVERT_EXPR
,
3295 TREE_TYPE (dlhs
), drhs
);
3297 gdebug
*ds
= gimple_build_debug_bind (dlhs
, drhs
, stmt
);
3298 gsi_insert_before (gsi
, ds
, GSI_SAME_STMT
);
3301 /* From this point on, the function deals with assignments in between
3302 aggregates when at least one has scalar reductions of some of its
3303 components. There are three possible scenarios: Both the LHS and RHS have
3304 to-be-scalarized components, 2) only the RHS has or 3) only the LHS has.
3306 In the first case, we would like to load the LHS components from RHS
3307 components whenever possible. If that is not possible, we would like to
3308 read it directly from the RHS (after updating it by storing in it its own
3309 components). If there are some necessary unscalarized data in the LHS,
3310 those will be loaded by the original assignment too. If neither of these
3311 cases happen, the original statement can be removed. Most of this is done
3312 by load_assign_lhs_subreplacements.
3314 In the second case, we would like to store all RHS scalarized components
3315 directly into LHS and if they cover the aggregate completely, remove the
3316 statement too. In the third case, we want the LHS components to be loaded
3317 directly from the RHS (DSE will remove the original statement if it
3320 This is a bit complex but manageable when types match and when unions do
3321 not cause confusion in a way that we cannot really load a component of LHS
3322 from the RHS or vice versa (the access representing this level can have
3323 subaccesses that are accessible only through a different union field at a
3324 higher level - different from the one used in the examined expression).
3327 Therefore, I specially handle a fourth case, happening when there is a
3328 specific type cast or it is impossible to locate a scalarized subaccess on
3329 the other side of the expression. If that happens, I simply "refresh" the
3330 RHS by storing in it is scalarized components leave the original statement
3331 there to do the copying and then load the scalar replacements of the LHS.
3332 This is what the first branch does. */
3334 if (modify_this_stmt
3335 || gimple_has_volatile_ops (stmt
)
3336 || contains_vce_or_bfcref_p (rhs
)
3337 || contains_vce_or_bfcref_p (lhs
)
3338 || stmt_ends_bb_p (stmt
))
3340 if (access_has_children_p (racc
))
3341 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3342 gsi
, false, false, loc
);
3343 if (access_has_children_p (lacc
))
3345 gimple_stmt_iterator alt_gsi
= gsi_none ();
3346 if (stmt_ends_bb_p (stmt
))
3348 alt_gsi
= gsi_start_edge (single_non_eh_succ (gsi_bb (*gsi
)));
3351 generate_subtree_copies (lacc
->first_child
, lhs
, lacc
->offset
, 0, 0,
3352 gsi
, true, true, loc
);
3354 sra_stats
.separate_lhs_rhs_handling
++;
3356 /* This gimplification must be done after generate_subtree_copies,
3357 lest we insert the subtree copies in the middle of the gimplified
3359 if (force_gimple_rhs
)
3360 rhs
= force_gimple_operand_gsi (&orig_gsi
, rhs
, true, NULL_TREE
,
3361 true, GSI_SAME_STMT
);
3362 if (gimple_assign_rhs1 (stmt
) != rhs
)
3364 modify_this_stmt
= true;
3365 gimple_assign_set_rhs_from_tree (&orig_gsi
, rhs
);
3366 gcc_assert (stmt
== gsi_stmt (orig_gsi
));
3369 return modify_this_stmt
? SRA_AM_MODIFIED
: SRA_AM_NONE
;
3373 if (access_has_children_p (lacc
)
3374 && access_has_children_p (racc
)
3375 /* When an access represents an unscalarizable region, it usually
3376 represents accesses with variable offset and thus must not be used
3377 to generate new memory accesses. */
3378 && !lacc
->grp_unscalarizable_region
3379 && !racc
->grp_unscalarizable_region
)
3381 struct subreplacement_assignment_data sad
;
3383 sad
.left_offset
= lacc
->offset
;
3384 sad
.assignment_lhs
= lhs
;
3385 sad
.assignment_rhs
= rhs
;
3386 sad
.top_racc
= racc
;
3389 sad
.loc
= gimple_location (stmt
);
3390 sad
.refreshed
= SRA_UDH_NONE
;
3392 if (lacc
->grp_read
&& !lacc
->grp_covered
)
3393 handle_unscalarized_data_in_subtree (&sad
);
3395 load_assign_lhs_subreplacements (lacc
, &sad
);
3396 if (sad
.refreshed
!= SRA_UDH_RIGHT
)
3399 unlink_stmt_vdef (stmt
);
3400 gsi_remove (&sad
.old_gsi
, true);
3401 release_defs (stmt
);
3402 sra_stats
.deleted
++;
3403 return SRA_AM_REMOVED
;
3408 if (access_has_children_p (racc
)
3409 && !racc
->grp_unscalarized_data
)
3413 fprintf (dump_file
, "Removing load: ");
3414 print_gimple_stmt (dump_file
, stmt
, 0, 0);
3416 generate_subtree_copies (racc
->first_child
, lhs
,
3417 racc
->offset
, 0, 0, gsi
,
3419 gcc_assert (stmt
== gsi_stmt (*gsi
));
3420 unlink_stmt_vdef (stmt
);
3421 gsi_remove (gsi
, true);
3422 release_defs (stmt
);
3423 sra_stats
.deleted
++;
3424 return SRA_AM_REMOVED
;
3426 /* Restore the aggregate RHS from its components so the
3427 prevailing aggregate copy does the right thing. */
3428 if (access_has_children_p (racc
))
3429 generate_subtree_copies (racc
->first_child
, rhs
, racc
->offset
, 0, 0,
3430 gsi
, false, false, loc
);
3431 /* Re-load the components of the aggregate copy destination.
3432 But use the RHS aggregate to load from to expose more
3433 optimization opportunities. */
3434 if (access_has_children_p (lacc
))
3435 generate_subtree_copies (lacc
->first_child
, rhs
, lacc
->offset
,
3436 0, 0, gsi
, true, true, loc
);
3443 /* Traverse the function body and all modifications as decided in
3444 analyze_all_variable_accesses. Return true iff the CFG has been
3448 sra_modify_function_body (void)
3450 bool cfg_changed
= false;
3453 FOR_EACH_BB_FN (bb
, cfun
)
3455 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
3456 while (!gsi_end_p (gsi
))
3458 gimple stmt
= gsi_stmt (gsi
);
3459 enum assignment_mod_result assign_result
;
3460 bool modified
= false, deleted
= false;
3464 switch (gimple_code (stmt
))
3467 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
3468 if (*t
!= NULL_TREE
)
3469 modified
|= sra_modify_expr (t
, &gsi
, false);
3473 assign_result
= sra_modify_assign (stmt
, &gsi
);
3474 modified
|= assign_result
== SRA_AM_MODIFIED
;
3475 deleted
= assign_result
== SRA_AM_REMOVED
;
3479 /* Operands must be processed before the lhs. */
3480 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
3482 t
= gimple_call_arg_ptr (stmt
, i
);
3483 modified
|= sra_modify_expr (t
, &gsi
, false);
3486 if (gimple_call_lhs (stmt
))
3488 t
= gimple_call_lhs_ptr (stmt
);
3489 modified
|= sra_modify_expr (t
, &gsi
, true);
3495 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
3496 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
3498 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
3499 modified
|= sra_modify_expr (t
, &gsi
, false);
3501 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
3503 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
3504 modified
|= sra_modify_expr (t
, &gsi
, true);
3516 if (maybe_clean_eh_stmt (stmt
)
3517 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
3525 gsi_commit_edge_inserts ();
3529 /* Generate statements initializing scalar replacements of parts of function
3533 initialize_parameter_reductions (void)
3535 gimple_stmt_iterator gsi
;
3536 gimple_seq seq
= NULL
;
3539 gsi
= gsi_start (seq
);
3540 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3542 parm
= DECL_CHAIN (parm
))
3544 vec
<access_p
> *access_vec
;
3545 struct access
*access
;
3547 if (!bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
3549 access_vec
= get_base_access_vector (parm
);
3553 for (access
= (*access_vec
)[0];
3555 access
= access
->next_grp
)
3556 generate_subtree_copies (access
, parm
, 0, 0, 0, &gsi
, true, true,
3557 EXPR_LOCATION (parm
));
3560 seq
= gsi_seq (gsi
);
3562 gsi_insert_seq_on_edge_immediate (single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (cfun
)), seq
);
3565 /* The "main" function of intraprocedural SRA passes. Runs the analysis and if
3566 it reveals there are components of some aggregates to be scalarized, it runs
3567 the required transformations. */
3569 perform_intra_sra (void)
3574 if (!find_var_candidates ())
3577 if (!scan_function ())
3580 if (!analyze_all_variable_accesses ())
3583 if (sra_modify_function_body ())
3584 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
3586 ret
= TODO_update_ssa
;
3587 initialize_parameter_reductions ();
3589 statistics_counter_event (cfun
, "Scalar replacements created",
3590 sra_stats
.replacements
);
3591 statistics_counter_event (cfun
, "Modified expressions", sra_stats
.exprs
);
3592 statistics_counter_event (cfun
, "Subtree copy stmts",
3593 sra_stats
.subtree_copies
);
3594 statistics_counter_event (cfun
, "Subreplacement stmts",
3595 sra_stats
.subreplacements
);
3596 statistics_counter_event (cfun
, "Deleted stmts", sra_stats
.deleted
);
3597 statistics_counter_event (cfun
, "Separate LHS and RHS handling",
3598 sra_stats
.separate_lhs_rhs_handling
);
3601 sra_deinitialize ();
3605 /* Perform early intraprocedural SRA. */
3607 early_intra_sra (void)
3609 sra_mode
= SRA_MODE_EARLY_INTRA
;
3610 return perform_intra_sra ();
3613 /* Perform "late" intraprocedural SRA. */
3615 late_intra_sra (void)
3617 sra_mode
= SRA_MODE_INTRA
;
3618 return perform_intra_sra ();
3623 gate_intra_sra (void)
3625 return flag_tree_sra
!= 0 && dbg_cnt (tree_sra
);
3631 const pass_data pass_data_sra_early
=
3633 GIMPLE_PASS
, /* type */
3635 OPTGROUP_NONE
, /* optinfo_flags */
3636 TV_TREE_SRA
, /* tv_id */
3637 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3638 0, /* properties_provided */
3639 0, /* properties_destroyed */
3640 0, /* todo_flags_start */
3641 TODO_update_ssa
, /* todo_flags_finish */
3644 class pass_sra_early
: public gimple_opt_pass
3647 pass_sra_early (gcc::context
*ctxt
)
3648 : gimple_opt_pass (pass_data_sra_early
, ctxt
)
3651 /* opt_pass methods: */
3652 virtual bool gate (function
*) { return gate_intra_sra (); }
3653 virtual unsigned int execute (function
*) { return early_intra_sra (); }
3655 }; // class pass_sra_early
3660 make_pass_sra_early (gcc::context
*ctxt
)
3662 return new pass_sra_early (ctxt
);
3667 const pass_data pass_data_sra
=
3669 GIMPLE_PASS
, /* type */
3671 OPTGROUP_NONE
, /* optinfo_flags */
3672 TV_TREE_SRA
, /* tv_id */
3673 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3674 0, /* properties_provided */
3675 0, /* properties_destroyed */
3676 TODO_update_address_taken
, /* todo_flags_start */
3677 TODO_update_ssa
, /* todo_flags_finish */
3680 class pass_sra
: public gimple_opt_pass
3683 pass_sra (gcc::context
*ctxt
)
3684 : gimple_opt_pass (pass_data_sra
, ctxt
)
3687 /* opt_pass methods: */
3688 virtual bool gate (function
*) { return gate_intra_sra (); }
3689 virtual unsigned int execute (function
*) { return late_intra_sra (); }
3691 }; // class pass_sra
3696 make_pass_sra (gcc::context
*ctxt
)
3698 return new pass_sra (ctxt
);
3702 /* Return true iff PARM (which must be a parm_decl) is an unused scalar
3706 is_unused_scalar_param (tree parm
)
3709 return (is_gimple_reg (parm
)
3710 && (!(name
= ssa_default_def (cfun
, parm
))
3711 || has_zero_uses (name
)));
3714 /* Scan immediate uses of a default definition SSA name of a parameter PARM and
3715 examine whether there are any direct or otherwise infeasible ones. If so,
3716 return true, otherwise return false. PARM must be a gimple register with a
3717 non-NULL default definition. */
3720 ptr_parm_has_direct_uses (tree parm
)
3722 imm_use_iterator ui
;
3724 tree name
= ssa_default_def (cfun
, parm
);
3727 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
3730 use_operand_p use_p
;
3732 if (is_gimple_debug (stmt
))
3735 /* Valid uses include dereferences on the lhs and the rhs. */
3736 if (gimple_has_lhs (stmt
))
3738 tree lhs
= gimple_get_lhs (stmt
);
3739 while (handled_component_p (lhs
))
3740 lhs
= TREE_OPERAND (lhs
, 0);
3741 if (TREE_CODE (lhs
) == MEM_REF
3742 && TREE_OPERAND (lhs
, 0) == name
3743 && integer_zerop (TREE_OPERAND (lhs
, 1))
3744 && types_compatible_p (TREE_TYPE (lhs
),
3745 TREE_TYPE (TREE_TYPE (name
)))
3746 && !TREE_THIS_VOLATILE (lhs
))
3749 if (gimple_assign_single_p (stmt
))
3751 tree rhs
= gimple_assign_rhs1 (stmt
);
3752 while (handled_component_p (rhs
))
3753 rhs
= TREE_OPERAND (rhs
, 0);
3754 if (TREE_CODE (rhs
) == MEM_REF
3755 && TREE_OPERAND (rhs
, 0) == name
3756 && integer_zerop (TREE_OPERAND (rhs
, 1))
3757 && types_compatible_p (TREE_TYPE (rhs
),
3758 TREE_TYPE (TREE_TYPE (name
)))
3759 && !TREE_THIS_VOLATILE (rhs
))
3762 else if (is_gimple_call (stmt
))
3765 for (i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
3767 tree arg
= gimple_call_arg (stmt
, i
);
3768 while (handled_component_p (arg
))
3769 arg
= TREE_OPERAND (arg
, 0);
3770 if (TREE_CODE (arg
) == MEM_REF
3771 && TREE_OPERAND (arg
, 0) == name
3772 && integer_zerop (TREE_OPERAND (arg
, 1))
3773 && types_compatible_p (TREE_TYPE (arg
),
3774 TREE_TYPE (TREE_TYPE (name
)))
3775 && !TREE_THIS_VOLATILE (arg
))
3780 /* If the number of valid uses does not match the number of
3781 uses in this stmt there is an unhandled use. */
3782 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
3789 BREAK_FROM_IMM_USE_STMT (ui
);
3795 /* Identify candidates for reduction for IPA-SRA based on their type and mark
3796 them in candidate_bitmap. Note that these do not necessarily include
3797 parameter which are unused and thus can be removed. Return true iff any
3798 such candidate has been found. */
3801 find_param_candidates (void)
3808 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3810 parm
= DECL_CHAIN (parm
))
3812 tree type
= TREE_TYPE (parm
);
3817 if (TREE_THIS_VOLATILE (parm
)
3818 || TREE_ADDRESSABLE (parm
)
3819 || (!is_gimple_reg_type (type
) && is_va_list_type (type
)))
3822 if (is_unused_scalar_param (parm
))
3828 if (POINTER_TYPE_P (type
))
3830 type
= TREE_TYPE (type
);
3832 if (TREE_CODE (type
) == FUNCTION_TYPE
3833 || TYPE_VOLATILE (type
)
3834 || (TREE_CODE (type
) == ARRAY_TYPE
3835 && TYPE_NONALIASED_COMPONENT (type
))
3836 || !is_gimple_reg (parm
)
3837 || is_va_list_type (type
)
3838 || ptr_parm_has_direct_uses (parm
))
3841 else if (!AGGREGATE_TYPE_P (type
))
3844 if (!COMPLETE_TYPE_P (type
)
3845 || !tree_fits_uhwi_p (TYPE_SIZE (type
))
3846 || tree_to_uhwi (TYPE_SIZE (type
)) == 0
3847 || (AGGREGATE_TYPE_P (type
)
3848 && type_internals_preclude_sra_p (type
, &msg
)))
3851 bitmap_set_bit (candidate_bitmap
, DECL_UID (parm
));
3852 slot
= candidates
->find_slot_with_hash (parm
, DECL_UID (parm
), INSERT
);
3856 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3858 fprintf (dump_file
, "Candidate (%d): ", DECL_UID (parm
));
3859 print_generic_expr (dump_file
, parm
, 0);
3860 fprintf (dump_file
, "\n");
3864 func_param_count
= count
;
3868 /* Callback of walk_aliased_vdefs, marks the access passed as DATA as
3872 mark_maybe_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
3875 struct access
*repr
= (struct access
*) data
;
3877 repr
->grp_maybe_modified
= 1;
3881 /* Analyze what representatives (in linked lists accessible from
3882 REPRESENTATIVES) can be modified by side effects of statements in the
3883 current function. */
3886 analyze_modified_params (vec
<access_p
> representatives
)
3890 for (i
= 0; i
< func_param_count
; i
++)
3892 struct access
*repr
;
3894 for (repr
= representatives
[i
];
3896 repr
= repr
->next_grp
)
3898 struct access
*access
;
3902 if (no_accesses_p (repr
))
3904 if (!POINTER_TYPE_P (TREE_TYPE (repr
->base
))
3905 || repr
->grp_maybe_modified
)
3908 ao_ref_init (&ar
, repr
->expr
);
3909 visited
= BITMAP_ALLOC (NULL
);
3910 for (access
= repr
; access
; access
= access
->next_sibling
)
3912 /* All accesses are read ones, otherwise grp_maybe_modified would
3913 be trivially set. */
3914 walk_aliased_vdefs (&ar
, gimple_vuse (access
->stmt
),
3915 mark_maybe_modified
, repr
, &visited
);
3916 if (repr
->grp_maybe_modified
)
3919 BITMAP_FREE (visited
);
3924 /* Propagate distances in bb_dereferences in the opposite direction than the
3925 control flow edges, in each step storing the maximum of the current value
3926 and the minimum of all successors. These steps are repeated until the table
3927 stabilizes. Note that BBs which might terminate the functions (according to
3928 final_bbs bitmap) never updated in this way. */
3931 propagate_dereference_distances (void)
3935 auto_vec
<basic_block
> queue (last_basic_block_for_fn (cfun
));
3936 queue
.quick_push (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
3937 FOR_EACH_BB_FN (bb
, cfun
)
3939 queue
.quick_push (bb
);
3943 while (!queue
.is_empty ())
3947 bool change
= false;
3953 if (bitmap_bit_p (final_bbs
, bb
->index
))
3956 for (i
= 0; i
< func_param_count
; i
++)
3958 int idx
= bb
->index
* func_param_count
+ i
;
3960 HOST_WIDE_INT inh
= 0;
3962 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3964 int succ_idx
= e
->dest
->index
* func_param_count
+ i
;
3966 if (e
->src
== EXIT_BLOCK_PTR_FOR_FN (cfun
))
3972 inh
= bb_dereferences
[succ_idx
];
3974 else if (bb_dereferences
[succ_idx
] < inh
)
3975 inh
= bb_dereferences
[succ_idx
];
3978 if (!first
&& bb_dereferences
[idx
] < inh
)
3980 bb_dereferences
[idx
] = inh
;
3985 if (change
&& !bitmap_bit_p (final_bbs
, bb
->index
))
3986 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3991 e
->src
->aux
= e
->src
;
3992 queue
.quick_push (e
->src
);
3997 /* Dump a dereferences TABLE with heading STR to file F. */
4000 dump_dereferences_table (FILE *f
, const char *str
, HOST_WIDE_INT
*table
)
4004 fprintf (dump_file
, "%s", str
);
4005 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (cfun
),
4006 EXIT_BLOCK_PTR_FOR_FN (cfun
), next_bb
)
4008 fprintf (f
, "%4i %i ", bb
->index
, bitmap_bit_p (final_bbs
, bb
->index
));
4009 if (bb
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
4012 for (i
= 0; i
< func_param_count
; i
++)
4014 int idx
= bb
->index
* func_param_count
+ i
;
4015 fprintf (f
, " %4" HOST_WIDE_INT_PRINT
"d", table
[idx
]);
4020 fprintf (dump_file
, "\n");
4023 /* Determine what (parts of) parameters passed by reference that are not
4024 assigned to are not certainly dereferenced in this function and thus the
4025 dereferencing cannot be safely moved to the caller without potentially
4026 introducing a segfault. Mark such REPRESENTATIVES as
4027 grp_not_necessarilly_dereferenced.
4029 The dereferenced maximum "distance," i.e. the offset + size of the accessed
4030 part is calculated rather than simple booleans are calculated for each
4031 pointer parameter to handle cases when only a fraction of the whole
4032 aggregate is allocated (see testsuite/gcc.c-torture/execute/ipa-sra-2.c for
4035 The maximum dereference distances for each pointer parameter and BB are
4036 already stored in bb_dereference. This routine simply propagates these
4037 values upwards by propagate_dereference_distances and then compares the
4038 distances of individual parameters in the ENTRY BB to the equivalent
4039 distances of each representative of a (fraction of a) parameter. */
4042 analyze_caller_dereference_legality (vec
<access_p
> representatives
)
4046 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4047 dump_dereferences_table (dump_file
,
4048 "Dereference table before propagation:\n",
4051 propagate_dereference_distances ();
4053 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4054 dump_dereferences_table (dump_file
,
4055 "Dereference table after propagation:\n",
4058 for (i
= 0; i
< func_param_count
; i
++)
4060 struct access
*repr
= representatives
[i
];
4061 int idx
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->index
* func_param_count
+ i
;
4063 if (!repr
|| no_accesses_p (repr
))
4068 if ((repr
->offset
+ repr
->size
) > bb_dereferences
[idx
])
4069 repr
->grp_not_necessarilly_dereferenced
= 1;
4070 repr
= repr
->next_grp
;
4076 /* Return the representative access for the parameter declaration PARM if it is
4077 a scalar passed by reference which is not written to and the pointer value
4078 is not used directly. Thus, if it is legal to dereference it in the caller
4079 and we can rule out modifications through aliases, such parameter should be
4080 turned into one passed by value. Return NULL otherwise. */
4082 static struct access
*
4083 unmodified_by_ref_scalar_representative (tree parm
)
4085 int i
, access_count
;
4086 struct access
*repr
;
4087 vec
<access_p
> *access_vec
;
4089 access_vec
= get_base_access_vector (parm
);
4090 gcc_assert (access_vec
);
4091 repr
= (*access_vec
)[0];
4094 repr
->group_representative
= repr
;
4096 access_count
= access_vec
->length ();
4097 for (i
= 1; i
< access_count
; i
++)
4099 struct access
*access
= (*access_vec
)[i
];
4102 access
->group_representative
= repr
;
4103 access
->next_sibling
= repr
->next_sibling
;
4104 repr
->next_sibling
= access
;
4108 repr
->grp_scalar_ptr
= 1;
4112 /* Return true iff this ACCESS precludes IPA-SRA of the parameter it is
4113 associated with. REQ_ALIGN is the minimum required alignment. */
4116 access_precludes_ipa_sra_p (struct access
*access
, unsigned int req_align
)
4118 unsigned int exp_align
;
4119 /* Avoid issues such as the second simple testcase in PR 42025. The problem
4120 is incompatible assign in a call statement (and possibly even in asm
4121 statements). This can be relaxed by using a new temporary but only for
4122 non-TREE_ADDRESSABLE types and is probably not worth the complexity. (In
4123 intraprocedural SRA we deal with this by keeping the old aggregate around,
4124 something we cannot do in IPA-SRA.) */
4126 && (is_gimple_call (access
->stmt
)
4127 || gimple_code (access
->stmt
) == GIMPLE_ASM
))
4130 exp_align
= get_object_alignment (access
->expr
);
4131 if (exp_align
< req_align
)
4138 /* Sort collected accesses for parameter PARM, identify representatives for
4139 each accessed region and link them together. Return NULL if there are
4140 different but overlapping accesses, return the special ptr value meaning
4141 there are no accesses for this parameter if that is the case and return the
4142 first representative otherwise. Set *RO_GRP if there is a group of accesses
4143 with only read (i.e. no write) accesses. */
4145 static struct access
*
4146 splice_param_accesses (tree parm
, bool *ro_grp
)
4148 int i
, j
, access_count
, group_count
;
4149 int agg_size
, total_size
= 0;
4150 struct access
*access
, *res
, **prev_acc_ptr
= &res
;
4151 vec
<access_p
> *access_vec
;
4153 access_vec
= get_base_access_vector (parm
);
4155 return &no_accesses_representant
;
4156 access_count
= access_vec
->length ();
4158 access_vec
->qsort (compare_access_positions
);
4163 while (i
< access_count
)
4167 access
= (*access_vec
)[i
];
4168 modification
= access
->write
;
4169 if (access_precludes_ipa_sra_p (access
, TYPE_ALIGN (access
->type
)))
4171 a1_alias_type
= reference_alias_ptr_type (access
->expr
);
4173 /* Access is about to become group representative unless we find some
4174 nasty overlap which would preclude us from breaking this parameter
4178 while (j
< access_count
)
4180 struct access
*ac2
= (*access_vec
)[j
];
4181 if (ac2
->offset
!= access
->offset
)
4183 /* All or nothing law for parameters. */
4184 if (access
->offset
+ access
->size
> ac2
->offset
)
4189 else if (ac2
->size
!= access
->size
)
4192 if (access_precludes_ipa_sra_p (ac2
, TYPE_ALIGN (access
->type
))
4193 || (ac2
->type
!= access
->type
4194 && (TREE_ADDRESSABLE (ac2
->type
)
4195 || TREE_ADDRESSABLE (access
->type
)))
4196 || (reference_alias_ptr_type (ac2
->expr
) != a1_alias_type
))
4199 modification
|= ac2
->write
;
4200 ac2
->group_representative
= access
;
4201 ac2
->next_sibling
= access
->next_sibling
;
4202 access
->next_sibling
= ac2
;
4207 access
->grp_maybe_modified
= modification
;
4210 *prev_acc_ptr
= access
;
4211 prev_acc_ptr
= &access
->next_grp
;
4212 total_size
+= access
->size
;
4216 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4217 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4219 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4220 if (total_size
>= agg_size
)
4223 gcc_assert (group_count
> 0);
4227 /* Decide whether parameters with representative accesses given by REPR should
4228 be reduced into components. */
4231 decide_one_param_reduction (struct access
*repr
)
4233 int total_size
, cur_parm_size
, agg_size
, new_param_count
, parm_size_limit
;
4238 cur_parm_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (parm
)));
4239 gcc_assert (cur_parm_size
> 0);
4241 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4244 agg_size
= tree_to_uhwi (TYPE_SIZE (TREE_TYPE (TREE_TYPE (parm
))));
4249 agg_size
= cur_parm_size
;
4255 fprintf (dump_file
, "Evaluating PARAM group sizes for ");
4256 print_generic_expr (dump_file
, parm
, 0);
4257 fprintf (dump_file
, " (UID: %u): \n", DECL_UID (parm
));
4258 for (acc
= repr
; acc
; acc
= acc
->next_grp
)
4259 dump_access (dump_file
, acc
, true);
4263 new_param_count
= 0;
4265 for (; repr
; repr
= repr
->next_grp
)
4267 gcc_assert (parm
== repr
->base
);
4269 /* Taking the address of a non-addressable field is verboten. */
4270 if (by_ref
&& repr
->non_addressable
)
4273 /* Do not decompose a non-BLKmode param in a way that would
4274 create BLKmode params. Especially for by-reference passing
4275 (thus, pointer-type param) this is hardly worthwhile. */
4276 if (DECL_MODE (parm
) != BLKmode
4277 && TYPE_MODE (repr
->type
) == BLKmode
)
4280 if (!by_ref
|| (!repr
->grp_maybe_modified
4281 && !repr
->grp_not_necessarilly_dereferenced
))
4282 total_size
+= repr
->size
;
4284 total_size
+= cur_parm_size
;
4289 gcc_assert (new_param_count
> 0);
4291 if (optimize_function_for_size_p (cfun
))
4292 parm_size_limit
= cur_parm_size
;
4294 parm_size_limit
= (PARAM_VALUE (PARAM_IPA_SRA_PTR_GROWTH_FACTOR
)
4297 if (total_size
< agg_size
4298 && total_size
<= parm_size_limit
)
4301 fprintf (dump_file
, " ....will be split into %i components\n",
4303 return new_param_count
;
4309 /* The order of the following enums is important, we need to do extra work for
4310 UNUSED_PARAMS, BY_VAL_ACCESSES and UNMODIF_BY_REF_ACCESSES. */
4311 enum ipa_splicing_result
{ NO_GOOD_ACCESS
, UNUSED_PARAMS
, BY_VAL_ACCESSES
,
4312 MODIF_BY_REF_ACCESSES
, UNMODIF_BY_REF_ACCESSES
};
4314 /* Identify representatives of all accesses to all candidate parameters for
4315 IPA-SRA. Return result based on what representatives have been found. */
4317 static enum ipa_splicing_result
4318 splice_all_param_accesses (vec
<access_p
> &representatives
)
4320 enum ipa_splicing_result result
= NO_GOOD_ACCESS
;
4322 struct access
*repr
;
4324 representatives
.create (func_param_count
);
4326 for (parm
= DECL_ARGUMENTS (current_function_decl
);
4328 parm
= DECL_CHAIN (parm
))
4330 if (is_unused_scalar_param (parm
))
4332 representatives
.quick_push (&no_accesses_representant
);
4333 if (result
== NO_GOOD_ACCESS
)
4334 result
= UNUSED_PARAMS
;
4336 else if (POINTER_TYPE_P (TREE_TYPE (parm
))
4337 && is_gimple_reg_type (TREE_TYPE (TREE_TYPE (parm
)))
4338 && bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4340 repr
= unmodified_by_ref_scalar_representative (parm
);
4341 representatives
.quick_push (repr
);
4343 result
= UNMODIF_BY_REF_ACCESSES
;
4345 else if (bitmap_bit_p (candidate_bitmap
, DECL_UID (parm
)))
4347 bool ro_grp
= false;
4348 repr
= splice_param_accesses (parm
, &ro_grp
);
4349 representatives
.quick_push (repr
);
4351 if (repr
&& !no_accesses_p (repr
))
4353 if (POINTER_TYPE_P (TREE_TYPE (parm
)))
4356 result
= UNMODIF_BY_REF_ACCESSES
;
4357 else if (result
< MODIF_BY_REF_ACCESSES
)
4358 result
= MODIF_BY_REF_ACCESSES
;
4360 else if (result
< BY_VAL_ACCESSES
)
4361 result
= BY_VAL_ACCESSES
;
4363 else if (no_accesses_p (repr
) && (result
== NO_GOOD_ACCESS
))
4364 result
= UNUSED_PARAMS
;
4367 representatives
.quick_push (NULL
);
4370 if (result
== NO_GOOD_ACCESS
)
4372 representatives
.release ();
4373 return NO_GOOD_ACCESS
;
4379 /* Return the index of BASE in PARMS. Abort if it is not found. */
4382 get_param_index (tree base
, vec
<tree
> parms
)
4386 len
= parms
.length ();
4387 for (i
= 0; i
< len
; i
++)
4388 if (parms
[i
] == base
)
4393 /* Convert the decisions made at the representative level into compact
4394 parameter adjustments. REPRESENTATIVES are pointers to first
4395 representatives of each param accesses, ADJUSTMENTS_COUNT is the expected
4396 final number of adjustments. */
4398 static ipa_parm_adjustment_vec
4399 turn_representatives_into_adjustments (vec
<access_p
> representatives
,
4400 int adjustments_count
)
4403 ipa_parm_adjustment_vec adjustments
;
4407 gcc_assert (adjustments_count
> 0);
4408 parms
= ipa_get_vector_of_formal_parms (current_function_decl
);
4409 adjustments
.create (adjustments_count
);
4410 parm
= DECL_ARGUMENTS (current_function_decl
);
4411 for (i
= 0; i
< func_param_count
; i
++, parm
= DECL_CHAIN (parm
))
4413 struct access
*repr
= representatives
[i
];
4415 if (!repr
|| no_accesses_p (repr
))
4417 struct ipa_parm_adjustment adj
;
4419 memset (&adj
, 0, sizeof (adj
));
4420 adj
.base_index
= get_param_index (parm
, parms
);
4423 adj
.op
= IPA_PARM_OP_COPY
;
4425 adj
.op
= IPA_PARM_OP_REMOVE
;
4426 adj
.arg_prefix
= "ISRA";
4427 adjustments
.quick_push (adj
);
4431 struct ipa_parm_adjustment adj
;
4432 int index
= get_param_index (parm
, parms
);
4434 for (; repr
; repr
= repr
->next_grp
)
4436 memset (&adj
, 0, sizeof (adj
));
4437 gcc_assert (repr
->base
== parm
);
4438 adj
.base_index
= index
;
4439 adj
.base
= repr
->base
;
4440 adj
.type
= repr
->type
;
4441 adj
.alias_ptr_type
= reference_alias_ptr_type (repr
->expr
);
4442 adj
.offset
= repr
->offset
;
4443 adj
.by_ref
= (POINTER_TYPE_P (TREE_TYPE (repr
->base
))
4444 && (repr
->grp_maybe_modified
4445 || repr
->grp_not_necessarilly_dereferenced
));
4446 adj
.arg_prefix
= "ISRA";
4447 adjustments
.quick_push (adj
);
4455 /* Analyze the collected accesses and produce a plan what to do with the
4456 parameters in the form of adjustments, NULL meaning nothing. */
4458 static ipa_parm_adjustment_vec
4459 analyze_all_param_acesses (void)
4461 enum ipa_splicing_result repr_state
;
4462 bool proceed
= false;
4463 int i
, adjustments_count
= 0;
4464 vec
<access_p
> representatives
;
4465 ipa_parm_adjustment_vec adjustments
;
4467 repr_state
= splice_all_param_accesses (representatives
);
4468 if (repr_state
== NO_GOOD_ACCESS
)
4469 return ipa_parm_adjustment_vec ();
4471 /* If there are any parameters passed by reference which are not modified
4472 directly, we need to check whether they can be modified indirectly. */
4473 if (repr_state
== UNMODIF_BY_REF_ACCESSES
)
4475 analyze_caller_dereference_legality (representatives
);
4476 analyze_modified_params (representatives
);
4479 for (i
= 0; i
< func_param_count
; i
++)
4481 struct access
*repr
= representatives
[i
];
4483 if (repr
&& !no_accesses_p (repr
))
4485 if (repr
->grp_scalar_ptr
)
4487 adjustments_count
++;
4488 if (repr
->grp_not_necessarilly_dereferenced
4489 || repr
->grp_maybe_modified
)
4490 representatives
[i
] = NULL
;
4494 sra_stats
.scalar_by_ref_to_by_val
++;
4499 int new_components
= decide_one_param_reduction (repr
);
4501 if (new_components
== 0)
4503 representatives
[i
] = NULL
;
4504 adjustments_count
++;
4508 adjustments_count
+= new_components
;
4509 sra_stats
.aggregate_params_reduced
++;
4510 sra_stats
.param_reductions_created
+= new_components
;
4517 if (no_accesses_p (repr
))
4520 sra_stats
.deleted_unused_parameters
++;
4522 adjustments_count
++;
4526 if (!proceed
&& dump_file
)
4527 fprintf (dump_file
, "NOT proceeding to change params.\n");
4530 adjustments
= turn_representatives_into_adjustments (representatives
,
4533 adjustments
= ipa_parm_adjustment_vec ();
4535 representatives
.release ();
4539 /* If a parameter replacement identified by ADJ does not yet exist in the form
4540 of declaration, create it and record it, otherwise return the previously
4544 get_replaced_param_substitute (struct ipa_parm_adjustment
*adj
)
4547 if (!adj
->new_ssa_base
)
4549 char *pretty_name
= make_fancy_name (adj
->base
);
4551 repl
= create_tmp_reg (TREE_TYPE (adj
->base
), "ISR");
4552 DECL_NAME (repl
) = get_identifier (pretty_name
);
4553 obstack_free (&name_obstack
, pretty_name
);
4555 adj
->new_ssa_base
= repl
;
4558 repl
= adj
->new_ssa_base
;
4562 /* Find the first adjustment for a particular parameter BASE in a vector of
4563 ADJUSTMENTS which is not a copy_param. Return NULL if there is no such
4566 static struct ipa_parm_adjustment
*
4567 get_adjustment_for_base (ipa_parm_adjustment_vec adjustments
, tree base
)
4571 len
= adjustments
.length ();
4572 for (i
= 0; i
< len
; i
++)
4574 struct ipa_parm_adjustment
*adj
;
4576 adj
= &adjustments
[i
];
4577 if (adj
->op
!= IPA_PARM_OP_COPY
&& adj
->base
== base
)
4584 /* If the statement STMT defines an SSA_NAME of a parameter which is to be
4585 removed because its value is not used, replace the SSA_NAME with a one
4586 relating to a created VAR_DECL together all of its uses and return true.
4587 ADJUSTMENTS is a pointer to an adjustments vector. */
4590 replace_removed_params_ssa_names (gimple stmt
,
4591 ipa_parm_adjustment_vec adjustments
)
4593 struct ipa_parm_adjustment
*adj
;
4594 tree lhs
, decl
, repl
, name
;
4596 if (gimple_code (stmt
) == GIMPLE_PHI
)
4597 lhs
= gimple_phi_result (stmt
);
4598 else if (is_gimple_assign (stmt
))
4599 lhs
= gimple_assign_lhs (stmt
);
4600 else if (is_gimple_call (stmt
))
4601 lhs
= gimple_call_lhs (stmt
);
4605 if (TREE_CODE (lhs
) != SSA_NAME
)
4608 decl
= SSA_NAME_VAR (lhs
);
4609 if (decl
== NULL_TREE
4610 || TREE_CODE (decl
) != PARM_DECL
)
4613 adj
= get_adjustment_for_base (adjustments
, decl
);
4617 repl
= get_replaced_param_substitute (adj
);
4618 name
= make_ssa_name (repl
, stmt
);
4622 fprintf (dump_file
, "replacing an SSA name of a removed param ");
4623 print_generic_expr (dump_file
, lhs
, 0);
4624 fprintf (dump_file
, " with ");
4625 print_generic_expr (dump_file
, name
, 0);
4626 fprintf (dump_file
, "\n");
4629 if (is_gimple_assign (stmt
))
4630 gimple_assign_set_lhs (stmt
, name
);
4631 else if (is_gimple_call (stmt
))
4632 gimple_call_set_lhs (stmt
, name
);
4634 gimple_phi_set_result (as_a
<gphi
*> (stmt
), name
);
4636 replace_uses_by (lhs
, name
);
4637 release_ssa_name (lhs
);
4641 /* If the statement STMT contains any expressions that need to replaced with a
4642 different one as noted by ADJUSTMENTS, do so. Handle any potential type
4643 incompatibilities (GSI is used to accommodate conversion statements and must
4644 point to the statement). Return true iff the statement was modified. */
4647 sra_ipa_modify_assign (gimple stmt
, gimple_stmt_iterator
*gsi
,
4648 ipa_parm_adjustment_vec adjustments
)
4650 tree
*lhs_p
, *rhs_p
;
4653 if (!gimple_assign_single_p (stmt
))
4656 rhs_p
= gimple_assign_rhs1_ptr (stmt
);
4657 lhs_p
= gimple_assign_lhs_ptr (stmt
);
4659 any
= ipa_modify_expr (rhs_p
, false, adjustments
);
4660 any
|= ipa_modify_expr (lhs_p
, false, adjustments
);
4663 tree new_rhs
= NULL_TREE
;
4665 if (!useless_type_conversion_p (TREE_TYPE (*lhs_p
), TREE_TYPE (*rhs_p
)))
4667 if (TREE_CODE (*rhs_p
) == CONSTRUCTOR
)
4669 /* V_C_Es of constructors can cause trouble (PR 42714). */
4670 if (is_gimple_reg_type (TREE_TYPE (*lhs_p
)))
4671 *rhs_p
= build_zero_cst (TREE_TYPE (*lhs_p
));
4673 *rhs_p
= build_constructor (TREE_TYPE (*lhs_p
),
4677 new_rhs
= fold_build1_loc (gimple_location (stmt
),
4678 VIEW_CONVERT_EXPR
, TREE_TYPE (*lhs_p
),
4681 else if (REFERENCE_CLASS_P (*rhs_p
)
4682 && is_gimple_reg_type (TREE_TYPE (*lhs_p
))
4683 && !is_gimple_reg (*lhs_p
))
4684 /* This can happen when an assignment in between two single field
4685 structures is turned into an assignment in between two pointers to
4686 scalars (PR 42237). */
4691 tree tmp
= force_gimple_operand_gsi (gsi
, new_rhs
, true, NULL_TREE
,
4692 true, GSI_SAME_STMT
);
4694 gimple_assign_set_rhs_from_tree (gsi
, tmp
);
4703 /* Traverse the function body and all modifications as described in
4704 ADJUSTMENTS. Return true iff the CFG has been changed. */
4707 ipa_sra_modify_function_body (ipa_parm_adjustment_vec adjustments
)
4709 bool cfg_changed
= false;
4712 FOR_EACH_BB_FN (bb
, cfun
)
4714 gimple_stmt_iterator gsi
;
4716 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4717 replace_removed_params_ssa_names (gsi_stmt (gsi
), adjustments
);
4719 gsi
= gsi_start_bb (bb
);
4720 while (!gsi_end_p (gsi
))
4722 gimple stmt
= gsi_stmt (gsi
);
4723 bool modified
= false;
4727 switch (gimple_code (stmt
))
4730 t
= gimple_return_retval_ptr (as_a
<greturn
*> (stmt
));
4731 if (*t
!= NULL_TREE
)
4732 modified
|= ipa_modify_expr (t
, true, adjustments
);
4736 modified
|= sra_ipa_modify_assign (stmt
, &gsi
, adjustments
);
4737 modified
|= replace_removed_params_ssa_names (stmt
, adjustments
);
4741 /* Operands must be processed before the lhs. */
4742 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
4744 t
= gimple_call_arg_ptr (stmt
, i
);
4745 modified
|= ipa_modify_expr (t
, true, adjustments
);
4748 if (gimple_call_lhs (stmt
))
4750 t
= gimple_call_lhs_ptr (stmt
);
4751 modified
|= ipa_modify_expr (t
, false, adjustments
);
4752 modified
|= replace_removed_params_ssa_names (stmt
,
4759 gasm
*asm_stmt
= as_a
<gasm
*> (stmt
);
4760 for (i
= 0; i
< gimple_asm_ninputs (asm_stmt
); i
++)
4762 t
= &TREE_VALUE (gimple_asm_input_op (asm_stmt
, i
));
4763 modified
|= ipa_modify_expr (t
, true, adjustments
);
4765 for (i
= 0; i
< gimple_asm_noutputs (asm_stmt
); i
++)
4767 t
= &TREE_VALUE (gimple_asm_output_op (asm_stmt
, i
));
4768 modified
|= ipa_modify_expr (t
, false, adjustments
);
4780 if (maybe_clean_eh_stmt (stmt
)
4781 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
4791 /* Call gimple_debug_bind_reset_value on all debug statements describing
4792 gimple register parameters that are being removed or replaced. */
4795 sra_ipa_reset_debug_stmts (ipa_parm_adjustment_vec adjustments
)
4798 gimple_stmt_iterator
*gsip
= NULL
, gsi
;
4800 if (MAY_HAVE_DEBUG_STMTS
&& single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (cfun
)))
4802 gsi
= gsi_after_labels (single_succ (ENTRY_BLOCK_PTR_FOR_FN (cfun
)));
4805 len
= adjustments
.length ();
4806 for (i
= 0; i
< len
; i
++)
4808 struct ipa_parm_adjustment
*adj
;
4809 imm_use_iterator ui
;
4812 tree name
, vexpr
, copy
= NULL_TREE
;
4813 use_operand_p use_p
;
4815 adj
= &adjustments
[i
];
4816 if (adj
->op
== IPA_PARM_OP_COPY
|| !is_gimple_reg (adj
->base
))
4818 name
= ssa_default_def (cfun
, adj
->base
);
4821 FOR_EACH_IMM_USE_STMT (stmt
, ui
, name
)
4823 if (gimple_clobber_p (stmt
))
4825 gimple_stmt_iterator cgsi
= gsi_for_stmt (stmt
);
4826 unlink_stmt_vdef (stmt
);
4827 gsi_remove (&cgsi
, true);
4828 release_defs (stmt
);
4831 /* All other users must have been removed by
4832 ipa_sra_modify_function_body. */
4833 gcc_assert (is_gimple_debug (stmt
));
4834 if (vexpr
== NULL
&& gsip
!= NULL
)
4836 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4837 vexpr
= make_node (DEBUG_EXPR_DECL
);
4838 def_temp
= gimple_build_debug_source_bind (vexpr
, adj
->base
,
4840 DECL_ARTIFICIAL (vexpr
) = 1;
4841 TREE_TYPE (vexpr
) = TREE_TYPE (name
);
4842 DECL_MODE (vexpr
) = DECL_MODE (adj
->base
);
4843 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4847 FOR_EACH_IMM_USE_ON_STMT (use_p
, ui
)
4848 SET_USE (use_p
, vexpr
);
4851 gimple_debug_bind_reset_value (stmt
);
4854 /* Create a VAR_DECL for debug info purposes. */
4855 if (!DECL_IGNORED_P (adj
->base
))
4857 copy
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
4858 VAR_DECL
, DECL_NAME (adj
->base
),
4859 TREE_TYPE (adj
->base
));
4860 if (DECL_PT_UID_SET_P (adj
->base
))
4861 SET_DECL_PT_UID (copy
, DECL_PT_UID (adj
->base
));
4862 TREE_ADDRESSABLE (copy
) = TREE_ADDRESSABLE (adj
->base
);
4863 TREE_READONLY (copy
) = TREE_READONLY (adj
->base
);
4864 TREE_THIS_VOLATILE (copy
) = TREE_THIS_VOLATILE (adj
->base
);
4865 DECL_GIMPLE_REG_P (copy
) = DECL_GIMPLE_REG_P (adj
->base
);
4866 DECL_ARTIFICIAL (copy
) = DECL_ARTIFICIAL (adj
->base
);
4867 DECL_IGNORED_P (copy
) = DECL_IGNORED_P (adj
->base
);
4868 DECL_ABSTRACT_ORIGIN (copy
) = DECL_ORIGIN (adj
->base
);
4869 DECL_SEEN_IN_BIND_EXPR_P (copy
) = 1;
4870 SET_DECL_RTL (copy
, 0);
4871 TREE_USED (copy
) = 1;
4872 DECL_CONTEXT (copy
) = current_function_decl
;
4873 add_local_decl (cfun
, copy
);
4875 BLOCK_VARS (DECL_INITIAL (current_function_decl
));
4876 BLOCK_VARS (DECL_INITIAL (current_function_decl
)) = copy
;
4878 if (gsip
!= NULL
&& copy
&& target_for_debug_bind (adj
->base
))
4880 gcc_assert (TREE_CODE (adj
->base
) == PARM_DECL
);
4882 def_temp
= gimple_build_debug_bind (copy
, vexpr
, NULL
);
4884 def_temp
= gimple_build_debug_source_bind (copy
, adj
->base
,
4886 gsi_insert_before (gsip
, def_temp
, GSI_SAME_STMT
);
4891 /* Return false if all callers have at least as many actual arguments as there
4892 are formal parameters in the current function and that their types
4896 some_callers_have_mismatched_arguments_p (struct cgraph_node
*node
,
4897 void *data ATTRIBUTE_UNUSED
)
4899 struct cgraph_edge
*cs
;
4900 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4901 if (!cs
->call_stmt
|| !callsite_arguments_match_p (cs
->call_stmt
))
4907 /* Return false if all callers have vuse attached to a call statement. */
4910 some_callers_have_no_vuse_p (struct cgraph_node
*node
,
4911 void *data ATTRIBUTE_UNUSED
)
4913 struct cgraph_edge
*cs
;
4914 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4915 if (!cs
->call_stmt
|| !gimple_vuse (cs
->call_stmt
))
4921 /* Convert all callers of NODE. */
4924 convert_callers_for_node (struct cgraph_node
*node
,
4927 ipa_parm_adjustment_vec
*adjustments
= (ipa_parm_adjustment_vec
*) data
;
4928 bitmap recomputed_callers
= BITMAP_ALLOC (NULL
);
4929 struct cgraph_edge
*cs
;
4931 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4933 push_cfun (DECL_STRUCT_FUNCTION (cs
->caller
->decl
));
4936 fprintf (dump_file
, "Adjusting call %s/%i -> %s/%i\n",
4937 xstrdup (cs
->caller
->name ()),
4939 xstrdup (cs
->callee
->name ()),
4942 ipa_modify_call_arguments (cs
, cs
->call_stmt
, *adjustments
);
4947 for (cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
4948 if (bitmap_set_bit (recomputed_callers
, cs
->caller
->uid
)
4949 && gimple_in_ssa_p (DECL_STRUCT_FUNCTION (cs
->caller
->decl
)))
4950 compute_inline_parameters (cs
->caller
, true);
4951 BITMAP_FREE (recomputed_callers
);
4956 /* Convert all callers of NODE to pass parameters as given in ADJUSTMENTS. */
4959 convert_callers (struct cgraph_node
*node
, tree old_decl
,
4960 ipa_parm_adjustment_vec adjustments
)
4962 basic_block this_block
;
4964 node
->call_for_symbol_and_aliases (convert_callers_for_node
,
4965 &adjustments
, false);
4967 if (!encountered_recursive_call
)
4970 FOR_EACH_BB_FN (this_block
, cfun
)
4972 gimple_stmt_iterator gsi
;
4974 for (gsi
= gsi_start_bb (this_block
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4978 stmt
= dyn_cast
<gcall
*> (gsi_stmt (gsi
));
4981 call_fndecl
= gimple_call_fndecl (stmt
);
4982 if (call_fndecl
== old_decl
)
4985 fprintf (dump_file
, "Adjusting recursive call");
4986 gimple_call_set_fndecl (stmt
, node
->decl
);
4987 ipa_modify_call_arguments (NULL
, stmt
, adjustments
);
4995 /* Perform all the modification required in IPA-SRA for NODE to have parameters
4996 as given in ADJUSTMENTS. Return true iff the CFG has been changed. */
4999 modify_function (struct cgraph_node
*node
, ipa_parm_adjustment_vec adjustments
)
5001 struct cgraph_node
*new_node
;
5004 cgraph_edge::rebuild_edges ();
5005 free_dominance_info (CDI_DOMINATORS
);
5008 /* This must be done after rebuilding cgraph edges for node above.
5009 Otherwise any recursive calls to node that are recorded in
5010 redirect_callers will be corrupted. */
5011 vec
<cgraph_edge
*> redirect_callers
= node
->collect_callers ();
5012 new_node
= node
->create_version_clone_with_body (redirect_callers
, NULL
,
5013 NULL
, false, NULL
, NULL
,
5015 redirect_callers
.release ();
5017 push_cfun (DECL_STRUCT_FUNCTION (new_node
->decl
));
5018 ipa_modify_formal_parameters (current_function_decl
, adjustments
);
5019 cfg_changed
= ipa_sra_modify_function_body (adjustments
);
5020 sra_ipa_reset_debug_stmts (adjustments
);
5021 convert_callers (new_node
, node
->decl
, adjustments
);
5022 new_node
->make_local ();
5026 /* Means of communication between ipa_sra_check_caller and
5027 ipa_sra_preliminary_function_checks. */
5029 struct ipa_sra_check_caller_data
5032 bool bad_arg_alignment
;
5036 /* If NODE has a caller, mark that fact in DATA which is pointer to
5037 ipa_sra_check_caller_data. Also check all aggregate arguments in all known
5038 calls if they are unit aligned and if not, set the appropriate flag in DATA
5042 ipa_sra_check_caller (struct cgraph_node
*node
, void *data
)
5047 struct ipa_sra_check_caller_data
*iscc
;
5048 iscc
= (struct ipa_sra_check_caller_data
*) data
;
5049 iscc
->has_callers
= true;
5051 for (cgraph_edge
*cs
= node
->callers
; cs
; cs
= cs
->next_caller
)
5053 if (cs
->caller
->thunk
.thunk_p
)
5055 iscc
->has_thunk
= true;
5058 gimple call_stmt
= cs
->call_stmt
;
5059 unsigned count
= gimple_call_num_args (call_stmt
);
5060 for (unsigned i
= 0; i
< count
; i
++)
5062 tree arg
= gimple_call_arg (call_stmt
, i
);
5063 if (is_gimple_reg (arg
))
5067 HOST_WIDE_INT bitsize
, bitpos
;
5069 int unsignedp
, volatilep
= 0;
5070 get_inner_reference (arg
, &bitsize
, &bitpos
, &offset
, &mode
,
5071 &unsignedp
, &volatilep
, false);
5072 if (bitpos
% BITS_PER_UNIT
)
5074 iscc
->bad_arg_alignment
= true;
5083 /* Return false the function is apparently unsuitable for IPA-SRA based on it's
5084 attributes, return true otherwise. NODE is the cgraph node of the current
5088 ipa_sra_preliminary_function_checks (struct cgraph_node
*node
)
5090 if (!node
->can_be_local_p ())
5093 fprintf (dump_file
, "Function not local to this compilation unit.\n");
5097 if (!node
->local
.can_change_signature
)
5100 fprintf (dump_file
, "Function can not change signature.\n");
5104 if (!tree_versionable_function_p (node
->decl
))
5107 fprintf (dump_file
, "Function is not versionable.\n");
5111 if (!opt_for_fn (node
->decl
, optimize
)
5112 || !opt_for_fn (node
->decl
, flag_ipa_sra
))
5115 fprintf (dump_file
, "Function not optimized.\n");
5119 if (DECL_VIRTUAL_P (current_function_decl
))
5122 fprintf (dump_file
, "Function is a virtual method.\n");
5126 if ((DECL_ONE_ONLY (node
->decl
) || DECL_EXTERNAL (node
->decl
))
5127 && inline_summaries
->get (node
)->size
>= MAX_INLINE_INSNS_AUTO
)
5130 fprintf (dump_file
, "Function too big to be made truly local.\n");
5137 fprintf (dump_file
, "Function uses stdarg. \n");
5141 if (TYPE_ATTRIBUTES (TREE_TYPE (node
->decl
)))
5144 if (DECL_DISREGARD_INLINE_LIMITS (node
->decl
))
5147 fprintf (dump_file
, "Always inline function will be inlined "
5152 struct ipa_sra_check_caller_data iscc
;
5153 memset (&iscc
, 0, sizeof(iscc
));
5154 node
->call_for_symbol_and_aliases (ipa_sra_check_caller
, &iscc
, true);
5155 if (!iscc
.has_callers
)
5159 "Function has no callers in this compilation unit.\n");
5163 if (iscc
.bad_arg_alignment
)
5167 "A function call has an argument with non-unit alignment.\n");
5182 /* Perform early interprocedural SRA. */
5185 ipa_early_sra (void)
5187 struct cgraph_node
*node
= cgraph_node::get (current_function_decl
);
5188 ipa_parm_adjustment_vec adjustments
;
5191 if (!ipa_sra_preliminary_function_checks (node
))
5195 sra_mode
= SRA_MODE_EARLY_IPA
;
5197 if (!find_param_candidates ())
5200 fprintf (dump_file
, "Function has no IPA-SRA candidates.\n");
5204 if (node
->call_for_symbol_and_aliases
5205 (some_callers_have_mismatched_arguments_p
, NULL
, true))
5208 fprintf (dump_file
, "There are callers with insufficient number of "
5209 "arguments or arguments with type mismatches.\n");
5213 if (node
->call_for_symbol_and_aliases
5214 (some_callers_have_no_vuse_p
, NULL
, true))
5217 fprintf (dump_file
, "There are callers with no VUSE attached "
5218 "to a call stmt.\n");
5222 bb_dereferences
= XCNEWVEC (HOST_WIDE_INT
,
5224 * last_basic_block_for_fn (cfun
));
5225 final_bbs
= BITMAP_ALLOC (NULL
);
5228 if (encountered_apply_args
)
5231 fprintf (dump_file
, "Function calls __builtin_apply_args().\n");
5235 if (encountered_unchangable_recursive_call
)
5238 fprintf (dump_file
, "Function calls itself with insufficient "
5239 "number of arguments.\n");
5243 adjustments
= analyze_all_param_acesses ();
5244 if (!adjustments
.exists ())
5247 ipa_dump_param_adjustments (dump_file
, adjustments
, current_function_decl
);
5249 if (modify_function (node
, adjustments
))
5250 ret
= TODO_update_ssa
| TODO_cleanup_cfg
;
5252 ret
= TODO_update_ssa
;
5253 adjustments
.release ();
5255 statistics_counter_event (cfun
, "Unused parameters deleted",
5256 sra_stats
.deleted_unused_parameters
);
5257 statistics_counter_event (cfun
, "Scalar parameters converted to by-value",
5258 sra_stats
.scalar_by_ref_to_by_val
);
5259 statistics_counter_event (cfun
, "Aggregate parameters broken up",
5260 sra_stats
.aggregate_params_reduced
);
5261 statistics_counter_event (cfun
, "Aggregate parameter components created",
5262 sra_stats
.param_reductions_created
);
5265 BITMAP_FREE (final_bbs
);
5266 free (bb_dereferences
);
5268 sra_deinitialize ();
5274 const pass_data pass_data_early_ipa_sra
=
5276 GIMPLE_PASS
, /* type */
5277 "eipa_sra", /* name */
5278 OPTGROUP_NONE
, /* optinfo_flags */
5279 TV_IPA_SRA
, /* tv_id */
5280 0, /* properties_required */
5281 0, /* properties_provided */
5282 0, /* properties_destroyed */
5283 0, /* todo_flags_start */
5284 TODO_dump_symtab
, /* todo_flags_finish */
5287 class pass_early_ipa_sra
: public gimple_opt_pass
5290 pass_early_ipa_sra (gcc::context
*ctxt
)
5291 : gimple_opt_pass (pass_data_early_ipa_sra
, ctxt
)
5294 /* opt_pass methods: */
5295 virtual bool gate (function
*) { return flag_ipa_sra
&& dbg_cnt (eipa_sra
); }
5296 virtual unsigned int execute (function
*) { return ipa_early_sra (); }
5298 }; // class pass_early_ipa_sra
5303 make_pass_early_ipa_sra (gcc::context
*ctxt
)
5305 return new pass_early_ipa_sra (ctxt
);