]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-sra.c
c-common.h (enum rid): Add RID_CXX_COMPAT_WARN.
[gcc.git] / gcc / tree-sra.c
1 /* Scalar Replacement of Aggregates (SRA) converts some structure
2 references into scalar references, exposing them to the scalar
3 optimizers.
4 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008
5 Free Software Foundation, Inc.
6 Contributed by Diego Novillo <dnovillo@redhat.com>
7
8 This file is part of GCC.
9
10 GCC is free software; you can redistribute it and/or modify it
11 under the terms of the GNU General Public License as published by the
12 Free Software Foundation; either version 3, or (at your option) any
13 later version.
14
15 GCC is distributed in the hope that it will be useful, but WITHOUT
16 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
19
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
23
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "ggc.h"
29 #include "tree.h"
30
31 /* These RTL headers are needed for basic-block.h. */
32 #include "rtl.h"
33 #include "tm_p.h"
34 #include "hard-reg-set.h"
35 #include "basic-block.h"
36 #include "diagnostic.h"
37 #include "langhooks.h"
38 #include "tree-inline.h"
39 #include "tree-flow.h"
40 #include "tree-gimple.h"
41 #include "tree-dump.h"
42 #include "tree-pass.h"
43 #include "timevar.h"
44 #include "flags.h"
45 #include "bitmap.h"
46 #include "obstack.h"
47 #include "target.h"
48 /* expr.h is needed for MOVE_RATIO. */
49 #include "expr.h"
50 #include "params.h"
51
52
53 /* This object of this pass is to replace a non-addressable aggregate with a
54 set of independent variables. Most of the time, all of these variables
55 will be scalars. But a secondary objective is to break up larger
56 aggregates into smaller aggregates. In the process we may find that some
57 bits of the larger aggregate can be deleted as unreferenced.
58
59 This substitution is done globally. More localized substitutions would
60 be the purvey of a load-store motion pass.
61
62 The optimization proceeds in phases:
63
64 (1) Identify variables that have types that are candidates for
65 decomposition.
66
67 (2) Scan the function looking for the ways these variables are used.
68 In particular we're interested in the number of times a variable
69 (or member) is needed as a complete unit, and the number of times
70 a variable (or member) is copied.
71
72 (3) Based on the usage profile, instantiate substitution variables.
73
74 (4) Scan the function making replacements.
75 */
76
77
78 /* True if this is the "early" pass, before inlining. */
79 static bool early_sra;
80
81 /* The set of todo flags to return from tree_sra. */
82 static unsigned int todoflags;
83
84 /* The set of aggregate variables that are candidates for scalarization. */
85 static bitmap sra_candidates;
86
87 /* Set of scalarizable PARM_DECLs that need copy-in operations at the
88 beginning of the function. */
89 static bitmap needs_copy_in;
90
91 /* Sets of bit pairs that cache type decomposition and instantiation. */
92 static bitmap sra_type_decomp_cache;
93 static bitmap sra_type_inst_cache;
94
95 /* One of these structures is created for each candidate aggregate and
96 each (accessed) member or group of members of such an aggregate. */
97 struct sra_elt
98 {
99 /* A tree of the elements. Used when we want to traverse everything. */
100 struct sra_elt *parent;
101 struct sra_elt *groups;
102 struct sra_elt *children;
103 struct sra_elt *sibling;
104
105 /* If this element is a root, then this is the VAR_DECL. If this is
106 a sub-element, this is some token used to identify the reference.
107 In the case of COMPONENT_REF, this is the FIELD_DECL. In the case
108 of an ARRAY_REF, this is the (constant) index. In the case of an
109 ARRAY_RANGE_REF, this is the (constant) RANGE_EXPR. In the case
110 of a complex number, this is a zero or one. */
111 tree element;
112
113 /* The type of the element. */
114 tree type;
115
116 /* A VAR_DECL, for any sub-element we've decided to replace. */
117 tree replacement;
118
119 /* The number of times the element is referenced as a whole. I.e.
120 given "a.b.c", this would be incremented for C, but not for A or B. */
121 unsigned int n_uses;
122
123 /* The number of times the element is copied to or from another
124 scalarizable element. */
125 unsigned int n_copies;
126
127 /* True if TYPE is scalar. */
128 bool is_scalar;
129
130 /* True if this element is a group of members of its parent. */
131 bool is_group;
132
133 /* True if we saw something about this element that prevents scalarization,
134 such as non-constant indexing. */
135 bool cannot_scalarize;
136
137 /* True if we've decided that structure-to-structure assignment
138 should happen via memcpy and not per-element. */
139 bool use_block_copy;
140
141 /* True if everything under this element has been marked TREE_NO_WARNING. */
142 bool all_no_warning;
143
144 /* A flag for use with/after random access traversals. */
145 bool visited;
146
147 /* True if there is BIT_FIELD_REF on the lhs with a vector. */
148 bool is_vector_lhs;
149
150 /* 1 if the element is a field that is part of a block, 2 if the field
151 is the block itself, 0 if it's neither. */
152 char in_bitfld_block;
153 };
154
155 #define IS_ELEMENT_FOR_GROUP(ELEMENT) (TREE_CODE (ELEMENT) == RANGE_EXPR)
156
157 #define FOR_EACH_ACTUAL_CHILD(CHILD, ELT) \
158 for ((CHILD) = (ELT)->is_group \
159 ? next_child_for_group (NULL, (ELT)) \
160 : (ELT)->children; \
161 (CHILD); \
162 (CHILD) = (ELT)->is_group \
163 ? next_child_for_group ((CHILD), (ELT)) \
164 : (CHILD)->sibling)
165
166 /* Helper function for above macro. Return next child in group. */
167 static struct sra_elt *
168 next_child_for_group (struct sra_elt *child, struct sra_elt *group)
169 {
170 gcc_assert (group->is_group);
171
172 /* Find the next child in the parent. */
173 if (child)
174 child = child->sibling;
175 else
176 child = group->parent->children;
177
178 /* Skip siblings that do not belong to the group. */
179 while (child)
180 {
181 tree g_elt = group->element;
182 if (TREE_CODE (g_elt) == RANGE_EXPR)
183 {
184 if (!tree_int_cst_lt (child->element, TREE_OPERAND (g_elt, 0))
185 && !tree_int_cst_lt (TREE_OPERAND (g_elt, 1), child->element))
186 break;
187 }
188 else
189 gcc_unreachable ();
190
191 child = child->sibling;
192 }
193
194 return child;
195 }
196
197 /* Random access to the child of a parent is performed by hashing.
198 This prevents quadratic behavior, and allows SRA to function
199 reasonably on larger records. */
200 static htab_t sra_map;
201
202 /* All structures are allocated out of the following obstack. */
203 static struct obstack sra_obstack;
204
205 /* Debugging functions. */
206 static void dump_sra_elt_name (FILE *, struct sra_elt *);
207 extern void debug_sra_elt_name (struct sra_elt *);
208
209 /* Forward declarations. */
210 static tree generate_element_ref (struct sra_elt *);
211 static tree sra_build_assignment (tree dst, tree src);
212 static void mark_all_v_defs (tree list);
213
214 \f
215 /* Return true if DECL is an SRA candidate. */
216
217 static bool
218 is_sra_candidate_decl (tree decl)
219 {
220 return DECL_P (decl) && bitmap_bit_p (sra_candidates, DECL_UID (decl));
221 }
222
223 /* Return true if TYPE is a scalar type. */
224
225 static bool
226 is_sra_scalar_type (tree type)
227 {
228 enum tree_code code = TREE_CODE (type);
229 return (code == INTEGER_TYPE || code == REAL_TYPE || code == VECTOR_TYPE
230 || code == FIXED_POINT_TYPE
231 || code == ENUMERAL_TYPE || code == BOOLEAN_TYPE
232 || code == POINTER_TYPE || code == OFFSET_TYPE
233 || code == REFERENCE_TYPE);
234 }
235
236 /* Return true if TYPE can be decomposed into a set of independent variables.
237
238 Note that this doesn't imply that all elements of TYPE can be
239 instantiated, just that if we decide to break up the type into
240 separate pieces that it can be done. */
241
242 bool
243 sra_type_can_be_decomposed_p (tree type)
244 {
245 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
246 tree t;
247
248 /* Avoid searching the same type twice. */
249 if (bitmap_bit_p (sra_type_decomp_cache, cache+0))
250 return true;
251 if (bitmap_bit_p (sra_type_decomp_cache, cache+1))
252 return false;
253
254 /* The type must have a definite nonzero size. */
255 if (TYPE_SIZE (type) == NULL || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST
256 || integer_zerop (TYPE_SIZE (type)))
257 goto fail;
258
259 /* The type must be a non-union aggregate. */
260 switch (TREE_CODE (type))
261 {
262 case RECORD_TYPE:
263 {
264 bool saw_one_field = false;
265
266 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
267 if (TREE_CODE (t) == FIELD_DECL)
268 {
269 /* Reject incorrectly represented bit fields. */
270 if (DECL_BIT_FIELD (t)
271 && INTEGRAL_TYPE_P (TREE_TYPE (t))
272 && (tree_low_cst (DECL_SIZE (t), 1)
273 != TYPE_PRECISION (TREE_TYPE (t))))
274 goto fail;
275
276 saw_one_field = true;
277 }
278
279 /* Record types must have at least one field. */
280 if (!saw_one_field)
281 goto fail;
282 }
283 break;
284
285 case ARRAY_TYPE:
286 /* Array types must have a fixed lower and upper bound. */
287 t = TYPE_DOMAIN (type);
288 if (t == NULL)
289 goto fail;
290 if (TYPE_MIN_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MIN_VALUE (t)))
291 goto fail;
292 if (TYPE_MAX_VALUE (t) == NULL || !TREE_CONSTANT (TYPE_MAX_VALUE (t)))
293 goto fail;
294 break;
295
296 case COMPLEX_TYPE:
297 break;
298
299 default:
300 goto fail;
301 }
302
303 bitmap_set_bit (sra_type_decomp_cache, cache+0);
304 return true;
305
306 fail:
307 bitmap_set_bit (sra_type_decomp_cache, cache+1);
308 return false;
309 }
310
311 /* Returns true if the TYPE is one of the available va_list types.
312 Otherwise it returns false.
313 Note, that for multiple calling conventions there can be more
314 than just one va_list type present. */
315
316 static bool
317 is_va_list_type (tree type)
318 {
319 tree h;
320
321 if (type == NULL_TREE)
322 return false;
323 h = targetm.canonical_va_list_type (type);
324 if (h == NULL_TREE)
325 return false;
326 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (h))
327 return true;
328 return false;
329 }
330
331 /* Return true if DECL can be decomposed into a set of independent
332 (though not necessarily scalar) variables. */
333
334 static bool
335 decl_can_be_decomposed_p (tree var)
336 {
337 /* Early out for scalars. */
338 if (is_sra_scalar_type (TREE_TYPE (var)))
339 return false;
340
341 /* The variable must not be aliased. */
342 if (!is_gimple_non_addressable (var))
343 {
344 if (dump_file && (dump_flags & TDF_DETAILS))
345 {
346 fprintf (dump_file, "Cannot scalarize variable ");
347 print_generic_expr (dump_file, var, dump_flags);
348 fprintf (dump_file, " because it must live in memory\n");
349 }
350 return false;
351 }
352
353 /* The variable must not be volatile. */
354 if (TREE_THIS_VOLATILE (var))
355 {
356 if (dump_file && (dump_flags & TDF_DETAILS))
357 {
358 fprintf (dump_file, "Cannot scalarize variable ");
359 print_generic_expr (dump_file, var, dump_flags);
360 fprintf (dump_file, " because it is declared volatile\n");
361 }
362 return false;
363 }
364
365 /* We must be able to decompose the variable's type. */
366 if (!sra_type_can_be_decomposed_p (TREE_TYPE (var)))
367 {
368 if (dump_file && (dump_flags & TDF_DETAILS))
369 {
370 fprintf (dump_file, "Cannot scalarize variable ");
371 print_generic_expr (dump_file, var, dump_flags);
372 fprintf (dump_file, " because its type cannot be decomposed\n");
373 }
374 return false;
375 }
376
377 /* HACK: if we decompose a va_list_type_node before inlining, then we'll
378 confuse tree-stdarg.c, and we won't be able to figure out which and
379 how many arguments are accessed. This really should be improved in
380 tree-stdarg.c, as the decomposition is truly a win. This could also
381 be fixed if the stdarg pass ran early, but this can't be done until
382 we've aliasing information early too. See PR 30791. */
383 if (early_sra && is_va_list_type (TREE_TYPE (var)))
384 return false;
385
386 return true;
387 }
388
389 /* Return true if TYPE can be *completely* decomposed into scalars. */
390
391 static bool
392 type_can_instantiate_all_elements (tree type)
393 {
394 if (is_sra_scalar_type (type))
395 return true;
396 if (!sra_type_can_be_decomposed_p (type))
397 return false;
398
399 switch (TREE_CODE (type))
400 {
401 case RECORD_TYPE:
402 {
403 unsigned int cache = TYPE_UID (TYPE_MAIN_VARIANT (type)) * 2;
404 tree f;
405
406 if (bitmap_bit_p (sra_type_inst_cache, cache+0))
407 return true;
408 if (bitmap_bit_p (sra_type_inst_cache, cache+1))
409 return false;
410
411 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
412 if (TREE_CODE (f) == FIELD_DECL)
413 {
414 if (!type_can_instantiate_all_elements (TREE_TYPE (f)))
415 {
416 bitmap_set_bit (sra_type_inst_cache, cache+1);
417 return false;
418 }
419 }
420
421 bitmap_set_bit (sra_type_inst_cache, cache+0);
422 return true;
423 }
424
425 case ARRAY_TYPE:
426 return type_can_instantiate_all_elements (TREE_TYPE (type));
427
428 case COMPLEX_TYPE:
429 return true;
430
431 default:
432 gcc_unreachable ();
433 }
434 }
435
436 /* Test whether ELT or some sub-element cannot be scalarized. */
437
438 static bool
439 can_completely_scalarize_p (struct sra_elt *elt)
440 {
441 struct sra_elt *c;
442
443 if (elt->cannot_scalarize)
444 return false;
445
446 for (c = elt->children; c; c = c->sibling)
447 if (!can_completely_scalarize_p (c))
448 return false;
449
450 for (c = elt->groups; c; c = c->sibling)
451 if (!can_completely_scalarize_p (c))
452 return false;
453
454 return true;
455 }
456
457 \f
458 /* A simplified tree hashing algorithm that only handles the types of
459 trees we expect to find in sra_elt->element. */
460
461 static hashval_t
462 sra_hash_tree (tree t)
463 {
464 hashval_t h;
465
466 switch (TREE_CODE (t))
467 {
468 case VAR_DECL:
469 case PARM_DECL:
470 case RESULT_DECL:
471 h = DECL_UID (t);
472 break;
473
474 case INTEGER_CST:
475 h = TREE_INT_CST_LOW (t) ^ TREE_INT_CST_HIGH (t);
476 break;
477
478 case RANGE_EXPR:
479 h = iterative_hash_expr (TREE_OPERAND (t, 0), 0);
480 h = iterative_hash_expr (TREE_OPERAND (t, 1), h);
481 break;
482
483 case FIELD_DECL:
484 /* We can have types that are compatible, but have different member
485 lists, so we can't hash fields by ID. Use offsets instead. */
486 h = iterative_hash_expr (DECL_FIELD_OFFSET (t), 0);
487 h = iterative_hash_expr (DECL_FIELD_BIT_OFFSET (t), h);
488 break;
489
490 case BIT_FIELD_REF:
491 /* Don't take operand 0 into account, that's our parent. */
492 h = iterative_hash_expr (TREE_OPERAND (t, 1), 0);
493 h = iterative_hash_expr (TREE_OPERAND (t, 2), h);
494 break;
495
496 default:
497 gcc_unreachable ();
498 }
499
500 return h;
501 }
502
503 /* Hash function for type SRA_PAIR. */
504
505 static hashval_t
506 sra_elt_hash (const void *x)
507 {
508 const struct sra_elt *const e = (const struct sra_elt *) x;
509 const struct sra_elt *p;
510 hashval_t h;
511
512 h = sra_hash_tree (e->element);
513
514 /* Take into account everything except bitfield blocks back up the
515 chain. Given that chain lengths are rarely very long, this
516 should be acceptable. If we truly identify this as a performance
517 problem, it should work to hash the pointer value
518 "e->parent". */
519 for (p = e->parent; p ; p = p->parent)
520 if (!p->in_bitfld_block)
521 h = (h * 65521) ^ sra_hash_tree (p->element);
522
523 return h;
524 }
525
526 /* Equality function for type SRA_PAIR. */
527
528 static int
529 sra_elt_eq (const void *x, const void *y)
530 {
531 const struct sra_elt *const a = (const struct sra_elt *) x;
532 const struct sra_elt *const b = (const struct sra_elt *) y;
533 tree ae, be;
534 const struct sra_elt *ap = a->parent;
535 const struct sra_elt *bp = b->parent;
536
537 if (ap)
538 while (ap->in_bitfld_block)
539 ap = ap->parent;
540 if (bp)
541 while (bp->in_bitfld_block)
542 bp = bp->parent;
543
544 if (ap != bp)
545 return false;
546
547 ae = a->element;
548 be = b->element;
549
550 if (ae == be)
551 return true;
552 if (TREE_CODE (ae) != TREE_CODE (be))
553 return false;
554
555 switch (TREE_CODE (ae))
556 {
557 case VAR_DECL:
558 case PARM_DECL:
559 case RESULT_DECL:
560 /* These are all pointer unique. */
561 return false;
562
563 case INTEGER_CST:
564 /* Integers are not pointer unique, so compare their values. */
565 return tree_int_cst_equal (ae, be);
566
567 case RANGE_EXPR:
568 return
569 tree_int_cst_equal (TREE_OPERAND (ae, 0), TREE_OPERAND (be, 0))
570 && tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1));
571
572 case FIELD_DECL:
573 /* Fields are unique within a record, but not between
574 compatible records. */
575 if (DECL_FIELD_CONTEXT (ae) == DECL_FIELD_CONTEXT (be))
576 return false;
577 return fields_compatible_p (ae, be);
578
579 case BIT_FIELD_REF:
580 return
581 tree_int_cst_equal (TREE_OPERAND (ae, 1), TREE_OPERAND (be, 1))
582 && tree_int_cst_equal (TREE_OPERAND (ae, 2), TREE_OPERAND (be, 2));
583
584 default:
585 gcc_unreachable ();
586 }
587 }
588
589 /* Create or return the SRA_ELT structure for CHILD in PARENT. PARENT
590 may be null, in which case CHILD must be a DECL. */
591
592 static struct sra_elt *
593 lookup_element (struct sra_elt *parent, tree child, tree type,
594 enum insert_option insert)
595 {
596 struct sra_elt dummy;
597 struct sra_elt **slot;
598 struct sra_elt *elt;
599
600 if (parent)
601 dummy.parent = parent->is_group ? parent->parent : parent;
602 else
603 dummy.parent = NULL;
604 dummy.element = child;
605
606 slot = (struct sra_elt **) htab_find_slot (sra_map, &dummy, insert);
607 if (!slot && insert == NO_INSERT)
608 return NULL;
609
610 elt = *slot;
611 if (!elt && insert == INSERT)
612 {
613 *slot = elt = XOBNEW (&sra_obstack, struct sra_elt);
614 memset (elt, 0, sizeof (*elt));
615
616 elt->parent = parent;
617 elt->element = child;
618 elt->type = type;
619 elt->is_scalar = is_sra_scalar_type (type);
620
621 if (parent)
622 {
623 if (IS_ELEMENT_FOR_GROUP (elt->element))
624 {
625 elt->is_group = true;
626 elt->sibling = parent->groups;
627 parent->groups = elt;
628 }
629 else
630 {
631 elt->sibling = parent->children;
632 parent->children = elt;
633 }
634 }
635
636 /* If this is a parameter, then if we want to scalarize, we have
637 one copy from the true function parameter. Count it now. */
638 if (TREE_CODE (child) == PARM_DECL)
639 {
640 elt->n_copies = 1;
641 bitmap_set_bit (needs_copy_in, DECL_UID (child));
642 }
643 }
644
645 return elt;
646 }
647
648 /* Create or return the SRA_ELT structure for EXPR if the expression
649 refers to a scalarizable variable. */
650
651 static struct sra_elt *
652 maybe_lookup_element_for_expr (tree expr)
653 {
654 struct sra_elt *elt;
655 tree child;
656
657 switch (TREE_CODE (expr))
658 {
659 case VAR_DECL:
660 case PARM_DECL:
661 case RESULT_DECL:
662 if (is_sra_candidate_decl (expr))
663 return lookup_element (NULL, expr, TREE_TYPE (expr), INSERT);
664 return NULL;
665
666 case ARRAY_REF:
667 /* We can't scalarize variable array indices. */
668 if (in_array_bounds_p (expr))
669 child = TREE_OPERAND (expr, 1);
670 else
671 return NULL;
672 break;
673
674 case ARRAY_RANGE_REF:
675 /* We can't scalarize variable array indices. */
676 if (range_in_array_bounds_p (expr))
677 {
678 tree domain = TYPE_DOMAIN (TREE_TYPE (expr));
679 child = build2 (RANGE_EXPR, integer_type_node,
680 TYPE_MIN_VALUE (domain), TYPE_MAX_VALUE (domain));
681 }
682 else
683 return NULL;
684 break;
685
686 case COMPONENT_REF:
687 {
688 tree type = TREE_TYPE (TREE_OPERAND (expr, 0));
689 /* Don't look through unions. */
690 if (TREE_CODE (type) != RECORD_TYPE)
691 return NULL;
692 /* Neither through variable-sized records. */
693 if (TYPE_SIZE (type) == NULL_TREE
694 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
695 return NULL;
696 child = TREE_OPERAND (expr, 1);
697 }
698 break;
699
700 case REALPART_EXPR:
701 child = integer_zero_node;
702 break;
703 case IMAGPART_EXPR:
704 child = integer_one_node;
705 break;
706
707 default:
708 return NULL;
709 }
710
711 elt = maybe_lookup_element_for_expr (TREE_OPERAND (expr, 0));
712 if (elt)
713 return lookup_element (elt, child, TREE_TYPE (expr), INSERT);
714 return NULL;
715 }
716
717 \f
718 /* Functions to walk just enough of the tree to see all scalarizable
719 references, and categorize them. */
720
721 /* A set of callbacks for phases 2 and 4. They'll be invoked for the
722 various kinds of references seen. In all cases, *BSI is an iterator
723 pointing to the statement being processed. */
724 struct sra_walk_fns
725 {
726 /* Invoked when ELT is required as a unit. Note that ELT might refer to
727 a leaf node, in which case this is a simple scalar reference. *EXPR_P
728 points to the location of the expression. IS_OUTPUT is true if this
729 is a left-hand-side reference. USE_ALL is true if we saw something we
730 couldn't quite identify and had to force the use of the entire object. */
731 void (*use) (struct sra_elt *elt, tree *expr_p,
732 block_stmt_iterator *bsi, bool is_output, bool use_all);
733
734 /* Invoked when we have a copy between two scalarizable references. */
735 void (*copy) (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
736 block_stmt_iterator *bsi);
737
738 /* Invoked when ELT is initialized from a constant. VALUE may be NULL,
739 in which case it should be treated as an empty CONSTRUCTOR. */
740 void (*init) (struct sra_elt *elt, tree value, block_stmt_iterator *bsi);
741
742 /* Invoked when we have a copy between one scalarizable reference ELT
743 and one non-scalarizable reference OTHER without side-effects.
744 IS_OUTPUT is true if ELT is on the left-hand side. */
745 void (*ldst) (struct sra_elt *elt, tree other,
746 block_stmt_iterator *bsi, bool is_output);
747
748 /* True during phase 2, false during phase 4. */
749 /* ??? This is a hack. */
750 bool initial_scan;
751 };
752
753 #ifdef ENABLE_CHECKING
754 /* Invoked via walk_tree, if *TP contains a candidate decl, return it. */
755
756 static tree
757 sra_find_candidate_decl (tree *tp, int *walk_subtrees,
758 void *data ATTRIBUTE_UNUSED)
759 {
760 tree t = *tp;
761 enum tree_code code = TREE_CODE (t);
762
763 if (code == VAR_DECL || code == PARM_DECL || code == RESULT_DECL)
764 {
765 *walk_subtrees = 0;
766 if (is_sra_candidate_decl (t))
767 return t;
768 }
769 else if (TYPE_P (t))
770 *walk_subtrees = 0;
771
772 return NULL;
773 }
774 #endif
775
776 /* Walk most expressions looking for a scalarizable aggregate.
777 If we find one, invoke FNS->USE. */
778
779 static void
780 sra_walk_expr (tree *expr_p, block_stmt_iterator *bsi, bool is_output,
781 const struct sra_walk_fns *fns)
782 {
783 tree expr = *expr_p;
784 tree inner = expr;
785 bool disable_scalarization = false;
786 bool use_all_p = false;
787
788 /* We're looking to collect a reference expression between EXPR and INNER,
789 such that INNER is a scalarizable decl and all other nodes through EXPR
790 are references that we can scalarize. If we come across something that
791 we can't scalarize, we reset EXPR. This has the effect of making it
792 appear that we're referring to the larger expression as a whole. */
793
794 while (1)
795 switch (TREE_CODE (inner))
796 {
797 case VAR_DECL:
798 case PARM_DECL:
799 case RESULT_DECL:
800 /* If there is a scalarizable decl at the bottom, then process it. */
801 if (is_sra_candidate_decl (inner))
802 {
803 struct sra_elt *elt = maybe_lookup_element_for_expr (expr);
804 if (disable_scalarization)
805 elt->cannot_scalarize = true;
806 else
807 fns->use (elt, expr_p, bsi, is_output, use_all_p);
808 }
809 return;
810
811 case ARRAY_REF:
812 /* Non-constant index means any member may be accessed. Prevent the
813 expression from being scalarized. If we were to treat this as a
814 reference to the whole array, we can wind up with a single dynamic
815 index reference inside a loop being overridden by several constant
816 index references during loop setup. It's possible that this could
817 be avoided by using dynamic usage counts based on BB trip counts
818 (based on loop analysis or profiling), but that hardly seems worth
819 the effort. */
820 /* ??? Hack. Figure out how to push this into the scan routines
821 without duplicating too much code. */
822 if (!in_array_bounds_p (inner))
823 {
824 disable_scalarization = true;
825 goto use_all;
826 }
827 /* ??? Are we assured that non-constant bounds and stride will have
828 the same value everywhere? I don't think Fortran will... */
829 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
830 goto use_all;
831 inner = TREE_OPERAND (inner, 0);
832 break;
833
834 case ARRAY_RANGE_REF:
835 if (!range_in_array_bounds_p (inner))
836 {
837 disable_scalarization = true;
838 goto use_all;
839 }
840 /* ??? See above non-constant bounds and stride . */
841 if (TREE_OPERAND (inner, 2) || TREE_OPERAND (inner, 3))
842 goto use_all;
843 inner = TREE_OPERAND (inner, 0);
844 break;
845
846 case COMPONENT_REF:
847 {
848 tree type = TREE_TYPE (TREE_OPERAND (inner, 0));
849 /* Don't look through unions. */
850 if (TREE_CODE (type) != RECORD_TYPE)
851 goto use_all;
852 /* Neither through variable-sized records. */
853 if (TYPE_SIZE (type) == NULL_TREE
854 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
855 goto use_all;
856 inner = TREE_OPERAND (inner, 0);
857 }
858 break;
859
860 case REALPART_EXPR:
861 case IMAGPART_EXPR:
862 inner = TREE_OPERAND (inner, 0);
863 break;
864
865 case BIT_FIELD_REF:
866 /* A bit field reference to a specific vector is scalarized but for
867 ones for inputs need to be marked as used on the left hand size so
868 when we scalarize it, we can mark that variable as non renamable. */
869 if (is_output
870 && TREE_CODE (TREE_TYPE (TREE_OPERAND (inner, 0))) == VECTOR_TYPE)
871 {
872 struct sra_elt *elt
873 = maybe_lookup_element_for_expr (TREE_OPERAND (inner, 0));
874 if (elt)
875 elt->is_vector_lhs = true;
876 }
877
878 /* A bit field reference (access to *multiple* fields simultaneously)
879 is not currently scalarized. Consider this an access to the full
880 outer element, to which walk_tree will bring us next. */
881 goto use_all;
882
883 case NOP_EXPR:
884 /* Similarly, a nop explicitly wants to look at an object in a
885 type other than the one we've scalarized. */
886 goto use_all;
887
888 case VIEW_CONVERT_EXPR:
889 /* Likewise for a view conversion, but with an additional twist:
890 it can be on the LHS and, in this case, an access to the full
891 outer element would mean a killing def. So we need to punt
892 if we haven't already a full access to the current element,
893 because we cannot pretend to have a killing def if we only
894 have a partial access at some level. */
895 if (is_output && !use_all_p && inner != expr)
896 disable_scalarization = true;
897 goto use_all;
898
899 case WITH_SIZE_EXPR:
900 /* This is a transparent wrapper. The entire inner expression really
901 is being used. */
902 goto use_all;
903
904 use_all:
905 expr_p = &TREE_OPERAND (inner, 0);
906 inner = expr = *expr_p;
907 use_all_p = true;
908 break;
909
910 default:
911 #ifdef ENABLE_CHECKING
912 /* Validate that we're not missing any references. */
913 gcc_assert (!walk_tree (&inner, sra_find_candidate_decl, NULL, NULL));
914 #endif
915 return;
916 }
917 }
918
919 /* Walk a TREE_LIST of values looking for scalarizable aggregates.
920 If we find one, invoke FNS->USE. */
921
922 static void
923 sra_walk_tree_list (tree list, block_stmt_iterator *bsi, bool is_output,
924 const struct sra_walk_fns *fns)
925 {
926 tree op;
927 for (op = list; op ; op = TREE_CHAIN (op))
928 sra_walk_expr (&TREE_VALUE (op), bsi, is_output, fns);
929 }
930
931 /* Walk the arguments of a CALL_EXPR looking for scalarizable aggregates.
932 If we find one, invoke FNS->USE. */
933
934 static void
935 sra_walk_call_expr (tree expr, block_stmt_iterator *bsi,
936 const struct sra_walk_fns *fns)
937 {
938 int i;
939 int nargs = call_expr_nargs (expr);
940 for (i = 0; i < nargs; i++)
941 sra_walk_expr (&CALL_EXPR_ARG (expr, i), bsi, false, fns);
942 }
943
944 /* Walk the inputs and outputs of an ASM_EXPR looking for scalarizable
945 aggregates. If we find one, invoke FNS->USE. */
946
947 static void
948 sra_walk_asm_expr (tree expr, block_stmt_iterator *bsi,
949 const struct sra_walk_fns *fns)
950 {
951 sra_walk_tree_list (ASM_INPUTS (expr), bsi, false, fns);
952 sra_walk_tree_list (ASM_OUTPUTS (expr), bsi, true, fns);
953 }
954
955 /* Walk a GIMPLE_MODIFY_STMT and categorize the assignment appropriately. */
956
957 static void
958 sra_walk_gimple_modify_stmt (tree expr, block_stmt_iterator *bsi,
959 const struct sra_walk_fns *fns)
960 {
961 struct sra_elt *lhs_elt, *rhs_elt;
962 tree lhs, rhs;
963
964 lhs = GIMPLE_STMT_OPERAND (expr, 0);
965 rhs = GIMPLE_STMT_OPERAND (expr, 1);
966 lhs_elt = maybe_lookup_element_for_expr (lhs);
967 rhs_elt = maybe_lookup_element_for_expr (rhs);
968
969 /* If both sides are scalarizable, this is a COPY operation. */
970 if (lhs_elt && rhs_elt)
971 {
972 fns->copy (lhs_elt, rhs_elt, bsi);
973 return;
974 }
975
976 /* If the RHS is scalarizable, handle it. There are only two cases. */
977 if (rhs_elt)
978 {
979 if (!rhs_elt->is_scalar && !TREE_SIDE_EFFECTS (lhs))
980 fns->ldst (rhs_elt, lhs, bsi, false);
981 else
982 fns->use (rhs_elt, &GIMPLE_STMT_OPERAND (expr, 1), bsi, false, false);
983 }
984
985 /* If it isn't scalarizable, there may be scalarizable variables within, so
986 check for a call or else walk the RHS to see if we need to do any
987 copy-in operations. We need to do it before the LHS is scalarized so
988 that the statements get inserted in the proper place, before any
989 copy-out operations. */
990 else
991 {
992 tree call = get_call_expr_in (rhs);
993 if (call)
994 sra_walk_call_expr (call, bsi, fns);
995 else
996 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 1), bsi, false, fns);
997 }
998
999 /* Likewise, handle the LHS being scalarizable. We have cases similar
1000 to those above, but also want to handle RHS being constant. */
1001 if (lhs_elt)
1002 {
1003 /* If this is an assignment from a constant, or constructor, then
1004 we have access to all of the elements individually. Invoke INIT. */
1005 if (TREE_CODE (rhs) == COMPLEX_EXPR
1006 || TREE_CODE (rhs) == COMPLEX_CST
1007 || TREE_CODE (rhs) == CONSTRUCTOR)
1008 fns->init (lhs_elt, rhs, bsi);
1009
1010 /* If this is an assignment from read-only memory, treat this as if
1011 we'd been passed the constructor directly. Invoke INIT. */
1012 else if (TREE_CODE (rhs) == VAR_DECL
1013 && TREE_STATIC (rhs)
1014 && TREE_READONLY (rhs)
1015 && targetm.binds_local_p (rhs))
1016 fns->init (lhs_elt, DECL_INITIAL (rhs), bsi);
1017
1018 /* If this is a copy from a non-scalarizable lvalue, invoke LDST.
1019 The lvalue requirement prevents us from trying to directly scalarize
1020 the result of a function call. Which would result in trying to call
1021 the function multiple times, and other evil things. */
1022 else if (!lhs_elt->is_scalar
1023 && !TREE_SIDE_EFFECTS (rhs) && is_gimple_addressable (rhs))
1024 fns->ldst (lhs_elt, rhs, bsi, true);
1025
1026 /* Otherwise we're being used in some context that requires the
1027 aggregate to be seen as a whole. Invoke USE. */
1028 else
1029 fns->use (lhs_elt, &GIMPLE_STMT_OPERAND (expr, 0), bsi, true, false);
1030 }
1031
1032 /* Similarly to above, LHS_ELT being null only means that the LHS as a
1033 whole is not a scalarizable reference. There may be occurrences of
1034 scalarizable variables within, which implies a USE. */
1035 else
1036 sra_walk_expr (&GIMPLE_STMT_OPERAND (expr, 0), bsi, true, fns);
1037 }
1038
1039 /* Entry point to the walk functions. Search the entire function,
1040 invoking the callbacks in FNS on each of the references to
1041 scalarizable variables. */
1042
1043 static void
1044 sra_walk_function (const struct sra_walk_fns *fns)
1045 {
1046 basic_block bb;
1047 block_stmt_iterator si, ni;
1048
1049 /* ??? Phase 4 could derive some benefit to walking the function in
1050 dominator tree order. */
1051
1052 FOR_EACH_BB (bb)
1053 for (si = bsi_start (bb); !bsi_end_p (si); si = ni)
1054 {
1055 tree stmt, t;
1056 stmt_ann_t ann;
1057
1058 stmt = bsi_stmt (si);
1059 ann = stmt_ann (stmt);
1060
1061 ni = si;
1062 bsi_next (&ni);
1063
1064 /* If the statement has no virtual operands, then it doesn't
1065 make any structure references that we care about. */
1066 if (gimple_aliases_computed_p (cfun)
1067 && ZERO_SSA_OPERANDS (stmt, (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE)))
1068 continue;
1069
1070 switch (TREE_CODE (stmt))
1071 {
1072 case RETURN_EXPR:
1073 /* If we have "return <retval>" then the return value is
1074 already exposed for our pleasure. Walk it as a USE to
1075 force all the components back in place for the return.
1076
1077 If we have an embedded assignment, then <retval> is of
1078 a type that gets returned in registers in this ABI, and
1079 we do not wish to extend their lifetimes. Treat this
1080 as a USE of the variable on the RHS of this assignment. */
1081
1082 t = TREE_OPERAND (stmt, 0);
1083 if (t == NULL_TREE)
1084 ;
1085 else if (TREE_CODE (t) == GIMPLE_MODIFY_STMT)
1086 sra_walk_expr (&GIMPLE_STMT_OPERAND (t, 1), &si, false, fns);
1087 else
1088 sra_walk_expr (&TREE_OPERAND (stmt, 0), &si, false, fns);
1089 break;
1090
1091 case GIMPLE_MODIFY_STMT:
1092 sra_walk_gimple_modify_stmt (stmt, &si, fns);
1093 break;
1094 case CALL_EXPR:
1095 sra_walk_call_expr (stmt, &si, fns);
1096 break;
1097 case ASM_EXPR:
1098 sra_walk_asm_expr (stmt, &si, fns);
1099 break;
1100
1101 default:
1102 break;
1103 }
1104 }
1105 }
1106 \f
1107 /* Phase One: Scan all referenced variables in the program looking for
1108 structures that could be decomposed. */
1109
1110 static bool
1111 find_candidates_for_sra (void)
1112 {
1113 bool any_set = false;
1114 tree var;
1115 referenced_var_iterator rvi;
1116
1117 FOR_EACH_REFERENCED_VAR (var, rvi)
1118 {
1119 if (decl_can_be_decomposed_p (var))
1120 {
1121 bitmap_set_bit (sra_candidates, DECL_UID (var));
1122 any_set = true;
1123 }
1124 }
1125
1126 return any_set;
1127 }
1128
1129 \f
1130 /* Phase Two: Scan all references to scalarizable variables. Count the
1131 number of times they are used or copied respectively. */
1132
1133 /* Callbacks to fill in SRA_WALK_FNS. Everything but USE is
1134 considered a copy, because we can decompose the reference such that
1135 the sub-elements needn't be contiguous. */
1136
1137 static void
1138 scan_use (struct sra_elt *elt, tree *expr_p ATTRIBUTE_UNUSED,
1139 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1140 bool is_output ATTRIBUTE_UNUSED, bool use_all ATTRIBUTE_UNUSED)
1141 {
1142 elt->n_uses += 1;
1143 }
1144
1145 static void
1146 scan_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
1147 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1148 {
1149 lhs_elt->n_copies += 1;
1150 rhs_elt->n_copies += 1;
1151 }
1152
1153 static void
1154 scan_init (struct sra_elt *lhs_elt, tree rhs ATTRIBUTE_UNUSED,
1155 block_stmt_iterator *bsi ATTRIBUTE_UNUSED)
1156 {
1157 lhs_elt->n_copies += 1;
1158 }
1159
1160 static void
1161 scan_ldst (struct sra_elt *elt, tree other ATTRIBUTE_UNUSED,
1162 block_stmt_iterator *bsi ATTRIBUTE_UNUSED,
1163 bool is_output ATTRIBUTE_UNUSED)
1164 {
1165 elt->n_copies += 1;
1166 }
1167
1168 /* Dump the values we collected during the scanning phase. */
1169
1170 static void
1171 scan_dump (struct sra_elt *elt)
1172 {
1173 struct sra_elt *c;
1174
1175 dump_sra_elt_name (dump_file, elt);
1176 fprintf (dump_file, ": n_uses=%u n_copies=%u\n", elt->n_uses, elt->n_copies);
1177
1178 for (c = elt->children; c ; c = c->sibling)
1179 scan_dump (c);
1180
1181 for (c = elt->groups; c ; c = c->sibling)
1182 scan_dump (c);
1183 }
1184
1185 /* Entry point to phase 2. Scan the entire function, building up
1186 scalarization data structures, recording copies and uses. */
1187
1188 static void
1189 scan_function (void)
1190 {
1191 static const struct sra_walk_fns fns = {
1192 scan_use, scan_copy, scan_init, scan_ldst, true
1193 };
1194 bitmap_iterator bi;
1195
1196 sra_walk_function (&fns);
1197
1198 if (dump_file && (dump_flags & TDF_DETAILS))
1199 {
1200 unsigned i;
1201
1202 fputs ("\nScan results:\n", dump_file);
1203 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1204 {
1205 tree var = referenced_var (i);
1206 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1207 if (elt)
1208 scan_dump (elt);
1209 }
1210 fputc ('\n', dump_file);
1211 }
1212 }
1213 \f
1214 /* Phase Three: Make decisions about which variables to scalarize, if any.
1215 All elements to be scalarized have replacement variables made for them. */
1216
1217 /* A subroutine of build_element_name. Recursively build the element
1218 name on the obstack. */
1219
1220 static void
1221 build_element_name_1 (struct sra_elt *elt)
1222 {
1223 tree t;
1224 char buffer[32];
1225
1226 if (elt->parent)
1227 {
1228 build_element_name_1 (elt->parent);
1229 obstack_1grow (&sra_obstack, '$');
1230
1231 if (TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
1232 {
1233 if (elt->element == integer_zero_node)
1234 obstack_grow (&sra_obstack, "real", 4);
1235 else
1236 obstack_grow (&sra_obstack, "imag", 4);
1237 return;
1238 }
1239 }
1240
1241 t = elt->element;
1242 if (TREE_CODE (t) == INTEGER_CST)
1243 {
1244 /* ??? Eh. Don't bother doing double-wide printing. */
1245 sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (t));
1246 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1247 }
1248 else if (TREE_CODE (t) == BIT_FIELD_REF)
1249 {
1250 sprintf (buffer, "B" HOST_WIDE_INT_PRINT_DEC,
1251 tree_low_cst (TREE_OPERAND (t, 2), 1));
1252 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1253 sprintf (buffer, "F" HOST_WIDE_INT_PRINT_DEC,
1254 tree_low_cst (TREE_OPERAND (t, 1), 1));
1255 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1256 }
1257 else
1258 {
1259 tree name = DECL_NAME (t);
1260 if (name)
1261 obstack_grow (&sra_obstack, IDENTIFIER_POINTER (name),
1262 IDENTIFIER_LENGTH (name));
1263 else
1264 {
1265 sprintf (buffer, "D%u", DECL_UID (t));
1266 obstack_grow (&sra_obstack, buffer, strlen (buffer));
1267 }
1268 }
1269 }
1270
1271 /* Construct a pretty variable name for an element's replacement variable.
1272 The name is built on the obstack. */
1273
1274 static char *
1275 build_element_name (struct sra_elt *elt)
1276 {
1277 build_element_name_1 (elt);
1278 obstack_1grow (&sra_obstack, '\0');
1279 return XOBFINISH (&sra_obstack, char *);
1280 }
1281
1282 /* Instantiate an element as an independent variable. */
1283
1284 static void
1285 instantiate_element (struct sra_elt *elt)
1286 {
1287 struct sra_elt *base_elt;
1288 tree var, base;
1289 bool nowarn = TREE_NO_WARNING (elt->element);
1290
1291 for (base_elt = elt; base_elt->parent; base_elt = base_elt->parent)
1292 if (!nowarn)
1293 nowarn = TREE_NO_WARNING (base_elt->parent->element);
1294 base = base_elt->element;
1295
1296 elt->replacement = var = make_rename_temp (elt->type, "SR");
1297
1298 if (DECL_P (elt->element)
1299 && !tree_int_cst_equal (DECL_SIZE (var), DECL_SIZE (elt->element)))
1300 {
1301 DECL_SIZE (var) = DECL_SIZE (elt->element);
1302 DECL_SIZE_UNIT (var) = DECL_SIZE_UNIT (elt->element);
1303
1304 elt->in_bitfld_block = 1;
1305 elt->replacement = fold_build3 (BIT_FIELD_REF, elt->type, var,
1306 DECL_SIZE (var),
1307 BYTES_BIG_ENDIAN
1308 ? size_binop (MINUS_EXPR,
1309 TYPE_SIZE (elt->type),
1310 DECL_SIZE (var))
1311 : bitsize_int (0));
1312 }
1313
1314 /* For vectors, if used on the left hand side with BIT_FIELD_REF,
1315 they are not a gimple register. */
1316 if (TREE_CODE (TREE_TYPE (var)) == VECTOR_TYPE && elt->is_vector_lhs)
1317 DECL_GIMPLE_REG_P (var) = 0;
1318
1319 DECL_SOURCE_LOCATION (var) = DECL_SOURCE_LOCATION (base);
1320 DECL_ARTIFICIAL (var) = 1;
1321
1322 if (TREE_THIS_VOLATILE (elt->type))
1323 {
1324 TREE_THIS_VOLATILE (var) = 1;
1325 TREE_SIDE_EFFECTS (var) = 1;
1326 }
1327
1328 if (DECL_NAME (base) && !DECL_IGNORED_P (base))
1329 {
1330 char *pretty_name = build_element_name (elt);
1331 DECL_NAME (var) = get_identifier (pretty_name);
1332 obstack_free (&sra_obstack, pretty_name);
1333
1334 SET_DECL_DEBUG_EXPR (var, generate_element_ref (elt));
1335 DECL_DEBUG_EXPR_IS_FROM (var) = 1;
1336
1337 DECL_IGNORED_P (var) = 0;
1338 TREE_NO_WARNING (var) = nowarn;
1339 }
1340 else
1341 {
1342 DECL_IGNORED_P (var) = 1;
1343 /* ??? We can't generate any warning that would be meaningful. */
1344 TREE_NO_WARNING (var) = 1;
1345 }
1346
1347 /* Zero-initialize bit-field scalarization variables, to avoid
1348 triggering undefined behavior. */
1349 if (TREE_CODE (elt->element) == BIT_FIELD_REF
1350 || (var != elt->replacement
1351 && TREE_CODE (elt->replacement) == BIT_FIELD_REF))
1352 {
1353 tree init = sra_build_assignment (var, fold_convert (TREE_TYPE (var),
1354 integer_zero_node));
1355 insert_edge_copies (init, ENTRY_BLOCK_PTR);
1356 mark_all_v_defs (init);
1357 }
1358
1359 if (dump_file)
1360 {
1361 fputs (" ", dump_file);
1362 dump_sra_elt_name (dump_file, elt);
1363 fputs (" -> ", dump_file);
1364 print_generic_expr (dump_file, var, dump_flags);
1365 fputc ('\n', dump_file);
1366 }
1367 }
1368
1369 /* Make one pass across an element tree deciding whether or not it's
1370 profitable to instantiate individual leaf scalars.
1371
1372 PARENT_USES and PARENT_COPIES are the sum of the N_USES and N_COPIES
1373 fields all the way up the tree. */
1374
1375 static void
1376 decide_instantiation_1 (struct sra_elt *elt, unsigned int parent_uses,
1377 unsigned int parent_copies)
1378 {
1379 if (dump_file && !elt->parent)
1380 {
1381 fputs ("Initial instantiation for ", dump_file);
1382 dump_sra_elt_name (dump_file, elt);
1383 fputc ('\n', dump_file);
1384 }
1385
1386 if (elt->cannot_scalarize)
1387 return;
1388
1389 if (elt->is_scalar)
1390 {
1391 /* The decision is simple: instantiate if we're used more frequently
1392 than the parent needs to be seen as a complete unit. */
1393 if (elt->n_uses + elt->n_copies + parent_copies > parent_uses)
1394 instantiate_element (elt);
1395 }
1396 else
1397 {
1398 struct sra_elt *c, *group;
1399 unsigned int this_uses = elt->n_uses + parent_uses;
1400 unsigned int this_copies = elt->n_copies + parent_copies;
1401
1402 /* Consider groups of sub-elements as weighing in favour of
1403 instantiation whatever their size. */
1404 for (group = elt->groups; group ; group = group->sibling)
1405 FOR_EACH_ACTUAL_CHILD (c, group)
1406 {
1407 c->n_uses += group->n_uses;
1408 c->n_copies += group->n_copies;
1409 }
1410
1411 for (c = elt->children; c ; c = c->sibling)
1412 decide_instantiation_1 (c, this_uses, this_copies);
1413 }
1414 }
1415
1416 /* Compute the size and number of all instantiated elements below ELT.
1417 We will only care about this if the size of the complete structure
1418 fits in a HOST_WIDE_INT, so we don't have to worry about overflow. */
1419
1420 static unsigned int
1421 sum_instantiated_sizes (struct sra_elt *elt, unsigned HOST_WIDE_INT *sizep)
1422 {
1423 if (elt->replacement)
1424 {
1425 *sizep += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (elt->type));
1426 return 1;
1427 }
1428 else
1429 {
1430 struct sra_elt *c;
1431 unsigned int count = 0;
1432
1433 for (c = elt->children; c ; c = c->sibling)
1434 count += sum_instantiated_sizes (c, sizep);
1435
1436 return count;
1437 }
1438 }
1439
1440 /* Instantiate fields in ELT->TYPE that are not currently present as
1441 children of ELT. */
1442
1443 static void instantiate_missing_elements (struct sra_elt *elt);
1444
1445 static struct sra_elt *
1446 instantiate_missing_elements_1 (struct sra_elt *elt, tree child, tree type)
1447 {
1448 struct sra_elt *sub = lookup_element (elt, child, type, INSERT);
1449 if (sub->is_scalar)
1450 {
1451 if (sub->replacement == NULL)
1452 instantiate_element (sub);
1453 }
1454 else
1455 instantiate_missing_elements (sub);
1456 return sub;
1457 }
1458
1459 /* Obtain the canonical type for field F of ELEMENT. */
1460
1461 static tree
1462 canon_type_for_field (tree f, tree element)
1463 {
1464 tree field_type = TREE_TYPE (f);
1465
1466 /* canonicalize_component_ref() unwidens some bit-field types (not
1467 marked as DECL_BIT_FIELD in C++), so we must do the same, lest we
1468 may introduce type mismatches. */
1469 if (INTEGRAL_TYPE_P (field_type)
1470 && DECL_MODE (f) != TYPE_MODE (field_type))
1471 field_type = TREE_TYPE (get_unwidened (build3 (COMPONENT_REF,
1472 field_type,
1473 element,
1474 f, NULL_TREE),
1475 NULL_TREE));
1476
1477 return field_type;
1478 }
1479
1480 /* Look for adjacent fields of ELT starting at F that we'd like to
1481 scalarize as a single variable. Return the last field of the
1482 group. */
1483
1484 static tree
1485 try_instantiate_multiple_fields (struct sra_elt *elt, tree f)
1486 {
1487 int count;
1488 unsigned HOST_WIDE_INT align, bit, size, alchk;
1489 enum machine_mode mode;
1490 tree first = f, prev;
1491 tree type, var;
1492 struct sra_elt *block;
1493
1494 /* Point fields are typically best handled as standalone entities. */
1495 if (POINTER_TYPE_P (TREE_TYPE (f)))
1496 return f;
1497
1498 if (!is_sra_scalar_type (TREE_TYPE (f))
1499 || !host_integerp (DECL_FIELD_OFFSET (f), 1)
1500 || !host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1501 || !host_integerp (DECL_SIZE (f), 1)
1502 || lookup_element (elt, f, NULL, NO_INSERT))
1503 return f;
1504
1505 block = elt;
1506
1507 /* For complex and array objects, there are going to be integer
1508 literals as child elements. In this case, we can't just take the
1509 alignment and mode of the decl, so we instead rely on the element
1510 type.
1511
1512 ??? We could try to infer additional alignment from the full
1513 object declaration and the location of the sub-elements we're
1514 accessing. */
1515 for (count = 0; !DECL_P (block->element); count++)
1516 block = block->parent;
1517
1518 align = DECL_ALIGN (block->element);
1519 alchk = GET_MODE_BITSIZE (DECL_MODE (block->element));
1520
1521 if (count)
1522 {
1523 type = TREE_TYPE (block->element);
1524 while (count--)
1525 type = TREE_TYPE (type);
1526
1527 align = TYPE_ALIGN (type);
1528 alchk = GET_MODE_BITSIZE (TYPE_MODE (type));
1529 }
1530
1531 if (align < alchk)
1532 align = alchk;
1533
1534 /* Coalescing wider fields is probably pointless and
1535 inefficient. */
1536 if (align > BITS_PER_WORD)
1537 align = BITS_PER_WORD;
1538
1539 bit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1540 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1541 size = tree_low_cst (DECL_SIZE (f), 1);
1542
1543 alchk = align - 1;
1544 alchk = ~alchk;
1545
1546 if ((bit & alchk) != ((bit + size - 1) & alchk))
1547 return f;
1548
1549 /* Find adjacent fields in the same alignment word. */
1550
1551 for (prev = f, f = TREE_CHAIN (f);
1552 f && TREE_CODE (f) == FIELD_DECL
1553 && is_sra_scalar_type (TREE_TYPE (f))
1554 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1555 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)
1556 && host_integerp (DECL_SIZE (f), 1)
1557 && !lookup_element (elt, f, NULL, NO_INSERT);
1558 prev = f, f = TREE_CHAIN (f))
1559 {
1560 unsigned HOST_WIDE_INT nbit, nsize;
1561
1562 nbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1563 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1564 nsize = tree_low_cst (DECL_SIZE (f), 1);
1565
1566 if (bit + size == nbit)
1567 {
1568 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1569 {
1570 /* If we're at an alignment boundary, don't bother
1571 growing alignment such that we can include this next
1572 field. */
1573 if ((nbit & alchk)
1574 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1575 break;
1576
1577 align = GET_MODE_BITSIZE (DECL_MODE (f));
1578 alchk = align - 1;
1579 alchk = ~alchk;
1580
1581 if ((bit & alchk) != ((nbit + nsize - 1) & alchk))
1582 break;
1583 }
1584 size += nsize;
1585 }
1586 else if (nbit + nsize == bit)
1587 {
1588 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1589 {
1590 if ((bit & alchk)
1591 || GET_MODE_BITSIZE (DECL_MODE (f)) <= align)
1592 break;
1593
1594 align = GET_MODE_BITSIZE (DECL_MODE (f));
1595 alchk = align - 1;
1596 alchk = ~alchk;
1597
1598 if ((nbit & alchk) != ((bit + size - 1) & alchk))
1599 break;
1600 }
1601 bit = nbit;
1602 size += nsize;
1603 }
1604 else
1605 break;
1606 }
1607
1608 f = prev;
1609
1610 if (f == first)
1611 return f;
1612
1613 gcc_assert ((bit & alchk) == ((bit + size - 1) & alchk));
1614
1615 /* Try to widen the bit range so as to cover padding bits as well. */
1616
1617 if ((bit & ~alchk) || size != align)
1618 {
1619 unsigned HOST_WIDE_INT mbit = bit & alchk;
1620 unsigned HOST_WIDE_INT msize = align;
1621
1622 for (f = TYPE_FIELDS (elt->type);
1623 f; f = TREE_CHAIN (f))
1624 {
1625 unsigned HOST_WIDE_INT fbit, fsize;
1626
1627 /* Skip the fields from first to prev. */
1628 if (f == first)
1629 {
1630 f = prev;
1631 continue;
1632 }
1633
1634 if (!(TREE_CODE (f) == FIELD_DECL
1635 && host_integerp (DECL_FIELD_OFFSET (f), 1)
1636 && host_integerp (DECL_FIELD_BIT_OFFSET (f), 1)))
1637 continue;
1638
1639 fbit = tree_low_cst (DECL_FIELD_OFFSET (f), 1) * BITS_PER_UNIT
1640 + tree_low_cst (DECL_FIELD_BIT_OFFSET (f), 1);
1641
1642 /* If we're past the selected word, we're fine. */
1643 if ((bit & alchk) < (fbit & alchk))
1644 continue;
1645
1646 if (host_integerp (DECL_SIZE (f), 1))
1647 fsize = tree_low_cst (DECL_SIZE (f), 1);
1648 else
1649 /* Assume a variable-sized field takes up all space till
1650 the end of the word. ??? Endianness issues? */
1651 fsize = align - (fbit & alchk);
1652
1653 if ((fbit & alchk) < (bit & alchk))
1654 {
1655 /* A large field might start at a previous word and
1656 extend into the selected word. Exclude those
1657 bits. ??? Endianness issues? */
1658 HOST_WIDE_INT diff = fbit + fsize - mbit;
1659
1660 if (diff <= 0)
1661 continue;
1662
1663 mbit += diff;
1664 msize -= diff;
1665 }
1666 else
1667 {
1668 /* Non-overlapping, great. */
1669 if (fbit + fsize <= mbit
1670 || mbit + msize <= fbit)
1671 continue;
1672
1673 if (fbit <= mbit)
1674 {
1675 unsigned HOST_WIDE_INT diff = fbit + fsize - mbit;
1676 mbit += diff;
1677 msize -= diff;
1678 }
1679 else if (fbit > mbit)
1680 msize -= (mbit + msize - fbit);
1681 else
1682 gcc_unreachable ();
1683 }
1684 }
1685
1686 bit = mbit;
1687 size = msize;
1688 }
1689
1690 /* Now we know the bit range we're interested in. Find the smallest
1691 machine mode we can use to access it. */
1692
1693 for (mode = smallest_mode_for_size (size, MODE_INT);
1694 ;
1695 mode = GET_MODE_WIDER_MODE (mode))
1696 {
1697 gcc_assert (mode != VOIDmode);
1698
1699 alchk = GET_MODE_PRECISION (mode) - 1;
1700 alchk = ~alchk;
1701
1702 if ((bit & alchk) == ((bit + size - 1) & alchk))
1703 break;
1704 }
1705
1706 gcc_assert (~alchk < align);
1707
1708 /* Create the field group as a single variable. */
1709
1710 /* We used to create a type for the mode above, but size turns
1711 to be out not of mode-size. As we need a matching type
1712 to build a BIT_FIELD_REF, use a nonstandard integer type as
1713 fallback. */
1714 type = lang_hooks.types.type_for_size (size, 1);
1715 if (!type || TYPE_PRECISION (type) != size)
1716 type = build_nonstandard_integer_type (size, 1);
1717 gcc_assert (type);
1718 var = build3 (BIT_FIELD_REF, type, NULL_TREE,
1719 bitsize_int (size), bitsize_int (bit));
1720
1721 block = instantiate_missing_elements_1 (elt, var, type);
1722 gcc_assert (block && block->is_scalar);
1723
1724 var = block->replacement;
1725
1726 if ((bit & ~alchk)
1727 || (HOST_WIDE_INT)size != tree_low_cst (DECL_SIZE (var), 1))
1728 {
1729 block->replacement = fold_build3 (BIT_FIELD_REF,
1730 TREE_TYPE (block->element), var,
1731 bitsize_int (size),
1732 bitsize_int (bit & ~alchk));
1733 }
1734
1735 block->in_bitfld_block = 2;
1736
1737 /* Add the member fields to the group, such that they access
1738 portions of the group variable. */
1739
1740 for (f = first; f != TREE_CHAIN (prev); f = TREE_CHAIN (f))
1741 {
1742 tree field_type = canon_type_for_field (f, elt->element);
1743 struct sra_elt *fld = lookup_element (block, f, field_type, INSERT);
1744
1745 gcc_assert (fld && fld->is_scalar && !fld->replacement);
1746
1747 fld->replacement = fold_build3 (BIT_FIELD_REF, field_type, var,
1748 DECL_SIZE (f),
1749 bitsize_int
1750 ((TREE_INT_CST_LOW (DECL_FIELD_OFFSET (f))
1751 * BITS_PER_UNIT
1752 + (TREE_INT_CST_LOW
1753 (DECL_FIELD_BIT_OFFSET (f))))
1754 & ~alchk));
1755 fld->in_bitfld_block = 1;
1756 }
1757
1758 return prev;
1759 }
1760
1761 static void
1762 instantiate_missing_elements (struct sra_elt *elt)
1763 {
1764 tree type = elt->type;
1765
1766 switch (TREE_CODE (type))
1767 {
1768 case RECORD_TYPE:
1769 {
1770 tree f;
1771 for (f = TYPE_FIELDS (type); f ; f = TREE_CHAIN (f))
1772 if (TREE_CODE (f) == FIELD_DECL)
1773 {
1774 tree last = try_instantiate_multiple_fields (elt, f);
1775
1776 if (last != f)
1777 {
1778 f = last;
1779 continue;
1780 }
1781
1782 instantiate_missing_elements_1 (elt, f,
1783 canon_type_for_field
1784 (f, elt->element));
1785 }
1786 break;
1787 }
1788
1789 case ARRAY_TYPE:
1790 {
1791 tree i, max, subtype;
1792
1793 i = TYPE_MIN_VALUE (TYPE_DOMAIN (type));
1794 max = TYPE_MAX_VALUE (TYPE_DOMAIN (type));
1795 subtype = TREE_TYPE (type);
1796
1797 while (1)
1798 {
1799 instantiate_missing_elements_1 (elt, i, subtype);
1800 if (tree_int_cst_equal (i, max))
1801 break;
1802 i = int_const_binop (PLUS_EXPR, i, integer_one_node, true);
1803 }
1804
1805 break;
1806 }
1807
1808 case COMPLEX_TYPE:
1809 type = TREE_TYPE (type);
1810 instantiate_missing_elements_1 (elt, integer_zero_node, type);
1811 instantiate_missing_elements_1 (elt, integer_one_node, type);
1812 break;
1813
1814 default:
1815 gcc_unreachable ();
1816 }
1817 }
1818
1819 /* Return true if there is only one non aggregate field in the record, TYPE.
1820 Return false otherwise. */
1821
1822 static bool
1823 single_scalar_field_in_record_p (tree type)
1824 {
1825 int num_fields = 0;
1826 tree field;
1827 if (TREE_CODE (type) != RECORD_TYPE)
1828 return false;
1829
1830 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1831 if (TREE_CODE (field) == FIELD_DECL)
1832 {
1833 num_fields++;
1834
1835 if (num_fields == 2)
1836 return false;
1837
1838 if (AGGREGATE_TYPE_P (TREE_TYPE (field)))
1839 return false;
1840 }
1841
1842 return true;
1843 }
1844
1845 /* Make one pass across an element tree deciding whether to perform block
1846 or element copies. If we decide on element copies, instantiate all
1847 elements. Return true if there are any instantiated sub-elements. */
1848
1849 static bool
1850 decide_block_copy (struct sra_elt *elt)
1851 {
1852 struct sra_elt *c;
1853 bool any_inst;
1854
1855 /* We shouldn't be invoked on groups of sub-elements as they must
1856 behave like their parent as far as block copy is concerned. */
1857 gcc_assert (!elt->is_group);
1858
1859 /* If scalarization is disabled, respect it. */
1860 if (elt->cannot_scalarize)
1861 {
1862 elt->use_block_copy = 1;
1863
1864 if (dump_file)
1865 {
1866 fputs ("Scalarization disabled for ", dump_file);
1867 dump_sra_elt_name (dump_file, elt);
1868 fputc ('\n', dump_file);
1869 }
1870
1871 /* Disable scalarization of sub-elements */
1872 for (c = elt->children; c; c = c->sibling)
1873 {
1874 c->cannot_scalarize = 1;
1875 decide_block_copy (c);
1876 }
1877
1878 /* Groups behave like their parent. */
1879 for (c = elt->groups; c; c = c->sibling)
1880 {
1881 c->cannot_scalarize = 1;
1882 c->use_block_copy = 1;
1883 }
1884
1885 return false;
1886 }
1887
1888 /* Don't decide if we've no uses and no groups. */
1889 if (elt->n_uses == 0 && elt->n_copies == 0 && elt->groups == NULL)
1890 ;
1891
1892 else if (!elt->is_scalar)
1893 {
1894 tree size_tree = TYPE_SIZE_UNIT (elt->type);
1895 bool use_block_copy = true;
1896
1897 /* Tradeoffs for COMPLEX types pretty much always make it better
1898 to go ahead and split the components. */
1899 if (TREE_CODE (elt->type) == COMPLEX_TYPE)
1900 use_block_copy = false;
1901
1902 /* Don't bother trying to figure out the rest if the structure is
1903 so large we can't do easy arithmetic. This also forces block
1904 copies for variable sized structures. */
1905 else if (host_integerp (size_tree, 1))
1906 {
1907 unsigned HOST_WIDE_INT full_size, inst_size = 0;
1908 unsigned int max_size, max_count, inst_count, full_count;
1909
1910 /* If the sra-max-structure-size parameter is 0, then the
1911 user has not overridden the parameter and we can choose a
1912 sensible default. */
1913 max_size = SRA_MAX_STRUCTURE_SIZE
1914 ? SRA_MAX_STRUCTURE_SIZE
1915 : MOVE_RATIO * UNITS_PER_WORD;
1916 max_count = SRA_MAX_STRUCTURE_COUNT
1917 ? SRA_MAX_STRUCTURE_COUNT
1918 : MOVE_RATIO;
1919
1920 full_size = tree_low_cst (size_tree, 1);
1921 full_count = count_type_elements (elt->type, false);
1922 inst_count = sum_instantiated_sizes (elt, &inst_size);
1923
1924 /* If there is only one scalar field in the record, don't block copy. */
1925 if (single_scalar_field_in_record_p (elt->type))
1926 use_block_copy = false;
1927
1928 /* ??? What to do here. If there are two fields, and we've only
1929 instantiated one, then instantiating the other is clearly a win.
1930 If there are a large number of fields then the size of the copy
1931 is much more of a factor. */
1932
1933 /* If the structure is small, and we've made copies, go ahead
1934 and instantiate, hoping that the copies will go away. */
1935 if (full_size <= max_size
1936 && (full_count - inst_count) <= max_count
1937 && elt->n_copies > elt->n_uses)
1938 use_block_copy = false;
1939 else if (inst_count * 100 >= full_count * SRA_FIELD_STRUCTURE_RATIO
1940 && inst_size * 100 >= full_size * SRA_FIELD_STRUCTURE_RATIO)
1941 use_block_copy = false;
1942
1943 /* In order to avoid block copy, we have to be able to instantiate
1944 all elements of the type. See if this is possible. */
1945 if (!use_block_copy
1946 && (!can_completely_scalarize_p (elt)
1947 || !type_can_instantiate_all_elements (elt->type)))
1948 use_block_copy = true;
1949 }
1950
1951 elt->use_block_copy = use_block_copy;
1952
1953 /* Groups behave like their parent. */
1954 for (c = elt->groups; c; c = c->sibling)
1955 c->use_block_copy = use_block_copy;
1956
1957 if (dump_file)
1958 {
1959 fprintf (dump_file, "Using %s for ",
1960 use_block_copy ? "block-copy" : "element-copy");
1961 dump_sra_elt_name (dump_file, elt);
1962 fputc ('\n', dump_file);
1963 }
1964
1965 if (!use_block_copy)
1966 {
1967 instantiate_missing_elements (elt);
1968 return true;
1969 }
1970 }
1971
1972 any_inst = elt->replacement != NULL;
1973
1974 for (c = elt->children; c ; c = c->sibling)
1975 any_inst |= decide_block_copy (c);
1976
1977 return any_inst;
1978 }
1979
1980 /* Entry point to phase 3. Instantiate scalar replacement variables. */
1981
1982 static void
1983 decide_instantiations (void)
1984 {
1985 unsigned int i;
1986 bool cleared_any;
1987 bitmap_head done_head;
1988 bitmap_iterator bi;
1989
1990 /* We cannot clear bits from a bitmap we're iterating over,
1991 so save up all the bits to clear until the end. */
1992 bitmap_initialize (&done_head, &bitmap_default_obstack);
1993 cleared_any = false;
1994
1995 EXECUTE_IF_SET_IN_BITMAP (sra_candidates, 0, i, bi)
1996 {
1997 tree var = referenced_var (i);
1998 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
1999 if (elt)
2000 {
2001 decide_instantiation_1 (elt, 0, 0);
2002 if (!decide_block_copy (elt))
2003 elt = NULL;
2004 }
2005 if (!elt)
2006 {
2007 bitmap_set_bit (&done_head, i);
2008 cleared_any = true;
2009 }
2010 }
2011
2012 if (cleared_any)
2013 {
2014 bitmap_and_compl_into (sra_candidates, &done_head);
2015 bitmap_and_compl_into (needs_copy_in, &done_head);
2016 }
2017 bitmap_clear (&done_head);
2018
2019 mark_set_for_renaming (sra_candidates);
2020
2021 if (dump_file)
2022 fputc ('\n', dump_file);
2023 }
2024
2025 \f
2026 /* Phase Four: Update the function to match the replacements created. */
2027
2028 /* Mark all the variables in VDEF/VUSE operators for STMT for
2029 renaming. This becomes necessary when we modify all of a
2030 non-scalar. */
2031
2032 static void
2033 mark_all_v_defs_1 (tree stmt)
2034 {
2035 tree sym;
2036 ssa_op_iter iter;
2037
2038 update_stmt_if_modified (stmt);
2039
2040 FOR_EACH_SSA_TREE_OPERAND (sym, stmt, iter, SSA_OP_ALL_VIRTUALS)
2041 {
2042 if (TREE_CODE (sym) == SSA_NAME)
2043 sym = SSA_NAME_VAR (sym);
2044 mark_sym_for_renaming (sym);
2045 }
2046 }
2047
2048
2049 /* Mark all the variables in virtual operands in all the statements in
2050 LIST for renaming. */
2051
2052 static void
2053 mark_all_v_defs (tree list)
2054 {
2055 if (TREE_CODE (list) != STATEMENT_LIST)
2056 mark_all_v_defs_1 (list);
2057 else
2058 {
2059 tree_stmt_iterator i;
2060 for (i = tsi_start (list); !tsi_end_p (i); tsi_next (&i))
2061 mark_all_v_defs_1 (tsi_stmt (i));
2062 }
2063 }
2064
2065
2066 /* Mark every replacement under ELT with TREE_NO_WARNING. */
2067
2068 static void
2069 mark_no_warning (struct sra_elt *elt)
2070 {
2071 if (!elt->all_no_warning)
2072 {
2073 if (elt->replacement)
2074 TREE_NO_WARNING (elt->replacement) = 1;
2075 else
2076 {
2077 struct sra_elt *c;
2078 FOR_EACH_ACTUAL_CHILD (c, elt)
2079 mark_no_warning (c);
2080 }
2081 elt->all_no_warning = true;
2082 }
2083 }
2084
2085 /* Build a single level component reference to ELT rooted at BASE. */
2086
2087 static tree
2088 generate_one_element_ref (struct sra_elt *elt, tree base)
2089 {
2090 switch (TREE_CODE (TREE_TYPE (base)))
2091 {
2092 case RECORD_TYPE:
2093 {
2094 tree field = elt->element;
2095
2096 /* We can't test elt->in_bitfld_block here because, when this is
2097 called from instantiate_element, we haven't set this field
2098 yet. */
2099 if (TREE_CODE (field) == BIT_FIELD_REF)
2100 {
2101 tree ret = unshare_expr (field);
2102 TREE_OPERAND (ret, 0) = base;
2103 return ret;
2104 }
2105
2106 /* Watch out for compatible records with differing field lists. */
2107 if (DECL_FIELD_CONTEXT (field) != TYPE_MAIN_VARIANT (TREE_TYPE (base)))
2108 field = find_compatible_field (TREE_TYPE (base), field);
2109
2110 return build3 (COMPONENT_REF, elt->type, base, field, NULL);
2111 }
2112
2113 case ARRAY_TYPE:
2114 if (TREE_CODE (elt->element) == RANGE_EXPR)
2115 return build4 (ARRAY_RANGE_REF, elt->type, base,
2116 TREE_OPERAND (elt->element, 0), NULL, NULL);
2117 else
2118 return build4 (ARRAY_REF, elt->type, base, elt->element, NULL, NULL);
2119
2120 case COMPLEX_TYPE:
2121 if (elt->element == integer_zero_node)
2122 return build1 (REALPART_EXPR, elt->type, base);
2123 else
2124 return build1 (IMAGPART_EXPR, elt->type, base);
2125
2126 default:
2127 gcc_unreachable ();
2128 }
2129 }
2130
2131 /* Build a full component reference to ELT rooted at its native variable. */
2132
2133 static tree
2134 generate_element_ref (struct sra_elt *elt)
2135 {
2136 if (elt->parent)
2137 return generate_one_element_ref (elt, generate_element_ref (elt->parent));
2138 else
2139 return elt->element;
2140 }
2141
2142 /* Return true if BF is a bit-field that we can handle like a scalar. */
2143
2144 static bool
2145 scalar_bitfield_p (tree bf)
2146 {
2147 return (TREE_CODE (bf) == BIT_FIELD_REF
2148 && (is_gimple_reg (TREE_OPERAND (bf, 0))
2149 || (TYPE_MODE (TREE_TYPE (TREE_OPERAND (bf, 0))) != BLKmode
2150 && (!TREE_SIDE_EFFECTS (TREE_OPERAND (bf, 0))
2151 || (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE
2152 (TREE_OPERAND (bf, 0))))
2153 <= BITS_PER_WORD)))));
2154 }
2155
2156 /* Create an assignment statement from SRC to DST. */
2157
2158 static tree
2159 sra_build_assignment (tree dst, tree src)
2160 {
2161 /* Turning BIT_FIELD_REFs into bit operations enables other passes
2162 to do a much better job at optimizing the code.
2163 From dst = BIT_FIELD_REF <var, sz, off> we produce
2164
2165 SR.1 = (scalar type) var;
2166 SR.2 = SR.1 >> off;
2167 SR.3 = SR.2 & ((1 << sz) - 1);
2168 ... possible sign extension of SR.3 ...
2169 dst = (destination type) SR.3;
2170 */
2171 if (scalar_bitfield_p (src))
2172 {
2173 tree var, shift, width;
2174 tree utype, stype, stmp, utmp, dtmp;
2175 tree list, stmt;
2176 bool unsignedp = (INTEGRAL_TYPE_P (TREE_TYPE (src))
2177 ? TYPE_UNSIGNED (TREE_TYPE (src)) : true);
2178
2179 var = TREE_OPERAND (src, 0);
2180 width = TREE_OPERAND (src, 1);
2181 /* The offset needs to be adjusted to a right shift quantity
2182 depending on the endianness. */
2183 if (BYTES_BIG_ENDIAN)
2184 {
2185 tree tmp = size_binop (PLUS_EXPR, width, TREE_OPERAND (src, 2));
2186 shift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), tmp);
2187 }
2188 else
2189 shift = TREE_OPERAND (src, 2);
2190
2191 /* In weird cases we have non-integral types for the source or
2192 destination object.
2193 ??? For unknown reasons we also want an unsigned scalar type. */
2194 stype = TREE_TYPE (var);
2195 if (!INTEGRAL_TYPE_P (stype))
2196 stype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2197 (TYPE_SIZE (stype)), 1);
2198 else if (!TYPE_UNSIGNED (stype))
2199 stype = unsigned_type_for (stype);
2200
2201 utype = TREE_TYPE (dst);
2202 if (!INTEGRAL_TYPE_P (utype))
2203 utype = lang_hooks.types.type_for_size (TREE_INT_CST_LOW
2204 (TYPE_SIZE (utype)), 1);
2205 else if (!TYPE_UNSIGNED (utype))
2206 utype = unsigned_type_for (utype);
2207
2208 list = NULL;
2209 stmp = make_rename_temp (stype, "SR");
2210
2211 /* Convert the base var of the BIT_FIELD_REF to the scalar type
2212 we use for computation if we cannot use it directly. */
2213 if (!useless_type_conversion_p (stype, TREE_TYPE (var)))
2214 {
2215 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2216 stmt = build_gimple_modify_stmt (stmp,
2217 fold_convert (stype, var));
2218 else
2219 stmt = build_gimple_modify_stmt (stmp,
2220 fold_build1 (VIEW_CONVERT_EXPR,
2221 stype, var));
2222 append_to_statement_list (stmt, &list);
2223 var = stmp;
2224 }
2225
2226 if (!integer_zerop (shift))
2227 {
2228 stmt = build_gimple_modify_stmt (stmp,
2229 fold_build2 (RSHIFT_EXPR, stype,
2230 var, shift));
2231 append_to_statement_list (stmt, &list);
2232 var = stmp;
2233 }
2234
2235 /* If we need a masking operation, produce one. */
2236 if (TREE_INT_CST_LOW (width) == TYPE_PRECISION (stype))
2237 unsignedp = true;
2238 else
2239 {
2240 tree one = build_int_cst_wide (stype, 1, 0);
2241 tree mask = int_const_binop (LSHIFT_EXPR, one, width, 0);
2242 mask = int_const_binop (MINUS_EXPR, mask, one, 0);
2243
2244 stmt = build_gimple_modify_stmt (stmp,
2245 fold_build2 (BIT_AND_EXPR, stype,
2246 var, mask));
2247 append_to_statement_list (stmt, &list);
2248 var = stmp;
2249 }
2250
2251 /* After shifting and masking, convert to the target type. */
2252 utmp = stmp;
2253 if (!useless_type_conversion_p (utype, stype))
2254 {
2255 utmp = make_rename_temp (utype, "SR");
2256
2257 stmt = build_gimple_modify_stmt (utmp, fold_convert (utype, var));
2258 append_to_statement_list (stmt, &list);
2259
2260 var = utmp;
2261 }
2262
2263 /* Perform sign extension, if required.
2264 ??? This should never be necessary. */
2265 if (!unsignedp)
2266 {
2267 tree signbit = int_const_binop (LSHIFT_EXPR,
2268 build_int_cst_wide (utype, 1, 0),
2269 size_binop (MINUS_EXPR, width,
2270 bitsize_int (1)), 0);
2271
2272 stmt = build_gimple_modify_stmt (utmp,
2273 fold_build2 (BIT_XOR_EXPR, utype,
2274 var, signbit));
2275 append_to_statement_list (stmt, &list);
2276
2277 stmt = build_gimple_modify_stmt (utmp,
2278 fold_build2 (MINUS_EXPR, utype,
2279 utmp, signbit));
2280 append_to_statement_list (stmt, &list);
2281
2282 var = utmp;
2283 }
2284
2285 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2286 STRIP_NOPS (dst);
2287
2288 /* Finally, move and convert to the destination. */
2289 if (!useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (var)))
2290 {
2291 if (INTEGRAL_TYPE_P (TREE_TYPE (dst)))
2292 var = fold_convert (TREE_TYPE (dst), var);
2293 else
2294 var = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (dst), var);
2295
2296 /* If the destination is not a register the conversion needs
2297 to be a separate statement. */
2298 if (!is_gimple_reg (dst))
2299 {
2300 dtmp = make_rename_temp (TREE_TYPE (dst), "SR");
2301 stmt = build_gimple_modify_stmt (dtmp, var);
2302 append_to_statement_list (stmt, &list);
2303 var = dtmp;
2304 }
2305 }
2306 stmt = build_gimple_modify_stmt (dst, var);
2307 append_to_statement_list (stmt, &list);
2308
2309 return list;
2310 }
2311
2312 /* fold_build3 (BIT_FIELD_REF, ...) sometimes returns a cast. */
2313 if (CONVERT_EXPR_P (dst))
2314 {
2315 STRIP_NOPS (dst);
2316 src = fold_convert (TREE_TYPE (dst), src);
2317 }
2318 /* It was hoped that we could perform some type sanity checking
2319 here, but since front-ends can emit accesses of fields in types
2320 different from their nominal types and copy structures containing
2321 them as a whole, we'd have to handle such differences here.
2322 Since such accesses under different types require compatibility
2323 anyway, there's little point in making tests and/or adding
2324 conversions to ensure the types of src and dst are the same.
2325 So we just assume type differences at this point are ok.
2326 The only exception we make here are pointer types, which can be different
2327 in e.g. structurally equal, but non-identical RECORD_TYPEs. */
2328 else if (POINTER_TYPE_P (TREE_TYPE (dst))
2329 && !useless_type_conversion_p (TREE_TYPE (dst), TREE_TYPE (src)))
2330 src = fold_convert (TREE_TYPE (dst), src);
2331
2332 return build_gimple_modify_stmt (dst, src);
2333 }
2334
2335 /* BIT_FIELD_REFs must not be shared. sra_build_elt_assignment()
2336 takes care of assignments, but we must create copies for uses. */
2337 #define REPLDUP(t) (TREE_CODE (t) != BIT_FIELD_REF ? (t) : unshare_expr (t))
2338
2339 /* Emit an assignment from SRC to DST, but if DST is a scalarizable
2340 BIT_FIELD_REF, turn it into bit operations. */
2341
2342 static tree
2343 sra_build_bf_assignment (tree dst, tree src)
2344 {
2345 tree var, type, utype, tmp, tmp2, tmp3;
2346 tree list, stmt;
2347 tree cst, cst2, mask;
2348 tree minshift, maxshift;
2349
2350 if (TREE_CODE (dst) != BIT_FIELD_REF)
2351 return sra_build_assignment (dst, src);
2352
2353 var = TREE_OPERAND (dst, 0);
2354
2355 if (!scalar_bitfield_p (dst))
2356 return sra_build_assignment (REPLDUP (dst), src);
2357
2358 list = NULL;
2359
2360 cst = fold_convert (bitsizetype, TREE_OPERAND (dst, 2));
2361 cst2 = size_binop (PLUS_EXPR,
2362 fold_convert (bitsizetype, TREE_OPERAND (dst, 1)),
2363 cst);
2364
2365 if (BYTES_BIG_ENDIAN)
2366 {
2367 maxshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst);
2368 minshift = size_binop (MINUS_EXPR, TYPE_SIZE (TREE_TYPE (var)), cst2);
2369 }
2370 else
2371 {
2372 maxshift = cst2;
2373 minshift = cst;
2374 }
2375
2376 type = TREE_TYPE (var);
2377 if (!INTEGRAL_TYPE_P (type))
2378 type = lang_hooks.types.type_for_size
2379 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (var))), 1);
2380 if (TYPE_UNSIGNED (type))
2381 utype = type;
2382 else
2383 utype = unsigned_type_for (type);
2384
2385 mask = build_int_cst_wide (utype, 1, 0);
2386 if (TREE_INT_CST_LOW (maxshift) == TYPE_PRECISION (utype))
2387 cst = build_int_cst_wide (utype, 0, 0);
2388 else
2389 cst = int_const_binop (LSHIFT_EXPR, mask, maxshift, true);
2390 if (integer_zerop (minshift))
2391 cst2 = mask;
2392 else
2393 cst2 = int_const_binop (LSHIFT_EXPR, mask, minshift, true);
2394 mask = int_const_binop (MINUS_EXPR, cst, cst2, true);
2395 mask = fold_build1 (BIT_NOT_EXPR, utype, mask);
2396
2397 if (TYPE_MAIN_VARIANT (utype) != TYPE_MAIN_VARIANT (TREE_TYPE (var))
2398 && !integer_zerop (mask))
2399 {
2400 tmp = var;
2401 if (!is_gimple_variable (tmp))
2402 tmp = unshare_expr (var);
2403
2404 tmp2 = make_rename_temp (utype, "SR");
2405
2406 if (INTEGRAL_TYPE_P (TREE_TYPE (var)))
2407 stmt = build_gimple_modify_stmt (tmp2, fold_convert (utype, tmp));
2408 else
2409 stmt = build_gimple_modify_stmt (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2410 utype, tmp));
2411 append_to_statement_list (stmt, &list);
2412 }
2413 else
2414 tmp2 = var;
2415
2416 if (!integer_zerop (mask))
2417 {
2418 tmp = make_rename_temp (utype, "SR");
2419 stmt = build_gimple_modify_stmt (tmp,
2420 fold_build2 (BIT_AND_EXPR, utype,
2421 tmp2, mask));
2422 append_to_statement_list (stmt, &list);
2423 }
2424 else
2425 tmp = mask;
2426
2427 if (is_gimple_reg (src) && INTEGRAL_TYPE_P (TREE_TYPE (src)))
2428 tmp2 = src;
2429 else if (INTEGRAL_TYPE_P (TREE_TYPE (src)))
2430 {
2431 tmp2 = make_rename_temp (TREE_TYPE (src), "SR");
2432 stmt = sra_build_assignment (tmp2, src);
2433 append_to_statement_list (stmt, &list);
2434 }
2435 else
2436 {
2437 tmp2 = make_rename_temp
2438 (lang_hooks.types.type_for_size
2439 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (src))),
2440 1), "SR");
2441 stmt = sra_build_assignment (tmp2, fold_build1 (VIEW_CONVERT_EXPR,
2442 TREE_TYPE (tmp2), src));
2443 append_to_statement_list (stmt, &list);
2444 }
2445
2446 if (!TYPE_UNSIGNED (TREE_TYPE (tmp2)))
2447 {
2448 tree ut = unsigned_type_for (TREE_TYPE (tmp2));
2449 tmp3 = make_rename_temp (ut, "SR");
2450 tmp2 = fold_convert (ut, tmp2);
2451 stmt = sra_build_assignment (tmp3, tmp2);
2452 append_to_statement_list (stmt, &list);
2453
2454 tmp2 = fold_build1 (BIT_NOT_EXPR, utype, mask);
2455 tmp2 = int_const_binop (RSHIFT_EXPR, tmp2, minshift, true);
2456 tmp2 = fold_convert (ut, tmp2);
2457 tmp2 = fold_build2 (BIT_AND_EXPR, ut, tmp3, tmp2);
2458
2459 if (tmp3 != tmp2)
2460 {
2461 tmp3 = make_rename_temp (ut, "SR");
2462 stmt = sra_build_assignment (tmp3, tmp2);
2463 append_to_statement_list (stmt, &list);
2464 }
2465
2466 tmp2 = tmp3;
2467 }
2468
2469 if (TYPE_MAIN_VARIANT (TREE_TYPE (tmp2)) != TYPE_MAIN_VARIANT (utype))
2470 {
2471 tmp3 = make_rename_temp (utype, "SR");
2472 tmp2 = fold_convert (utype, tmp2);
2473 stmt = sra_build_assignment (tmp3, tmp2);
2474 append_to_statement_list (stmt, &list);
2475 tmp2 = tmp3;
2476 }
2477
2478 if (!integer_zerop (minshift))
2479 {
2480 tmp3 = make_rename_temp (utype, "SR");
2481 stmt = build_gimple_modify_stmt (tmp3,
2482 fold_build2 (LSHIFT_EXPR, utype,
2483 tmp2, minshift));
2484 append_to_statement_list (stmt, &list);
2485 tmp2 = tmp3;
2486 }
2487
2488 if (utype != TREE_TYPE (var))
2489 tmp3 = make_rename_temp (utype, "SR");
2490 else
2491 tmp3 = var;
2492 stmt = build_gimple_modify_stmt (tmp3,
2493 fold_build2 (BIT_IOR_EXPR, utype,
2494 tmp, tmp2));
2495 append_to_statement_list (stmt, &list);
2496
2497 if (tmp3 != var)
2498 {
2499 if (TREE_TYPE (var) == type)
2500 stmt = build_gimple_modify_stmt (var,
2501 fold_convert (type, tmp3));
2502 else
2503 stmt = build_gimple_modify_stmt (var,
2504 fold_build1 (VIEW_CONVERT_EXPR,
2505 TREE_TYPE (var), tmp3));
2506 append_to_statement_list (stmt, &list);
2507 }
2508
2509 return list;
2510 }
2511
2512 /* Expand an assignment of SRC to the scalarized representation of
2513 ELT. If it is a field group, try to widen the assignment to cover
2514 the full variable. */
2515
2516 static tree
2517 sra_build_elt_assignment (struct sra_elt *elt, tree src)
2518 {
2519 tree dst = elt->replacement;
2520 tree var, tmp, cst, cst2, list, stmt;
2521
2522 if (TREE_CODE (dst) != BIT_FIELD_REF
2523 || !elt->in_bitfld_block)
2524 return sra_build_assignment (REPLDUP (dst), src);
2525
2526 var = TREE_OPERAND (dst, 0);
2527
2528 /* Try to widen the assignment to the entire variable.
2529 We need the source to be a BIT_FIELD_REF as well, such that, for
2530 BIT_FIELD_REF<d,sz,dp> = BIT_FIELD_REF<s,sz,sp>,
2531 by design, conditions are met such that we can turn it into
2532 d = BIT_FIELD_REF<s,dw,sp-dp>. */
2533 if (elt->in_bitfld_block == 2
2534 && TREE_CODE (src) == BIT_FIELD_REF)
2535 {
2536 tmp = src;
2537 cst = TYPE_SIZE (TREE_TYPE (var));
2538 cst2 = size_binop (MINUS_EXPR, TREE_OPERAND (src, 2),
2539 TREE_OPERAND (dst, 2));
2540
2541 src = TREE_OPERAND (src, 0);
2542
2543 /* Avoid full-width bit-fields. */
2544 if (integer_zerop (cst2)
2545 && tree_int_cst_equal (cst, TYPE_SIZE (TREE_TYPE (src))))
2546 {
2547 if (INTEGRAL_TYPE_P (TREE_TYPE (src))
2548 && !TYPE_UNSIGNED (TREE_TYPE (src)))
2549 src = fold_convert (unsigned_type_for (TREE_TYPE (src)), src);
2550
2551 /* If a single conversion won't do, we'll need a statement
2552 list. */
2553 if (TYPE_MAIN_VARIANT (TREE_TYPE (var))
2554 != TYPE_MAIN_VARIANT (TREE_TYPE (src)))
2555 {
2556 list = NULL;
2557
2558 if (!INTEGRAL_TYPE_P (TREE_TYPE (src)))
2559 src = fold_build1 (VIEW_CONVERT_EXPR,
2560 lang_hooks.types.type_for_size
2561 (TREE_INT_CST_LOW
2562 (TYPE_SIZE (TREE_TYPE (src))),
2563 1), src);
2564 gcc_assert (TYPE_UNSIGNED (TREE_TYPE (src)));
2565
2566 tmp = make_rename_temp (TREE_TYPE (src), "SR");
2567 stmt = build_gimple_modify_stmt (tmp, src);
2568 append_to_statement_list (stmt, &list);
2569
2570 stmt = sra_build_assignment (var,
2571 fold_convert (TREE_TYPE (var),
2572 tmp));
2573 append_to_statement_list (stmt, &list);
2574
2575 return list;
2576 }
2577
2578 src = fold_convert (TREE_TYPE (var), src);
2579 }
2580 else
2581 {
2582 src = fold_convert (TREE_TYPE (var), tmp);
2583 }
2584
2585 return sra_build_assignment (var, src);
2586 }
2587
2588 return sra_build_bf_assignment (dst, src);
2589 }
2590
2591 /* Generate a set of assignment statements in *LIST_P to copy all
2592 instantiated elements under ELT to or from the equivalent structure
2593 rooted at EXPR. COPY_OUT controls the direction of the copy, with
2594 true meaning to copy out of EXPR into ELT. */
2595
2596 static void
2597 generate_copy_inout (struct sra_elt *elt, bool copy_out, tree expr,
2598 tree *list_p)
2599 {
2600 struct sra_elt *c;
2601 tree t;
2602
2603 if (!copy_out && TREE_CODE (expr) == SSA_NAME
2604 && TREE_CODE (TREE_TYPE (expr)) == COMPLEX_TYPE)
2605 {
2606 tree r, i;
2607
2608 c = lookup_element (elt, integer_zero_node, NULL, NO_INSERT);
2609 r = c->replacement;
2610 c = lookup_element (elt, integer_one_node, NULL, NO_INSERT);
2611 i = c->replacement;
2612
2613 t = build2 (COMPLEX_EXPR, elt->type, r, i);
2614 t = sra_build_bf_assignment (expr, t);
2615 SSA_NAME_DEF_STMT (expr) = t;
2616 append_to_statement_list (t, list_p);
2617 }
2618 else if (elt->replacement)
2619 {
2620 if (copy_out)
2621 t = sra_build_elt_assignment (elt, expr);
2622 else
2623 t = sra_build_bf_assignment (expr, REPLDUP (elt->replacement));
2624 append_to_statement_list (t, list_p);
2625 }
2626 else
2627 {
2628 FOR_EACH_ACTUAL_CHILD (c, elt)
2629 {
2630 t = generate_one_element_ref (c, unshare_expr (expr));
2631 generate_copy_inout (c, copy_out, t, list_p);
2632 }
2633 }
2634 }
2635
2636 /* Generate a set of assignment statements in *LIST_P to copy all instantiated
2637 elements under SRC to their counterparts under DST. There must be a 1-1
2638 correspondence of instantiated elements. */
2639
2640 static void
2641 generate_element_copy (struct sra_elt *dst, struct sra_elt *src, tree *list_p)
2642 {
2643 struct sra_elt *dc, *sc;
2644
2645 FOR_EACH_ACTUAL_CHILD (dc, dst)
2646 {
2647 sc = lookup_element (src, dc->element, NULL, NO_INSERT);
2648 if (!sc && dc->in_bitfld_block == 2)
2649 {
2650 struct sra_elt *dcs;
2651
2652 FOR_EACH_ACTUAL_CHILD (dcs, dc)
2653 {
2654 sc = lookup_element (src, dcs->element, NULL, NO_INSERT);
2655 gcc_assert (sc);
2656 generate_element_copy (dcs, sc, list_p);
2657 }
2658
2659 continue;
2660 }
2661
2662 /* If DST and SRC are structs with the same elements, but do not have
2663 the same TYPE_MAIN_VARIANT, then lookup of DST FIELD_DECL in SRC
2664 will fail. Try harder by finding the corresponding FIELD_DECL
2665 in SRC. */
2666 if (!sc)
2667 {
2668 tree f;
2669
2670 gcc_assert (useless_type_conversion_p (dst->type, src->type));
2671 gcc_assert (TREE_CODE (dc->element) == FIELD_DECL);
2672 for (f = TYPE_FIELDS (src->type); f ; f = TREE_CHAIN (f))
2673 if (simple_cst_equal (DECL_FIELD_OFFSET (f),
2674 DECL_FIELD_OFFSET (dc->element)) > 0
2675 && simple_cst_equal (DECL_FIELD_BIT_OFFSET (f),
2676 DECL_FIELD_BIT_OFFSET (dc->element)) > 0
2677 && simple_cst_equal (DECL_SIZE (f),
2678 DECL_SIZE (dc->element)) > 0
2679 && (useless_type_conversion_p (TREE_TYPE (dc->element),
2680 TREE_TYPE (f))
2681 || (POINTER_TYPE_P (TREE_TYPE (dc->element))
2682 && POINTER_TYPE_P (TREE_TYPE (f)))))
2683 break;
2684 gcc_assert (f != NULL_TREE);
2685 sc = lookup_element (src, f, NULL, NO_INSERT);
2686 }
2687
2688 generate_element_copy (dc, sc, list_p);
2689 }
2690
2691 if (dst->replacement)
2692 {
2693 tree t;
2694
2695 gcc_assert (src->replacement);
2696
2697 t = sra_build_elt_assignment (dst, REPLDUP (src->replacement));
2698 append_to_statement_list (t, list_p);
2699 }
2700 }
2701
2702 /* Generate a set of assignment statements in *LIST_P to zero all instantiated
2703 elements under ELT. In addition, do not assign to elements that have been
2704 marked VISITED but do reset the visited flag; this allows easy coordination
2705 with generate_element_init. */
2706
2707 static void
2708 generate_element_zero (struct sra_elt *elt, tree *list_p)
2709 {
2710 struct sra_elt *c;
2711
2712 if (elt->visited)
2713 {
2714 elt->visited = false;
2715 return;
2716 }
2717
2718 if (!elt->in_bitfld_block)
2719 FOR_EACH_ACTUAL_CHILD (c, elt)
2720 generate_element_zero (c, list_p);
2721
2722 if (elt->replacement)
2723 {
2724 tree t;
2725
2726 gcc_assert (elt->is_scalar);
2727 t = fold_convert (elt->type, integer_zero_node);
2728
2729 t = sra_build_elt_assignment (elt, t);
2730 append_to_statement_list (t, list_p);
2731 }
2732 }
2733
2734 /* Generate an assignment VAR = INIT, where INIT may need gimplification.
2735 Add the result to *LIST_P. */
2736
2737 static void
2738 generate_one_element_init (struct sra_elt *elt, tree init, tree *list_p)
2739 {
2740 /* The replacement can be almost arbitrarily complex. Gimplify. */
2741 tree stmt = sra_build_elt_assignment (elt, init);
2742 gimplify_and_add (stmt, list_p);
2743 }
2744
2745 /* Generate a set of assignment statements in *LIST_P to set all instantiated
2746 elements under ELT with the contents of the initializer INIT. In addition,
2747 mark all assigned elements VISITED; this allows easy coordination with
2748 generate_element_zero. Return false if we found a case we couldn't
2749 handle. */
2750
2751 static bool
2752 generate_element_init_1 (struct sra_elt *elt, tree init, tree *list_p)
2753 {
2754 bool result = true;
2755 enum tree_code init_code;
2756 struct sra_elt *sub;
2757 tree t;
2758 unsigned HOST_WIDE_INT idx;
2759 tree value, purpose;
2760
2761 /* We can be passed DECL_INITIAL of a static variable. It might have a
2762 conversion, which we strip off here. */
2763 STRIP_USELESS_TYPE_CONVERSION (init);
2764 init_code = TREE_CODE (init);
2765
2766 if (elt->is_scalar)
2767 {
2768 if (elt->replacement)
2769 {
2770 generate_one_element_init (elt, init, list_p);
2771 elt->visited = true;
2772 }
2773 return result;
2774 }
2775
2776 switch (init_code)
2777 {
2778 case COMPLEX_CST:
2779 case COMPLEX_EXPR:
2780 FOR_EACH_ACTUAL_CHILD (sub, elt)
2781 {
2782 if (sub->element == integer_zero_node)
2783 t = (init_code == COMPLEX_EXPR
2784 ? TREE_OPERAND (init, 0) : TREE_REALPART (init));
2785 else
2786 t = (init_code == COMPLEX_EXPR
2787 ? TREE_OPERAND (init, 1) : TREE_IMAGPART (init));
2788 result &= generate_element_init_1 (sub, t, list_p);
2789 }
2790 break;
2791
2792 case CONSTRUCTOR:
2793 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (init), idx, purpose, value)
2794 {
2795 if (TREE_CODE (purpose) == RANGE_EXPR)
2796 {
2797 tree lower = TREE_OPERAND (purpose, 0);
2798 tree upper = TREE_OPERAND (purpose, 1);
2799
2800 while (1)
2801 {
2802 sub = lookup_element (elt, lower, NULL, NO_INSERT);
2803 if (sub != NULL)
2804 result &= generate_element_init_1 (sub, value, list_p);
2805 if (tree_int_cst_equal (lower, upper))
2806 break;
2807 lower = int_const_binop (PLUS_EXPR, lower,
2808 integer_one_node, true);
2809 }
2810 }
2811 else
2812 {
2813 sub = lookup_element (elt, purpose, NULL, NO_INSERT);
2814 if (sub != NULL)
2815 result &= generate_element_init_1 (sub, value, list_p);
2816 }
2817 }
2818 break;
2819
2820 default:
2821 elt->visited = true;
2822 result = false;
2823 }
2824
2825 return result;
2826 }
2827
2828 /* A wrapper function for generate_element_init_1 that handles cleanup after
2829 gimplification. */
2830
2831 static bool
2832 generate_element_init (struct sra_elt *elt, tree init, tree *list_p)
2833 {
2834 bool ret;
2835 struct gimplify_ctx gctx;
2836
2837 push_gimplify_context (&gctx);
2838 ret = generate_element_init_1 (elt, init, list_p);
2839 pop_gimplify_context (NULL);
2840
2841 /* The replacement can expose previously unreferenced variables. */
2842 if (ret && *list_p)
2843 {
2844 tree_stmt_iterator i;
2845
2846 for (i = tsi_start (*list_p); !tsi_end_p (i); tsi_next (&i))
2847 find_new_referenced_vars (tsi_stmt_ptr (i));
2848 }
2849
2850 return ret;
2851 }
2852
2853 /* Insert STMT on all the outgoing edges out of BB. Note that if BB
2854 has more than one edge, STMT will be replicated for each edge. Also,
2855 abnormal edges will be ignored. */
2856
2857 void
2858 insert_edge_copies (tree stmt, basic_block bb)
2859 {
2860 edge e;
2861 edge_iterator ei;
2862 bool first_copy;
2863
2864 first_copy = true;
2865 FOR_EACH_EDGE (e, ei, bb->succs)
2866 {
2867 /* We don't need to insert copies on abnormal edges. The
2868 value of the scalar replacement is not guaranteed to
2869 be valid through an abnormal edge. */
2870 if (!(e->flags & EDGE_ABNORMAL))
2871 {
2872 if (first_copy)
2873 {
2874 bsi_insert_on_edge (e, stmt);
2875 first_copy = false;
2876 }
2877 else
2878 bsi_insert_on_edge (e, unsave_expr_now (stmt));
2879 }
2880 }
2881 }
2882
2883 /* Helper function to insert LIST before BSI, and set up line number info. */
2884
2885 void
2886 sra_insert_before (block_stmt_iterator *bsi, tree list)
2887 {
2888 tree stmt = bsi_stmt (*bsi);
2889
2890 if (EXPR_HAS_LOCATION (stmt))
2891 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2892 bsi_insert_before (bsi, list, BSI_SAME_STMT);
2893 }
2894
2895 /* Similarly, but insert after BSI. Handles insertion onto edges as well. */
2896
2897 void
2898 sra_insert_after (block_stmt_iterator *bsi, tree list)
2899 {
2900 tree stmt = bsi_stmt (*bsi);
2901
2902 if (EXPR_HAS_LOCATION (stmt))
2903 annotate_all_with_locus (&list, EXPR_LOCATION (stmt));
2904
2905 if (stmt_ends_bb_p (stmt))
2906 insert_edge_copies (list, bsi->bb);
2907 else
2908 bsi_insert_after (bsi, list, BSI_SAME_STMT);
2909 }
2910
2911 /* Similarly, but replace the statement at BSI. */
2912
2913 static void
2914 sra_replace (block_stmt_iterator *bsi, tree list)
2915 {
2916 sra_insert_before (bsi, list);
2917 bsi_remove (bsi, false);
2918 if (bsi_end_p (*bsi))
2919 *bsi = bsi_last (bsi->bb);
2920 else
2921 bsi_prev (bsi);
2922 }
2923
2924 /* Data structure that bitfield_overlaps_p fills in with information
2925 about the element passed in and how much of it overlaps with the
2926 bit-range passed it to. */
2927
2928 struct bitfield_overlap_info
2929 {
2930 /* The bit-length of an element. */
2931 tree field_len;
2932
2933 /* The bit-position of the element in its parent. */
2934 tree field_pos;
2935
2936 /* The number of bits of the element that overlap with the incoming
2937 bit range. */
2938 tree overlap_len;
2939
2940 /* The first bit of the element that overlaps with the incoming bit
2941 range. */
2942 tree overlap_pos;
2943 };
2944
2945 /* Return true if a BIT_FIELD_REF<(FLD->parent), BLEN, BPOS>
2946 expression (referenced as BF below) accesses any of the bits in FLD,
2947 false if it doesn't. If DATA is non-null, its field_len and
2948 field_pos are filled in such that BIT_FIELD_REF<(FLD->parent),
2949 field_len, field_pos> (referenced as BFLD below) represents the
2950 entire field FLD->element, and BIT_FIELD_REF<BFLD, overlap_len,
2951 overlap_pos> represents the portion of the entire field that
2952 overlaps with BF. */
2953
2954 static bool
2955 bitfield_overlaps_p (tree blen, tree bpos, struct sra_elt *fld,
2956 struct bitfield_overlap_info *data)
2957 {
2958 tree flen, fpos;
2959 bool ret;
2960
2961 if (TREE_CODE (fld->element) == FIELD_DECL)
2962 {
2963 flen = fold_convert (bitsizetype, DECL_SIZE (fld->element));
2964 fpos = fold_convert (bitsizetype, DECL_FIELD_OFFSET (fld->element));
2965 fpos = size_binop (MULT_EXPR, fpos, bitsize_int (BITS_PER_UNIT));
2966 fpos = size_binop (PLUS_EXPR, fpos, DECL_FIELD_BIT_OFFSET (fld->element));
2967 }
2968 else if (TREE_CODE (fld->element) == BIT_FIELD_REF)
2969 {
2970 flen = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 1));
2971 fpos = fold_convert (bitsizetype, TREE_OPERAND (fld->element, 2));
2972 }
2973 else if (TREE_CODE (fld->element) == INTEGER_CST)
2974 {
2975 flen = fold_convert (bitsizetype, TYPE_SIZE (fld->type));
2976 fpos = fold_convert (bitsizetype, fld->element);
2977 fpos = size_binop (MULT_EXPR, flen, fpos);
2978 }
2979 else
2980 gcc_unreachable ();
2981
2982 gcc_assert (host_integerp (blen, 1)
2983 && host_integerp (bpos, 1)
2984 && host_integerp (flen, 1)
2985 && host_integerp (fpos, 1));
2986
2987 ret = ((!tree_int_cst_lt (fpos, bpos)
2988 && tree_int_cst_lt (size_binop (MINUS_EXPR, fpos, bpos),
2989 blen))
2990 || (!tree_int_cst_lt (bpos, fpos)
2991 && tree_int_cst_lt (size_binop (MINUS_EXPR, bpos, fpos),
2992 flen)));
2993
2994 if (!ret)
2995 return ret;
2996
2997 if (data)
2998 {
2999 tree bend, fend;
3000
3001 data->field_len = flen;
3002 data->field_pos = fpos;
3003
3004 fend = size_binop (PLUS_EXPR, fpos, flen);
3005 bend = size_binop (PLUS_EXPR, bpos, blen);
3006
3007 if (tree_int_cst_lt (bend, fend))
3008 data->overlap_len = size_binop (MINUS_EXPR, bend, fpos);
3009 else
3010 data->overlap_len = NULL;
3011
3012 if (tree_int_cst_lt (fpos, bpos))
3013 {
3014 data->overlap_pos = size_binop (MINUS_EXPR, bpos, fpos);
3015 data->overlap_len = size_binop (MINUS_EXPR,
3016 data->overlap_len
3017 ? data->overlap_len
3018 : data->field_len,
3019 data->overlap_pos);
3020 }
3021 else
3022 data->overlap_pos = NULL;
3023 }
3024
3025 return ret;
3026 }
3027
3028 /* Add to LISTP a sequence of statements that copies BLEN bits between
3029 VAR and the scalarized elements of ELT, starting a bit VPOS of VAR
3030 and at bit BPOS of ELT. The direction of the copy is given by
3031 TO_VAR. */
3032
3033 static void
3034 sra_explode_bitfield_assignment (tree var, tree vpos, bool to_var,
3035 tree *listp, tree blen, tree bpos,
3036 struct sra_elt *elt)
3037 {
3038 struct sra_elt *fld;
3039 struct bitfield_overlap_info flp;
3040
3041 FOR_EACH_ACTUAL_CHILD (fld, elt)
3042 {
3043 tree flen, fpos;
3044
3045 if (!bitfield_overlaps_p (blen, bpos, fld, &flp))
3046 continue;
3047
3048 flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3049 fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3050
3051 if (fld->replacement)
3052 {
3053 tree infld, invar, st, type;
3054
3055 infld = fld->replacement;
3056
3057 type = TREE_TYPE (infld);
3058 if (TYPE_PRECISION (type) != TREE_INT_CST_LOW (flen))
3059 type = lang_hooks.types.type_for_size (TREE_INT_CST_LOW (flen), 1);
3060 else
3061 type = unsigned_type_for (type);
3062
3063 if (TREE_CODE (infld) == BIT_FIELD_REF)
3064 {
3065 fpos = size_binop (PLUS_EXPR, fpos, TREE_OPERAND (infld, 2));
3066 infld = TREE_OPERAND (infld, 0);
3067 }
3068 else if (BYTES_BIG_ENDIAN && DECL_P (fld->element)
3069 && !tree_int_cst_equal (TYPE_SIZE (TREE_TYPE (infld)),
3070 DECL_SIZE (fld->element)))
3071 {
3072 fpos = size_binop (PLUS_EXPR, fpos,
3073 TYPE_SIZE (TREE_TYPE (infld)));
3074 fpos = size_binop (MINUS_EXPR, fpos,
3075 DECL_SIZE (fld->element));
3076 }
3077
3078 infld = fold_build3 (BIT_FIELD_REF, type, infld, flen, fpos);
3079
3080 invar = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3081 if (flp.overlap_pos)
3082 invar = size_binop (PLUS_EXPR, invar, flp.overlap_pos);
3083 invar = size_binop (PLUS_EXPR, invar, vpos);
3084
3085 invar = fold_build3 (BIT_FIELD_REF, type, var, flen, invar);
3086
3087 if (to_var)
3088 st = sra_build_bf_assignment (invar, infld);
3089 else
3090 st = sra_build_bf_assignment (infld, invar);
3091
3092 append_to_statement_list (st, listp);
3093 }
3094 else
3095 {
3096 tree sub = size_binop (MINUS_EXPR, flp.field_pos, bpos);
3097 sub = size_binop (PLUS_EXPR, vpos, sub);
3098 if (flp.overlap_pos)
3099 sub = size_binop (PLUS_EXPR, sub, flp.overlap_pos);
3100
3101 sra_explode_bitfield_assignment (var, sub, to_var, listp,
3102 flen, fpos, fld);
3103 }
3104 }
3105 }
3106
3107 /* Add to LISTBEFOREP statements that copy scalarized members of ELT
3108 that overlap with BIT_FIELD_REF<(ELT->element), BLEN, BPOS> back
3109 into the full variable, and to LISTAFTERP, if non-NULL, statements
3110 that copy the (presumably modified) overlapping portions of the
3111 full variable back to the scalarized variables. */
3112
3113 static void
3114 sra_sync_for_bitfield_assignment (tree *listbeforep, tree *listafterp,
3115 tree blen, tree bpos,
3116 struct sra_elt *elt)
3117 {
3118 struct sra_elt *fld;
3119 struct bitfield_overlap_info flp;
3120
3121 FOR_EACH_ACTUAL_CHILD (fld, elt)
3122 if (bitfield_overlaps_p (blen, bpos, fld, &flp))
3123 {
3124 if (fld->replacement || (!flp.overlap_len && !flp.overlap_pos))
3125 {
3126 generate_copy_inout (fld, false, generate_element_ref (fld),
3127 listbeforep);
3128 mark_no_warning (fld);
3129 if (listafterp)
3130 generate_copy_inout (fld, true, generate_element_ref (fld),
3131 listafterp);
3132 }
3133 else
3134 {
3135 tree flen = flp.overlap_len ? flp.overlap_len : flp.field_len;
3136 tree fpos = flp.overlap_pos ? flp.overlap_pos : bitsize_int (0);
3137
3138 sra_sync_for_bitfield_assignment (listbeforep, listafterp,
3139 flen, fpos, fld);
3140 }
3141 }
3142 }
3143
3144 /* Scalarize a USE. To recap, this is either a simple reference to ELT,
3145 if elt is scalar, or some occurrence of ELT that requires a complete
3146 aggregate. IS_OUTPUT is true if ELT is being modified. */
3147
3148 static void
3149 scalarize_use (struct sra_elt *elt, tree *expr_p, block_stmt_iterator *bsi,
3150 bool is_output, bool use_all)
3151 {
3152 tree stmt = bsi_stmt (*bsi);
3153 tree bfexpr;
3154
3155 if (elt->replacement)
3156 {
3157 tree replacement = elt->replacement;
3158
3159 /* If we have a replacement, then updating the reference is as
3160 simple as modifying the existing statement in place. */
3161 if (is_output
3162 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3163 && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
3164 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3165 && &GIMPLE_STMT_OPERAND (stmt, 0) == expr_p)
3166 {
3167 tree newstmt = sra_build_elt_assignment
3168 (elt, GIMPLE_STMT_OPERAND (stmt, 1));
3169 if (TREE_CODE (newstmt) != STATEMENT_LIST)
3170 {
3171 tree list = NULL;
3172 append_to_statement_list (newstmt, &list);
3173 newstmt = list;
3174 }
3175 sra_replace (bsi, newstmt);
3176 return;
3177 }
3178 else if (!is_output
3179 && TREE_CODE (elt->replacement) == BIT_FIELD_REF
3180 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3181 && &GIMPLE_STMT_OPERAND (stmt, 1) == expr_p)
3182 {
3183 tree tmp = make_rename_temp
3184 (TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 0)), "SR");
3185 tree newstmt = sra_build_assignment (tmp, REPLDUP (elt->replacement));
3186
3187 if (TREE_CODE (newstmt) != STATEMENT_LIST)
3188 {
3189 tree list = NULL;
3190 append_to_statement_list (newstmt, &list);
3191 newstmt = list;
3192 }
3193 sra_insert_before (bsi, newstmt);
3194 replacement = tmp;
3195 }
3196 if (is_output)
3197 mark_all_v_defs (stmt);
3198 *expr_p = REPLDUP (replacement);
3199 update_stmt (stmt);
3200 }
3201 else if (use_all && is_output
3202 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3203 && TREE_CODE (bfexpr
3204 = GIMPLE_STMT_OPERAND (stmt, 0)) == BIT_FIELD_REF
3205 && &TREE_OPERAND (bfexpr, 0) == expr_p
3206 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3207 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3208 {
3209 tree listbefore = NULL, listafter = NULL;
3210 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3211 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3212 bool update = false;
3213
3214 if (!elt->use_block_copy)
3215 {
3216 tree type = TREE_TYPE (bfexpr);
3217 tree var = make_rename_temp (type, "SR"), tmp, st, vpos;
3218
3219 GIMPLE_STMT_OPERAND (stmt, 0) = var;
3220 update = true;
3221
3222 if (!TYPE_UNSIGNED (type))
3223 {
3224 type = unsigned_type_for (type);
3225 tmp = make_rename_temp (type, "SR");
3226 st = build_gimple_modify_stmt (tmp,
3227 fold_convert (type, var));
3228 append_to_statement_list (st, &listafter);
3229 var = tmp;
3230 }
3231
3232 /* If VAR is wider than BLEN bits, it is padded at the
3233 most-significant end. We want to set VPOS such that
3234 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3235 least-significant BLEN bits of VAR. */
3236 if (BYTES_BIG_ENDIAN)
3237 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3238 else
3239 vpos = bitsize_int (0);
3240 sra_explode_bitfield_assignment
3241 (var, vpos, false, &listafter, blen, bpos, elt);
3242 }
3243 else
3244 sra_sync_for_bitfield_assignment
3245 (&listbefore, &listafter, blen, bpos, elt);
3246
3247 if (listbefore)
3248 {
3249 mark_all_v_defs (listbefore);
3250 sra_insert_before (bsi, listbefore);
3251 }
3252 if (listafter)
3253 {
3254 mark_all_v_defs (listafter);
3255 sra_insert_after (bsi, listafter);
3256 }
3257
3258 if (update)
3259 update_stmt (stmt);
3260 }
3261 else if (use_all && !is_output
3262 && TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
3263 && TREE_CODE (bfexpr
3264 = GIMPLE_STMT_OPERAND (stmt, 1)) == BIT_FIELD_REF
3265 && &TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0) == expr_p
3266 && INTEGRAL_TYPE_P (TREE_TYPE (bfexpr))
3267 && TREE_CODE (TREE_TYPE (*expr_p)) == RECORD_TYPE)
3268 {
3269 tree list = NULL;
3270 tree blen = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 1));
3271 tree bpos = fold_convert (bitsizetype, TREE_OPERAND (bfexpr, 2));
3272 bool update = false;
3273
3274 if (!elt->use_block_copy)
3275 {
3276 tree type = TREE_TYPE (bfexpr);
3277 tree var, vpos;
3278
3279 if (!TYPE_UNSIGNED (type))
3280 type = unsigned_type_for (type);
3281
3282 var = make_rename_temp (type, "SR");
3283
3284 append_to_statement_list (build_gimple_modify_stmt
3285 (var, build_int_cst_wide (type, 0, 0)),
3286 &list);
3287
3288 /* If VAR is wider than BLEN bits, it is padded at the
3289 most-significant end. We want to set VPOS such that
3290 <BIT_FIELD_REF VAR BLEN VPOS> would refer to the
3291 least-significant BLEN bits of VAR. */
3292 if (BYTES_BIG_ENDIAN)
3293 vpos = size_binop (MINUS_EXPR, TYPE_SIZE (type), blen);
3294 else
3295 vpos = bitsize_int (0);
3296 sra_explode_bitfield_assignment
3297 (var, vpos, true, &list, blen, bpos, elt);
3298
3299 GIMPLE_STMT_OPERAND (stmt, 1) = var;
3300 update = true;
3301 }
3302 else
3303 sra_sync_for_bitfield_assignment
3304 (&list, NULL, blen, bpos, elt);
3305
3306 if (list)
3307 {
3308 mark_all_v_defs (list);
3309 sra_insert_before (bsi, list);
3310 }
3311
3312 if (update)
3313 update_stmt (stmt);
3314 }
3315 else
3316 {
3317 tree list = NULL;
3318
3319 /* Otherwise we need some copies. If ELT is being read, then we
3320 want to store all (modified) sub-elements back into the
3321 structure before the reference takes place. If ELT is being
3322 written, then we want to load the changed values back into
3323 our shadow variables. */
3324 /* ??? We don't check modified for reads, we just always write all of
3325 the values. We should be able to record the SSA number of the VOP
3326 for which the values were last read. If that number matches the
3327 SSA number of the VOP in the current statement, then we needn't
3328 emit an assignment. This would also eliminate double writes when
3329 a structure is passed as more than one argument to a function call.
3330 This optimization would be most effective if sra_walk_function
3331 processed the blocks in dominator order. */
3332
3333 generate_copy_inout (elt, is_output, generate_element_ref (elt), &list);
3334 if (list == NULL)
3335 return;
3336 mark_all_v_defs (list);
3337 if (is_output)
3338 sra_insert_after (bsi, list);
3339 else
3340 {
3341 sra_insert_before (bsi, list);
3342 if (use_all)
3343 mark_no_warning (elt);
3344 }
3345 }
3346 }
3347
3348 /* Scalarize a COPY. To recap, this is an assignment statement between
3349 two scalarizable references, LHS_ELT and RHS_ELT. */
3350
3351 static void
3352 scalarize_copy (struct sra_elt *lhs_elt, struct sra_elt *rhs_elt,
3353 block_stmt_iterator *bsi)
3354 {
3355 tree list, stmt;
3356
3357 if (lhs_elt->replacement && rhs_elt->replacement)
3358 {
3359 /* If we have two scalar operands, modify the existing statement. */
3360 stmt = bsi_stmt (*bsi);
3361
3362 /* See the commentary in sra_walk_function concerning
3363 RETURN_EXPR, and why we should never see one here. */
3364 gcc_assert (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT);
3365
3366 GIMPLE_STMT_OPERAND (stmt, 0) = lhs_elt->replacement;
3367 GIMPLE_STMT_OPERAND (stmt, 1) = REPLDUP (rhs_elt->replacement);
3368 update_stmt (stmt);
3369 }
3370 else if (lhs_elt->use_block_copy || rhs_elt->use_block_copy)
3371 {
3372 /* If either side requires a block copy, then sync the RHS back
3373 to the original structure, leave the original assignment
3374 statement (which will perform the block copy), then load the
3375 LHS values out of its now-updated original structure. */
3376 /* ??? Could perform a modified pair-wise element copy. That
3377 would at least allow those elements that are instantiated in
3378 both structures to be optimized well. */
3379
3380 list = NULL;
3381 generate_copy_inout (rhs_elt, false,
3382 generate_element_ref (rhs_elt), &list);
3383 if (list)
3384 {
3385 mark_all_v_defs (list);
3386 sra_insert_before (bsi, list);
3387 }
3388
3389 list = NULL;
3390 generate_copy_inout (lhs_elt, true,
3391 generate_element_ref (lhs_elt), &list);
3392 if (list)
3393 {
3394 mark_all_v_defs (list);
3395 sra_insert_after (bsi, list);
3396 }
3397 }
3398 else
3399 {
3400 /* Otherwise both sides must be fully instantiated. In which
3401 case perform pair-wise element assignments and replace the
3402 original block copy statement. */
3403
3404 stmt = bsi_stmt (*bsi);
3405 mark_all_v_defs (stmt);
3406
3407 list = NULL;
3408 generate_element_copy (lhs_elt, rhs_elt, &list);
3409 gcc_assert (list);
3410 mark_all_v_defs (list);
3411 sra_replace (bsi, list);
3412 }
3413 }
3414
3415 /* Scalarize an INIT. To recap, this is an assignment to a scalarizable
3416 reference from some form of constructor: CONSTRUCTOR, COMPLEX_CST or
3417 COMPLEX_EXPR. If RHS is NULL, it should be treated as an empty
3418 CONSTRUCTOR. */
3419
3420 static void
3421 scalarize_init (struct sra_elt *lhs_elt, tree rhs, block_stmt_iterator *bsi)
3422 {
3423 bool result = true;
3424 tree list = NULL, init_list = NULL;
3425
3426 /* Generate initialization statements for all members extant in the RHS. */
3427 if (rhs)
3428 {
3429 /* Unshare the expression just in case this is from a decl's initial. */
3430 rhs = unshare_expr (rhs);
3431 result = generate_element_init (lhs_elt, rhs, &init_list);
3432 }
3433
3434 /* CONSTRUCTOR is defined such that any member not mentioned is assigned
3435 a zero value. Initialize the rest of the instantiated elements. */
3436 generate_element_zero (lhs_elt, &list);
3437 append_to_statement_list (init_list, &list);
3438
3439 if (!result)
3440 {
3441 /* If we failed to convert the entire initializer, then we must
3442 leave the structure assignment in place and must load values
3443 from the structure into the slots for which we did not find
3444 constants. The easiest way to do this is to generate a complete
3445 copy-out, and then follow that with the constant assignments
3446 that we were able to build. DCE will clean things up. */
3447 tree list0 = NULL;
3448 generate_copy_inout (lhs_elt, true, generate_element_ref (lhs_elt),
3449 &list0);
3450 append_to_statement_list (list, &list0);
3451 list = list0;
3452 }
3453
3454 if (lhs_elt->use_block_copy || !result)
3455 {
3456 /* Since LHS is not fully instantiated, we must leave the structure
3457 assignment in place. Treating this case differently from a USE
3458 exposes constants to later optimizations. */
3459 if (list)
3460 {
3461 mark_all_v_defs (list);
3462 sra_insert_after (bsi, list);
3463 }
3464 }
3465 else
3466 {
3467 /* The LHS is fully instantiated. The list of initializations
3468 replaces the original structure assignment. */
3469 gcc_assert (list);
3470 mark_all_v_defs (bsi_stmt (*bsi));
3471 mark_all_v_defs (list);
3472 sra_replace (bsi, list);
3473 }
3474 }
3475
3476 /* A subroutine of scalarize_ldst called via walk_tree. Set TREE_NO_TRAP
3477 on all INDIRECT_REFs. */
3478
3479 static tree
3480 mark_notrap (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
3481 {
3482 tree t = *tp;
3483
3484 if (TREE_CODE (t) == INDIRECT_REF)
3485 {
3486 TREE_THIS_NOTRAP (t) = 1;
3487 *walk_subtrees = 0;
3488 }
3489 else if (IS_TYPE_OR_DECL_P (t))
3490 *walk_subtrees = 0;
3491
3492 return NULL;
3493 }
3494
3495 /* Scalarize a LDST. To recap, this is an assignment between one scalarizable
3496 reference ELT and one non-scalarizable reference OTHER. IS_OUTPUT is true
3497 if ELT is on the left-hand side. */
3498
3499 static void
3500 scalarize_ldst (struct sra_elt *elt, tree other,
3501 block_stmt_iterator *bsi, bool is_output)
3502 {
3503 /* Shouldn't have gotten called for a scalar. */
3504 gcc_assert (!elt->replacement);
3505
3506 if (elt->use_block_copy)
3507 {
3508 /* Since ELT is not fully instantiated, we have to leave the
3509 block copy in place. Treat this as a USE. */
3510 scalarize_use (elt, NULL, bsi, is_output, false);
3511 }
3512 else
3513 {
3514 /* The interesting case is when ELT is fully instantiated. In this
3515 case we can have each element stored/loaded directly to/from the
3516 corresponding slot in OTHER. This avoids a block copy. */
3517
3518 tree list = NULL, stmt = bsi_stmt (*bsi);
3519
3520 mark_all_v_defs (stmt);
3521 generate_copy_inout (elt, is_output, other, &list);
3522 gcc_assert (list);
3523 mark_all_v_defs (list);
3524
3525 /* Preserve EH semantics. */
3526 if (stmt_ends_bb_p (stmt))
3527 {
3528 tree_stmt_iterator tsi;
3529 tree first, blist = NULL;
3530 bool thr = tree_could_throw_p (stmt);
3531
3532 /* If the last statement of this BB created an EH edge
3533 before scalarization, we have to locate the first
3534 statement that can throw in the new statement list and
3535 use that as the last statement of this BB, such that EH
3536 semantics is preserved. All statements up to this one
3537 are added to the same BB. All other statements in the
3538 list will be added to normal outgoing edges of the same
3539 BB. If they access any memory, it's the same memory, so
3540 we can assume they won't throw. */
3541 tsi = tsi_start (list);
3542 for (first = tsi_stmt (tsi);
3543 thr && !tsi_end_p (tsi) && !tree_could_throw_p (first);
3544 first = tsi_stmt (tsi))
3545 {
3546 tsi_delink (&tsi);
3547 append_to_statement_list (first, &blist);
3548 }
3549
3550 /* Extract the first remaining statement from LIST, this is
3551 the EH statement if there is one. */
3552 tsi_delink (&tsi);
3553
3554 if (blist)
3555 sra_insert_before (bsi, blist);
3556
3557 /* Replace the old statement with this new representative. */
3558 bsi_replace (bsi, first, true);
3559
3560 if (!tsi_end_p (tsi))
3561 {
3562 /* If any reference would trap, then they all would. And more
3563 to the point, the first would. Therefore none of the rest
3564 will trap since the first didn't. Indicate this by
3565 iterating over the remaining statements and set
3566 TREE_THIS_NOTRAP in all INDIRECT_REFs. */
3567 do
3568 {
3569 walk_tree (tsi_stmt_ptr (tsi), mark_notrap, NULL, NULL);
3570 tsi_next (&tsi);
3571 }
3572 while (!tsi_end_p (tsi));
3573
3574 insert_edge_copies (list, bsi->bb);
3575 }
3576 }
3577 else
3578 sra_replace (bsi, list);
3579 }
3580 }
3581
3582 /* Generate initializations for all scalarizable parameters. */
3583
3584 static void
3585 scalarize_parms (void)
3586 {
3587 tree list = NULL;
3588 unsigned i;
3589 bitmap_iterator bi;
3590
3591 EXECUTE_IF_SET_IN_BITMAP (needs_copy_in, 0, i, bi)
3592 {
3593 tree var = referenced_var (i);
3594 struct sra_elt *elt = lookup_element (NULL, var, NULL, NO_INSERT);
3595 generate_copy_inout (elt, true, var, &list);
3596 }
3597
3598 if (list)
3599 {
3600 insert_edge_copies (list, ENTRY_BLOCK_PTR);
3601 mark_all_v_defs (list);
3602 }
3603 }
3604
3605 /* Entry point to phase 4. Update the function to match replacements. */
3606
3607 static void
3608 scalarize_function (void)
3609 {
3610 static const struct sra_walk_fns fns = {
3611 scalarize_use, scalarize_copy, scalarize_init, scalarize_ldst, false
3612 };
3613
3614 sra_walk_function (&fns);
3615 scalarize_parms ();
3616 bsi_commit_edge_inserts ();
3617 }
3618
3619 \f
3620 /* Debug helper function. Print ELT in a nice human-readable format. */
3621
3622 static void
3623 dump_sra_elt_name (FILE *f, struct sra_elt *elt)
3624 {
3625 if (elt->parent && TREE_CODE (elt->parent->type) == COMPLEX_TYPE)
3626 {
3627 fputs (elt->element == integer_zero_node ? "__real__ " : "__imag__ ", f);
3628 dump_sra_elt_name (f, elt->parent);
3629 }
3630 else
3631 {
3632 if (elt->parent)
3633 dump_sra_elt_name (f, elt->parent);
3634 if (DECL_P (elt->element))
3635 {
3636 if (TREE_CODE (elt->element) == FIELD_DECL)
3637 fputc ('.', f);
3638 print_generic_expr (f, elt->element, dump_flags);
3639 }
3640 else if (TREE_CODE (elt->element) == BIT_FIELD_REF)
3641 fprintf (f, "$B" HOST_WIDE_INT_PRINT_DEC "F" HOST_WIDE_INT_PRINT_DEC,
3642 tree_low_cst (TREE_OPERAND (elt->element, 2), 1),
3643 tree_low_cst (TREE_OPERAND (elt->element, 1), 1));
3644 else if (TREE_CODE (elt->element) == RANGE_EXPR)
3645 fprintf (f, "["HOST_WIDE_INT_PRINT_DEC".."HOST_WIDE_INT_PRINT_DEC"]",
3646 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 0)),
3647 TREE_INT_CST_LOW (TREE_OPERAND (elt->element, 1)));
3648 else
3649 fprintf (f, "[" HOST_WIDE_INT_PRINT_DEC "]",
3650 TREE_INT_CST_LOW (elt->element));
3651 }
3652 }
3653
3654 /* Likewise, but callable from the debugger. */
3655
3656 void
3657 debug_sra_elt_name (struct sra_elt *elt)
3658 {
3659 dump_sra_elt_name (stderr, elt);
3660 fputc ('\n', stderr);
3661 }
3662
3663 void
3664 sra_init_cache (void)
3665 {
3666 if (sra_type_decomp_cache)
3667 return;
3668
3669 sra_type_decomp_cache = BITMAP_ALLOC (NULL);
3670 sra_type_inst_cache = BITMAP_ALLOC (NULL);
3671 }
3672
3673 /* Main entry point. */
3674
3675 static unsigned int
3676 tree_sra (void)
3677 {
3678 /* Initialize local variables. */
3679 todoflags = 0;
3680 gcc_obstack_init (&sra_obstack);
3681 sra_candidates = BITMAP_ALLOC (NULL);
3682 needs_copy_in = BITMAP_ALLOC (NULL);
3683 sra_init_cache ();
3684 sra_map = htab_create (101, sra_elt_hash, sra_elt_eq, NULL);
3685
3686 /* Scan. If we find anything, instantiate and scalarize. */
3687 if (find_candidates_for_sra ())
3688 {
3689 scan_function ();
3690 decide_instantiations ();
3691 scalarize_function ();
3692 if (!bitmap_empty_p (sra_candidates))
3693 todoflags |= TODO_rebuild_alias;
3694 }
3695
3696 /* Free allocated memory. */
3697 htab_delete (sra_map);
3698 sra_map = NULL;
3699 BITMAP_FREE (sra_candidates);
3700 BITMAP_FREE (needs_copy_in);
3701 BITMAP_FREE (sra_type_decomp_cache);
3702 BITMAP_FREE (sra_type_inst_cache);
3703 obstack_free (&sra_obstack, NULL);
3704 return todoflags;
3705 }
3706
3707 static unsigned int
3708 tree_sra_early (void)
3709 {
3710 unsigned int ret;
3711
3712 early_sra = true;
3713 ret = tree_sra ();
3714 early_sra = false;
3715
3716 return ret & ~TODO_rebuild_alias;
3717 }
3718
3719 static bool
3720 gate_sra (void)
3721 {
3722 return flag_tree_sra != 0;
3723 }
3724
3725 struct gimple_opt_pass pass_sra_early =
3726 {
3727 {
3728 GIMPLE_PASS,
3729 "esra", /* name */
3730 gate_sra, /* gate */
3731 tree_sra_early, /* execute */
3732 NULL, /* sub */
3733 NULL, /* next */
3734 0, /* static_pass_number */
3735 TV_TREE_SRA, /* tv_id */
3736 PROP_cfg | PROP_ssa, /* properties_required */
3737 0, /* properties_provided */
3738 0, /* properties_destroyed */
3739 0, /* todo_flags_start */
3740 TODO_dump_func
3741 | TODO_update_ssa
3742 | TODO_ggc_collect
3743 | TODO_verify_ssa /* todo_flags_finish */
3744 }
3745 };
3746
3747 struct gimple_opt_pass pass_sra =
3748 {
3749 {
3750 GIMPLE_PASS,
3751 "sra", /* name */
3752 gate_sra, /* gate */
3753 tree_sra, /* execute */
3754 NULL, /* sub */
3755 NULL, /* next */
3756 0, /* static_pass_number */
3757 TV_TREE_SRA, /* tv_id */
3758 PROP_cfg | PROP_ssa, /* properties_required */
3759 0, /* properties_provided */
3760 0, /* properties_destroyed */
3761 0, /* todo_flags_start */
3762 TODO_dump_func
3763 | TODO_update_ssa
3764 | TODO_ggc_collect
3765 | TODO_verify_ssa /* todo_flags_finish */
3766 }
3767 };
This page took 0.214159 seconds and 5 git commands to generate.