]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-ssa-alias.c
c-common.h (enum rid): Add RID_CXX_COMPAT_WARN.
[gcc.git] / gcc / tree-ssa-alias.c
1 /* Alias analysis for trees.
2 Copyright (C) 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Diego Novillo <dnovillo@redhat.com>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "timevar.h"
31 #include "expr.h"
32 #include "ggc.h"
33 #include "langhooks.h"
34 #include "flags.h"
35 #include "function.h"
36 #include "diagnostic.h"
37 #include "tree-dump.h"
38 #include "tree-gimple.h"
39 #include "tree-flow.h"
40 #include "tree-inline.h"
41 #include "tree-pass.h"
42 #include "tree-ssa-structalias.h"
43 #include "convert.h"
44 #include "params.h"
45 #include "ipa-type-escape.h"
46 #include "vec.h"
47 #include "bitmap.h"
48 #include "vecprim.h"
49 #include "pointer-set.h"
50 #include "alloc-pool.h"
51
52 /* Broad overview of how aliasing works:
53
54 First we compute points-to sets, which is done in
55 tree-ssa-structalias.c
56
57 During points-to set constraint finding, a bunch of little bits of
58 information is collected.
59 This is not done because it is necessary for points-to, but because
60 points-to has to walk every statement anyway. The function performing
61 this collecting is update_alias_info.
62
63 Bits update_alias_info collects include:
64 1. Directly escaping variables and variables whose value escapes
65 (using is_escape_site). This is the set of variables and values that
66 escape prior to transitive closure of the clobbers.
67 2. The set of variables dereferenced on the LHS (into
68 dereferenced_ptr_stores)
69 3. The set of variables dereferenced on the RHS (into
70 dereferenced_ptr_loads)
71 4. The set of all pointers we saw.
72 5. The number of loads and stores for each variable
73 6. The number of statements touching memory
74 7. The set of address taken variables.
75
76
77 #1 is computed by a combination of is_escape_site, and counting the
78 number of uses/deref operators. This function properly accounts for
79 situations like &ptr->field, which is *not* a dereference.
80
81 After points-to sets are computed, the sets themselves still
82 contain points-to specific variables, such as a variable that says
83 the pointer points to anything, a variable that says the pointer
84 points to readonly memory, etc.
85
86 These are eliminated in a later phase, as we will see.
87
88 The rest of the phases are located in tree-ssa-alias.c
89
90 The next phase after points-to set computation is called
91 "setup_pointers_and_addressables"
92
93 This pass does 3 main things:
94
95 1. All variables that can have TREE_ADDRESSABLE removed safely (IE
96 non-globals whose address is not taken), have TREE_ADDRESSABLE
97 removed.
98 2. All variables that may be aliased (which is the set of addressable
99 variables and globals) at all, are marked for renaming, and have
100 symbol memory tags created for them.
101 3. All variables which are stored into have their SMT's added to
102 written vars.
103
104
105 After this function is run, all variables that will ever have an
106 SMT, have one, though its aliases are not filled in.
107
108 The next phase is to compute flow-insensitive aliasing, which in
109 our case, is a misnomer. it is really computing aliasing that
110 requires no transitive closure to be correct. In particular, it
111 uses stack vs non-stack, TBAA, etc, to determine whether two
112 symbols could *ever* alias . This phase works by going through all
113 the pointers we collected during update_alias_info, and for every
114 addressable variable in the program, seeing if they alias. If so,
115 the addressable variable is added to the symbol memory tag for the
116 pointer.
117
118 As part of this, we handle symbol memory tags that conflict but
119 have no aliases in common, by forcing them to have a symbol in
120 common (through unioning alias sets or adding one as an alias of
121 the other), or by adding one as an alias of another. The case of
122 conflicts with no aliases in common occurs mainly due to aliasing
123 we cannot see. In particular, it generally means we have a load
124 through a pointer whose value came from outside the function.
125 Without an addressable symbol to point to, they would get the wrong
126 answer.
127
128 After flow insensitive aliasing is computed, we compute name tags
129 (called compute_flow_sensitive_info). We walk each pointer we
130 collected and see if it has a usable points-to set. If so, we
131 generate a name tag using that pointer, and make an alias bitmap for
132 it. Name tags are shared between all things with the same alias
133 bitmap. The alias bitmap will be translated from what points-to
134 computed. In particular, the "anything" variable in points-to will be
135 transformed into a pruned set of SMT's and their aliases that
136 compute_flow_insensitive_aliasing computed.
137 Note that since 4.3, every pointer that points-to computed a solution for
138 will get a name tag (whereas before 4.3, only those whose set did
139 *not* include the anything variable would). At the point where name
140 tags are all assigned, symbol memory tags are dead, and could be
141 deleted, *except* on global variables. Global variables still use
142 symbol memory tags as of right now.
143
144 After name tags are computed, the set of clobbered variables is
145 transitively closed. In particular, we compute the set of clobbered
146 variables based on the initial set of clobbers, plus the aliases of
147 pointers which either escape, or have their value escape.
148
149 After this, maybe_create_global_var is run, which handles a corner
150 case where we have no call clobbered variables, but have pure and
151 non-pure functions.
152
153 Staring at this function, I now remember it is a hack for the fact
154 that we do not mark all globals in the program as call clobbered for a
155 function unless they are actually used in that function. Instead, we
156 only mark the set that is actually clobbered. As a result, you can
157 end up with situations where you have no call clobbered vars set.
158
159 After maybe_create_global_var, we set pointers with the REF_ALL flag
160 to have alias sets that include all clobbered
161 memory tags and variables.
162
163 After this, memory partitioning is computed (by the function
164 compute_memory_partitions) and alias sets are reworked accordingly.
165
166 Lastly, we delete partitions with no symbols, and clean up after
167 ourselves. */
168
169
170 /* Alias information used by compute_may_aliases and its helpers. */
171 struct alias_info
172 {
173 /* SSA names visited while collecting points-to information. If bit I
174 is set, it means that SSA variable with version I has already been
175 visited. */
176 sbitmap ssa_names_visited;
177
178 /* Array of SSA_NAME pointers processed by the points-to collector. */
179 VEC(tree,heap) *processed_ptrs;
180
181 /* ADDRESSABLE_VARS contains all the global variables and locals that
182 have had their address taken. */
183 struct alias_map_d **addressable_vars;
184 size_t num_addressable_vars;
185
186 /* POINTERS contains all the _DECL pointers with unique memory tags
187 that have been referenced in the program. */
188 struct alias_map_d **pointers;
189 size_t num_pointers;
190
191 /* Variables that have been written to directly (i.e., not through a
192 pointer dereference). */
193 struct pointer_set_t *written_vars;
194
195 /* Pointers that have been used in an indirect store operation. */
196 struct pointer_set_t *dereferenced_ptrs_store;
197
198 /* Pointers that have been used in an indirect load operation. */
199 struct pointer_set_t *dereferenced_ptrs_load;
200 };
201
202
203 /* Structure to map a variable to its alias set. */
204 struct alias_map_d
205 {
206 /* Variable and its alias set. */
207 tree var;
208 alias_set_type set;
209 };
210
211
212 /* Counters used to display statistics on alias analysis. */
213 struct alias_stats_d
214 {
215 unsigned int alias_queries;
216 unsigned int alias_mayalias;
217 unsigned int alias_noalias;
218 unsigned int simple_queries;
219 unsigned int simple_resolved;
220 unsigned int tbaa_queries;
221 unsigned int tbaa_resolved;
222 unsigned int structnoaddress_queries;
223 unsigned int structnoaddress_resolved;
224 };
225
226
227 /* Local variables. */
228 static struct alias_stats_d alias_stats;
229 static bitmap_obstack alias_bitmap_obstack;
230
231 /* Local functions. */
232 static void compute_flow_insensitive_aliasing (struct alias_info *);
233 static void dump_alias_stats (FILE *);
234 static tree create_memory_tag (tree type, bool is_type_tag);
235 static tree get_smt_for (tree, struct alias_info *);
236 static tree get_nmt_for (tree);
237 static void add_may_alias (tree, tree);
238 static struct alias_info *init_alias_info (void);
239 static void delete_alias_info (struct alias_info *);
240 static void compute_flow_sensitive_aliasing (struct alias_info *);
241 static void setup_pointers_and_addressables (struct alias_info *);
242 static void update_alias_info (struct alias_info *);
243 static void create_global_var (void);
244 static void maybe_create_global_var (void);
245 static void set_pt_anything (tree);
246
247 void debug_mp_info (VEC(mem_sym_stats_t,heap) *);
248
249 static alloc_pool mem_sym_stats_pool;
250
251 /* Return memory reference stats for symbol VAR. Create a new slot in
252 cfun->gimple_df->mem_sym_stats if needed. */
253
254 static struct mem_sym_stats_d *
255 get_mem_sym_stats_for (tree var)
256 {
257 void **slot;
258 struct mem_sym_stats_d *stats;
259 struct pointer_map_t *map = gimple_mem_ref_stats (cfun)->mem_sym_stats;
260
261 gcc_assert (map);
262
263 slot = pointer_map_insert (map, var);
264 if (*slot == NULL)
265 {
266 stats = (struct mem_sym_stats_d *) pool_alloc (mem_sym_stats_pool);
267 memset (stats, 0, sizeof (*stats));
268 stats->var = var;
269 *slot = (void *) stats;
270 }
271 else
272 stats = (struct mem_sym_stats_d *) *slot;
273
274 return stats;
275 }
276
277
278 /* Return memory reference statistics for variable VAR in function FN.
279 This is computed by alias analysis, but it is not kept
280 incrementally up-to-date. So, these stats are only accurate if
281 pass_may_alias has been run recently. If no alias information
282 exists, this function returns NULL. */
283
284 static mem_sym_stats_t
285 mem_sym_stats (struct function *fn, tree var)
286 {
287 void **slot;
288 struct pointer_map_t *stats_map = gimple_mem_ref_stats (fn)->mem_sym_stats;
289
290 if (stats_map == NULL)
291 return NULL;
292
293 slot = pointer_map_contains (stats_map, var);
294 if (slot == NULL)
295 return NULL;
296
297 return (mem_sym_stats_t) *slot;
298 }
299
300
301 /* Set MPT to be the memory partition associated with symbol SYM. */
302
303 static inline void
304 set_memory_partition (tree sym, tree mpt)
305 {
306 #if defined ENABLE_CHECKING
307 if (mpt)
308 gcc_assert (TREE_CODE (mpt) == MEMORY_PARTITION_TAG
309 && !is_gimple_reg (sym));
310 #endif
311
312 var_ann (sym)->mpt = mpt;
313 if (mpt)
314 {
315 if (MPT_SYMBOLS (mpt) == NULL)
316 MPT_SYMBOLS (mpt) = BITMAP_ALLOC (&alias_bitmap_obstack);
317
318 bitmap_set_bit (MPT_SYMBOLS (mpt), DECL_UID (sym));
319
320 /* MPT inherits the call-clobbering attributes from SYM. */
321 if (is_call_clobbered (sym))
322 {
323 MTAG_GLOBAL (mpt) = 1;
324 mark_call_clobbered (mpt, ESCAPE_IS_GLOBAL);
325 }
326 }
327 }
328
329
330 /* Mark variable VAR as being non-addressable. */
331
332 static void
333 mark_non_addressable (tree var)
334 {
335 tree mpt;
336
337 if (!TREE_ADDRESSABLE (var))
338 return;
339
340 mpt = memory_partition (var);
341
342 clear_call_clobbered (var);
343 TREE_ADDRESSABLE (var) = 0;
344
345 if (mpt)
346 {
347 /* Note that it's possible for a symbol to have an associated
348 MPT and the MPT have a NULL empty set. During
349 init_alias_info, all MPTs get their sets cleared out, but the
350 symbols still point to the old MPTs that used to hold them.
351 This is done so that compute_memory_partitions can now which
352 symbols are losing or changing partitions and mark them for
353 renaming. */
354 if (MPT_SYMBOLS (mpt))
355 bitmap_clear_bit (MPT_SYMBOLS (mpt), DECL_UID (var));
356 set_memory_partition (var, NULL_TREE);
357 }
358 }
359
360
361 /* qsort comparison function to sort type/name tags by DECL_UID. */
362
363 static int
364 sort_tags_by_id (const void *pa, const void *pb)
365 {
366 const_tree const a = *(const_tree const *)pa;
367 const_tree const b = *(const_tree const *)pb;
368
369 return DECL_UID (a) - DECL_UID (b);
370 }
371
372 /* Initialize WORKLIST to contain those memory tags that are marked call
373 clobbered. Initialized WORKLIST2 to contain the reasons these
374 memory tags escaped. */
375
376 static void
377 init_transitive_clobber_worklist (VEC (tree, heap) **worklist,
378 VEC (int, heap) **worklist2,
379 bitmap on_worklist)
380 {
381 referenced_var_iterator rvi;
382 tree curr;
383
384 FOR_EACH_REFERENCED_VAR (curr, rvi)
385 {
386 if (MTAG_P (curr) && is_call_clobbered (curr))
387 {
388 VEC_safe_push (tree, heap, *worklist, curr);
389 VEC_safe_push (int, heap, *worklist2,
390 var_ann (curr)->escape_mask);
391 bitmap_set_bit (on_worklist, DECL_UID (curr));
392 }
393 }
394 }
395
396 /* Add ALIAS to WORKLIST (and the reason for escaping REASON to WORKLIST2) if
397 ALIAS is not already marked call clobbered, and is a memory
398 tag. */
399
400 static void
401 add_to_worklist (tree alias, VEC (tree, heap) **worklist,
402 VEC (int, heap) **worklist2, int reason,
403 bitmap on_worklist)
404 {
405 if (MTAG_P (alias) && !is_call_clobbered (alias)
406 && !bitmap_bit_p (on_worklist, DECL_UID (alias)))
407 {
408 VEC_safe_push (tree, heap, *worklist, alias);
409 VEC_safe_push (int, heap, *worklist2, reason);
410 bitmap_set_bit (on_worklist, DECL_UID (alias));
411 }
412 }
413
414 /* Mark aliases of TAG as call clobbered, and place any tags on the
415 alias list that were not already call clobbered on WORKLIST. */
416
417 static void
418 mark_aliases_call_clobbered (tree tag, VEC (tree, heap) **worklist,
419 VEC (int, heap) **worklist2, bitmap on_worklist)
420 {
421 bitmap aliases;
422 bitmap_iterator bi;
423 unsigned int i;
424 tree entry;
425 var_ann_t ta = var_ann (tag);
426
427 if (!MTAG_P (tag))
428 return;
429 aliases = may_aliases (tag);
430 if (!aliases)
431 return;
432
433 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
434 {
435 entry = referenced_var (i);
436 /* If you clobber one part of a structure, you
437 clobber the entire thing. While this does not make
438 the world a particularly nice place, it is necessary
439 in order to allow C/C++ tricks that involve
440 pointer arithmetic to work. */
441 if (!unmodifiable_var_p (entry))
442 {
443 add_to_worklist (entry, worklist, worklist2, ta->escape_mask,
444 on_worklist);
445 mark_call_clobbered (entry, ta->escape_mask);
446 }
447 }
448 }
449
450 /* Tags containing global vars need to be marked as global.
451 Tags containing call clobbered vars need to be marked as call
452 clobbered. */
453
454 static void
455 compute_tag_properties (void)
456 {
457 referenced_var_iterator rvi;
458 tree tag;
459 bool changed = true;
460 VEC (tree, heap) *taglist = NULL;
461
462 FOR_EACH_REFERENCED_VAR (tag, rvi)
463 {
464 if (!MTAG_P (tag))
465 continue;
466 VEC_safe_push (tree, heap, taglist, tag);
467 }
468
469 /* We sort the taglist by DECL_UID, for two reasons.
470 1. To get a sequential ordering to make the bitmap accesses
471 faster.
472 2. Because of the way we compute aliases, it's more likely that
473 an earlier tag is included in a later tag, and this will reduce
474 the number of iterations.
475
476 If we had a real tag graph, we would just topo-order it and be
477 done with it. */
478 qsort (VEC_address (tree, taglist),
479 VEC_length (tree, taglist),
480 sizeof (tree),
481 sort_tags_by_id);
482
483 /* Go through each tag not marked as global, and if it aliases
484 global vars, mark it global.
485
486 If the tag contains call clobbered vars, mark it call
487 clobbered.
488
489 This loop iterates because tags may appear in the may-aliases
490 list of other tags when we group. */
491
492 while (changed)
493 {
494 unsigned int k;
495
496 changed = false;
497 for (k = 0; VEC_iterate (tree, taglist, k, tag); k++)
498 {
499 bitmap ma;
500 bitmap_iterator bi;
501 unsigned int i;
502 tree entry;
503 bool tagcc = is_call_clobbered (tag);
504 bool tagglobal = MTAG_GLOBAL (tag);
505
506 if (tagcc && tagglobal)
507 continue;
508
509 ma = may_aliases (tag);
510 if (!ma)
511 continue;
512
513 EXECUTE_IF_SET_IN_BITMAP (ma, 0, i, bi)
514 {
515 entry = referenced_var (i);
516 /* Call clobbered entries cause the tag to be marked
517 call clobbered. */
518 if (!tagcc && is_call_clobbered (entry))
519 {
520 mark_call_clobbered (tag, var_ann (entry)->escape_mask);
521 tagcc = true;
522 changed = true;
523 }
524
525 /* Global vars cause the tag to be marked global. */
526 if (!tagglobal && is_global_var (entry))
527 {
528 MTAG_GLOBAL (tag) = true;
529 changed = true;
530 tagglobal = true;
531 }
532
533 /* Early exit once both global and cc are set, since the
534 loop can't do any more than that. */
535 if (tagcc && tagglobal)
536 break;
537 }
538 }
539 }
540 VEC_free (tree, heap, taglist);
541 }
542
543 /* Set up the initial variable clobbers, call-uses and globalness.
544 When this function completes, only tags whose aliases need to be
545 clobbered will be set clobbered. Tags clobbered because they
546 contain call clobbered vars are handled in compute_tag_properties. */
547
548 static void
549 set_initial_properties (struct alias_info *ai)
550 {
551 unsigned int i;
552 referenced_var_iterator rvi;
553 tree var;
554 tree ptr;
555 bool any_pt_anything = false;
556 enum escape_type pt_anything_mask = 0;
557
558 FOR_EACH_REFERENCED_VAR (var, rvi)
559 {
560 if (is_global_var (var))
561 {
562 if (!unmodifiable_var_p (var))
563 mark_call_clobbered (var, ESCAPE_IS_GLOBAL);
564 }
565 else if (TREE_CODE (var) == PARM_DECL
566 && gimple_default_def (cfun, var)
567 && POINTER_TYPE_P (TREE_TYPE (var)))
568 {
569 tree def = gimple_default_def (cfun, var);
570 get_ptr_info (def)->value_escapes_p = 1;
571 get_ptr_info (def)->escape_mask |= ESCAPE_IS_PARM;
572 }
573 }
574
575 if (!clobber_what_escaped ())
576 {
577 any_pt_anything = true;
578 pt_anything_mask |= ESCAPE_TO_CALL;
579 }
580
581 compute_call_used_vars ();
582
583 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
584 {
585 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
586 tree tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
587
588 /* A pointer that only escapes via a function return does not
589 add to the call clobber or call used solution.
590 To exclude ESCAPE_TO_PURE_CONST we would need to track
591 call used variables separately or compute those properly
592 in the operand scanner. */
593 if (pi->value_escapes_p
594 && pi->escape_mask & ~ESCAPE_TO_RETURN)
595 {
596 /* If PTR escapes then its associated memory tags and
597 pointed-to variables are call-clobbered. */
598 if (pi->name_mem_tag)
599 mark_call_clobbered (pi->name_mem_tag, pi->escape_mask);
600
601 if (tag)
602 mark_call_clobbered (tag, pi->escape_mask);
603 }
604
605 /* If the name tag is call clobbered, so is the symbol tag
606 associated with the base VAR_DECL. */
607 if (pi->name_mem_tag
608 && tag
609 && is_call_clobbered (pi->name_mem_tag))
610 mark_call_clobbered (tag, pi->escape_mask);
611
612 /* Name tags and symbol tags that we don't know where they point
613 to, might point to global memory, and thus, are clobbered.
614
615 FIXME: This is not quite right. They should only be
616 clobbered if value_escapes_p is true, regardless of whether
617 they point to global memory or not.
618 So removing this code and fixing all the bugs would be nice.
619 It is the cause of a bunch of clobbering. */
620 if ((pi->pt_global_mem || pi->pt_anything)
621 && pi->memory_tag_needed && pi->name_mem_tag)
622 {
623 mark_call_clobbered (pi->name_mem_tag, ESCAPE_IS_GLOBAL);
624 MTAG_GLOBAL (pi->name_mem_tag) = true;
625 }
626
627 if ((pi->pt_global_mem || pi->pt_anything)
628 && pi->memory_tag_needed
629 && tag)
630 {
631 mark_call_clobbered (tag, ESCAPE_IS_GLOBAL);
632 MTAG_GLOBAL (tag) = true;
633 }
634 }
635
636 /* If a pt_anything pointer escaped we need to mark all addressable
637 variables call clobbered. */
638 if (any_pt_anything)
639 {
640 bitmap_iterator bi;
641 unsigned int j;
642
643 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, j, bi)
644 {
645 tree var = referenced_var (j);
646 if (!unmodifiable_var_p (var))
647 mark_call_clobbered (var, pt_anything_mask);
648 }
649 }
650 }
651
652 /* Compute which variables need to be marked call clobbered because
653 their tag is call clobbered, and which tags need to be marked
654 global because they contain global variables. */
655
656 static void
657 compute_call_clobbered (struct alias_info *ai)
658 {
659 VEC (tree, heap) *worklist = NULL;
660 VEC (int,heap) *worklist2 = NULL;
661 bitmap on_worklist;
662
663 timevar_push (TV_CALL_CLOBBER);
664 on_worklist = BITMAP_ALLOC (NULL);
665
666 set_initial_properties (ai);
667 init_transitive_clobber_worklist (&worklist, &worklist2, on_worklist);
668 while (VEC_length (tree, worklist) != 0)
669 {
670 tree curr = VEC_pop (tree, worklist);
671 int reason = VEC_pop (int, worklist2);
672
673 bitmap_clear_bit (on_worklist, DECL_UID (curr));
674 mark_call_clobbered (curr, reason);
675 mark_aliases_call_clobbered (curr, &worklist, &worklist2, on_worklist);
676 }
677 VEC_free (tree, heap, worklist);
678 VEC_free (int, heap, worklist2);
679 BITMAP_FREE (on_worklist);
680 compute_tag_properties ();
681 timevar_pop (TV_CALL_CLOBBER);
682 }
683
684
685 /* Dump memory partition information to FILE. */
686
687 static void
688 dump_memory_partitions (FILE *file)
689 {
690 unsigned i, npart;
691 unsigned long nsyms;
692 tree mpt;
693
694 fprintf (file, "\nMemory partitions\n\n");
695 for (i = 0, npart = 0, nsyms = 0;
696 VEC_iterate (tree, gimple_ssa_operands (cfun)->mpt_table, i, mpt);
697 i++)
698 {
699 if (mpt)
700 {
701 bitmap syms = MPT_SYMBOLS (mpt);
702 unsigned long n = (syms) ? bitmap_count_bits (syms) : 0;
703
704 fprintf (file, "#%u: ", i);
705 print_generic_expr (file, mpt, 0);
706 fprintf (file, ": %lu elements: ", n);
707 dump_decl_set (file, syms);
708 npart++;
709 nsyms += n;
710 }
711 }
712
713 fprintf (file, "\n%u memory partitions holding %lu symbols\n", npart, nsyms);
714 }
715
716
717 /* Dump memory partition information to stderr. */
718
719 void
720 debug_memory_partitions (void)
721 {
722 dump_memory_partitions (stderr);
723 }
724
725
726 /* Return true if memory partitioning is required given the memory
727 reference estimates in STATS. */
728
729 static inline bool
730 need_to_partition_p (struct mem_ref_stats_d *stats)
731 {
732 long num_vops = stats->num_vuses + stats->num_vdefs;
733 long avg_vops = CEIL (num_vops, stats->num_mem_stmts);
734 return (num_vops > (long) MAX_ALIASED_VOPS
735 && avg_vops > (long) AVG_ALIASED_VOPS);
736 }
737
738
739 /* Count the actual number of virtual operators in CFUN. Note that
740 this is only meaningful after virtual operands have been populated,
741 so it should be invoked at the end of compute_may_aliases.
742
743 The number of virtual operators are stored in *NUM_VDEFS_P and
744 *NUM_VUSES_P, the number of partitioned symbols in
745 *NUM_PARTITIONED_P and the number of unpartitioned symbols in
746 *NUM_UNPARTITIONED_P.
747
748 If any of these pointers is NULL the corresponding count is not
749 computed. */
750
751 static void
752 count_mem_refs (long *num_vuses_p, long *num_vdefs_p,
753 long *num_partitioned_p, long *num_unpartitioned_p)
754 {
755 block_stmt_iterator bsi;
756 basic_block bb;
757 long num_vdefs, num_vuses, num_partitioned, num_unpartitioned;
758 referenced_var_iterator rvi;
759 tree sym;
760
761 num_vuses = num_vdefs = num_partitioned = num_unpartitioned = 0;
762
763 if (num_vuses_p || num_vdefs_p)
764 FOR_EACH_BB (bb)
765 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
766 {
767 tree stmt = bsi_stmt (bsi);
768 if (stmt_references_memory_p (stmt))
769 {
770 num_vuses += NUM_SSA_OPERANDS (stmt, SSA_OP_VUSE);
771 num_vdefs += NUM_SSA_OPERANDS (stmt, SSA_OP_VDEF);
772 }
773 }
774
775 if (num_partitioned_p || num_unpartitioned_p)
776 FOR_EACH_REFERENCED_VAR (sym, rvi)
777 {
778 if (is_gimple_reg (sym))
779 continue;
780
781 if (memory_partition (sym))
782 num_partitioned++;
783 else
784 num_unpartitioned++;
785 }
786
787 if (num_vdefs_p)
788 *num_vdefs_p = num_vdefs;
789
790 if (num_vuses_p)
791 *num_vuses_p = num_vuses;
792
793 if (num_partitioned_p)
794 *num_partitioned_p = num_partitioned;
795
796 if (num_unpartitioned_p)
797 *num_unpartitioned_p = num_unpartitioned;
798 }
799
800
801 /* The list is sorted by increasing partitioning score (PSCORE).
802 This score is computed such that symbols with high scores are
803 those that are least likely to be partitioned. Given a symbol
804 MP->VAR, PSCORE(S) is the result of the following weighted sum
805
806 PSCORE(S) = FW * 64 + FR * 32
807 + DW * 16 + DR * 8
808 + IW * 4 + IR * 2
809 + NO_ALIAS
810
811 where
812
813 FW Execution frequency of writes to S
814 FR Execution frequency of reads from S
815 DW Number of direct writes to S
816 DR Number of direct reads from S
817 IW Number of indirect writes to S
818 IR Number of indirect reads from S
819 NO_ALIAS State of the NO_ALIAS* flags
820
821 The basic idea here is that symbols that are frequently
822 written-to in hot paths of the code are the last to be considered
823 for partitioning. */
824
825 static inline long
826 mem_sym_score (mem_sym_stats_t mp)
827 {
828 return mp->frequency_writes * 64 + mp->frequency_reads * 32
829 + mp->num_direct_writes * 16 + mp->num_direct_reads * 8
830 + mp->num_indirect_writes * 4 + mp->num_indirect_reads * 2
831 + var_ann (mp->var)->noalias_state;
832 }
833
834
835 /* Dump memory reference stats for function CFUN to FILE. */
836
837 void
838 dump_mem_ref_stats (FILE *file)
839 {
840 long actual_num_vuses, actual_num_vdefs;
841 long num_partitioned, num_unpartitioned;
842 struct mem_ref_stats_d *stats;
843
844 stats = gimple_mem_ref_stats (cfun);
845
846 count_mem_refs (&actual_num_vuses, &actual_num_vdefs, &num_partitioned,
847 &num_unpartitioned);
848
849 fprintf (file, "\nMemory reference statistics for %s\n\n",
850 lang_hooks.decl_printable_name (current_function_decl, 2));
851
852 fprintf (file, "Number of memory statements: %ld\n",
853 stats->num_mem_stmts);
854 fprintf (file, "Number of call sites: %ld\n",
855 stats->num_call_sites);
856 fprintf (file, "Number of pure/const call sites: %ld\n",
857 stats->num_pure_const_call_sites);
858 fprintf (file, "Number of asm sites: %ld\n",
859 stats->num_asm_sites);
860 fprintf (file, "Estimated number of loads: %ld (%ld/stmt)\n",
861 stats->num_vuses,
862 (stats->num_mem_stmts)
863 ? CEIL (stats->num_vuses, stats->num_mem_stmts)
864 : 0);
865 fprintf (file, "Actual number of loads: %ld (%ld/stmt)\n",
866 actual_num_vuses,
867 (stats->num_mem_stmts)
868 ? CEIL (actual_num_vuses, stats->num_mem_stmts)
869 : 0);
870
871 if (actual_num_vuses > stats->num_vuses + (stats->num_vuses / 25))
872 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
873
874 fprintf (file, "Estimated number of stores: %ld (%ld/stmt)\n",
875 stats->num_vdefs,
876 (stats->num_mem_stmts)
877 ? CEIL (stats->num_vdefs, stats->num_mem_stmts)
878 : 0);
879 fprintf (file, "Actual number of stores: %ld (%ld/stmt)\n",
880 actual_num_vdefs,
881 (stats->num_mem_stmts)
882 ? CEIL (actual_num_vdefs, stats->num_mem_stmts)
883 : 0);
884
885 if (actual_num_vdefs > stats->num_vdefs + (stats->num_vdefs / 25))
886 fprintf (file, "\t(warning: estimation is lower by more than 25%%)\n");
887
888 fprintf (file, "Partitioning thresholds: MAX = %d AVG = %d "
889 "(%sNEED TO PARTITION)\n", MAX_ALIASED_VOPS, AVG_ALIASED_VOPS,
890 stats->num_mem_stmts && need_to_partition_p (stats) ? "" : "NO ");
891 fprintf (file, "Number of partitioned symbols: %ld\n", num_partitioned);
892 fprintf (file, "Number of unpartitioned symbols: %ld\n", num_unpartitioned);
893 }
894
895
896 /* Dump memory reference stats for function FN to stderr. */
897
898 void
899 debug_mem_ref_stats (void)
900 {
901 dump_mem_ref_stats (stderr);
902 }
903
904
905 /* Dump memory reference stats for variable VAR to FILE. */
906
907 static void
908 dump_mem_sym_stats (FILE *file, tree var)
909 {
910 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
911
912 if (stats == NULL)
913 return;
914
915 fprintf (file, "read frequency: %6ld, write frequency: %6ld, "
916 "direct reads: %3ld, direct writes: %3ld, "
917 "indirect reads: %4ld, indirect writes: %4ld, symbol: ",
918 stats->frequency_reads, stats->frequency_writes,
919 stats->num_direct_reads, stats->num_direct_writes,
920 stats->num_indirect_reads, stats->num_indirect_writes);
921 print_generic_expr (file, stats->var, 0);
922 fprintf (file, ", tags: ");
923 dump_decl_set (file, stats->parent_tags);
924 }
925
926
927 /* Dump memory reference stats for variable VAR to stderr. */
928
929 void
930 debug_mem_sym_stats (tree var)
931 {
932 dump_mem_sym_stats (stderr, var);
933 }
934
935 /* Dump memory reference stats for variable VAR to FILE. For use
936 of tree-dfa.c:dump_variable. */
937
938 void
939 dump_mem_sym_stats_for_var (FILE *file, tree var)
940 {
941 mem_sym_stats_t stats = mem_sym_stats (cfun, var);
942
943 if (stats == NULL)
944 return;
945
946 fprintf (file, ", score: %ld", mem_sym_score (stats));
947 fprintf (file, ", direct reads: %ld", stats->num_direct_reads);
948 fprintf (file, ", direct writes: %ld", stats->num_direct_writes);
949 fprintf (file, ", indirect reads: %ld", stats->num_indirect_reads);
950 fprintf (file, ", indirect writes: %ld", stats->num_indirect_writes);
951 }
952
953 /* Dump memory reference stats for all memory symbols to FILE. */
954
955 static void
956 dump_all_mem_sym_stats (FILE *file)
957 {
958 referenced_var_iterator rvi;
959 tree sym;
960
961 FOR_EACH_REFERENCED_VAR (sym, rvi)
962 {
963 if (is_gimple_reg (sym))
964 continue;
965
966 dump_mem_sym_stats (file, sym);
967 }
968 }
969
970
971 /* Dump memory reference stats for all memory symbols to stderr. */
972
973 void
974 debug_all_mem_sym_stats (void)
975 {
976 dump_all_mem_sym_stats (stderr);
977 }
978
979
980 /* Dump the MP_INFO array to FILE. */
981
982 static void
983 dump_mp_info (FILE *file, VEC(mem_sym_stats_t,heap) *mp_info)
984 {
985 unsigned i;
986 mem_sym_stats_t mp_p;
987
988 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
989 if (!mp_p->partitioned_p)
990 dump_mem_sym_stats (file, mp_p->var);
991 }
992
993
994 /* Dump the MP_INFO array to stderr. */
995
996 void
997 debug_mp_info (VEC(mem_sym_stats_t,heap) *mp_info)
998 {
999 dump_mp_info (stderr, mp_info);
1000 }
1001
1002
1003 /* Update memory reference stats for symbol VAR in statement STMT.
1004 NUM_DIRECT_READS and NUM_DIRECT_WRITES specify the number of times
1005 that VAR is read/written in STMT (indirect reads/writes are not
1006 recorded by this function, see compute_memory_partitions). */
1007
1008 void
1009 update_mem_sym_stats_from_stmt (tree var, tree stmt, long num_direct_reads,
1010 long num_direct_writes)
1011 {
1012 mem_sym_stats_t stats;
1013
1014 gcc_assert (num_direct_reads >= 0 && num_direct_writes >= 0);
1015
1016 stats = get_mem_sym_stats_for (var);
1017
1018 stats->num_direct_reads += num_direct_reads;
1019 stats->frequency_reads += ((long) bb_for_stmt (stmt)->frequency
1020 * num_direct_reads);
1021
1022 stats->num_direct_writes += num_direct_writes;
1023 stats->frequency_writes += ((long) bb_for_stmt (stmt)->frequency
1024 * num_direct_writes);
1025 }
1026
1027
1028 /* Given two MP_INFO entries MP1 and MP2, return -1 if MP1->VAR should
1029 be partitioned before MP2->VAR, 0 if they are the same or 1 if
1030 MP1->VAR should be partitioned after MP2->VAR. */
1031
1032 static inline int
1033 compare_mp_info_entries (mem_sym_stats_t mp1, mem_sym_stats_t mp2)
1034 {
1035 long pscore1 = mem_sym_score (mp1);
1036 long pscore2 = mem_sym_score (mp2);
1037
1038 if (pscore1 < pscore2)
1039 return -1;
1040 else if (pscore1 > pscore2)
1041 return 1;
1042 else
1043 return DECL_UID (mp1->var) - DECL_UID (mp2->var);
1044 }
1045
1046
1047 /* Comparison routine for qsort. The list is sorted by increasing
1048 partitioning score (PSCORE). This score is computed such that
1049 symbols with high scores are those that are least likely to be
1050 partitioned. */
1051
1052 static int
1053 mp_info_cmp (const void *p, const void *q)
1054 {
1055 mem_sym_stats_t e1 = *((const mem_sym_stats_t *) p);
1056 mem_sym_stats_t e2 = *((const mem_sym_stats_t *) q);
1057 return compare_mp_info_entries (e1, e2);
1058 }
1059
1060
1061 /* Sort the array of reference counts used to compute memory partitions.
1062 Elements are sorted in ascending order of execution frequency and
1063 descending order of virtual operators needed. */
1064
1065 static inline void
1066 sort_mp_info (VEC(mem_sym_stats_t,heap) *list)
1067 {
1068 unsigned num = VEC_length (mem_sym_stats_t, list);
1069
1070 if (num < 2)
1071 return;
1072
1073 if (num == 2)
1074 {
1075 if (compare_mp_info_entries (VEC_index (mem_sym_stats_t, list, 0),
1076 VEC_index (mem_sym_stats_t, list, 1)) > 0)
1077 {
1078 /* Swap elements if they are in the wrong order. */
1079 mem_sym_stats_t tmp = VEC_index (mem_sym_stats_t, list, 0);
1080 VEC_replace (mem_sym_stats_t, list, 0,
1081 VEC_index (mem_sym_stats_t, list, 1));
1082 VEC_replace (mem_sym_stats_t, list, 1, tmp);
1083 }
1084
1085 return;
1086 }
1087
1088 /* There are 3 or more elements, call qsort. */
1089 qsort (VEC_address (mem_sym_stats_t, list),
1090 VEC_length (mem_sym_stats_t, list),
1091 sizeof (mem_sym_stats_t),
1092 mp_info_cmp);
1093 }
1094
1095
1096 /* Return the memory partition tag (MPT) associated with memory
1097 symbol SYM. */
1098
1099 static tree
1100 get_mpt_for (tree sym)
1101 {
1102 tree mpt;
1103
1104 /* Don't create a new tag unnecessarily. */
1105 mpt = memory_partition (sym);
1106 if (mpt == NULL_TREE)
1107 {
1108 mpt = create_tag_raw (MEMORY_PARTITION_TAG, TREE_TYPE (sym), "MPT");
1109 TREE_ADDRESSABLE (mpt) = 0;
1110 add_referenced_var (mpt);
1111 VEC_safe_push (tree, heap, gimple_ssa_operands (cfun)->mpt_table, mpt);
1112 gcc_assert (MPT_SYMBOLS (mpt) == NULL);
1113 set_memory_partition (sym, mpt);
1114 }
1115
1116 return mpt;
1117 }
1118
1119
1120 /* Add MP_P->VAR to a memory partition and return the partition. */
1121
1122 static tree
1123 find_partition_for (mem_sym_stats_t mp_p)
1124 {
1125 unsigned i;
1126 VEC(tree,heap) *mpt_table;
1127 tree mpt;
1128
1129 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1130 mpt = NULL_TREE;
1131
1132 /* Find an existing partition for MP_P->VAR. */
1133 for (i = 0; VEC_iterate (tree, mpt_table, i, mpt); i++)
1134 {
1135 mem_sym_stats_t mpt_stats;
1136
1137 /* If MPT does not have any symbols yet, use it. */
1138 if (MPT_SYMBOLS (mpt) == NULL)
1139 break;
1140
1141 /* Otherwise, see if MPT has common parent tags with MP_P->VAR,
1142 but avoid grouping clobbered variables with non-clobbered
1143 variables (otherwise, this tends to creates a single memory
1144 partition because other call-clobbered variables may have
1145 common parent tags with non-clobbered ones). */
1146 mpt_stats = get_mem_sym_stats_for (mpt);
1147 if (mp_p->parent_tags
1148 && mpt_stats->parent_tags
1149 && is_call_clobbered (mpt) == is_call_clobbered (mp_p->var)
1150 && bitmap_intersect_p (mpt_stats->parent_tags, mp_p->parent_tags))
1151 break;
1152
1153 /* If no common parent tags are found, see if both MPT and
1154 MP_P->VAR are call-clobbered. */
1155 if (is_call_clobbered (mpt) && is_call_clobbered (mp_p->var))
1156 break;
1157 }
1158
1159 if (mpt == NULL_TREE)
1160 mpt = get_mpt_for (mp_p->var);
1161 else
1162 set_memory_partition (mp_p->var, mpt);
1163
1164 mp_p->partitioned_p = true;
1165
1166 mark_sym_for_renaming (mp_p->var);
1167 mark_sym_for_renaming (mpt);
1168
1169 return mpt;
1170 }
1171
1172
1173 /* Rewrite the alias set for TAG to use the newly created partitions.
1174 If TAG is NULL, rewrite the set of call-clobbered variables.
1175 NEW_ALIASES is a scratch bitmap to build the new set of aliases for
1176 TAG. */
1177
1178 static void
1179 rewrite_alias_set_for (tree tag, bitmap new_aliases)
1180 {
1181 bitmap_iterator bi;
1182 unsigned i;
1183 tree mpt, sym;
1184
1185 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, i, bi)
1186 {
1187 sym = referenced_var (i);
1188 mpt = memory_partition (sym);
1189 if (mpt)
1190 bitmap_set_bit (new_aliases, DECL_UID (mpt));
1191 else
1192 bitmap_set_bit (new_aliases, DECL_UID (sym));
1193 }
1194
1195 /* Rebuild the may-alias array for TAG. */
1196 bitmap_copy (MTAG_ALIASES (tag), new_aliases);
1197 }
1198
1199
1200 /* Determine how many virtual operands can be saved by partitioning
1201 MP_P->VAR into MPT. When a symbol S is thrown inside a partition
1202 P, every virtual operand that used to reference S will now
1203 reference P. Whether it reduces the number of virtual operands
1204 depends on:
1205
1206 1- Direct references to S are never saved. Instead of the virtual
1207 operand to S, we will now have a virtual operand to P.
1208
1209 2- Indirect references to S are reduced only for those memory tags
1210 holding S that already had other symbols partitioned into P.
1211 For instance, if a memory tag T has the alias set { a b S c },
1212 the first time we partition S into P, the alias set will become
1213 { a b P c }, so no virtual operands will be saved. However, if
1214 we now partition symbol 'c' into P, then the alias set for T
1215 will become { a b P }, so we will be saving one virtual operand
1216 for every indirect reference to 'c'.
1217
1218 3- Is S is call-clobbered, we save as many virtual operands as
1219 call/asm sites exist in the code, but only if other
1220 call-clobbered symbols have been grouped into P. The first
1221 call-clobbered symbol that we group does not produce any
1222 savings.
1223
1224 MEM_REF_STATS points to CFUN's memory reference information. */
1225
1226 static void
1227 estimate_vop_reduction (struct mem_ref_stats_d *mem_ref_stats,
1228 mem_sym_stats_t mp_p, tree mpt)
1229 {
1230 unsigned i;
1231 bitmap_iterator bi;
1232 mem_sym_stats_t mpt_stats;
1233
1234 /* We should only get symbols with indirect references here. */
1235 gcc_assert (mp_p->num_indirect_reads > 0 || mp_p->num_indirect_writes > 0);
1236
1237 /* Note that the only statistics we keep for MPT is the set of
1238 parent tags to know which memory tags have had alias members
1239 partitioned, and the indicator has_call_clobbered_vars.
1240 Reference counts are not important for MPT. */
1241 mpt_stats = get_mem_sym_stats_for (mpt);
1242
1243 /* Traverse all the parent tags for MP_P->VAR. For every tag T, if
1244 partition P is already grouping aliases of T, then reduce the
1245 number of virtual operands by the number of direct references
1246 to T. */
1247 if (mp_p->parent_tags)
1248 {
1249 if (mpt_stats->parent_tags == NULL)
1250 mpt_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1251
1252 EXECUTE_IF_SET_IN_BITMAP (mp_p->parent_tags, 0, i, bi)
1253 {
1254 if (bitmap_bit_p (mpt_stats->parent_tags, i))
1255 {
1256 /* Partition MPT is already partitioning symbols in the
1257 alias set for TAG. This means that we are now saving
1258 1 virtual operand for every direct reference to TAG. */
1259 tree tag = referenced_var (i);
1260 mem_sym_stats_t tag_stats = mem_sym_stats (cfun, tag);
1261 mem_ref_stats->num_vuses -= tag_stats->num_direct_reads;
1262 mem_ref_stats->num_vdefs -= tag_stats->num_direct_writes;
1263 }
1264 else
1265 {
1266 /* This is the first symbol in tag I's alias set that is
1267 being grouped under MPT. We will not save any
1268 virtual operands this time, but record that MPT is
1269 grouping a symbol from TAG's alias set so that the
1270 next time we get the savings. */
1271 bitmap_set_bit (mpt_stats->parent_tags, i);
1272 }
1273 }
1274 }
1275
1276 /* If MP_P->VAR is call-clobbered, and MPT is already grouping
1277 call-clobbered symbols, then we will save as many virtual
1278 operands as asm/call sites there are. */
1279 if (is_call_clobbered (mp_p->var))
1280 {
1281 if (mpt_stats->has_call_clobbered_vars)
1282 mem_ref_stats->num_vdefs -= mem_ref_stats->num_call_sites
1283 + mem_ref_stats->num_asm_sites;
1284 else
1285 mpt_stats->has_call_clobbered_vars = true;
1286 }
1287 }
1288
1289
1290 /* Helper for compute_memory_partitions. Transfer reference counts
1291 from pointers to their pointed-to sets. Counters for pointers were
1292 computed by update_alias_info. MEM_REF_STATS points to CFUN's
1293 memory reference information. */
1294
1295 static void
1296 update_reference_counts (struct mem_ref_stats_d *mem_ref_stats)
1297 {
1298 unsigned i;
1299 bitmap_iterator bi;
1300 mem_sym_stats_t sym_stats;
1301
1302 for (i = 1; i < num_ssa_names; i++)
1303 {
1304 tree ptr;
1305 struct ptr_info_def *pi;
1306
1307 ptr = ssa_name (i);
1308 if (ptr
1309 && POINTER_TYPE_P (TREE_TYPE (ptr))
1310 && (pi = SSA_NAME_PTR_INFO (ptr)) != NULL
1311 && pi->memory_tag_needed)
1312 {
1313 unsigned j;
1314 bitmap_iterator bj;
1315 tree tag;
1316 mem_sym_stats_t ptr_stats, tag_stats;
1317
1318 /* If PTR has flow-sensitive points-to information, use
1319 PTR's name tag, otherwise use the symbol tag associated
1320 with PTR's symbol. */
1321 if (pi->name_mem_tag)
1322 tag = pi->name_mem_tag;
1323 else
1324 tag = symbol_mem_tag (SSA_NAME_VAR (ptr));
1325
1326 ptr_stats = get_mem_sym_stats_for (ptr);
1327 tag_stats = get_mem_sym_stats_for (tag);
1328
1329 /* TAG has as many direct references as dereferences we
1330 found for its parent pointer. */
1331 tag_stats->num_direct_reads += ptr_stats->num_direct_reads;
1332 tag_stats->num_direct_writes += ptr_stats->num_direct_writes;
1333
1334 /* All the dereferences of pointer PTR are considered direct
1335 references to PTR's memory tag (TAG). In turn,
1336 references to TAG will become virtual operands for every
1337 symbol in TAG's alias set. So, for every symbol ALIAS in
1338 TAG's alias set, add as many indirect references to ALIAS
1339 as direct references there are for TAG. */
1340 if (MTAG_ALIASES (tag))
1341 EXECUTE_IF_SET_IN_BITMAP (MTAG_ALIASES (tag), 0, j, bj)
1342 {
1343 tree alias = referenced_var (j);
1344 sym_stats = get_mem_sym_stats_for (alias);
1345
1346 /* All the direct references to TAG are indirect references
1347 to ALIAS. */
1348 sym_stats->num_indirect_reads += ptr_stats->num_direct_reads;
1349 sym_stats->num_indirect_writes += ptr_stats->num_direct_writes;
1350 sym_stats->frequency_reads += ptr_stats->frequency_reads;
1351 sym_stats->frequency_writes += ptr_stats->frequency_writes;
1352
1353 /* Indicate that TAG is one of ALIAS's parent tags. */
1354 if (sym_stats->parent_tags == NULL)
1355 sym_stats->parent_tags = BITMAP_ALLOC (&alias_bitmap_obstack);
1356 bitmap_set_bit (sym_stats->parent_tags, DECL_UID (tag));
1357 }
1358 }
1359 }
1360
1361 /* Call-clobbered symbols are indirectly written at every
1362 call/asm site. */
1363 EXECUTE_IF_SET_IN_BITMAP (gimple_call_clobbered_vars (cfun), 0, i, bi)
1364 {
1365 tree sym = referenced_var (i);
1366 sym_stats = get_mem_sym_stats_for (sym);
1367 sym_stats->num_indirect_writes += mem_ref_stats->num_call_sites
1368 + mem_ref_stats->num_asm_sites;
1369 }
1370
1371 /* Addressable symbols are indirectly written at some ASM sites.
1372 Since only ASM sites that clobber memory actually affect
1373 addressable symbols, this is an over-estimation. */
1374 EXECUTE_IF_SET_IN_BITMAP (gimple_addressable_vars (cfun), 0, i, bi)
1375 {
1376 tree sym = referenced_var (i);
1377 sym_stats = get_mem_sym_stats_for (sym);
1378 sym_stats->num_indirect_writes += mem_ref_stats->num_asm_sites;
1379 }
1380 }
1381
1382
1383 /* Helper for compute_memory_partitions. Add all memory symbols to
1384 *MP_INFO_P and compute the initial estimate for the total number of
1385 virtual operands needed. MEM_REF_STATS points to CFUN's memory
1386 reference information. On exit, *TAGS_P will contain the list of
1387 memory tags whose alias set need to be rewritten after
1388 partitioning. */
1389
1390 static void
1391 build_mp_info (struct mem_ref_stats_d *mem_ref_stats,
1392 VEC(mem_sym_stats_t,heap) **mp_info_p,
1393 VEC(tree,heap) **tags_p)
1394 {
1395 tree var;
1396 referenced_var_iterator rvi;
1397
1398 FOR_EACH_REFERENCED_VAR (var, rvi)
1399 {
1400 mem_sym_stats_t sym_stats;
1401 tree old_mpt;
1402
1403 /* We are only interested in memory symbols other than MPTs. */
1404 if (is_gimple_reg (var) || TREE_CODE (var) == MEMORY_PARTITION_TAG)
1405 continue;
1406
1407 /* Collect memory tags into the TAGS array so that we can
1408 rewrite their alias sets after partitioning. */
1409 if (MTAG_P (var) && MTAG_ALIASES (var))
1410 VEC_safe_push (tree, heap, *tags_p, var);
1411
1412 /* Since we are going to re-compute partitions, any symbols that
1413 used to belong to a partition must be detached from it and
1414 marked for renaming. */
1415 if ((old_mpt = memory_partition (var)) != NULL)
1416 {
1417 mark_sym_for_renaming (old_mpt);
1418 set_memory_partition (var, NULL_TREE);
1419 mark_sym_for_renaming (var);
1420 }
1421
1422 sym_stats = get_mem_sym_stats_for (var);
1423
1424 /* Add VAR's reference info to MP_INFO. Note that the only
1425 symbols that make sense to partition are those that have
1426 indirect references. If a symbol S is always directly
1427 referenced, partitioning it will not reduce the number of
1428 virtual operators. The only symbols that are profitable to
1429 partition are those that belong to alias sets and/or are
1430 call-clobbered. */
1431 if (sym_stats->num_indirect_reads > 0
1432 || sym_stats->num_indirect_writes > 0)
1433 VEC_safe_push (mem_sym_stats_t, heap, *mp_info_p, sym_stats);
1434
1435 /* Update the number of estimated VOPS. Note that direct
1436 references to memory tags are always counted as indirect
1437 references to their alias set members, so if a memory tag has
1438 aliases, do not count its direct references to avoid double
1439 accounting. */
1440 if (!MTAG_P (var) || !MTAG_ALIASES (var))
1441 {
1442 mem_ref_stats->num_vuses += sym_stats->num_direct_reads;
1443 mem_ref_stats->num_vdefs += sym_stats->num_direct_writes;
1444 }
1445
1446 mem_ref_stats->num_vuses += sym_stats->num_indirect_reads;
1447 mem_ref_stats->num_vdefs += sym_stats->num_indirect_writes;
1448 }
1449 }
1450
1451
1452 /* Compute memory partitions. A memory partition (MPT) is an
1453 arbitrary grouping of memory symbols, such that references to one
1454 member of the group is considered a reference to all the members of
1455 the group.
1456
1457 As opposed to alias sets in memory tags, the grouping into
1458 partitions is completely arbitrary and only done to reduce the
1459 number of virtual operands. The only rule that needs to be
1460 observed when creating memory partitions is that given two memory
1461 partitions MPT.i and MPT.j, they must not contain symbols in
1462 common.
1463
1464 Memory partitions are used when putting the program into Memory-SSA
1465 form. In particular, in Memory-SSA PHI nodes are not computed for
1466 individual memory symbols. They are computed for memory
1467 partitions. This reduces the amount of PHI nodes in the SSA graph
1468 at the expense of precision (i.e., it makes unrelated stores affect
1469 each other).
1470
1471 However, it is possible to increase precision by changing this
1472 partitioning scheme. For instance, if the partitioning scheme is
1473 such that get_mpt_for is the identity function (that is,
1474 get_mpt_for (s) = s), this will result in ultimate precision at the
1475 expense of huge SSA webs.
1476
1477 At the other extreme, a partitioning scheme that groups all the
1478 symbols in the same set results in minimal SSA webs and almost
1479 total loss of precision.
1480
1481 There partitioning heuristic uses three parameters to decide the
1482 order in which symbols are processed. The list of symbols is
1483 sorted so that symbols that are more likely to be partitioned are
1484 near the top of the list:
1485
1486 - Execution frequency. If a memory references is in a frequently
1487 executed code path, grouping it into a partition may block useful
1488 transformations and cause sub-optimal code generation. So, the
1489 partition heuristic tries to avoid grouping symbols with high
1490 execution frequency scores. Execution frequency is taken
1491 directly from the basic blocks where every reference is made (see
1492 update_mem_sym_stats_from_stmt), which in turn uses the
1493 profile guided machinery, so if the program is compiled with PGO
1494 enabled, more accurate partitioning decisions will be made.
1495
1496 - Number of references. Symbols with few references in the code,
1497 are partitioned before symbols with many references.
1498
1499 - NO_ALIAS attributes. Symbols with any of the NO_ALIAS*
1500 attributes are partitioned after symbols marked MAY_ALIAS.
1501
1502 Once the list is sorted, the partitioning proceeds as follows:
1503
1504 1- For every symbol S in MP_INFO, create a new memory partition MP,
1505 if necessary. To avoid memory partitions that contain symbols
1506 from non-conflicting alias sets, memory partitions are
1507 associated to the memory tag that holds S in its alias set. So,
1508 when looking for a memory partition for S, the memory partition
1509 associated with one of the memory tags holding S is chosen. If
1510 none exists, a new one is created.
1511
1512 2- Add S to memory partition MP.
1513
1514 3- Reduce by 1 the number of VOPS for every memory tag holding S.
1515
1516 4- If the total number of VOPS is less than MAX_ALIASED_VOPS or the
1517 average number of VOPS per statement is less than
1518 AVG_ALIASED_VOPS, stop. Otherwise, go to the next symbol in the
1519 list. */
1520
1521 static void
1522 compute_memory_partitions (void)
1523 {
1524 tree tag;
1525 unsigned i;
1526 mem_sym_stats_t mp_p;
1527 VEC(mem_sym_stats_t,heap) *mp_info;
1528 bitmap new_aliases;
1529 VEC(tree,heap) *tags;
1530 struct mem_ref_stats_d *mem_ref_stats;
1531 int prev_max_aliased_vops;
1532
1533 mem_ref_stats = gimple_mem_ref_stats (cfun);
1534 gcc_assert (mem_ref_stats->num_vuses == 0 && mem_ref_stats->num_vdefs == 0);
1535
1536 if (mem_ref_stats->num_mem_stmts == 0)
1537 return;
1538
1539 timevar_push (TV_MEMORY_PARTITIONING);
1540
1541 mp_info = NULL;
1542 tags = NULL;
1543 prev_max_aliased_vops = MAX_ALIASED_VOPS;
1544
1545 /* Since we clearly cannot lower the number of virtual operators
1546 below the total number of memory statements in the function, we
1547 may need to adjust MAX_ALIASED_VOPS beforehand. */
1548 if (MAX_ALIASED_VOPS < mem_ref_stats->num_mem_stmts)
1549 MAX_ALIASED_VOPS = mem_ref_stats->num_mem_stmts;
1550
1551 /* Update reference stats for all the pointed-to variables and
1552 memory tags. */
1553 update_reference_counts (mem_ref_stats);
1554
1555 /* Add all the memory symbols to MP_INFO. */
1556 build_mp_info (mem_ref_stats, &mp_info, &tags);
1557
1558 /* No partitions required if we are below the threshold. */
1559 if (!need_to_partition_p (mem_ref_stats))
1560 {
1561 if (dump_file)
1562 fprintf (dump_file, "\nMemory partitioning NOT NEEDED for %s\n",
1563 get_name (current_function_decl));
1564 goto done;
1565 }
1566
1567 /* Sort the MP_INFO array so that symbols that should be partitioned
1568 first are near the top of the list. */
1569 sort_mp_info (mp_info);
1570
1571 if (dump_file)
1572 {
1573 fprintf (dump_file, "\nMemory partitioning NEEDED for %s\n\n",
1574 get_name (current_function_decl));
1575 fprintf (dump_file, "Memory symbol references before partitioning:\n");
1576 dump_mp_info (dump_file, mp_info);
1577 }
1578
1579 /* Create partitions for variables in MP_INFO until we have enough
1580 to lower the total number of VOPS below MAX_ALIASED_VOPS or if
1581 the average number of VOPS per statement is below
1582 AVG_ALIASED_VOPS. */
1583 for (i = 0; VEC_iterate (mem_sym_stats_t, mp_info, i, mp_p); i++)
1584 {
1585 tree mpt;
1586
1587 /* If we are below the threshold, stop. */
1588 if (!need_to_partition_p (mem_ref_stats))
1589 break;
1590
1591 mpt = find_partition_for (mp_p);
1592 estimate_vop_reduction (mem_ref_stats, mp_p, mpt);
1593 }
1594
1595 /* After partitions have been created, rewrite alias sets to use
1596 them instead of the original symbols. This way, if the alias set
1597 was computed as { a b c d e f }, and the subset { b e f } was
1598 grouped into partition MPT.3, then the new alias set for the tag
1599 will be { a c d MPT.3 }.
1600
1601 Note that this is not strictly necessary. The operand scanner
1602 will always check if a symbol belongs to a partition when adding
1603 virtual operands. However, by reducing the size of the alias
1604 sets to be scanned, the work needed inside the operand scanner is
1605 significantly reduced. */
1606 new_aliases = BITMAP_ALLOC (&alias_bitmap_obstack);
1607
1608 for (i = 0; VEC_iterate (tree, tags, i, tag); i++)
1609 {
1610 rewrite_alias_set_for (tag, new_aliases);
1611 bitmap_clear (new_aliases);
1612 }
1613
1614 BITMAP_FREE (new_aliases);
1615
1616 if (dump_file)
1617 {
1618 fprintf (dump_file, "\nMemory symbol references after partitioning:\n");
1619 dump_mp_info (dump_file, mp_info);
1620 }
1621
1622 done:
1623 /* Free allocated memory. */
1624 VEC_free (mem_sym_stats_t, heap, mp_info);
1625 VEC_free (tree, heap, tags);
1626
1627 MAX_ALIASED_VOPS = prev_max_aliased_vops;
1628
1629 timevar_pop (TV_MEMORY_PARTITIONING);
1630 }
1631
1632
1633 /* Compute may-alias information for every variable referenced in function
1634 FNDECL.
1635
1636 Alias analysis proceeds in 3 main phases:
1637
1638 1- Points-to and escape analysis.
1639
1640 This phase walks the use-def chains in the SSA web looking for three
1641 things:
1642
1643 * Assignments of the form P_i = &VAR
1644 * Assignments of the form P_i = malloc()
1645 * Pointers and ADDR_EXPR that escape the current function.
1646
1647 The concept of 'escaping' is the same one used in the Java world. When
1648 a pointer or an ADDR_EXPR escapes, it means that it has been exposed
1649 outside of the current function. So, assignment to global variables,
1650 function arguments and returning a pointer are all escape sites, as are
1651 conversions between pointers and integers.
1652
1653 This is where we are currently limited. Since not everything is renamed
1654 into SSA, we lose track of escape properties when a pointer is stashed
1655 inside a field in a structure, for instance. In those cases, we are
1656 assuming that the pointer does escape.
1657
1658 We use escape analysis to determine whether a variable is
1659 call-clobbered. Simply put, if an ADDR_EXPR escapes, then the variable
1660 is call-clobbered. If a pointer P_i escapes, then all the variables
1661 pointed-to by P_i (and its memory tag) also escape.
1662
1663 2- Compute flow-sensitive aliases
1664
1665 We have two classes of memory tags. Memory tags associated with the
1666 pointed-to data type of the pointers in the program. These tags are
1667 called "symbol memory tag" (SMT). The other class are those associated
1668 with SSA_NAMEs, called "name memory tag" (NMT). The basic idea is that
1669 when adding operands for an INDIRECT_REF *P_i, we will first check
1670 whether P_i has a name tag, if it does we use it, because that will have
1671 more precise aliasing information. Otherwise, we use the standard symbol
1672 tag.
1673
1674 In this phase, we go through all the pointers we found in points-to
1675 analysis and create alias sets for the name memory tags associated with
1676 each pointer P_i. If P_i escapes, we mark call-clobbered the variables
1677 it points to and its tag.
1678
1679
1680 3- Compute flow-insensitive aliases
1681
1682 This pass will compare the alias set of every symbol memory tag and
1683 every addressable variable found in the program. Given a symbol
1684 memory tag SMT and an addressable variable V. If the alias sets of
1685 SMT and V conflict (as computed by may_alias_p), then V is marked
1686 as an alias tag and added to the alias set of SMT.
1687
1688 For instance, consider the following function:
1689
1690 foo (int i)
1691 {
1692 int *p, a, b;
1693
1694 if (i > 10)
1695 p = &a;
1696 else
1697 p = &b;
1698
1699 *p = 3;
1700 a = b + 2;
1701 return *p;
1702 }
1703
1704 After aliasing analysis has finished, the symbol memory tag for pointer
1705 'p' will have two aliases, namely variables 'a' and 'b'. Every time
1706 pointer 'p' is dereferenced, we want to mark the operation as a
1707 potential reference to 'a' and 'b'.
1708
1709 foo (int i)
1710 {
1711 int *p, a, b;
1712
1713 if (i_2 > 10)
1714 p_4 = &a;
1715 else
1716 p_6 = &b;
1717 # p_1 = PHI <p_4(1), p_6(2)>;
1718
1719 # a_7 = VDEF <a_3>;
1720 # b_8 = VDEF <b_5>;
1721 *p_1 = 3;
1722
1723 # a_9 = VDEF <a_7>
1724 # VUSE <b_8>
1725 a_9 = b_8 + 2;
1726
1727 # VUSE <a_9>;
1728 # VUSE <b_8>;
1729 return *p_1;
1730 }
1731
1732 In certain cases, the list of may aliases for a pointer may grow too
1733 large. This may cause an explosion in the number of virtual operands
1734 inserted in the code. Resulting in increased memory consumption and
1735 compilation time.
1736
1737 When the number of virtual operands needed to represent aliased
1738 loads and stores grows too large (configurable with option --param
1739 max-aliased-vops and --param avg-aliased-vops), alias sets are
1740 grouped to avoid severe compile-time slow downs and memory
1741 consumption. See compute_memory_partitions. */
1742
1743 unsigned int
1744 compute_may_aliases (void)
1745 {
1746 struct alias_info *ai;
1747
1748 timevar_push (TV_TREE_MAY_ALIAS);
1749
1750 memset (&alias_stats, 0, sizeof (alias_stats));
1751
1752 /* Initialize aliasing information. */
1753 ai = init_alias_info ();
1754
1755 /* For each pointer P_i, determine the sets of variables that P_i may
1756 point-to. For every addressable variable V, determine whether the
1757 address of V escapes the current function, making V call-clobbered
1758 (i.e., whether &V is stored in a global variable or if its passed as a
1759 function call argument). */
1760 compute_points_to_sets ();
1761
1762 /* Update various related attributes like escaped addresses,
1763 pointer dereferences for loads and stores. This is used
1764 when creating name tags and alias sets. */
1765 update_alias_info (ai);
1766
1767 /* Collect all pointers and addressable variables, compute alias sets,
1768 create memory tags for pointers and promote variables whose address is
1769 not needed anymore. */
1770 setup_pointers_and_addressables (ai);
1771
1772 /* Compute type-based flow-insensitive aliasing for all the type
1773 memory tags. */
1774 compute_flow_insensitive_aliasing (ai);
1775
1776 /* Compute flow-sensitive, points-to based aliasing for all the name
1777 memory tags. */
1778 compute_flow_sensitive_aliasing (ai);
1779
1780 /* Compute call clobbering information. */
1781 compute_call_clobbered (ai);
1782
1783 /* If the program makes no reference to global variables, but it
1784 contains a mixture of pure and non-pure functions, then we need
1785 to create use-def and def-def links between these functions to
1786 avoid invalid transformations on them. */
1787 maybe_create_global_var ();
1788
1789 /* Compute memory partitions for every memory variable. */
1790 compute_memory_partitions ();
1791
1792 /* Remove partitions with no symbols. Partitions may end up with an
1793 empty MPT_SYMBOLS set if a previous round of alias analysis
1794 needed to partition more symbols. Since we don't need those
1795 partitions anymore, remove them to free up the space. */
1796 {
1797 tree mpt;
1798 unsigned i;
1799 VEC(tree,heap) *mpt_table;
1800
1801 mpt_table = gimple_ssa_operands (cfun)->mpt_table;
1802 i = 0;
1803 while (i < VEC_length (tree, mpt_table))
1804 {
1805 mpt = VEC_index (tree, mpt_table, i);
1806 if (MPT_SYMBOLS (mpt) == NULL)
1807 VEC_unordered_remove (tree, mpt_table, i);
1808 else
1809 i++;
1810 }
1811 }
1812
1813 /* Populate all virtual operands and newly promoted register operands. */
1814 {
1815 block_stmt_iterator bsi;
1816 basic_block bb;
1817 FOR_EACH_BB (bb)
1818 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1819 update_stmt_if_modified (bsi_stmt (bsi));
1820 }
1821
1822 /* Debugging dumps. */
1823 if (dump_file)
1824 {
1825 dump_mem_ref_stats (dump_file);
1826 dump_alias_info (dump_file);
1827 dump_points_to_info (dump_file);
1828
1829 if (dump_flags & TDF_STATS)
1830 dump_alias_stats (dump_file);
1831
1832 if (dump_flags & TDF_DETAILS)
1833 dump_referenced_vars (dump_file);
1834 }
1835
1836 /* Report strict aliasing violations. */
1837 strict_aliasing_warning_backend ();
1838
1839 /* Deallocate memory used by aliasing data structures. */
1840 delete_alias_info (ai);
1841
1842 if (need_ssa_update_p ())
1843 update_ssa (TODO_update_ssa);
1844
1845 timevar_pop (TV_TREE_MAY_ALIAS);
1846
1847 return 0;
1848 }
1849
1850 /* Data structure used to count the number of dereferences to PTR
1851 inside an expression. */
1852 struct count_ptr_d
1853 {
1854 tree ptr;
1855 unsigned count;
1856 };
1857
1858
1859 /* Helper for count_uses_and_derefs. Called by walk_tree to look for
1860 (ALIGN/MISALIGNED_)INDIRECT_REF nodes for the pointer passed in DATA. */
1861
1862 static tree
1863 count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
1864 {
1865 struct count_ptr_d *count_p = (struct count_ptr_d *) data;
1866
1867 /* Do not walk inside ADDR_EXPR nodes. In the expression &ptr->fld,
1868 pointer 'ptr' is *not* dereferenced, it is simply used to compute
1869 the address of 'fld' as 'ptr + offsetof(fld)'. */
1870 if (TREE_CODE (*tp) == ADDR_EXPR)
1871 {
1872 *walk_subtrees = 0;
1873 return NULL_TREE;
1874 }
1875
1876 if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
1877 count_p->count++;
1878
1879 return NULL_TREE;
1880 }
1881
1882
1883 /* Count the number of direct and indirect uses for pointer PTR in
1884 statement STMT. The number of direct uses is stored in
1885 *NUM_USES_P. Indirect references are counted separately depending
1886 on whether they are store or load operations. The counts are
1887 stored in *NUM_STORES_P and *NUM_LOADS_P. */
1888
1889 void
1890 count_uses_and_derefs (tree ptr, tree stmt, unsigned *num_uses_p,
1891 unsigned *num_loads_p, unsigned *num_stores_p)
1892 {
1893 ssa_op_iter i;
1894 tree use;
1895
1896 *num_uses_p = 0;
1897 *num_loads_p = 0;
1898 *num_stores_p = 0;
1899
1900 /* Find out the total number of uses of PTR in STMT. */
1901 FOR_EACH_SSA_TREE_OPERAND (use, stmt, i, SSA_OP_USE)
1902 if (use == ptr)
1903 (*num_uses_p)++;
1904
1905 /* Now count the number of indirect references to PTR. This is
1906 truly awful, but we don't have much choice. There are no parent
1907 pointers inside INDIRECT_REFs, so an expression like
1908 '*x_1 = foo (x_1, *x_1)' needs to be traversed piece by piece to
1909 find all the indirect and direct uses of x_1 inside. The only
1910 shortcut we can take is the fact that GIMPLE only allows
1911 INDIRECT_REFs inside the expressions below. */
1912 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
1913 || (TREE_CODE (stmt) == RETURN_EXPR
1914 && TREE_CODE (TREE_OPERAND (stmt, 0)) == GIMPLE_MODIFY_STMT)
1915 || TREE_CODE (stmt) == ASM_EXPR
1916 || TREE_CODE (stmt) == CALL_EXPR)
1917 {
1918 tree lhs, rhs;
1919
1920 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
1921 {
1922 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
1923 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
1924 }
1925 else if (TREE_CODE (stmt) == RETURN_EXPR)
1926 {
1927 tree e = TREE_OPERAND (stmt, 0);
1928 lhs = GIMPLE_STMT_OPERAND (e, 0);
1929 rhs = GIMPLE_STMT_OPERAND (e, 1);
1930 }
1931 else if (TREE_CODE (stmt) == ASM_EXPR)
1932 {
1933 lhs = ASM_OUTPUTS (stmt);
1934 rhs = ASM_INPUTS (stmt);
1935 }
1936 else
1937 {
1938 lhs = NULL_TREE;
1939 rhs = stmt;
1940 }
1941
1942 if (lhs
1943 && (TREE_CODE (lhs) == TREE_LIST
1944 || EXPR_P (lhs)
1945 || GIMPLE_STMT_P (lhs)))
1946 {
1947 struct count_ptr_d count;
1948 count.ptr = ptr;
1949 count.count = 0;
1950 walk_tree (&lhs, count_ptr_derefs, &count, NULL);
1951 *num_stores_p = count.count;
1952 }
1953
1954 if (rhs
1955 && (TREE_CODE (rhs) == TREE_LIST
1956 || EXPR_P (rhs)
1957 || GIMPLE_STMT_P (rhs)))
1958 {
1959 struct count_ptr_d count;
1960 count.ptr = ptr;
1961 count.count = 0;
1962 walk_tree (&rhs, count_ptr_derefs, &count, NULL);
1963 *num_loads_p = count.count;
1964 }
1965 }
1966
1967 gcc_assert (*num_uses_p >= *num_loads_p + *num_stores_p);
1968 }
1969
1970 /* Remove memory references stats for function FN. */
1971
1972 void
1973 delete_mem_ref_stats (struct function *fn)
1974 {
1975 if (gimple_mem_ref_stats (fn)->mem_sym_stats)
1976 {
1977 free_alloc_pool (mem_sym_stats_pool);
1978 pointer_map_destroy (gimple_mem_ref_stats (fn)->mem_sym_stats);
1979 }
1980 gimple_mem_ref_stats (fn)->mem_sym_stats = NULL;
1981 }
1982
1983
1984 /* Initialize memory reference stats. */
1985
1986 static void
1987 init_mem_ref_stats (void)
1988 {
1989 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
1990
1991 mem_sym_stats_pool = create_alloc_pool ("Mem sym stats",
1992 sizeof (struct mem_sym_stats_d),
1993 100);
1994 memset (mem_ref_stats, 0, sizeof (struct mem_ref_stats_d));
1995 mem_ref_stats->mem_sym_stats = pointer_map_create ();
1996 }
1997
1998
1999 /* Helper for init_alias_info. Reset existing aliasing information. */
2000
2001 static void
2002 reset_alias_info (void)
2003 {
2004 referenced_var_iterator rvi;
2005 tree var;
2006 unsigned i;
2007 bitmap active_nmts, all_nmts;
2008
2009 /* Clear the set of addressable variables. We do not need to clear
2010 the TREE_ADDRESSABLE bit on every symbol because we are going to
2011 re-compute addressability here. */
2012 bitmap_clear (gimple_addressable_vars (cfun));
2013
2014 active_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
2015 all_nmts = BITMAP_ALLOC (&alias_bitmap_obstack);
2016
2017 /* Clear flow-insensitive alias information from each symbol. */
2018 FOR_EACH_REFERENCED_VAR (var, rvi)
2019 {
2020 if (is_gimple_reg (var))
2021 continue;
2022
2023 if (MTAG_P (var))
2024 MTAG_ALIASES (var) = NULL;
2025
2026 /* Memory partition information will be computed from scratch. */
2027 if (TREE_CODE (var) == MEMORY_PARTITION_TAG)
2028 MPT_SYMBOLS (var) = NULL;
2029
2030 /* Collect all the name tags to determine if we have any
2031 orphaned that need to be removed from the IL. A name tag
2032 will be orphaned if it is not associated with any active SSA
2033 name. */
2034 if (TREE_CODE (var) == NAME_MEMORY_TAG)
2035 bitmap_set_bit (all_nmts, DECL_UID (var));
2036
2037 /* Since we are about to re-discover call-clobbered
2038 variables, clear the call-clobbered flag. */
2039 clear_call_clobbered (var);
2040 }
2041
2042 /* There should be no call-clobbered variable left. */
2043 gcc_assert (bitmap_empty_p (gimple_call_clobbered_vars (cfun)));
2044
2045 /* Clear the call-used variables. */
2046 bitmap_clear (gimple_call_used_vars (cfun));
2047
2048 /* Clear flow-sensitive points-to information from each SSA name. */
2049 for (i = 1; i < num_ssa_names; i++)
2050 {
2051 tree name = ssa_name (i);
2052
2053 if (!name || !POINTER_TYPE_P (TREE_TYPE (name)))
2054 continue;
2055
2056 if (SSA_NAME_PTR_INFO (name))
2057 {
2058 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (name);
2059
2060 /* Clear all the flags but keep the name tag to
2061 avoid creating new temporaries unnecessarily. If
2062 this pointer is found to point to a subset or
2063 superset of its former points-to set, then a new
2064 tag will need to be created in create_name_tags. */
2065 pi->pt_anything = 0;
2066 pi->pt_null = 0;
2067 pi->value_escapes_p = 0;
2068 pi->memory_tag_needed = 0;
2069 pi->is_dereferenced = 0;
2070 if (pi->pt_vars)
2071 bitmap_clear (pi->pt_vars);
2072
2073 /* Add NAME's name tag to the set of active tags. */
2074 if (pi->name_mem_tag)
2075 bitmap_set_bit (active_nmts, DECL_UID (pi->name_mem_tag));
2076 }
2077 }
2078
2079 /* Name memory tags that are no longer associated with an SSA name
2080 are considered stale and should be removed from the IL. All the
2081 name tags that are in the set ALL_NMTS but not in ACTIVE_NMTS are
2082 considered stale and marked for renaming. */
2083 bitmap_and_compl_into (all_nmts, active_nmts);
2084 mark_set_for_renaming (all_nmts);
2085
2086 BITMAP_FREE (all_nmts);
2087 BITMAP_FREE (active_nmts);
2088 }
2089
2090
2091 /* Initialize the data structures used for alias analysis. */
2092
2093 static struct alias_info *
2094 init_alias_info (void)
2095 {
2096 struct alias_info *ai;
2097 referenced_var_iterator rvi;
2098 tree var;
2099
2100 ai = XCNEW (struct alias_info);
2101 ai->ssa_names_visited = sbitmap_alloc (num_ssa_names);
2102 sbitmap_zero (ai->ssa_names_visited);
2103 ai->processed_ptrs = VEC_alloc (tree, heap, 50);
2104 ai->written_vars = pointer_set_create ();
2105 ai->dereferenced_ptrs_store = pointer_set_create ();
2106 ai->dereferenced_ptrs_load = pointer_set_create ();
2107
2108 /* Clear out all memory reference stats. */
2109 init_mem_ref_stats ();
2110
2111 /* If aliases have been computed before, clear existing information. */
2112 if (gimple_aliases_computed_p (cfun))
2113 reset_alias_info ();
2114 else
2115 {
2116 /* If this is the first time we compute aliasing information,
2117 every non-register symbol will need to be put into SSA form
2118 (the initial SSA form only operates on GIMPLE registers). */
2119 FOR_EACH_REFERENCED_VAR (var, rvi)
2120 if (!is_gimple_reg (var))
2121 mark_sym_for_renaming (var);
2122 }
2123
2124 /* Next time, we will need to reset alias information. */
2125 cfun->gimple_df->aliases_computed_p = true;
2126 if (alias_bitmap_obstack.elements != NULL)
2127 bitmap_obstack_release (&alias_bitmap_obstack);
2128 bitmap_obstack_initialize (&alias_bitmap_obstack);
2129
2130 return ai;
2131 }
2132
2133
2134 /* Deallocate memory used by alias analysis. */
2135
2136 static void
2137 delete_alias_info (struct alias_info *ai)
2138 {
2139 size_t i;
2140
2141 sbitmap_free (ai->ssa_names_visited);
2142
2143 VEC_free (tree, heap, ai->processed_ptrs);
2144
2145 for (i = 0; i < ai->num_addressable_vars; i++)
2146 free (ai->addressable_vars[i]);
2147 free (ai->addressable_vars);
2148
2149 for (i = 0; i < ai->num_pointers; i++)
2150 free (ai->pointers[i]);
2151 free (ai->pointers);
2152
2153 pointer_set_destroy (ai->written_vars);
2154 pointer_set_destroy (ai->dereferenced_ptrs_store);
2155 pointer_set_destroy (ai->dereferenced_ptrs_load);
2156 free (ai);
2157
2158 delete_mem_ref_stats (cfun);
2159 delete_points_to_sets ();
2160 }
2161
2162
2163 /* Used for hashing to identify pointer infos with identical
2164 pt_vars bitmaps. */
2165
2166 static int
2167 eq_ptr_info (const void *p1, const void *p2)
2168 {
2169 const struct ptr_info_def *n1 = (const struct ptr_info_def *) p1;
2170 const struct ptr_info_def *n2 = (const struct ptr_info_def *) p2;
2171 return bitmap_equal_p (n1->pt_vars, n2->pt_vars);
2172 }
2173
2174 static hashval_t
2175 ptr_info_hash (const void *p)
2176 {
2177 const struct ptr_info_def *n = (const struct ptr_info_def *) p;
2178 return bitmap_hash (n->pt_vars);
2179 }
2180
2181
2182 /* Create name tags for all the pointers that have been dereferenced.
2183 We only create a name tag for a pointer P if P is found to point to
2184 a set of variables (so that we can alias them to *P) or if it is
2185 the result of a call to malloc (which means that P cannot point to
2186 anything else nor alias any other variable).
2187
2188 If two pointers P and Q point to the same set of variables, they
2189 are assigned the same name tag. */
2190
2191 static void
2192 create_name_tags (void)
2193 {
2194 size_t i;
2195 VEC (tree, heap) *with_ptvars = NULL;
2196 tree ptr;
2197 htab_t ptr_hash;
2198
2199 /* Collect the list of pointers with a non-empty points to set. */
2200 for (i = 1; i < num_ssa_names; i++)
2201 {
2202 tree ptr = ssa_name (i);
2203 struct ptr_info_def *pi;
2204
2205 if (!ptr
2206 || !POINTER_TYPE_P (TREE_TYPE (ptr))
2207 || !SSA_NAME_PTR_INFO (ptr))
2208 continue;
2209
2210 pi = SSA_NAME_PTR_INFO (ptr);
2211
2212 if (pi->pt_anything || !pi->memory_tag_needed)
2213 {
2214 /* No name tags for pointers that have not been
2215 dereferenced or point to an arbitrary location. */
2216 pi->name_mem_tag = NULL_TREE;
2217 continue;
2218 }
2219
2220 /* Set pt_anything on the pointers without pt_vars filled in so
2221 that they are assigned a symbol tag. */
2222 if (pi->pt_vars && !bitmap_empty_p (pi->pt_vars))
2223 VEC_safe_push (tree, heap, with_ptvars, ptr);
2224 else
2225 set_pt_anything (ptr);
2226 }
2227
2228 /* If we didn't find any pointers with pt_vars set, we're done. */
2229 if (!with_ptvars)
2230 return;
2231
2232 ptr_hash = htab_create (10, ptr_info_hash, eq_ptr_info, NULL);
2233
2234 /* Now go through the pointers with pt_vars, and find a name tag
2235 with the same pt_vars as this pointer, or create one if one
2236 doesn't exist. */
2237 for (i = 0; VEC_iterate (tree, with_ptvars, i, ptr); i++)
2238 {
2239 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2240 tree old_name_tag = pi->name_mem_tag;
2241 struct ptr_info_def **slot;
2242
2243 /* If PTR points to a set of variables, check if we don't
2244 have another pointer Q with the same points-to set before
2245 creating a tag. If so, use Q's tag instead of creating a
2246 new one.
2247
2248 This is important for not creating unnecessary symbols
2249 and also for copy propagation. If we ever need to
2250 propagate PTR into Q or vice-versa, we would run into
2251 problems if they both had different name tags because
2252 they would have different SSA version numbers (which
2253 would force us to take the name tags in and out of SSA). */
2254 slot = (struct ptr_info_def **) htab_find_slot (ptr_hash, pi, INSERT);
2255 if (*slot)
2256 pi->name_mem_tag = (*slot)->name_mem_tag;
2257 else
2258 {
2259 *slot = pi;
2260
2261 /* If we didn't find a pointer with the same points-to set
2262 as PTR, create a new name tag if needed. */
2263 if (pi->name_mem_tag == NULL_TREE)
2264 pi->name_mem_tag = get_nmt_for (ptr);
2265 }
2266
2267 /* If the new name tag computed for PTR is different than
2268 the old name tag that it used to have, then the old tag
2269 needs to be removed from the IL, so we mark it for
2270 renaming. */
2271 if (old_name_tag && old_name_tag != pi->name_mem_tag)
2272 mark_sym_for_renaming (old_name_tag);
2273
2274 /* Inherit volatility from the pointed-to type. */
2275 TREE_THIS_VOLATILE (pi->name_mem_tag)
2276 |= TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (ptr)));
2277
2278 /* Mark the new name tag for renaming. */
2279 mark_sym_for_renaming (pi->name_mem_tag);
2280 }
2281
2282 htab_delete (ptr_hash);
2283
2284 VEC_free (tree, heap, with_ptvars);
2285 }
2286
2287
2288 /* Union the alias set SET into the may-aliases for TAG. */
2289
2290 static void
2291 union_alias_set_into (tree tag, bitmap set)
2292 {
2293 bitmap ma = MTAG_ALIASES (tag);
2294
2295 if (bitmap_empty_p (set))
2296 return;
2297
2298 if (!ma)
2299 ma = MTAG_ALIASES (tag) = BITMAP_ALLOC (&alias_bitmap_obstack);
2300 bitmap_ior_into (ma, set);
2301 }
2302
2303
2304 /* For every pointer P_i in AI->PROCESSED_PTRS, create may-alias sets for
2305 the name memory tag (NMT) associated with P_i. If P_i escapes, then its
2306 name tag and the variables it points-to are call-clobbered. Finally, if
2307 P_i escapes and we could not determine where it points to, then all the
2308 variables in the same alias set as *P_i are marked call-clobbered. This
2309 is necessary because we must assume that P_i may take the address of any
2310 variable in the same alias set. */
2311
2312 static void
2313 compute_flow_sensitive_aliasing (struct alias_info *ai)
2314 {
2315 size_t i;
2316 tree ptr;
2317
2318 timevar_push (TV_FLOW_SENSITIVE);
2319
2320 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2321 {
2322 if (!find_what_p_points_to (ptr))
2323 set_pt_anything (ptr);
2324 }
2325
2326 create_name_tags ();
2327
2328 for (i = 0; VEC_iterate (tree, ai->processed_ptrs, i, ptr); i++)
2329 {
2330 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
2331
2332 /* Set up aliasing information for PTR's name memory tag (if it has
2333 one). Note that only pointers that have been dereferenced will
2334 have a name memory tag. */
2335 if (pi->name_mem_tag && pi->pt_vars)
2336 {
2337 if (!bitmap_empty_p (pi->pt_vars))
2338 union_alias_set_into (pi->name_mem_tag, pi->pt_vars);
2339 }
2340 }
2341 timevar_pop (TV_FLOW_SENSITIVE);
2342 }
2343
2344
2345 /* Return TRUE if at least one symbol in TAG2's alias set is also
2346 present in TAG1's alias set. */
2347
2348 static bool
2349 have_common_aliases_p (bitmap tag1aliases, bitmap tag2aliases)
2350 {
2351
2352 /* This is the old behavior of have_common_aliases_p, which is to
2353 return false if both sets are empty, or one set is and the other
2354 isn't. */
2355 if (tag1aliases == NULL || tag2aliases == NULL)
2356 return false;
2357
2358 return bitmap_intersect_p (tag1aliases, tag2aliases);
2359 }
2360
2361 /* Compute type-based alias sets. Traverse all the pointers and
2362 addressable variables found in setup_pointers_and_addressables.
2363
2364 For every pointer P in AI->POINTERS and addressable variable V in
2365 AI->ADDRESSABLE_VARS, add V to the may-alias sets of P's symbol
2366 memory tag (SMT) if their alias sets conflict. V is then marked as
2367 an aliased symbol so that the operand scanner knows that statements
2368 containing V have aliased operands. */
2369
2370 static void
2371 compute_flow_insensitive_aliasing (struct alias_info *ai)
2372 {
2373 referenced_var_iterator rvi;
2374 tree var;
2375 size_t i;
2376
2377 timevar_push (TV_FLOW_INSENSITIVE);
2378 /* For every pointer P, determine which addressable variables may alias
2379 with P's symbol memory tag. */
2380 for (i = 0; i < ai->num_pointers; i++)
2381 {
2382 size_t j;
2383 struct alias_map_d *p_map = ai->pointers[i];
2384 tree tag = symbol_mem_tag (p_map->var);
2385 tree var;
2386
2387 for (j = 0; j < ai->num_addressable_vars; j++)
2388 {
2389 struct alias_map_d *v_map;
2390 var_ann_t v_ann;
2391 bool tag_stored_p, var_stored_p;
2392
2393 v_map = ai->addressable_vars[j];
2394 var = v_map->var;
2395 v_ann = var_ann (var);
2396
2397 /* Skip memory tags and variables that have never been
2398 written to. We also need to check if the variables are
2399 call-clobbered because they may be overwritten by
2400 function calls. */
2401 tag_stored_p = pointer_set_contains (ai->written_vars, tag)
2402 || is_call_clobbered (tag);
2403 var_stored_p = pointer_set_contains (ai->written_vars, var)
2404 || is_call_clobbered (var);
2405 if (!tag_stored_p && !var_stored_p)
2406 continue;
2407
2408 if (may_alias_p (p_map->var, p_map->set, var, v_map->set, false))
2409 {
2410 /* Add VAR to TAG's may-aliases set. */
2411 add_may_alias (tag, var);
2412 }
2413 }
2414 }
2415
2416 /* Since this analysis is based exclusively on symbols, it fails to
2417 handle cases where two pointers P and Q have different memory
2418 tags with conflicting alias set numbers but no aliased symbols in
2419 common.
2420
2421 For example, suppose that we have two memory tags SMT.1 and SMT.2
2422 such that
2423
2424 may-aliases (SMT.1) = { a }
2425 may-aliases (SMT.2) = { b }
2426
2427 and the alias set number of SMT.1 conflicts with that of SMT.2.
2428 Since they don't have symbols in common, loads and stores from
2429 SMT.1 and SMT.2 will seem independent of each other, which will
2430 lead to the optimizers making invalid transformations (see
2431 testsuite/gcc.c-torture/execute/pr15262-[12].c).
2432
2433 To avoid this problem, we do a final traversal of AI->POINTERS
2434 looking for pairs of pointers that have no aliased symbols in
2435 common and yet have conflicting alias set numbers. */
2436 for (i = 0; i < ai->num_pointers; i++)
2437 {
2438 size_t j;
2439 struct alias_map_d *p_map1 = ai->pointers[i];
2440 tree tag1 = symbol_mem_tag (p_map1->var);
2441 bitmap may_aliases1 = MTAG_ALIASES (tag1);
2442
2443 for (j = 0; j < ai->num_pointers; j++)
2444 {
2445 struct alias_map_d *p_map2 = ai->pointers[j];
2446 tree tag2 = symbol_mem_tag (p_map2->var);
2447 bitmap may_aliases2 = may_aliases (tag2);
2448
2449 /* By convention tags don't alias themselves. */
2450 if (tag1 == tag2)
2451 continue;
2452
2453 /* If the pointers may not point to each other, do nothing. */
2454 if (!may_alias_p (p_map1->var, p_map1->set, tag2, p_map2->set, true))
2455 continue;
2456
2457 /* The two pointers may alias each other. If they already have
2458 symbols in common, do nothing. */
2459 if (have_common_aliases_p (may_aliases1, may_aliases2))
2460 continue;
2461
2462 add_may_alias (tag1, tag2);
2463 }
2464 }
2465
2466 /* We have to add all HEAP variables to all SMTs aliases bitmaps.
2467 As we don't know which effective type the HEAP will have we cannot
2468 do better here and we need the conflicts with obfuscated pointers
2469 (a simple (*(int[n] *)ptr)[i] will do, with ptr from a VLA array
2470 allocation). */
2471 for (i = 0; i < ai->num_pointers; i++)
2472 {
2473 struct alias_map_d *p_map = ai->pointers[i];
2474 tree tag = symbol_mem_tag (p_map->var);
2475
2476 FOR_EACH_REFERENCED_VAR (var, rvi)
2477 {
2478 if (var_ann (var)->is_heapvar)
2479 add_may_alias (tag, var);
2480 }
2481 }
2482
2483 timevar_pop (TV_FLOW_INSENSITIVE);
2484 }
2485
2486
2487 /* Create a new alias set entry for VAR in AI->ADDRESSABLE_VARS. */
2488
2489 static void
2490 create_alias_map_for (tree var, struct alias_info *ai)
2491 {
2492 struct alias_map_d *alias_map;
2493 alias_map = XCNEW (struct alias_map_d);
2494 alias_map->var = var;
2495 alias_map->set = get_alias_set (var);
2496 ai->addressable_vars[ai->num_addressable_vars++] = alias_map;
2497 }
2498
2499
2500 /* Update related alias information kept in AI. This is used when
2501 building name tags, alias sets and deciding grouping heuristics.
2502 STMT is the statement to process. This function also updates
2503 ADDRESSABLE_VARS. */
2504
2505 static void
2506 update_alias_info_1 (tree stmt, struct alias_info *ai)
2507 {
2508 bitmap addr_taken;
2509 use_operand_p use_p;
2510 ssa_op_iter iter;
2511 bool stmt_dereferences_ptr_p;
2512 enum escape_type stmt_escape_type = is_escape_site (stmt);
2513 struct mem_ref_stats_d *mem_ref_stats = gimple_mem_ref_stats (cfun);
2514
2515 stmt_dereferences_ptr_p = false;
2516
2517 if (stmt_escape_type == ESCAPE_TO_CALL
2518 || stmt_escape_type == ESCAPE_TO_PURE_CONST)
2519 {
2520 mem_ref_stats->num_call_sites++;
2521 if (stmt_escape_type == ESCAPE_TO_PURE_CONST)
2522 mem_ref_stats->num_pure_const_call_sites++;
2523 }
2524 else if (stmt_escape_type == ESCAPE_TO_ASM)
2525 mem_ref_stats->num_asm_sites++;
2526
2527 /* Mark all the variables whose address are taken by the statement. */
2528 addr_taken = addresses_taken (stmt);
2529 if (addr_taken)
2530 bitmap_ior_into (gimple_addressable_vars (cfun), addr_taken);
2531
2532 /* Process each operand use. For pointers, determine whether they
2533 are dereferenced by the statement, or whether their value
2534 escapes, etc. */
2535 FOR_EACH_PHI_OR_STMT_USE (use_p, stmt, iter, SSA_OP_USE)
2536 {
2537 tree op, var;
2538 var_ann_t v_ann;
2539 struct ptr_info_def *pi;
2540 unsigned num_uses, num_loads, num_stores;
2541
2542 op = USE_FROM_PTR (use_p);
2543
2544 /* If STMT is a PHI node, OP may be an ADDR_EXPR. If so, add it
2545 to the set of addressable variables. */
2546 if (TREE_CODE (op) == ADDR_EXPR)
2547 {
2548 bitmap addressable_vars = gimple_addressable_vars (cfun);
2549
2550 gcc_assert (TREE_CODE (stmt) == PHI_NODE);
2551 gcc_assert (addressable_vars);
2552
2553 /* PHI nodes don't have annotations for pinning the set
2554 of addresses taken, so we collect them here.
2555
2556 FIXME, should we allow PHI nodes to have annotations
2557 so that they can be treated like regular statements?
2558 Currently, they are treated as second-class
2559 statements. */
2560 add_to_addressable_set (TREE_OPERAND (op, 0), &addressable_vars);
2561 continue;
2562 }
2563
2564 /* Ignore constants (they may occur in PHI node arguments). */
2565 if (TREE_CODE (op) != SSA_NAME)
2566 continue;
2567
2568 var = SSA_NAME_VAR (op);
2569 v_ann = var_ann (var);
2570
2571 /* The base variable of an SSA name must be a GIMPLE register, and thus
2572 it cannot be aliased. */
2573 gcc_assert (!may_be_aliased (var));
2574
2575 /* We are only interested in pointers. */
2576 if (!POINTER_TYPE_P (TREE_TYPE (op)))
2577 continue;
2578
2579 pi = get_ptr_info (op);
2580
2581 /* Add OP to AI->PROCESSED_PTRS, if it's not there already. */
2582 if (!TEST_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op)))
2583 {
2584 SET_BIT (ai->ssa_names_visited, SSA_NAME_VERSION (op));
2585 VEC_safe_push (tree, heap, ai->processed_ptrs, op);
2586 }
2587
2588 /* If STMT is a PHI node, then it will not have pointer
2589 dereferences and it will not be an escape point. */
2590 if (TREE_CODE (stmt) == PHI_NODE)
2591 continue;
2592
2593 /* Determine whether OP is a dereferenced pointer, and if STMT
2594 is an escape point, whether OP escapes. */
2595 count_uses_and_derefs (op, stmt, &num_uses, &num_loads, &num_stores);
2596
2597 /* For directly dereferenced pointers we can apply
2598 TBAA-pruning to their points-to set. We may not count the
2599 implicit dereferences &PTR->FLD here. */
2600 if (num_loads + num_stores > 0)
2601 pi->is_dereferenced = 1;
2602
2603 /* Handle a corner case involving address expressions of the
2604 form '&PTR->FLD'. The problem with these expressions is that
2605 they do not represent a dereference of PTR. However, if some
2606 other transformation propagates them into an INDIRECT_REF
2607 expression, we end up with '*(&PTR->FLD)' which is folded
2608 into 'PTR->FLD'.
2609
2610 So, if the original code had no other dereferences of PTR,
2611 the aliaser will not create memory tags for it, and when
2612 &PTR->FLD gets propagated to INDIRECT_REF expressions, the
2613 memory operations will receive no VDEF/VUSE operands.
2614
2615 One solution would be to have count_uses_and_derefs consider
2616 &PTR->FLD a dereference of PTR. But that is wrong, since it
2617 is not really a dereference but an offset calculation.
2618
2619 What we do here is to recognize these special ADDR_EXPR
2620 nodes. Since these expressions are never GIMPLE values (they
2621 are not GIMPLE invariants), they can only appear on the RHS
2622 of an assignment and their base address is always an
2623 INDIRECT_REF expression. */
2624 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT
2625 && TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == ADDR_EXPR
2626 && !is_gimple_val (GIMPLE_STMT_OPERAND (stmt, 1)))
2627 {
2628 /* If the RHS if of the form &PTR->FLD and PTR == OP, then
2629 this represents a potential dereference of PTR. */
2630 tree rhs = GIMPLE_STMT_OPERAND (stmt, 1);
2631 tree base = get_base_address (TREE_OPERAND (rhs, 0));
2632 if (TREE_CODE (base) == INDIRECT_REF
2633 && TREE_OPERAND (base, 0) == op)
2634 num_loads++;
2635 }
2636
2637 if (num_loads + num_stores > 0)
2638 {
2639 /* Mark OP as dereferenced. In a subsequent pass,
2640 dereferenced pointers that point to a set of
2641 variables will be assigned a name tag to alias
2642 all the variables OP points to. */
2643 pi->memory_tag_needed = 1;
2644
2645 /* ??? For always executed direct dereferences we can
2646 apply TBAA-pruning to their escape set. */
2647
2648 /* If this is a store operation, mark OP as being
2649 dereferenced to store, otherwise mark it as being
2650 dereferenced to load. */
2651 if (num_stores > 0)
2652 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2653 else
2654 pointer_set_insert (ai->dereferenced_ptrs_load, var);
2655
2656 /* Update the frequency estimate for all the dereferences of
2657 pointer OP. */
2658 update_mem_sym_stats_from_stmt (op, stmt, num_loads, num_stores);
2659
2660 /* Indicate that STMT contains pointer dereferences. */
2661 stmt_dereferences_ptr_p = true;
2662 }
2663
2664 if (stmt_escape_type != NO_ESCAPE && num_loads + num_stores < num_uses)
2665 {
2666 /* If STMT is an escape point and STMT contains at
2667 least one direct use of OP, then the value of OP
2668 escapes and so the pointed-to variables need to
2669 be marked call-clobbered. */
2670 pi->value_escapes_p = 1;
2671 pi->escape_mask |= stmt_escape_type;
2672
2673 /* If the statement makes a function call, assume
2674 that pointer OP will be dereferenced in a store
2675 operation inside the called function. */
2676 if (get_call_expr_in (stmt)
2677 || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
2678 {
2679 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2680 pi->memory_tag_needed = 1;
2681 }
2682 }
2683 }
2684
2685 if (TREE_CODE (stmt) == PHI_NODE)
2686 return;
2687
2688 /* Mark stored variables in STMT as being written to and update the
2689 memory reference stats for all memory symbols referenced by STMT. */
2690 if (stmt_references_memory_p (stmt))
2691 {
2692 unsigned i;
2693 bitmap_iterator bi;
2694
2695 mem_ref_stats->num_mem_stmts++;
2696
2697 /* Notice that we only update memory reference stats for symbols
2698 loaded and stored by the statement if the statement does not
2699 contain pointer dereferences and it is not a call/asm site.
2700 This is to avoid double accounting problems when creating
2701 memory partitions. After computing points-to information,
2702 pointer dereference statistics are used to update the
2703 reference stats of the pointed-to variables, so here we
2704 should only update direct references to symbols.
2705
2706 Indirect references are not updated here for two reasons: (1)
2707 The first time we compute alias information, the sets
2708 LOADED/STORED are empty for pointer dereferences, (2) After
2709 partitioning, LOADED/STORED may have references to
2710 partitions, not the original pointed-to variables. So, if we
2711 always counted LOADED/STORED here and during partitioning, we
2712 would count many symbols more than once.
2713
2714 This does cause some imprecision when a statement has a
2715 combination of direct symbol references and pointer
2716 dereferences (e.g., MEMORY_VAR = *PTR) or if a call site has
2717 memory symbols in its argument list, but these cases do not
2718 occur so frequently as to constitute a serious problem. */
2719 if (STORED_SYMS (stmt))
2720 EXECUTE_IF_SET_IN_BITMAP (STORED_SYMS (stmt), 0, i, bi)
2721 {
2722 tree sym = referenced_var (i);
2723 pointer_set_insert (ai->written_vars, sym);
2724 if (!stmt_dereferences_ptr_p
2725 && stmt_escape_type != ESCAPE_TO_CALL
2726 && stmt_escape_type != ESCAPE_TO_PURE_CONST
2727 && stmt_escape_type != ESCAPE_TO_ASM)
2728 update_mem_sym_stats_from_stmt (sym, stmt, 0, 1);
2729 }
2730
2731 if (!stmt_dereferences_ptr_p
2732 && LOADED_SYMS (stmt)
2733 && stmt_escape_type != ESCAPE_TO_CALL
2734 && stmt_escape_type != ESCAPE_TO_PURE_CONST
2735 && stmt_escape_type != ESCAPE_TO_ASM)
2736 EXECUTE_IF_SET_IN_BITMAP (LOADED_SYMS (stmt), 0, i, bi)
2737 update_mem_sym_stats_from_stmt (referenced_var (i), stmt, 1, 0);
2738 }
2739 }
2740
2741 /* Update various related attributes like escaped addresses,
2742 pointer dereferences for loads and stores. This is used
2743 when creating name tags and alias sets. */
2744
2745 static void
2746 update_alias_info (struct alias_info *ai)
2747 {
2748 basic_block bb;
2749
2750 FOR_EACH_BB (bb)
2751 {
2752 block_stmt_iterator bsi;
2753 tree phi;
2754
2755 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
2756 if (is_gimple_reg (PHI_RESULT (phi)))
2757 update_alias_info_1 (phi, ai);
2758
2759 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2760 update_alias_info_1 (bsi_stmt (bsi), ai);
2761 }
2762 }
2763
2764 /* Create memory tags for all the dereferenced pointers and build the
2765 ADDRESSABLE_VARS and POINTERS arrays used for building the may-alias
2766 sets. Based on the address escape and points-to information collected
2767 earlier, this pass will also clear the TREE_ADDRESSABLE flag from those
2768 variables whose address is not needed anymore. */
2769
2770 static void
2771 setup_pointers_and_addressables (struct alias_info *ai)
2772 {
2773 size_t num_addressable_vars, num_pointers;
2774 referenced_var_iterator rvi;
2775 tree var;
2776 VEC (tree, heap) *varvec = NULL;
2777 safe_referenced_var_iterator srvi;
2778
2779 /* Size up the arrays ADDRESSABLE_VARS and POINTERS. */
2780 num_addressable_vars = num_pointers = 0;
2781
2782 FOR_EACH_REFERENCED_VAR (var, rvi)
2783 {
2784 if (may_be_aliased (var))
2785 num_addressable_vars++;
2786
2787 if (POINTER_TYPE_P (TREE_TYPE (var)))
2788 {
2789 /* Since we don't keep track of volatile variables, assume that
2790 these pointers are used in indirect store operations. */
2791 if (TREE_THIS_VOLATILE (var))
2792 pointer_set_insert (ai->dereferenced_ptrs_store, var);
2793
2794 num_pointers++;
2795 }
2796 }
2797
2798 /* Create ADDRESSABLE_VARS and POINTERS. Note that these arrays are
2799 always going to be slightly bigger than we actually need them
2800 because some TREE_ADDRESSABLE variables will be marked
2801 non-addressable below and only pointers with unique symbol tags are
2802 going to be added to POINTERS. */
2803 ai->addressable_vars = XCNEWVEC (struct alias_map_d *, num_addressable_vars);
2804 ai->pointers = XCNEWVEC (struct alias_map_d *, num_pointers);
2805 ai->num_addressable_vars = 0;
2806 ai->num_pointers = 0;
2807
2808 FOR_EACH_REFERENCED_VAR_SAFE (var, varvec, srvi)
2809 {
2810 /* Name memory tags already have flow-sensitive aliasing
2811 information, so they need not be processed by
2812 compute_flow_insensitive_aliasing. Similarly, symbol memory
2813 tags are already accounted for when we process their
2814 associated pointer.
2815
2816 Structure fields, on the other hand, have to have some of this
2817 information processed for them, but it's pointless to mark them
2818 non-addressable (since they are fake variables anyway). */
2819 if (MTAG_P (var))
2820 continue;
2821
2822 /* Remove the ADDRESSABLE flag from every addressable variable whose
2823 address is not needed anymore. This is caused by the propagation
2824 of ADDR_EXPR constants into INDIRECT_REF expressions and the
2825 removal of dead pointer assignments done by the early scalar
2826 cleanup passes. */
2827 if (TREE_ADDRESSABLE (var))
2828 {
2829 if (!bitmap_bit_p (gimple_addressable_vars (cfun), DECL_UID (var))
2830 && TREE_CODE (var) != RESULT_DECL
2831 && !is_global_var (var))
2832 {
2833 bool okay_to_mark = true;
2834
2835 /* Since VAR is now a regular GIMPLE register, we will need
2836 to rename VAR into SSA afterwards. */
2837 mark_sym_for_renaming (var);
2838
2839 /* The address of VAR is not needed, remove the
2840 addressable bit, so that it can be optimized as a
2841 regular variable. */
2842 if (okay_to_mark)
2843 {
2844 /* The memory partition holding VAR will no longer
2845 contain VAR, and statements referencing it will need
2846 to be updated. */
2847 if (memory_partition (var))
2848 mark_sym_for_renaming (memory_partition (var));
2849
2850 mark_non_addressable (var);
2851 }
2852 }
2853 }
2854
2855 /* Global variables and addressable locals may be aliased. Create an
2856 entry in ADDRESSABLE_VARS for VAR. */
2857 if (may_be_aliased (var))
2858 {
2859 create_alias_map_for (var, ai);
2860 mark_sym_for_renaming (var);
2861 }
2862
2863 /* Add pointer variables that have been dereferenced to the POINTERS
2864 array and create a symbol memory tag for them. */
2865 if (POINTER_TYPE_P (TREE_TYPE (var)))
2866 {
2867 if ((pointer_set_contains (ai->dereferenced_ptrs_store, var)
2868 || pointer_set_contains (ai->dereferenced_ptrs_load, var)))
2869 {
2870 tree tag, old_tag;
2871 var_ann_t t_ann;
2872
2873 /* If pointer VAR still doesn't have a memory tag
2874 associated with it, create it now or re-use an
2875 existing one. */
2876 tag = get_smt_for (var, ai);
2877 t_ann = var_ann (tag);
2878
2879 /* The symbol tag will need to be renamed into SSA
2880 afterwards. Note that we cannot do this inside
2881 get_smt_for because aliasing may run multiple times
2882 and we only create symbol tags the first time. */
2883 mark_sym_for_renaming (tag);
2884
2885 /* Similarly, if pointer VAR used to have another type
2886 tag, we will need to process it in the renamer to
2887 remove the stale virtual operands. */
2888 old_tag = symbol_mem_tag (var);
2889 if (old_tag)
2890 mark_sym_for_renaming (old_tag);
2891
2892 /* Associate the tag with pointer VAR. */
2893 set_symbol_mem_tag (var, tag);
2894
2895 /* If pointer VAR has been used in a store operation,
2896 then its memory tag must be marked as written-to. */
2897 if (pointer_set_contains (ai->dereferenced_ptrs_store, var))
2898 pointer_set_insert (ai->written_vars, tag);
2899 }
2900 else
2901 {
2902 /* The pointer has not been dereferenced. If it had a
2903 symbol memory tag, remove it and mark the old tag for
2904 renaming to remove it out of the IL. */
2905 tree tag = symbol_mem_tag (var);
2906 if (tag)
2907 {
2908 mark_sym_for_renaming (tag);
2909 set_symbol_mem_tag (var, NULL_TREE);
2910 }
2911 }
2912 }
2913 }
2914
2915 VEC_free (tree, heap, varvec);
2916 }
2917
2918
2919 /* Determine whether to use .GLOBAL_VAR to model call clobbering
2920 semantics. If the function makes no references to global
2921 variables and contains at least one call to a non-pure function,
2922 then we need to mark the side-effects of the call using .GLOBAL_VAR
2923 to represent all possible global memory referenced by the callee. */
2924
2925 static void
2926 maybe_create_global_var (void)
2927 {
2928 /* No need to create it, if we have one already. */
2929 if (gimple_global_var (cfun) == NULL_TREE)
2930 {
2931 struct mem_ref_stats_d *stats = gimple_mem_ref_stats (cfun);
2932
2933 /* Create .GLOBAL_VAR if there are no call-clobbered
2934 variables and the program contains a mixture of pure/const
2935 and regular function calls. This is to avoid the problem
2936 described in PR 20115:
2937
2938 int X;
2939 int func_pure (void) { return X; }
2940 int func_non_pure (int a) { X += a; }
2941 int foo ()
2942 {
2943 int a = func_pure ();
2944 func_non_pure (a);
2945 a = func_pure ();
2946 return a;
2947 }
2948
2949 Since foo() has no call-clobbered variables, there is
2950 no relationship between the calls to func_pure and
2951 func_non_pure. Since func_pure has no side-effects, value
2952 numbering optimizations elide the second call to func_pure.
2953 So, if we have some pure/const and some regular calls in the
2954 program we create .GLOBAL_VAR to avoid missing these
2955 relations. */
2956 if (bitmap_empty_p (gimple_call_clobbered_vars (cfun))
2957 && stats->num_call_sites > 0
2958 && stats->num_pure_const_call_sites > 0
2959 && stats->num_call_sites > stats->num_pure_const_call_sites)
2960 create_global_var ();
2961 }
2962 }
2963
2964
2965 /* Return TRUE if pointer PTR may point to variable VAR.
2966
2967 MEM_ALIAS_SET is the alias set for the memory location pointed-to by PTR
2968 This is needed because when checking for type conflicts we are
2969 interested in the alias set of the memory location pointed-to by
2970 PTR. The alias set of PTR itself is irrelevant.
2971
2972 VAR_ALIAS_SET is the alias set for VAR. */
2973
2974 bool
2975 may_alias_p (tree ptr, alias_set_type mem_alias_set,
2976 tree var, alias_set_type var_alias_set,
2977 bool alias_set_only)
2978 {
2979 tree mem;
2980
2981 alias_stats.alias_queries++;
2982 alias_stats.simple_queries++;
2983
2984 /* By convention, a variable cannot alias itself. */
2985 mem = symbol_mem_tag (ptr);
2986 if (mem == var)
2987 {
2988 alias_stats.alias_noalias++;
2989 alias_stats.simple_resolved++;
2990 return false;
2991 }
2992
2993 /* If -fargument-noalias-global is > 2, pointer arguments may
2994 not point to anything else. */
2995 if (flag_argument_noalias > 2 && TREE_CODE (ptr) == PARM_DECL)
2996 {
2997 alias_stats.alias_noalias++;
2998 alias_stats.simple_resolved++;
2999 return false;
3000 }
3001
3002 /* If -fargument-noalias-global is > 1, pointer arguments may
3003 not point to global variables. */
3004 if (flag_argument_noalias > 1 && is_global_var (var)
3005 && TREE_CODE (ptr) == PARM_DECL)
3006 {
3007 alias_stats.alias_noalias++;
3008 alias_stats.simple_resolved++;
3009 return false;
3010 }
3011
3012 /* If either MEM or VAR is a read-only global and the other one
3013 isn't, then PTR cannot point to VAR. */
3014 if ((unmodifiable_var_p (mem) && !unmodifiable_var_p (var))
3015 || (unmodifiable_var_p (var) && !unmodifiable_var_p (mem)))
3016 {
3017 alias_stats.alias_noalias++;
3018 alias_stats.simple_resolved++;
3019 return false;
3020 }
3021
3022 /* If the pointed to memory has alias set zero, or the pointer
3023 is ref-all, or the pointer decl is marked that no TBAA is to
3024 be applied, the MEM can alias VAR. */
3025 if (mem_alias_set == 0
3026 || DECL_POINTER_ALIAS_SET (ptr) == 0
3027 || TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (ptr))
3028 || DECL_NO_TBAA_P (ptr))
3029 {
3030 alias_stats.alias_mayalias++;
3031 alias_stats.simple_resolved++;
3032 return true;
3033 }
3034
3035 gcc_assert (TREE_CODE (mem) == SYMBOL_MEMORY_TAG);
3036
3037 alias_stats.tbaa_queries++;
3038
3039 /* If the alias sets don't conflict then MEM cannot alias VAR. */
3040 if (mem_alias_set != var_alias_set
3041 && !alias_set_subset_of (mem_alias_set, var_alias_set))
3042 {
3043 alias_stats.alias_noalias++;
3044 alias_stats.tbaa_resolved++;
3045 return false;
3046 }
3047
3048 /* If VAR is a record or union type, PTR cannot point into VAR
3049 unless there is some explicit address operation in the
3050 program that can reference a field of the type pointed-to by
3051 PTR. This also assumes that the types of both VAR and PTR
3052 are contained within the compilation unit, and that there is
3053 no fancy addressing arithmetic associated with any of the
3054 types involved. */
3055 if (mem_alias_set != 0 && var_alias_set != 0)
3056 {
3057 tree ptr_type = TREE_TYPE (ptr);
3058 tree var_type = TREE_TYPE (var);
3059
3060 /* The star count is -1 if the type at the end of the
3061 pointer_to chain is not a record or union type. */
3062 if (!alias_set_only
3063 && ipa_type_escape_star_count_of_interesting_type (var_type) >= 0)
3064 {
3065 int ptr_star_count = 0;
3066
3067 /* ipa_type_escape_star_count_of_interesting_type is a
3068 little too restrictive for the pointer type, need to
3069 allow pointers to primitive types as long as those
3070 types cannot be pointers to everything. */
3071 while (POINTER_TYPE_P (ptr_type))
3072 {
3073 /* Strip the *s off. */
3074 ptr_type = TREE_TYPE (ptr_type);
3075 ptr_star_count++;
3076 }
3077
3078 /* There does not appear to be a better test to see if
3079 the pointer type was one of the pointer to everything
3080 types. */
3081 if (ptr_star_count > 0)
3082 {
3083 alias_stats.structnoaddress_queries++;
3084 if (ipa_type_escape_field_does_not_clobber_p (var_type,
3085 TREE_TYPE (ptr)))
3086 {
3087 alias_stats.structnoaddress_resolved++;
3088 alias_stats.alias_noalias++;
3089 return false;
3090 }
3091 }
3092 else if (ptr_star_count == 0)
3093 {
3094 /* If PTR_TYPE was not really a pointer to type, it cannot
3095 alias. */
3096 alias_stats.structnoaddress_queries++;
3097 alias_stats.structnoaddress_resolved++;
3098 alias_stats.alias_noalias++;
3099 return false;
3100 }
3101 }
3102 }
3103
3104 alias_stats.alias_mayalias++;
3105 return true;
3106 }
3107
3108 /* Return true, if PTR may point to a global variable. */
3109
3110 bool
3111 may_point_to_global_var (tree ptr)
3112 {
3113 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3114
3115 /* If we do not have points-to information for this variable,
3116 we have to punt. */
3117 if (!pi
3118 || !pi->name_mem_tag)
3119 return true;
3120
3121 /* The name memory tag is marked as global variable if the points-to
3122 set contains a global variable. */
3123 return is_global_var (pi->name_mem_tag);
3124 }
3125
3126 /* Add ALIAS to the set of variables that may alias VAR. */
3127
3128 static void
3129 add_may_alias (tree var, tree alias)
3130 {
3131 /* Don't allow self-referential aliases. */
3132 gcc_assert (var != alias);
3133
3134 /* ALIAS must be addressable if it's being added to an alias set. */
3135 #if 1
3136 TREE_ADDRESSABLE (alias) = 1;
3137 #else
3138 gcc_assert (may_be_aliased (alias));
3139 #endif
3140
3141 /* VAR must be a symbol or a name tag. */
3142 gcc_assert (TREE_CODE (var) == SYMBOL_MEMORY_TAG
3143 || TREE_CODE (var) == NAME_MEMORY_TAG);
3144
3145 if (MTAG_ALIASES (var) == NULL)
3146 MTAG_ALIASES (var) = BITMAP_ALLOC (&alias_bitmap_obstack);
3147
3148 bitmap_set_bit (MTAG_ALIASES (var), DECL_UID (alias));
3149 }
3150
3151
3152 /* Mark pointer PTR as pointing to an arbitrary memory location. */
3153
3154 static void
3155 set_pt_anything (tree ptr)
3156 {
3157 struct ptr_info_def *pi = get_ptr_info (ptr);
3158
3159 pi->pt_anything = 1;
3160 /* Anything includes global memory. */
3161 pi->pt_global_mem = 1;
3162 pi->pt_vars = NULL;
3163
3164 /* The pointer used to have a name tag, but we now found it pointing
3165 to an arbitrary location. The name tag needs to be renamed and
3166 disassociated from PTR. */
3167 if (pi->name_mem_tag)
3168 {
3169 mark_sym_for_renaming (pi->name_mem_tag);
3170 pi->name_mem_tag = NULL_TREE;
3171 }
3172 }
3173
3174
3175 /* Return true if STMT is an "escape" site from the current function. Escape
3176 sites those statements which might expose the address of a variable
3177 outside the current function. STMT is an escape site iff:
3178
3179 1- STMT is a function call, or
3180 2- STMT is an __asm__ expression, or
3181 3- STMT is an assignment to a non-local variable, or
3182 4- STMT is a return statement.
3183
3184 Return the type of escape site found, if we found one, or NO_ESCAPE
3185 if none. */
3186
3187 enum escape_type
3188 is_escape_site (tree stmt)
3189 {
3190 tree call = get_call_expr_in (stmt);
3191 if (call != NULL_TREE)
3192 {
3193 if (!TREE_SIDE_EFFECTS (call))
3194 return ESCAPE_TO_PURE_CONST;
3195
3196 return ESCAPE_TO_CALL;
3197 }
3198 else if (TREE_CODE (stmt) == ASM_EXPR)
3199 return ESCAPE_TO_ASM;
3200 else if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
3201 {
3202 tree lhs = GIMPLE_STMT_OPERAND (stmt, 0);
3203
3204 /* Get to the base of _REF nodes. */
3205 if (TREE_CODE (lhs) != SSA_NAME)
3206 lhs = get_base_address (lhs);
3207
3208 /* If we couldn't recognize the LHS of the assignment, assume that it
3209 is a non-local store. */
3210 if (lhs == NULL_TREE)
3211 return ESCAPE_UNKNOWN;
3212
3213 if (CONVERT_EXPR_P (GIMPLE_STMT_OPERAND (stmt, 1))
3214 || TREE_CODE (GIMPLE_STMT_OPERAND (stmt, 1)) == VIEW_CONVERT_EXPR)
3215 {
3216 tree from
3217 = TREE_TYPE (TREE_OPERAND (GIMPLE_STMT_OPERAND (stmt, 1), 0));
3218 tree to = TREE_TYPE (GIMPLE_STMT_OPERAND (stmt, 1));
3219
3220 /* If the RHS is a conversion between a pointer and an integer, the
3221 pointer escapes since we can't track the integer. */
3222 if (POINTER_TYPE_P (from) && !POINTER_TYPE_P (to))
3223 return ESCAPE_BAD_CAST;
3224 }
3225
3226 /* If the LHS is an SSA name, it can't possibly represent a non-local
3227 memory store. */
3228 if (TREE_CODE (lhs) == SSA_NAME)
3229 return NO_ESCAPE;
3230
3231 /* If the LHS is a non-global decl, it isn't a non-local memory store.
3232 If the LHS escapes, the RHS escape is dealt with in the PTA solver. */
3233 if (DECL_P (lhs)
3234 && !is_global_var (lhs))
3235 return NO_ESCAPE;
3236
3237 /* FIXME: LHS is not an SSA_NAME. Even if it's an assignment to a
3238 local variables we cannot be sure if it will escape, because we
3239 don't have information about objects not in SSA form. Need to
3240 implement something along the lines of
3241
3242 J.-D. Choi, M. Gupta, M. J. Serrano, V. C. Sreedhar, and S. P.
3243 Midkiff, ``Escape analysis for java,'' in Proceedings of the
3244 Conference on Object-Oriented Programming Systems, Languages, and
3245 Applications (OOPSLA), pp. 1-19, 1999. */
3246 return ESCAPE_STORED_IN_GLOBAL;
3247 }
3248 else if (TREE_CODE (stmt) == RETURN_EXPR)
3249 return ESCAPE_TO_RETURN;
3250
3251 return NO_ESCAPE;
3252 }
3253
3254 /* Create a new memory tag of type TYPE.
3255 Does NOT push it into the current binding. */
3256
3257 tree
3258 create_tag_raw (enum tree_code code, tree type, const char *prefix)
3259 {
3260 tree tmp_var;
3261
3262 tmp_var = build_decl (code, create_tmp_var_name (prefix), type);
3263
3264 /* Memory tags are always writable and non-static. */
3265 TREE_READONLY (tmp_var) = 0;
3266 TREE_STATIC (tmp_var) = 0;
3267
3268 /* It doesn't start out global. */
3269 MTAG_GLOBAL (tmp_var) = 0;
3270 TREE_USED (tmp_var) = 1;
3271
3272 return tmp_var;
3273 }
3274
3275 /* Create a new memory tag of type TYPE. If IS_TYPE_TAG is true, the tag
3276 is considered to represent all the pointers whose pointed-to types are
3277 in the same alias set class. Otherwise, the tag represents a single
3278 SSA_NAME pointer variable. */
3279
3280 static tree
3281 create_memory_tag (tree type, bool is_type_tag)
3282 {
3283 tree tag = create_tag_raw (is_type_tag ? SYMBOL_MEMORY_TAG : NAME_MEMORY_TAG,
3284 type, (is_type_tag) ? "SMT" : "NMT");
3285
3286 /* By default, memory tags are local variables. Alias analysis will
3287 determine whether they should be considered globals. */
3288 DECL_CONTEXT (tag) = current_function_decl;
3289
3290 /* Memory tags are by definition addressable. */
3291 TREE_ADDRESSABLE (tag) = 1;
3292
3293 set_symbol_mem_tag (tag, NULL_TREE);
3294
3295 /* Add the tag to the symbol table. */
3296 add_referenced_var (tag);
3297
3298 return tag;
3299 }
3300
3301
3302 /* Create a name memory tag to represent a specific SSA_NAME pointer P_i.
3303 This is used if P_i has been found to point to a specific set of
3304 variables or to a non-aliased memory location like the address returned
3305 by malloc functions. */
3306
3307 static tree
3308 get_nmt_for (tree ptr)
3309 {
3310 struct ptr_info_def *pi = get_ptr_info (ptr);
3311 tree tag = pi->name_mem_tag;
3312
3313 if (tag == NULL_TREE)
3314 tag = create_memory_tag (TREE_TYPE (TREE_TYPE (ptr)), false);
3315 return tag;
3316 }
3317
3318
3319 /* Return the symbol memory tag associated to pointer PTR. A memory
3320 tag is an artificial variable that represents the memory location
3321 pointed-to by PTR. It is used to model the effects of pointer
3322 de-references on addressable variables.
3323
3324 AI points to the data gathered during alias analysis. This
3325 function populates the array AI->POINTERS. */
3326
3327 static tree
3328 get_smt_for (tree ptr, struct alias_info *ai)
3329 {
3330 size_t i;
3331 tree tag;
3332 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3333 alias_set_type tag_set = get_alias_set (tag_type);
3334
3335 /* To avoid creating unnecessary memory tags, only create one memory tag
3336 per alias set class. Note that it may be tempting to group
3337 memory tags based on conflicting alias sets instead of
3338 equivalence. That would be wrong because alias sets are not
3339 necessarily transitive (as demonstrated by the libstdc++ test
3340 23_containers/vector/cons/4.cc). Given three alias sets A, B, C
3341 such that conflicts (A, B) == true and conflicts (A, C) == true,
3342 it does not necessarily follow that conflicts (B, C) == true. */
3343 for (i = 0, tag = NULL_TREE; i < ai->num_pointers; i++)
3344 {
3345 struct alias_map_d *curr = ai->pointers[i];
3346 tree curr_tag = symbol_mem_tag (curr->var);
3347 if (tag_set == curr->set)
3348 {
3349 tag = curr_tag;
3350 break;
3351 }
3352 }
3353
3354 /* If VAR cannot alias with any of the existing memory tags, create a new
3355 tag for PTR and add it to the POINTERS array. */
3356 if (tag == NULL_TREE)
3357 {
3358 struct alias_map_d *alias_map;
3359
3360 /* If PTR did not have a symbol tag already, create a new SMT.*
3361 artificial variable representing the memory location
3362 pointed-to by PTR. */
3363 tag = symbol_mem_tag (ptr);
3364 if (tag == NULL_TREE)
3365 tag = create_memory_tag (tag_type, true);
3366
3367 /* Add PTR to the POINTERS array. Note that we are not interested in
3368 PTR's alias set. Instead, we cache the alias set for the memory that
3369 PTR points to. */
3370 alias_map = XCNEW (struct alias_map_d);
3371 alias_map->var = ptr;
3372 alias_map->set = tag_set;
3373 ai->pointers[ai->num_pointers++] = alias_map;
3374 }
3375
3376 /* If the pointed-to type is volatile, so is the tag. */
3377 TREE_THIS_VOLATILE (tag) |= TREE_THIS_VOLATILE (tag_type);
3378
3379 /* Make sure that the symbol tag has the same alias set as the
3380 pointed-to type or at least accesses through the pointer will
3381 alias that set. The latter can happen after the vectorizer
3382 created pointers of vector type. */
3383 gcc_assert (tag_set == get_alias_set (tag)
3384 || alias_set_subset_of (tag_set, get_alias_set (tag)));
3385
3386 return tag;
3387 }
3388
3389
3390 /* Create GLOBAL_VAR, an artificial global variable to act as a
3391 representative of all the variables that may be clobbered by function
3392 calls. */
3393
3394 static void
3395 create_global_var (void)
3396 {
3397 tree global_var = build_decl (VAR_DECL, get_identifier (".GLOBAL_VAR"),
3398 void_type_node);
3399 DECL_ARTIFICIAL (global_var) = 1;
3400 TREE_READONLY (global_var) = 0;
3401 DECL_EXTERNAL (global_var) = 1;
3402 TREE_STATIC (global_var) = 1;
3403 TREE_USED (global_var) = 1;
3404 DECL_CONTEXT (global_var) = NULL_TREE;
3405 TREE_THIS_VOLATILE (global_var) = 0;
3406 TREE_ADDRESSABLE (global_var) = 0;
3407
3408 create_var_ann (global_var);
3409 mark_call_clobbered (global_var, ESCAPE_UNKNOWN);
3410 add_referenced_var (global_var);
3411 mark_sym_for_renaming (global_var);
3412 cfun->gimple_df->global_var = global_var;
3413 }
3414
3415
3416 /* Dump alias statistics on FILE. */
3417
3418 static void
3419 dump_alias_stats (FILE *file)
3420 {
3421 const char *funcname
3422 = lang_hooks.decl_printable_name (current_function_decl, 2);
3423 fprintf (file, "\nAlias statistics for %s\n\n", funcname);
3424 fprintf (file, "Total alias queries:\t%u\n", alias_stats.alias_queries);
3425 fprintf (file, "Total alias mayalias results:\t%u\n",
3426 alias_stats.alias_mayalias);
3427 fprintf (file, "Total alias noalias results:\t%u\n",
3428 alias_stats.alias_noalias);
3429 fprintf (file, "Total simple queries:\t%u\n",
3430 alias_stats.simple_queries);
3431 fprintf (file, "Total simple resolved:\t%u\n",
3432 alias_stats.simple_resolved);
3433 fprintf (file, "Total TBAA queries:\t%u\n",
3434 alias_stats.tbaa_queries);
3435 fprintf (file, "Total TBAA resolved:\t%u\n",
3436 alias_stats.tbaa_resolved);
3437 fprintf (file, "Total non-addressable structure type queries:\t%u\n",
3438 alias_stats.structnoaddress_queries);
3439 fprintf (file, "Total non-addressable structure type resolved:\t%u\n",
3440 alias_stats.structnoaddress_resolved);
3441 }
3442
3443
3444 /* Dump alias information on FILE. */
3445
3446 void
3447 dump_alias_info (FILE *file)
3448 {
3449 size_t i;
3450 const char *funcname
3451 = lang_hooks.decl_printable_name (current_function_decl, 2);
3452 referenced_var_iterator rvi;
3453 tree var;
3454
3455 fprintf (file, "\nAlias information for %s\n\n", funcname);
3456
3457 dump_memory_partitions (file);
3458
3459 fprintf (file, "\nFlow-insensitive alias information for %s\n\n", funcname);
3460
3461 fprintf (file, "Aliased symbols\n\n");
3462
3463 FOR_EACH_REFERENCED_VAR (var, rvi)
3464 {
3465 if (may_be_aliased (var))
3466 dump_variable (file, var);
3467 }
3468
3469 fprintf (file, "\nDereferenced pointers\n\n");
3470
3471 FOR_EACH_REFERENCED_VAR (var, rvi)
3472 if (symbol_mem_tag (var))
3473 dump_variable (file, var);
3474
3475 fprintf (file, "\nSymbol memory tags\n\n");
3476
3477 FOR_EACH_REFERENCED_VAR (var, rvi)
3478 {
3479 if (TREE_CODE (var) == SYMBOL_MEMORY_TAG)
3480 dump_variable (file, var);
3481 }
3482
3483 fprintf (file, "\n\nFlow-sensitive alias information for %s\n\n", funcname);
3484
3485 fprintf (file, "SSA_NAME pointers\n\n");
3486 for (i = 1; i < num_ssa_names; i++)
3487 {
3488 tree ptr = ssa_name (i);
3489 struct ptr_info_def *pi;
3490
3491 if (ptr == NULL_TREE)
3492 continue;
3493
3494 pi = SSA_NAME_PTR_INFO (ptr);
3495 if (!SSA_NAME_IN_FREE_LIST (ptr)
3496 && pi
3497 && pi->name_mem_tag)
3498 dump_points_to_info_for (file, ptr);
3499 }
3500
3501 fprintf (file, "\nName memory tags\n\n");
3502
3503 FOR_EACH_REFERENCED_VAR (var, rvi)
3504 {
3505 if (TREE_CODE (var) == NAME_MEMORY_TAG)
3506 dump_variable (file, var);
3507 }
3508
3509 fprintf (file, "\n");
3510 }
3511
3512
3513 /* Dump alias information on stderr. */
3514
3515 void
3516 debug_alias_info (void)
3517 {
3518 dump_alias_info (stderr);
3519 }
3520
3521
3522 /* Return the alias information associated with pointer T. It creates a
3523 new instance if none existed. */
3524
3525 struct ptr_info_def *
3526 get_ptr_info (tree t)
3527 {
3528 struct ptr_info_def *pi;
3529
3530 gcc_assert (POINTER_TYPE_P (TREE_TYPE (t)));
3531
3532 pi = SSA_NAME_PTR_INFO (t);
3533 if (pi == NULL)
3534 {
3535 pi = GGC_CNEW (struct ptr_info_def);
3536 SSA_NAME_PTR_INFO (t) = pi;
3537 }
3538
3539 return pi;
3540 }
3541
3542
3543 /* Dump points-to information for SSA_NAME PTR into FILE. */
3544
3545 void
3546 dump_points_to_info_for (FILE *file, tree ptr)
3547 {
3548 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
3549
3550 print_generic_expr (file, ptr, dump_flags);
3551
3552 if (pi)
3553 {
3554 if (pi->name_mem_tag)
3555 {
3556 fprintf (file, ", name memory tag: ");
3557 print_generic_expr (file, pi->name_mem_tag, dump_flags);
3558 }
3559
3560 if (pi->is_dereferenced)
3561 fprintf (file, ", is dereferenced");
3562 else if (pi->memory_tag_needed)
3563 fprintf (file, ", is dereferenced in call");
3564
3565 if (pi->value_escapes_p)
3566 fprintf (file, ", its value escapes");
3567
3568 if (pi->pt_anything)
3569 fprintf (file, ", points-to anything");
3570
3571 if (pi->pt_null)
3572 fprintf (file, ", points-to NULL");
3573
3574 if (pi->pt_vars)
3575 {
3576 fprintf (file, ", points-to vars: ");
3577 dump_decl_set (file, pi->pt_vars);
3578 }
3579 }
3580
3581 fprintf (file, "\n");
3582 }
3583
3584
3585 /* Dump points-to information for VAR into stderr. */
3586
3587 void
3588 debug_points_to_info_for (tree var)
3589 {
3590 dump_points_to_info_for (stderr, var);
3591 }
3592
3593
3594 /* Dump points-to information into FILE. NOTE: This function is slow, as
3595 it needs to traverse the whole CFG looking for pointer SSA_NAMEs. */
3596
3597 void
3598 dump_points_to_info (FILE *file)
3599 {
3600 basic_block bb;
3601 block_stmt_iterator si;
3602 ssa_op_iter iter;
3603 const char *fname =
3604 lang_hooks.decl_printable_name (current_function_decl, 2);
3605 referenced_var_iterator rvi;
3606 tree var;
3607
3608 fprintf (file, "\n\nPointed-to sets for pointers in %s\n\n", fname);
3609
3610 /* First dump points-to information for the default definitions of
3611 pointer variables. This is necessary because default definitions are
3612 not part of the code. */
3613 FOR_EACH_REFERENCED_VAR (var, rvi)
3614 {
3615 if (POINTER_TYPE_P (TREE_TYPE (var)))
3616 {
3617 tree def = gimple_default_def (cfun, var);
3618 if (def)
3619 dump_points_to_info_for (file, def);
3620 }
3621 }
3622
3623 /* Dump points-to information for every pointer defined in the program. */
3624 FOR_EACH_BB (bb)
3625 {
3626 tree phi;
3627
3628 for (phi = phi_nodes (bb); phi; phi = PHI_CHAIN (phi))
3629 {
3630 tree ptr = PHI_RESULT (phi);
3631 if (POINTER_TYPE_P (TREE_TYPE (ptr)))
3632 dump_points_to_info_for (file, ptr);
3633 }
3634
3635 for (si = bsi_start (bb); !bsi_end_p (si); bsi_next (&si))
3636 {
3637 tree stmt = bsi_stmt (si);
3638 tree def;
3639 FOR_EACH_SSA_TREE_OPERAND (def, stmt, iter, SSA_OP_DEF)
3640 if (TREE_CODE (def) == SSA_NAME
3641 && POINTER_TYPE_P (TREE_TYPE (def)))
3642 dump_points_to_info_for (file, def);
3643 }
3644 }
3645
3646 fprintf (file, "\n");
3647 }
3648
3649
3650 /* Dump points-to info pointed to by PTO into STDERR. */
3651
3652 void
3653 debug_points_to_info (void)
3654 {
3655 dump_points_to_info (stderr);
3656 }
3657
3658 /* Dump to FILE the list of variables that may be aliasing VAR. */
3659
3660 void
3661 dump_may_aliases_for (FILE *file, tree var)
3662 {
3663 bitmap aliases;
3664
3665 aliases = MTAG_ALIASES (var);
3666 if (aliases)
3667 {
3668 bitmap_iterator bi;
3669 unsigned int i;
3670 tree al;
3671
3672 fprintf (file, "{ ");
3673 EXECUTE_IF_SET_IN_BITMAP (aliases, 0, i, bi)
3674 {
3675 al = referenced_var (i);
3676 print_generic_expr (file, al, dump_flags);
3677 fprintf (file, " ");
3678 }
3679 fprintf (file, "}");
3680 }
3681 }
3682
3683
3684 /* Dump to stderr the list of variables that may be aliasing VAR. */
3685
3686 void
3687 debug_may_aliases_for (tree var)
3688 {
3689 dump_may_aliases_for (stderr, var);
3690 }
3691
3692
3693 /* Return true if VAR may be aliased. */
3694
3695 bool
3696 may_be_aliased (tree var)
3697 {
3698 /* Obviously. */
3699 if (TREE_ADDRESSABLE (var))
3700 return true;
3701
3702 /* Globally visible variables can have their addresses taken by other
3703 translation units. */
3704 if (MTAG_P (var)
3705 && MTAG_GLOBAL (var))
3706 return true;
3707 else if (!MTAG_P (var)
3708 && (DECL_EXTERNAL (var) || TREE_PUBLIC (var)))
3709 return true;
3710
3711 /* Automatic variables can't have their addresses escape any other
3712 way. This must be after the check for global variables, as
3713 extern declarations do not have TREE_STATIC set. */
3714 if (!TREE_STATIC (var))
3715 return false;
3716
3717 /* If we're in unit-at-a-time mode, then we must have seen all
3718 occurrences of address-of operators, and so we can trust
3719 TREE_ADDRESSABLE. Otherwise we can only be sure the variable
3720 isn't addressable if it's local to the current function. */
3721 if (flag_unit_at_a_time)
3722 return false;
3723
3724 if (decl_function_context (var) == current_function_decl)
3725 return false;
3726
3727 return true;
3728 }
3729
3730 /* The following is based on code in add_stmt_operand to ensure that the
3731 same defs/uses/vdefs/vuses will be found after replacing a reference
3732 to var (or ARRAY_REF to var) with an INDIRECT_REF to ptr whose value
3733 is the address of var. Return a memtag for the ptr, after adding the
3734 proper may_aliases to it (which are the aliases of var, if it has any,
3735 or var itself). */
3736
3737 static tree
3738 add_may_alias_for_new_tag (tree tag, tree var)
3739 {
3740 bitmap aliases = NULL;
3741
3742 if (MTAG_P (var))
3743 aliases = may_aliases (var);
3744
3745 /* Case 1: |aliases| == 1 */
3746 if (aliases
3747 && bitmap_single_bit_set_p (aliases))
3748 {
3749 tree ali = referenced_var (bitmap_first_set_bit (aliases));
3750 if (TREE_CODE (ali) == SYMBOL_MEMORY_TAG)
3751 return ali;
3752 }
3753
3754 /* Case 2: |aliases| == 0 */
3755 if (aliases == NULL)
3756 add_may_alias (tag, var);
3757 else
3758 {
3759 /* Case 3: |aliases| > 1 */
3760 union_alias_set_into (tag, aliases);
3761 }
3762 return tag;
3763 }
3764
3765 /* Create a new symbol tag for PTR. Construct the may-alias list of
3766 this type tag so that it has the aliasing of VAR according to the
3767 location accessed by EXPR.
3768
3769 Note, the set of aliases represented by the new symbol tag are not
3770 marked for renaming. */
3771
3772 void
3773 new_type_alias (tree ptr, tree var, tree expr)
3774 {
3775 tree tag_type = TREE_TYPE (TREE_TYPE (ptr));
3776 tree tag;
3777 tree ali = NULL_TREE;
3778 HOST_WIDE_INT offset, size, maxsize;
3779 tree ref;
3780
3781 gcc_assert (symbol_mem_tag (ptr) == NULL_TREE);
3782 gcc_assert (!MTAG_P (var));
3783
3784 ref = get_ref_base_and_extent (expr, &offset, &size, &maxsize);
3785 gcc_assert (ref);
3786
3787 tag = create_memory_tag (tag_type, true);
3788 set_symbol_mem_tag (ptr, tag);
3789
3790 ali = add_may_alias_for_new_tag (tag, var);
3791
3792 set_symbol_mem_tag (ptr, ali);
3793 MTAG_GLOBAL (tag) = is_global_var (var);
3794 }
3795
3796
3797 /* Reset the call_clobbered flags on our referenced vars. In
3798 theory, this only needs to be done for globals. */
3799
3800 static unsigned int
3801 reset_cc_flags (void)
3802 {
3803 tree var;
3804 referenced_var_iterator rvi;
3805
3806 FOR_EACH_REFERENCED_VAR (var, rvi)
3807 var_ann (var)->call_clobbered = false;
3808 return 0;
3809 }
3810
3811 struct gimple_opt_pass pass_reset_cc_flags =
3812 {
3813 {
3814 GIMPLE_PASS,
3815 NULL, /* name */
3816 NULL, /* gate */
3817 reset_cc_flags, /* execute */
3818 NULL, /* sub */
3819 NULL, /* next */
3820 0, /* static_pass_number */
3821 0, /* tv_id */
3822 PROP_referenced_vars |PROP_cfg, /* properties_required */
3823 0, /* properties_provided */
3824 0, /* properties_destroyed */
3825 0, /* todo_flags_start */
3826 0 /* todo_flags_finish */
3827 }
3828 };
3829
3830
3831 /* A dummy pass to cause aliases to be computed via TODO_rebuild_alias. */
3832
3833 struct gimple_opt_pass pass_build_alias =
3834 {
3835 {
3836 GIMPLE_PASS,
3837 "alias", /* name */
3838 NULL, /* gate */
3839 NULL, /* execute */
3840 NULL, /* sub */
3841 NULL, /* next */
3842 0, /* static_pass_number */
3843 0, /* tv_id */
3844 PROP_cfg | PROP_ssa, /* properties_required */
3845 PROP_alias, /* properties_provided */
3846 0, /* properties_destroyed */
3847 0, /* todo_flags_start */
3848 TODO_rebuild_alias | TODO_dump_func /* todo_flags_finish */
3849 }
3850 };
This page took 0.20107 seconds and 5 git commands to generate.