]> gcc.gnu.org Git - gcc.git/blob - gcc/tree-ssa-structalias.c
re PR middle-end/36509 (gcc.dg/Wstrict-aliasing-float-ptr-int-obj.c)
[gcc.git] / gcc / tree-ssa-structalias.c
1 /* Tree based points-to analysis
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dberlin@dberlin.org>
4
5 This file is part of GCC.
6
7 GCC is free software; you can redistribute it and/or modify
8 under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3 of the License, or
10 (at your option) any later version.
11
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "ggc.h"
26 #include "obstack.h"
27 #include "bitmap.h"
28 #include "flags.h"
29 #include "rtl.h"
30 #include "tm_p.h"
31 #include "hard-reg-set.h"
32 #include "basic-block.h"
33 #include "output.h"
34 #include "tree.h"
35 #include "c-common.h"
36 #include "tree-flow.h"
37 #include "tree-inline.h"
38 #include "varray.h"
39 #include "c-tree.h"
40 #include "diagnostic.h"
41 #include "toplev.h"
42 #include "gimple.h"
43 #include "hashtab.h"
44 #include "function.h"
45 #include "cgraph.h"
46 #include "tree-pass.h"
47 #include "timevar.h"
48 #include "alloc-pool.h"
49 #include "splay-tree.h"
50 #include "params.h"
51 #include "tree-ssa-structalias.h"
52 #include "cgraph.h"
53 #include "alias.h"
54 #include "pointer-set.h"
55
56 /* The idea behind this analyzer is to generate set constraints from the
57 program, then solve the resulting constraints in order to generate the
58 points-to sets.
59
60 Set constraints are a way of modeling program analysis problems that
61 involve sets. They consist of an inclusion constraint language,
62 describing the variables (each variable is a set) and operations that
63 are involved on the variables, and a set of rules that derive facts
64 from these operations. To solve a system of set constraints, you derive
65 all possible facts under the rules, which gives you the correct sets
66 as a consequence.
67
68 See "Efficient Field-sensitive pointer analysis for C" by "David
69 J. Pearce and Paul H. J. Kelly and Chris Hankin, at
70 http://citeseer.ist.psu.edu/pearce04efficient.html
71
72 Also see "Ultra-fast Aliasing Analysis using CLA: A Million Lines
73 of C Code in a Second" by ""Nevin Heintze and Olivier Tardieu" at
74 http://citeseer.ist.psu.edu/heintze01ultrafast.html
75
76 There are three types of real constraint expressions, DEREF,
77 ADDRESSOF, and SCALAR. Each constraint expression consists
78 of a constraint type, a variable, and an offset.
79
80 SCALAR is a constraint expression type used to represent x, whether
81 it appears on the LHS or the RHS of a statement.
82 DEREF is a constraint expression type used to represent *x, whether
83 it appears on the LHS or the RHS of a statement.
84 ADDRESSOF is a constraint expression used to represent &x, whether
85 it appears on the LHS or the RHS of a statement.
86
87 Each pointer variable in the program is assigned an integer id, and
88 each field of a structure variable is assigned an integer id as well.
89
90 Structure variables are linked to their list of fields through a "next
91 field" in each variable that points to the next field in offset
92 order.
93 Each variable for a structure field has
94
95 1. "size", that tells the size in bits of that field.
96 2. "fullsize, that tells the size in bits of the entire structure.
97 3. "offset", that tells the offset in bits from the beginning of the
98 structure to this field.
99
100 Thus,
101 struct f
102 {
103 int a;
104 int b;
105 } foo;
106 int *bar;
107
108 looks like
109
110 foo.a -> id 1, size 32, offset 0, fullsize 64, next foo.b
111 foo.b -> id 2, size 32, offset 32, fullsize 64, next NULL
112 bar -> id 3, size 32, offset 0, fullsize 32, next NULL
113
114
115 In order to solve the system of set constraints, the following is
116 done:
117
118 1. Each constraint variable x has a solution set associated with it,
119 Sol(x).
120
121 2. Constraints are separated into direct, copy, and complex.
122 Direct constraints are ADDRESSOF constraints that require no extra
123 processing, such as P = &Q
124 Copy constraints are those of the form P = Q.
125 Complex constraints are all the constraints involving dereferences
126 and offsets (including offsetted copies).
127
128 3. All direct constraints of the form P = &Q are processed, such
129 that Q is added to Sol(P)
130
131 4. All complex constraints for a given constraint variable are stored in a
132 linked list attached to that variable's node.
133
134 5. A directed graph is built out of the copy constraints. Each
135 constraint variable is a node in the graph, and an edge from
136 Q to P is added for each copy constraint of the form P = Q
137
138 6. The graph is then walked, and solution sets are
139 propagated along the copy edges, such that an edge from Q to P
140 causes Sol(P) <- Sol(P) union Sol(Q).
141
142 7. As we visit each node, all complex constraints associated with
143 that node are processed by adding appropriate copy edges to the graph, or the
144 appropriate variables to the solution set.
145
146 8. The process of walking the graph is iterated until no solution
147 sets change.
148
149 Prior to walking the graph in steps 6 and 7, We perform static
150 cycle elimination on the constraint graph, as well
151 as off-line variable substitution.
152
153 TODO: Adding offsets to pointer-to-structures can be handled (IE not punted
154 on and turned into anything), but isn't. You can just see what offset
155 inside the pointed-to struct it's going to access.
156
157 TODO: Constant bounded arrays can be handled as if they were structs of the
158 same number of elements.
159
160 TODO: Modeling heap and incoming pointers becomes much better if we
161 add fields to them as we discover them, which we could do.
162
163 TODO: We could handle unions, but to be honest, it's probably not
164 worth the pain or slowdown. */
165
166 static GTY ((if_marked ("tree_map_marked_p"), param_is (struct tree_map)))
167 htab_t heapvar_for_stmt;
168
169 static bool use_field_sensitive = true;
170 static int in_ipa_mode = 0;
171
172 /* Used for predecessor bitmaps. */
173 static bitmap_obstack predbitmap_obstack;
174
175 /* Used for points-to sets. */
176 static bitmap_obstack pta_obstack;
177
178 /* Used for oldsolution members of variables. */
179 static bitmap_obstack oldpta_obstack;
180
181 /* Used for per-solver-iteration bitmaps. */
182 static bitmap_obstack iteration_obstack;
183
184 static unsigned int create_variable_info_for (tree, const char *);
185 typedef struct constraint_graph *constraint_graph_t;
186 static void unify_nodes (constraint_graph_t, unsigned int, unsigned int, bool);
187
188 DEF_VEC_P(constraint_t);
189 DEF_VEC_ALLOC_P(constraint_t,heap);
190
191 #define EXECUTE_IF_IN_NONNULL_BITMAP(a, b, c, d) \
192 if (a) \
193 EXECUTE_IF_SET_IN_BITMAP (a, b, c, d)
194
195 static struct constraint_stats
196 {
197 unsigned int total_vars;
198 unsigned int nonpointer_vars;
199 unsigned int unified_vars_static;
200 unsigned int unified_vars_dynamic;
201 unsigned int iterations;
202 unsigned int num_edges;
203 unsigned int num_implicit_edges;
204 unsigned int points_to_sets_created;
205 } stats;
206
207 struct variable_info
208 {
209 /* ID of this variable */
210 unsigned int id;
211
212 /* True if this is a variable created by the constraint analysis, such as
213 heap variables and constraints we had to break up. */
214 unsigned int is_artificial_var:1;
215
216 /* True if this is a special variable whose solution set should not be
217 changed. */
218 unsigned int is_special_var:1;
219
220 /* True for variables whose size is not known or variable. */
221 unsigned int is_unknown_size_var:1;
222
223 /* True for (sub-)fields that represent a whole variable. */
224 unsigned int is_full_var : 1;
225
226 /* True if this is a heap variable. */
227 unsigned int is_heap_var:1;
228
229 /* True if we may not use TBAA to prune references to this
230 variable. This is used for C++ placement new. */
231 unsigned int no_tbaa_pruning : 1;
232
233 /* Variable id this was collapsed to due to type unsafety. Zero if
234 this variable was not collapsed. This should be unused completely
235 after build_succ_graph, or something is broken. */
236 unsigned int collapsed_to;
237
238 /* A link to the variable for the next field in this structure. */
239 struct variable_info *next;
240
241 /* Offset of this variable, in bits, from the base variable */
242 unsigned HOST_WIDE_INT offset;
243
244 /* Size of the variable, in bits. */
245 unsigned HOST_WIDE_INT size;
246
247 /* Full size of the base variable, in bits. */
248 unsigned HOST_WIDE_INT fullsize;
249
250 /* Name of this variable */
251 const char *name;
252
253 /* Tree that this variable is associated with. */
254 tree decl;
255
256 /* Points-to set for this variable. */
257 bitmap solution;
258
259 /* Old points-to set for this variable. */
260 bitmap oldsolution;
261 };
262 typedef struct variable_info *varinfo_t;
263
264 static varinfo_t first_vi_for_offset (varinfo_t, unsigned HOST_WIDE_INT);
265 static varinfo_t lookup_vi_for_tree (tree);
266
267 /* Pool of variable info structures. */
268 static alloc_pool variable_info_pool;
269
270 DEF_VEC_P(varinfo_t);
271
272 DEF_VEC_ALLOC_P(varinfo_t, heap);
273
274 /* Table of variable info structures for constraint variables.
275 Indexed directly by variable info id. */
276 static VEC(varinfo_t,heap) *varmap;
277
278 /* Return the varmap element N */
279
280 static inline varinfo_t
281 get_varinfo (unsigned int n)
282 {
283 return VEC_index (varinfo_t, varmap, n);
284 }
285
286 /* Return the varmap element N, following the collapsed_to link. */
287
288 static inline varinfo_t
289 get_varinfo_fc (unsigned int n)
290 {
291 varinfo_t v = VEC_index (varinfo_t, varmap, n);
292
293 if (v->collapsed_to != 0)
294 return get_varinfo (v->collapsed_to);
295 return v;
296 }
297
298 /* Static IDs for the special variables. */
299 enum { nothing_id = 0, anything_id = 1, readonly_id = 2,
300 escaped_id = 3, nonlocal_id = 4, callused_id = 5, integer_id = 6 };
301
302 /* Variable that represents the unknown pointer. */
303 static varinfo_t var_anything;
304 static tree anything_tree;
305
306 /* Variable that represents the NULL pointer. */
307 static varinfo_t var_nothing;
308 static tree nothing_tree;
309
310 /* Variable that represents read only memory. */
311 static varinfo_t var_readonly;
312 static tree readonly_tree;
313
314 /* Variable that represents escaped memory. */
315 static varinfo_t var_escaped;
316 static tree escaped_tree;
317
318 /* Variable that represents nonlocal memory. */
319 static varinfo_t var_nonlocal;
320 static tree nonlocal_tree;
321
322 /* Variable that represents call-used memory. */
323 static varinfo_t var_callused;
324 static tree callused_tree;
325
326 /* Variable that represents integers. This is used for when people do things
327 like &0->a.b. */
328 static varinfo_t var_integer;
329 static tree integer_tree;
330
331 /* Lookup a heap var for FROM, and return it if we find one. */
332
333 static tree
334 heapvar_lookup (tree from)
335 {
336 struct tree_map *h, in;
337 in.base.from = from;
338
339 h = (struct tree_map *) htab_find_with_hash (heapvar_for_stmt, &in,
340 htab_hash_pointer (from));
341 if (h)
342 return h->to;
343 return NULL_TREE;
344 }
345
346 /* Insert a mapping FROM->TO in the heap var for statement
347 hashtable. */
348
349 static void
350 heapvar_insert (tree from, tree to)
351 {
352 struct tree_map *h;
353 void **loc;
354
355 h = GGC_NEW (struct tree_map);
356 h->hash = htab_hash_pointer (from);
357 h->base.from = from;
358 h->to = to;
359 loc = htab_find_slot_with_hash (heapvar_for_stmt, h, h->hash, INSERT);
360 *(struct tree_map **) loc = h;
361 }
362
363 /* Return a new variable info structure consisting for a variable
364 named NAME, and using constraint graph node NODE. */
365
366 static varinfo_t
367 new_var_info (tree t, unsigned int id, const char *name)
368 {
369 varinfo_t ret = (varinfo_t) pool_alloc (variable_info_pool);
370 tree var;
371
372 ret->id = id;
373 ret->name = name;
374 ret->decl = t;
375 ret->is_artificial_var = false;
376 ret->is_heap_var = false;
377 ret->is_special_var = false;
378 ret->is_unknown_size_var = false;
379 ret->is_full_var = false;
380 var = t;
381 if (TREE_CODE (var) == SSA_NAME)
382 var = SSA_NAME_VAR (var);
383 ret->no_tbaa_pruning = (DECL_P (var)
384 && POINTER_TYPE_P (TREE_TYPE (var))
385 && DECL_NO_TBAA_P (var));
386 ret->solution = BITMAP_ALLOC (&pta_obstack);
387 ret->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
388 ret->next = NULL;
389 ret->collapsed_to = 0;
390 return ret;
391 }
392
393 typedef enum {SCALAR, DEREF, ADDRESSOF} constraint_expr_type;
394
395 /* An expression that appears in a constraint. */
396
397 struct constraint_expr
398 {
399 /* Constraint type. */
400 constraint_expr_type type;
401
402 /* Variable we are referring to in the constraint. */
403 unsigned int var;
404
405 /* Offset, in bits, of this constraint from the beginning of
406 variables it ends up referring to.
407
408 IOW, in a deref constraint, we would deref, get the result set,
409 then add OFFSET to each member. */
410 unsigned HOST_WIDE_INT offset;
411 };
412
413 typedef struct constraint_expr ce_s;
414 DEF_VEC_O(ce_s);
415 DEF_VEC_ALLOC_O(ce_s, heap);
416 static void get_constraint_for_1 (tree, VEC(ce_s, heap) **, bool);
417 static void get_constraint_for (tree, VEC(ce_s, heap) **);
418 static void do_deref (VEC (ce_s, heap) **);
419
420 /* Our set constraints are made up of two constraint expressions, one
421 LHS, and one RHS.
422
423 As described in the introduction, our set constraints each represent an
424 operation between set valued variables.
425 */
426 struct constraint
427 {
428 struct constraint_expr lhs;
429 struct constraint_expr rhs;
430 };
431
432 /* List of constraints that we use to build the constraint graph from. */
433
434 static VEC(constraint_t,heap) *constraints;
435 static alloc_pool constraint_pool;
436
437
438 DEF_VEC_I(int);
439 DEF_VEC_ALLOC_I(int, heap);
440
441 /* The constraint graph is represented as an array of bitmaps
442 containing successor nodes. */
443
444 struct constraint_graph
445 {
446 /* Size of this graph, which may be different than the number of
447 nodes in the variable map. */
448 unsigned int size;
449
450 /* Explicit successors of each node. */
451 bitmap *succs;
452
453 /* Implicit predecessors of each node (Used for variable
454 substitution). */
455 bitmap *implicit_preds;
456
457 /* Explicit predecessors of each node (Used for variable substitution). */
458 bitmap *preds;
459
460 /* Indirect cycle representatives, or -1 if the node has no indirect
461 cycles. */
462 int *indirect_cycles;
463
464 /* Representative node for a node. rep[a] == a unless the node has
465 been unified. */
466 unsigned int *rep;
467
468 /* Equivalence class representative for a label. This is used for
469 variable substitution. */
470 int *eq_rep;
471
472 /* Pointer equivalence label for a node. All nodes with the same
473 pointer equivalence label can be unified together at some point
474 (either during constraint optimization or after the constraint
475 graph is built). */
476 unsigned int *pe;
477
478 /* Pointer equivalence representative for a label. This is used to
479 handle nodes that are pointer equivalent but not location
480 equivalent. We can unite these once the addressof constraints
481 are transformed into initial points-to sets. */
482 int *pe_rep;
483
484 /* Pointer equivalence label for each node, used during variable
485 substitution. */
486 unsigned int *pointer_label;
487
488 /* Location equivalence label for each node, used during location
489 equivalence finding. */
490 unsigned int *loc_label;
491
492 /* Pointed-by set for each node, used during location equivalence
493 finding. This is pointed-by rather than pointed-to, because it
494 is constructed using the predecessor graph. */
495 bitmap *pointed_by;
496
497 /* Points to sets for pointer equivalence. This is *not* the actual
498 points-to sets for nodes. */
499 bitmap *points_to;
500
501 /* Bitmap of nodes where the bit is set if the node is a direct
502 node. Used for variable substitution. */
503 sbitmap direct_nodes;
504
505 /* Bitmap of nodes where the bit is set if the node is address
506 taken. Used for variable substitution. */
507 bitmap address_taken;
508
509 /* Vector of complex constraints for each graph node. Complex
510 constraints are those involving dereferences or offsets that are
511 not 0. */
512 VEC(constraint_t,heap) **complex;
513 };
514
515 static constraint_graph_t graph;
516
517 /* During variable substitution and the offline version of indirect
518 cycle finding, we create nodes to represent dereferences and
519 address taken constraints. These represent where these start and
520 end. */
521 #define FIRST_REF_NODE (VEC_length (varinfo_t, varmap))
522 #define LAST_REF_NODE (FIRST_REF_NODE + (FIRST_REF_NODE - 1))
523
524 /* Return the representative node for NODE, if NODE has been unioned
525 with another NODE.
526 This function performs path compression along the way to finding
527 the representative. */
528
529 static unsigned int
530 find (unsigned int node)
531 {
532 gcc_assert (node < graph->size);
533 if (graph->rep[node] != node)
534 return graph->rep[node] = find (graph->rep[node]);
535 return node;
536 }
537
538 /* Union the TO and FROM nodes to the TO nodes.
539 Note that at some point in the future, we may want to do
540 union-by-rank, in which case we are going to have to return the
541 node we unified to. */
542
543 static bool
544 unite (unsigned int to, unsigned int from)
545 {
546 gcc_assert (to < graph->size && from < graph->size);
547 if (to != from && graph->rep[from] != to)
548 {
549 graph->rep[from] = to;
550 return true;
551 }
552 return false;
553 }
554
555 /* Create a new constraint consisting of LHS and RHS expressions. */
556
557 static constraint_t
558 new_constraint (const struct constraint_expr lhs,
559 const struct constraint_expr rhs)
560 {
561 constraint_t ret = (constraint_t) pool_alloc (constraint_pool);
562 ret->lhs = lhs;
563 ret->rhs = rhs;
564 return ret;
565 }
566
567 /* Print out constraint C to FILE. */
568
569 void
570 dump_constraint (FILE *file, constraint_t c)
571 {
572 if (c->lhs.type == ADDRESSOF)
573 fprintf (file, "&");
574 else if (c->lhs.type == DEREF)
575 fprintf (file, "*");
576 fprintf (file, "%s", get_varinfo_fc (c->lhs.var)->name);
577 if (c->lhs.offset != 0)
578 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->lhs.offset);
579 fprintf (file, " = ");
580 if (c->rhs.type == ADDRESSOF)
581 fprintf (file, "&");
582 else if (c->rhs.type == DEREF)
583 fprintf (file, "*");
584 fprintf (file, "%s", get_varinfo_fc (c->rhs.var)->name);
585 if (c->rhs.offset != 0)
586 fprintf (file, " + " HOST_WIDE_INT_PRINT_DEC, c->rhs.offset);
587 fprintf (file, "\n");
588 }
589
590 /* Print out constraint C to stderr. */
591
592 void
593 debug_constraint (constraint_t c)
594 {
595 dump_constraint (stderr, c);
596 }
597
598 /* Print out all constraints to FILE */
599
600 void
601 dump_constraints (FILE *file)
602 {
603 int i;
604 constraint_t c;
605 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
606 dump_constraint (file, c);
607 }
608
609 /* Print out all constraints to stderr. */
610
611 void
612 debug_constraints (void)
613 {
614 dump_constraints (stderr);
615 }
616
617 /* Print out to FILE the edge in the constraint graph that is created by
618 constraint c. The edge may have a label, depending on the type of
619 constraint that it represents. If complex1, e.g: a = *b, then the label
620 is "=*", if complex2, e.g: *a = b, then the label is "*=", if
621 complex with an offset, e.g: a = b + 8, then the label is "+".
622 Otherwise the edge has no label. */
623
624 void
625 dump_constraint_edge (FILE *file, constraint_t c)
626 {
627 if (c->rhs.type != ADDRESSOF)
628 {
629 const char *src = get_varinfo_fc (c->rhs.var)->name;
630 const char *dst = get_varinfo_fc (c->lhs.var)->name;
631 fprintf (file, " \"%s\" -> \"%s\" ", src, dst);
632 /* Due to preprocessing of constraints, instructions like *a = *b are
633 illegal; thus, we do not have to handle such cases. */
634 if (c->lhs.type == DEREF)
635 fprintf (file, " [ label=\"*=\" ] ;\n");
636 else if (c->rhs.type == DEREF)
637 fprintf (file, " [ label=\"=*\" ] ;\n");
638 else
639 {
640 /* We must check the case where the constraint is an offset.
641 In this case, it is treated as a complex constraint. */
642 if (c->rhs.offset != c->lhs.offset)
643 fprintf (file, " [ label=\"+\" ] ;\n");
644 else
645 fprintf (file, " ;\n");
646 }
647 }
648 }
649
650 /* Print the constraint graph in dot format. */
651
652 void
653 dump_constraint_graph (FILE *file)
654 {
655 unsigned int i=0, size;
656 constraint_t c;
657
658 /* Only print the graph if it has already been initialized: */
659 if (!graph)
660 return;
661
662 /* Print the constraints used to produce the constraint graph. The
663 constraints will be printed as comments in the dot file: */
664 fprintf (file, "\n\n/* Constraints used in the constraint graph:\n");
665 dump_constraints (file);
666 fprintf (file, "*/\n");
667
668 /* Prints the header of the dot file: */
669 fprintf (file, "\n\n// The constraint graph in dot format:\n");
670 fprintf (file, "strict digraph {\n");
671 fprintf (file, " node [\n shape = box\n ]\n");
672 fprintf (file, " edge [\n fontsize = \"12\"\n ]\n");
673 fprintf (file, "\n // List of nodes in the constraint graph:\n");
674
675 /* The next lines print the nodes in the graph. In order to get the
676 number of nodes in the graph, we must choose the minimum between the
677 vector VEC (varinfo_t, varmap) and graph->size. If the graph has not
678 yet been initialized, then graph->size == 0, otherwise we must only
679 read nodes that have an entry in VEC (varinfo_t, varmap). */
680 size = VEC_length (varinfo_t, varmap);
681 size = size < graph->size ? size : graph->size;
682 for (i = 0; i < size; i++)
683 {
684 const char *name = get_varinfo_fc (graph->rep[i])->name;
685 fprintf (file, " \"%s\" ;\n", name);
686 }
687
688 /* Go over the list of constraints printing the edges in the constraint
689 graph. */
690 fprintf (file, "\n // The constraint edges:\n");
691 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
692 if (c)
693 dump_constraint_edge (file, c);
694
695 /* Prints the tail of the dot file. By now, only the closing bracket. */
696 fprintf (file, "}\n\n\n");
697 }
698
699 /* Print out the constraint graph to stderr. */
700
701 void
702 debug_constraint_graph (void)
703 {
704 dump_constraint_graph (stderr);
705 }
706
707 /* SOLVER FUNCTIONS
708
709 The solver is a simple worklist solver, that works on the following
710 algorithm:
711
712 sbitmap changed_nodes = all zeroes;
713 changed_count = 0;
714 For each node that is not already collapsed:
715 changed_count++;
716 set bit in changed nodes
717
718 while (changed_count > 0)
719 {
720 compute topological ordering for constraint graph
721
722 find and collapse cycles in the constraint graph (updating
723 changed if necessary)
724
725 for each node (n) in the graph in topological order:
726 changed_count--;
727
728 Process each complex constraint associated with the node,
729 updating changed if necessary.
730
731 For each outgoing edge from n, propagate the solution from n to
732 the destination of the edge, updating changed as necessary.
733
734 } */
735
736 /* Return true if two constraint expressions A and B are equal. */
737
738 static bool
739 constraint_expr_equal (struct constraint_expr a, struct constraint_expr b)
740 {
741 return a.type == b.type && a.var == b.var && a.offset == b.offset;
742 }
743
744 /* Return true if constraint expression A is less than constraint expression
745 B. This is just arbitrary, but consistent, in order to give them an
746 ordering. */
747
748 static bool
749 constraint_expr_less (struct constraint_expr a, struct constraint_expr b)
750 {
751 if (a.type == b.type)
752 {
753 if (a.var == b.var)
754 return a.offset < b.offset;
755 else
756 return a.var < b.var;
757 }
758 else
759 return a.type < b.type;
760 }
761
762 /* Return true if constraint A is less than constraint B. This is just
763 arbitrary, but consistent, in order to give them an ordering. */
764
765 static bool
766 constraint_less (const constraint_t a, const constraint_t b)
767 {
768 if (constraint_expr_less (a->lhs, b->lhs))
769 return true;
770 else if (constraint_expr_less (b->lhs, a->lhs))
771 return false;
772 else
773 return constraint_expr_less (a->rhs, b->rhs);
774 }
775
776 /* Return true if two constraints A and B are equal. */
777
778 static bool
779 constraint_equal (struct constraint a, struct constraint b)
780 {
781 return constraint_expr_equal (a.lhs, b.lhs)
782 && constraint_expr_equal (a.rhs, b.rhs);
783 }
784
785
786 /* Find a constraint LOOKFOR in the sorted constraint vector VEC */
787
788 static constraint_t
789 constraint_vec_find (VEC(constraint_t,heap) *vec,
790 struct constraint lookfor)
791 {
792 unsigned int place;
793 constraint_t found;
794
795 if (vec == NULL)
796 return NULL;
797
798 place = VEC_lower_bound (constraint_t, vec, &lookfor, constraint_less);
799 if (place >= VEC_length (constraint_t, vec))
800 return NULL;
801 found = VEC_index (constraint_t, vec, place);
802 if (!constraint_equal (*found, lookfor))
803 return NULL;
804 return found;
805 }
806
807 /* Union two constraint vectors, TO and FROM. Put the result in TO. */
808
809 static void
810 constraint_set_union (VEC(constraint_t,heap) **to,
811 VEC(constraint_t,heap) **from)
812 {
813 int i;
814 constraint_t c;
815
816 for (i = 0; VEC_iterate (constraint_t, *from, i, c); i++)
817 {
818 if (constraint_vec_find (*to, *c) == NULL)
819 {
820 unsigned int place = VEC_lower_bound (constraint_t, *to, c,
821 constraint_less);
822 VEC_safe_insert (constraint_t, heap, *to, place, c);
823 }
824 }
825 }
826
827 /* Take a solution set SET, add OFFSET to each member of the set, and
828 overwrite SET with the result when done. */
829
830 static void
831 solution_set_add (bitmap set, unsigned HOST_WIDE_INT offset)
832 {
833 bitmap result = BITMAP_ALLOC (&iteration_obstack);
834 unsigned int i;
835 bitmap_iterator bi;
836
837 EXECUTE_IF_SET_IN_BITMAP (set, 0, i, bi)
838 {
839 varinfo_t vi = get_varinfo (i);
840
841 /* If this is a variable with just one field just set its bit
842 in the result. */
843 if (vi->is_artificial_var
844 || vi->is_unknown_size_var
845 || vi->is_full_var)
846 bitmap_set_bit (result, i);
847 else
848 {
849 unsigned HOST_WIDE_INT fieldoffset = vi->offset + offset;
850 varinfo_t v = first_vi_for_offset (vi, fieldoffset);
851 /* If the result is outside of the variable use the last field. */
852 if (!v)
853 {
854 v = vi;
855 while (v->next != NULL)
856 v = v->next;
857 }
858 bitmap_set_bit (result, v->id);
859 /* If the result is not exactly at fieldoffset include the next
860 field as well. See get_constraint_for_ptr_offset for more
861 rationale. */
862 if (v->offset != fieldoffset
863 && v->next != NULL)
864 bitmap_set_bit (result, v->next->id);
865 }
866 }
867
868 bitmap_copy (set, result);
869 BITMAP_FREE (result);
870 }
871
872 /* Union solution sets TO and FROM, and add INC to each member of FROM in the
873 process. */
874
875 static bool
876 set_union_with_increment (bitmap to, bitmap from, unsigned HOST_WIDE_INT inc)
877 {
878 if (inc == 0)
879 return bitmap_ior_into (to, from);
880 else
881 {
882 bitmap tmp;
883 bool res;
884
885 tmp = BITMAP_ALLOC (&iteration_obstack);
886 bitmap_copy (tmp, from);
887 solution_set_add (tmp, inc);
888 res = bitmap_ior_into (to, tmp);
889 BITMAP_FREE (tmp);
890 return res;
891 }
892 }
893
894 /* Insert constraint C into the list of complex constraints for graph
895 node VAR. */
896
897 static void
898 insert_into_complex (constraint_graph_t graph,
899 unsigned int var, constraint_t c)
900 {
901 VEC (constraint_t, heap) *complex = graph->complex[var];
902 unsigned int place = VEC_lower_bound (constraint_t, complex, c,
903 constraint_less);
904
905 /* Only insert constraints that do not already exist. */
906 if (place >= VEC_length (constraint_t, complex)
907 || !constraint_equal (*c, *VEC_index (constraint_t, complex, place)))
908 VEC_safe_insert (constraint_t, heap, graph->complex[var], place, c);
909 }
910
911
912 /* Condense two variable nodes into a single variable node, by moving
913 all associated info from SRC to TO. */
914
915 static void
916 merge_node_constraints (constraint_graph_t graph, unsigned int to,
917 unsigned int from)
918 {
919 unsigned int i;
920 constraint_t c;
921
922 gcc_assert (find (from) == to);
923
924 /* Move all complex constraints from src node into to node */
925 for (i = 0; VEC_iterate (constraint_t, graph->complex[from], i, c); i++)
926 {
927 /* In complex constraints for node src, we may have either
928 a = *src, and *src = a, or an offseted constraint which are
929 always added to the rhs node's constraints. */
930
931 if (c->rhs.type == DEREF)
932 c->rhs.var = to;
933 else if (c->lhs.type == DEREF)
934 c->lhs.var = to;
935 else
936 c->rhs.var = to;
937 }
938 constraint_set_union (&graph->complex[to], &graph->complex[from]);
939 VEC_free (constraint_t, heap, graph->complex[from]);
940 graph->complex[from] = NULL;
941 }
942
943
944 /* Remove edges involving NODE from GRAPH. */
945
946 static void
947 clear_edges_for_node (constraint_graph_t graph, unsigned int node)
948 {
949 if (graph->succs[node])
950 BITMAP_FREE (graph->succs[node]);
951 }
952
953 /* Merge GRAPH nodes FROM and TO into node TO. */
954
955 static void
956 merge_graph_nodes (constraint_graph_t graph, unsigned int to,
957 unsigned int from)
958 {
959 if (graph->indirect_cycles[from] != -1)
960 {
961 /* If we have indirect cycles with the from node, and we have
962 none on the to node, the to node has indirect cycles from the
963 from node now that they are unified.
964 If indirect cycles exist on both, unify the nodes that they
965 are in a cycle with, since we know they are in a cycle with
966 each other. */
967 if (graph->indirect_cycles[to] == -1)
968 graph->indirect_cycles[to] = graph->indirect_cycles[from];
969 }
970
971 /* Merge all the successor edges. */
972 if (graph->succs[from])
973 {
974 if (!graph->succs[to])
975 graph->succs[to] = BITMAP_ALLOC (&pta_obstack);
976 bitmap_ior_into (graph->succs[to],
977 graph->succs[from]);
978 }
979
980 clear_edges_for_node (graph, from);
981 }
982
983
984 /* Add an indirect graph edge to GRAPH, going from TO to FROM if
985 it doesn't exist in the graph already. */
986
987 static void
988 add_implicit_graph_edge (constraint_graph_t graph, unsigned int to,
989 unsigned int from)
990 {
991 if (to == from)
992 return;
993
994 if (!graph->implicit_preds[to])
995 graph->implicit_preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
996
997 if (bitmap_set_bit (graph->implicit_preds[to], from))
998 stats.num_implicit_edges++;
999 }
1000
1001 /* Add a predecessor graph edge to GRAPH, going from TO to FROM if
1002 it doesn't exist in the graph already.
1003 Return false if the edge already existed, true otherwise. */
1004
1005 static void
1006 add_pred_graph_edge (constraint_graph_t graph, unsigned int to,
1007 unsigned int from)
1008 {
1009 if (!graph->preds[to])
1010 graph->preds[to] = BITMAP_ALLOC (&predbitmap_obstack);
1011 bitmap_set_bit (graph->preds[to], from);
1012 }
1013
1014 /* Add a graph edge to GRAPH, going from FROM to TO if
1015 it doesn't exist in the graph already.
1016 Return false if the edge already existed, true otherwise. */
1017
1018 static bool
1019 add_graph_edge (constraint_graph_t graph, unsigned int to,
1020 unsigned int from)
1021 {
1022 if (to == from)
1023 {
1024 return false;
1025 }
1026 else
1027 {
1028 bool r = false;
1029
1030 if (!graph->succs[from])
1031 graph->succs[from] = BITMAP_ALLOC (&pta_obstack);
1032 if (bitmap_set_bit (graph->succs[from], to))
1033 {
1034 r = true;
1035 if (to < FIRST_REF_NODE && from < FIRST_REF_NODE)
1036 stats.num_edges++;
1037 }
1038 return r;
1039 }
1040 }
1041
1042
1043 /* Return true if {DEST.SRC} is an existing graph edge in GRAPH. */
1044
1045 static bool
1046 valid_graph_edge (constraint_graph_t graph, unsigned int src,
1047 unsigned int dest)
1048 {
1049 return (graph->succs[dest]
1050 && bitmap_bit_p (graph->succs[dest], src));
1051 }
1052
1053 /* Initialize the constraint graph structure to contain SIZE nodes. */
1054
1055 static void
1056 init_graph (unsigned int size)
1057 {
1058 unsigned int j;
1059
1060 graph = XCNEW (struct constraint_graph);
1061 graph->size = size;
1062 graph->succs = XCNEWVEC (bitmap, graph->size);
1063 graph->indirect_cycles = XNEWVEC (int, graph->size);
1064 graph->rep = XNEWVEC (unsigned int, graph->size);
1065 graph->complex = XCNEWVEC (VEC(constraint_t, heap) *, size);
1066 graph->pe = XCNEWVEC (unsigned int, graph->size);
1067 graph->pe_rep = XNEWVEC (int, graph->size);
1068
1069 for (j = 0; j < graph->size; j++)
1070 {
1071 graph->rep[j] = j;
1072 graph->pe_rep[j] = -1;
1073 graph->indirect_cycles[j] = -1;
1074 }
1075 }
1076
1077 /* Build the constraint graph, adding only predecessor edges right now. */
1078
1079 static void
1080 build_pred_graph (void)
1081 {
1082 int i;
1083 constraint_t c;
1084 unsigned int j;
1085
1086 graph->implicit_preds = XCNEWVEC (bitmap, graph->size);
1087 graph->preds = XCNEWVEC (bitmap, graph->size);
1088 graph->pointer_label = XCNEWVEC (unsigned int, graph->size);
1089 graph->loc_label = XCNEWVEC (unsigned int, graph->size);
1090 graph->pointed_by = XCNEWVEC (bitmap, graph->size);
1091 graph->points_to = XCNEWVEC (bitmap, graph->size);
1092 graph->eq_rep = XNEWVEC (int, graph->size);
1093 graph->direct_nodes = sbitmap_alloc (graph->size);
1094 graph->address_taken = BITMAP_ALLOC (&predbitmap_obstack);
1095 sbitmap_zero (graph->direct_nodes);
1096
1097 for (j = 0; j < FIRST_REF_NODE; j++)
1098 {
1099 if (!get_varinfo (j)->is_special_var)
1100 SET_BIT (graph->direct_nodes, j);
1101 }
1102
1103 for (j = 0; j < graph->size; j++)
1104 graph->eq_rep[j] = -1;
1105
1106 for (j = 0; j < VEC_length (varinfo_t, varmap); j++)
1107 graph->indirect_cycles[j] = -1;
1108
1109 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1110 {
1111 struct constraint_expr lhs = c->lhs;
1112 struct constraint_expr rhs = c->rhs;
1113 unsigned int lhsvar = get_varinfo_fc (lhs.var)->id;
1114 unsigned int rhsvar = get_varinfo_fc (rhs.var)->id;
1115
1116 if (lhs.type == DEREF)
1117 {
1118 /* *x = y. */
1119 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1120 add_pred_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1121 }
1122 else if (rhs.type == DEREF)
1123 {
1124 /* x = *y */
1125 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1126 add_pred_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1127 else
1128 RESET_BIT (graph->direct_nodes, lhsvar);
1129 }
1130 else if (rhs.type == ADDRESSOF)
1131 {
1132 /* x = &y */
1133 if (graph->points_to[lhsvar] == NULL)
1134 graph->points_to[lhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1135 bitmap_set_bit (graph->points_to[lhsvar], rhsvar);
1136
1137 if (graph->pointed_by[rhsvar] == NULL)
1138 graph->pointed_by[rhsvar] = BITMAP_ALLOC (&predbitmap_obstack);
1139 bitmap_set_bit (graph->pointed_by[rhsvar], lhsvar);
1140
1141 /* Implicitly, *x = y */
1142 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1143
1144 RESET_BIT (graph->direct_nodes, rhsvar);
1145 bitmap_set_bit (graph->address_taken, rhsvar);
1146 }
1147 else if (lhsvar > anything_id
1148 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1149 {
1150 /* x = y */
1151 add_pred_graph_edge (graph, lhsvar, rhsvar);
1152 /* Implicitly, *x = *y */
1153 add_implicit_graph_edge (graph, FIRST_REF_NODE + lhsvar,
1154 FIRST_REF_NODE + rhsvar);
1155 }
1156 else if (lhs.offset != 0 || rhs.offset != 0)
1157 {
1158 if (rhs.offset != 0)
1159 RESET_BIT (graph->direct_nodes, lhs.var);
1160 else if (lhs.offset != 0)
1161 RESET_BIT (graph->direct_nodes, rhs.var);
1162 }
1163 }
1164 }
1165
1166 /* Build the constraint graph, adding successor edges. */
1167
1168 static void
1169 build_succ_graph (void)
1170 {
1171 int i;
1172 constraint_t c;
1173
1174 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
1175 {
1176 struct constraint_expr lhs;
1177 struct constraint_expr rhs;
1178 unsigned int lhsvar;
1179 unsigned int rhsvar;
1180
1181 if (!c)
1182 continue;
1183
1184 lhs = c->lhs;
1185 rhs = c->rhs;
1186 lhsvar = find (get_varinfo_fc (lhs.var)->id);
1187 rhsvar = find (get_varinfo_fc (rhs.var)->id);
1188
1189 if (lhs.type == DEREF)
1190 {
1191 if (rhs.offset == 0 && lhs.offset == 0 && rhs.type == SCALAR)
1192 add_graph_edge (graph, FIRST_REF_NODE + lhsvar, rhsvar);
1193 }
1194 else if (rhs.type == DEREF)
1195 {
1196 if (rhs.offset == 0 && lhs.offset == 0 && lhs.type == SCALAR)
1197 add_graph_edge (graph, lhsvar, FIRST_REF_NODE + rhsvar);
1198 }
1199 else if (rhs.type == ADDRESSOF)
1200 {
1201 /* x = &y */
1202 gcc_assert (find (get_varinfo_fc (rhs.var)->id)
1203 == get_varinfo_fc (rhs.var)->id);
1204 bitmap_set_bit (get_varinfo (lhsvar)->solution, rhsvar);
1205 }
1206 else if (lhsvar > anything_id
1207 && lhsvar != rhsvar && lhs.offset == 0 && rhs.offset == 0)
1208 {
1209 add_graph_edge (graph, lhsvar, rhsvar);
1210 }
1211 }
1212 }
1213
1214
1215 /* Changed variables on the last iteration. */
1216 static unsigned int changed_count;
1217 static sbitmap changed;
1218
1219 DEF_VEC_I(unsigned);
1220 DEF_VEC_ALLOC_I(unsigned,heap);
1221
1222
1223 /* Strongly Connected Component visitation info. */
1224
1225 struct scc_info
1226 {
1227 sbitmap visited;
1228 sbitmap deleted;
1229 unsigned int *dfs;
1230 unsigned int *node_mapping;
1231 int current_index;
1232 VEC(unsigned,heap) *scc_stack;
1233 };
1234
1235
1236 /* Recursive routine to find strongly connected components in GRAPH.
1237 SI is the SCC info to store the information in, and N is the id of current
1238 graph node we are processing.
1239
1240 This is Tarjan's strongly connected component finding algorithm, as
1241 modified by Nuutila to keep only non-root nodes on the stack.
1242 The algorithm can be found in "On finding the strongly connected
1243 connected components in a directed graph" by Esko Nuutila and Eljas
1244 Soisalon-Soininen, in Information Processing Letters volume 49,
1245 number 1, pages 9-14. */
1246
1247 static void
1248 scc_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1249 {
1250 unsigned int i;
1251 bitmap_iterator bi;
1252 unsigned int my_dfs;
1253
1254 SET_BIT (si->visited, n);
1255 si->dfs[n] = si->current_index ++;
1256 my_dfs = si->dfs[n];
1257
1258 /* Visit all the successors. */
1259 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[n], 0, i, bi)
1260 {
1261 unsigned int w;
1262
1263 if (i > LAST_REF_NODE)
1264 break;
1265
1266 w = find (i);
1267 if (TEST_BIT (si->deleted, w))
1268 continue;
1269
1270 if (!TEST_BIT (si->visited, w))
1271 scc_visit (graph, si, w);
1272 {
1273 unsigned int t = find (w);
1274 unsigned int nnode = find (n);
1275 gcc_assert (nnode == n);
1276
1277 if (si->dfs[t] < si->dfs[nnode])
1278 si->dfs[n] = si->dfs[t];
1279 }
1280 }
1281
1282 /* See if any components have been identified. */
1283 if (si->dfs[n] == my_dfs)
1284 {
1285 if (VEC_length (unsigned, si->scc_stack) > 0
1286 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1287 {
1288 bitmap scc = BITMAP_ALLOC (NULL);
1289 bool have_ref_node = n >= FIRST_REF_NODE;
1290 unsigned int lowest_node;
1291 bitmap_iterator bi;
1292
1293 bitmap_set_bit (scc, n);
1294
1295 while (VEC_length (unsigned, si->scc_stack) != 0
1296 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1297 {
1298 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1299
1300 bitmap_set_bit (scc, w);
1301 if (w >= FIRST_REF_NODE)
1302 have_ref_node = true;
1303 }
1304
1305 lowest_node = bitmap_first_set_bit (scc);
1306 gcc_assert (lowest_node < FIRST_REF_NODE);
1307
1308 /* Collapse the SCC nodes into a single node, and mark the
1309 indirect cycles. */
1310 EXECUTE_IF_SET_IN_BITMAP (scc, 0, i, bi)
1311 {
1312 if (i < FIRST_REF_NODE)
1313 {
1314 if (unite (lowest_node, i))
1315 unify_nodes (graph, lowest_node, i, false);
1316 }
1317 else
1318 {
1319 unite (lowest_node, i);
1320 graph->indirect_cycles[i - FIRST_REF_NODE] = lowest_node;
1321 }
1322 }
1323 }
1324 SET_BIT (si->deleted, n);
1325 }
1326 else
1327 VEC_safe_push (unsigned, heap, si->scc_stack, n);
1328 }
1329
1330 /* Unify node FROM into node TO, updating the changed count if
1331 necessary when UPDATE_CHANGED is true. */
1332
1333 static void
1334 unify_nodes (constraint_graph_t graph, unsigned int to, unsigned int from,
1335 bool update_changed)
1336 {
1337
1338 gcc_assert (to != from && find (to) == to);
1339 if (dump_file && (dump_flags & TDF_DETAILS))
1340 fprintf (dump_file, "Unifying %s to %s\n",
1341 get_varinfo (from)->name,
1342 get_varinfo (to)->name);
1343
1344 if (update_changed)
1345 stats.unified_vars_dynamic++;
1346 else
1347 stats.unified_vars_static++;
1348
1349 merge_graph_nodes (graph, to, from);
1350 merge_node_constraints (graph, to, from);
1351
1352 if (get_varinfo (from)->no_tbaa_pruning)
1353 get_varinfo (to)->no_tbaa_pruning = true;
1354
1355 /* Mark TO as changed if FROM was changed. If TO was already marked
1356 as changed, decrease the changed count. */
1357
1358 if (update_changed && TEST_BIT (changed, from))
1359 {
1360 RESET_BIT (changed, from);
1361 if (!TEST_BIT (changed, to))
1362 SET_BIT (changed, to);
1363 else
1364 {
1365 gcc_assert (changed_count > 0);
1366 changed_count--;
1367 }
1368 }
1369 if (get_varinfo (from)->solution)
1370 {
1371 /* If the solution changes because of the merging, we need to mark
1372 the variable as changed. */
1373 if (bitmap_ior_into (get_varinfo (to)->solution,
1374 get_varinfo (from)->solution))
1375 {
1376 if (update_changed && !TEST_BIT (changed, to))
1377 {
1378 SET_BIT (changed, to);
1379 changed_count++;
1380 }
1381 }
1382
1383 BITMAP_FREE (get_varinfo (from)->solution);
1384 BITMAP_FREE (get_varinfo (from)->oldsolution);
1385
1386 if (stats.iterations > 0)
1387 {
1388 BITMAP_FREE (get_varinfo (to)->oldsolution);
1389 get_varinfo (to)->oldsolution = BITMAP_ALLOC (&oldpta_obstack);
1390 }
1391 }
1392 if (valid_graph_edge (graph, to, to))
1393 {
1394 if (graph->succs[to])
1395 bitmap_clear_bit (graph->succs[to], to);
1396 }
1397 }
1398
1399 /* Information needed to compute the topological ordering of a graph. */
1400
1401 struct topo_info
1402 {
1403 /* sbitmap of visited nodes. */
1404 sbitmap visited;
1405 /* Array that stores the topological order of the graph, *in
1406 reverse*. */
1407 VEC(unsigned,heap) *topo_order;
1408 };
1409
1410
1411 /* Initialize and return a topological info structure. */
1412
1413 static struct topo_info *
1414 init_topo_info (void)
1415 {
1416 size_t size = graph->size;
1417 struct topo_info *ti = XNEW (struct topo_info);
1418 ti->visited = sbitmap_alloc (size);
1419 sbitmap_zero (ti->visited);
1420 ti->topo_order = VEC_alloc (unsigned, heap, 1);
1421 return ti;
1422 }
1423
1424
1425 /* Free the topological sort info pointed to by TI. */
1426
1427 static void
1428 free_topo_info (struct topo_info *ti)
1429 {
1430 sbitmap_free (ti->visited);
1431 VEC_free (unsigned, heap, ti->topo_order);
1432 free (ti);
1433 }
1434
1435 /* Visit the graph in topological order, and store the order in the
1436 topo_info structure. */
1437
1438 static void
1439 topo_visit (constraint_graph_t graph, struct topo_info *ti,
1440 unsigned int n)
1441 {
1442 bitmap_iterator bi;
1443 unsigned int j;
1444
1445 SET_BIT (ti->visited, n);
1446
1447 if (graph->succs[n])
1448 EXECUTE_IF_SET_IN_BITMAP (graph->succs[n], 0, j, bi)
1449 {
1450 if (!TEST_BIT (ti->visited, j))
1451 topo_visit (graph, ti, j);
1452 }
1453
1454 VEC_safe_push (unsigned, heap, ti->topo_order, n);
1455 }
1456
1457 /* Return true if variable N + OFFSET is a legal field of N. */
1458
1459 static bool
1460 type_safe (unsigned int n, unsigned HOST_WIDE_INT *offset)
1461 {
1462 varinfo_t ninfo = get_varinfo (n);
1463
1464 /* For things we've globbed to single variables, any offset into the
1465 variable acts like the entire variable, so that it becomes offset
1466 0. */
1467 if (ninfo->is_special_var
1468 || ninfo->is_artificial_var
1469 || ninfo->is_unknown_size_var
1470 || ninfo->is_full_var)
1471 {
1472 *offset = 0;
1473 return true;
1474 }
1475 return (get_varinfo (n)->offset + *offset) < get_varinfo (n)->fullsize;
1476 }
1477
1478 /* Process a constraint C that represents x = *y, using DELTA as the
1479 starting solution. */
1480
1481 static void
1482 do_sd_constraint (constraint_graph_t graph, constraint_t c,
1483 bitmap delta)
1484 {
1485 unsigned int lhs = c->lhs.var;
1486 bool flag = false;
1487 bitmap sol = get_varinfo (lhs)->solution;
1488 unsigned int j;
1489 bitmap_iterator bi;
1490
1491 if (bitmap_bit_p (delta, anything_id))
1492 {
1493 flag |= bitmap_set_bit (sol, anything_id);
1494 goto done;
1495 }
1496
1497 /* For x = *ESCAPED and x = *CALLUSED we want to compute the
1498 reachability set of the rhs var. As a pointer to a sub-field
1499 of a variable can also reach all other fields of the variable
1500 we simply have to expand the solution to contain all sub-fields
1501 if one sub-field is contained. */
1502 if (c->rhs.var == escaped_id
1503 || c->rhs.var == callused_id)
1504 {
1505 bitmap vars = NULL;
1506 /* In a first pass record all variables we need to add all
1507 sub-fields off. This avoids quadratic behavior. */
1508 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1509 {
1510 varinfo_t v = get_varinfo (j);
1511 if (v->is_full_var)
1512 continue;
1513
1514 v = lookup_vi_for_tree (v->decl);
1515 if (v->next != NULL)
1516 {
1517 if (vars == NULL)
1518 vars = BITMAP_ALLOC (NULL);
1519 bitmap_set_bit (vars, v->id);
1520 }
1521 }
1522 /* In the second pass now do the addition to the solution and
1523 to speed up solving add it to the delta as well. */
1524 if (vars != NULL)
1525 {
1526 EXECUTE_IF_SET_IN_BITMAP (vars, 0, j, bi)
1527 {
1528 varinfo_t v = get_varinfo (j);
1529 for (; v != NULL; v = v->next)
1530 {
1531 if (bitmap_set_bit (sol, v->id))
1532 {
1533 flag = true;
1534 bitmap_set_bit (delta, v->id);
1535 }
1536 }
1537 }
1538 BITMAP_FREE (vars);
1539 }
1540 }
1541
1542 /* For each variable j in delta (Sol(y)), add
1543 an edge in the graph from j to x, and union Sol(j) into Sol(x). */
1544 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1545 {
1546 unsigned HOST_WIDE_INT roffset = c->rhs.offset;
1547 if (type_safe (j, &roffset))
1548 {
1549 varinfo_t v;
1550 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + roffset;
1551 unsigned int t;
1552
1553 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1554 /* If the access is outside of the variable we can ignore it. */
1555 if (!v)
1556 continue;
1557 t = find (v->id);
1558
1559 /* Adding edges from the special vars is pointless.
1560 They don't have sets that can change. */
1561 if (get_varinfo (t)->is_special_var)
1562 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1563 /* Merging the solution from ESCAPED needlessly increases
1564 the set. Use ESCAPED as representative instead.
1565 Same for CALLUSED. */
1566 else if (get_varinfo (t)->id == escaped_id
1567 || get_varinfo (t)->id == callused_id)
1568 flag |= bitmap_set_bit (sol, get_varinfo (t)->id);
1569 else if (add_graph_edge (graph, lhs, t))
1570 flag |= bitmap_ior_into (sol, get_varinfo (t)->solution);
1571 }
1572 }
1573
1574 done:
1575 /* If the LHS solution changed, mark the var as changed. */
1576 if (flag)
1577 {
1578 get_varinfo (lhs)->solution = sol;
1579 if (!TEST_BIT (changed, lhs))
1580 {
1581 SET_BIT (changed, lhs);
1582 changed_count++;
1583 }
1584 }
1585 }
1586
1587 /* Process a constraint C that represents *x = y. */
1588
1589 static void
1590 do_ds_constraint (constraint_t c, bitmap delta)
1591 {
1592 unsigned int rhs = c->rhs.var;
1593 bitmap sol = get_varinfo (rhs)->solution;
1594 unsigned int j;
1595 bitmap_iterator bi;
1596
1597 if (bitmap_bit_p (sol, anything_id))
1598 {
1599 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1600 {
1601 varinfo_t jvi = get_varinfo (j);
1602 unsigned int t;
1603 unsigned int loff = c->lhs.offset;
1604 unsigned HOST_WIDE_INT fieldoffset = jvi->offset + loff;
1605 varinfo_t v;
1606
1607 v = get_varinfo (j);
1608 if (!v->is_full_var)
1609 {
1610 v = first_vi_for_offset (v, fieldoffset);
1611 /* If the access is outside of the variable we can ignore it. */
1612 if (!v)
1613 continue;
1614 }
1615 t = find (v->id);
1616
1617 if (bitmap_set_bit (get_varinfo (t)->solution, anything_id)
1618 && !TEST_BIT (changed, t))
1619 {
1620 SET_BIT (changed, t);
1621 changed_count++;
1622 }
1623 }
1624 return;
1625 }
1626
1627 /* For each member j of delta (Sol(x)), add an edge from y to j and
1628 union Sol(y) into Sol(j) */
1629 EXECUTE_IF_SET_IN_BITMAP (delta, 0, j, bi)
1630 {
1631 unsigned HOST_WIDE_INT loff = c->lhs.offset;
1632 if (type_safe (j, &loff) && !(get_varinfo (j)->is_special_var))
1633 {
1634 varinfo_t v;
1635 unsigned int t;
1636 unsigned HOST_WIDE_INT fieldoffset = get_varinfo (j)->offset + loff;
1637 bitmap tmp;
1638
1639 v = first_vi_for_offset (get_varinfo (j), fieldoffset);
1640 /* If the access is outside of the variable we can ignore it. */
1641 if (!v)
1642 continue;
1643 t = find (v->id);
1644 tmp = get_varinfo (t)->solution;
1645
1646 if (set_union_with_increment (tmp, sol, 0))
1647 {
1648 get_varinfo (t)->solution = tmp;
1649 if (t == rhs)
1650 sol = get_varinfo (rhs)->solution;
1651 if (!TEST_BIT (changed, t))
1652 {
1653 SET_BIT (changed, t);
1654 changed_count++;
1655 }
1656 }
1657 }
1658 }
1659 }
1660
1661 /* Handle a non-simple (simple meaning requires no iteration),
1662 constraint (IE *x = &y, x = *y, *x = y, and x = y with offsets involved). */
1663
1664 static void
1665 do_complex_constraint (constraint_graph_t graph, constraint_t c, bitmap delta)
1666 {
1667 if (c->lhs.type == DEREF)
1668 {
1669 if (c->rhs.type == ADDRESSOF)
1670 {
1671 gcc_unreachable();
1672 }
1673 else
1674 {
1675 /* *x = y */
1676 do_ds_constraint (c, delta);
1677 }
1678 }
1679 else if (c->rhs.type == DEREF)
1680 {
1681 /* x = *y */
1682 if (!(get_varinfo (c->lhs.var)->is_special_var))
1683 do_sd_constraint (graph, c, delta);
1684 }
1685 else
1686 {
1687 bitmap tmp;
1688 bitmap solution;
1689 bool flag = false;
1690
1691 gcc_assert (c->rhs.type == SCALAR && c->lhs.type == SCALAR);
1692 solution = get_varinfo (c->rhs.var)->solution;
1693 tmp = get_varinfo (c->lhs.var)->solution;
1694
1695 flag = set_union_with_increment (tmp, solution, c->rhs.offset);
1696
1697 if (flag)
1698 {
1699 get_varinfo (c->lhs.var)->solution = tmp;
1700 if (!TEST_BIT (changed, c->lhs.var))
1701 {
1702 SET_BIT (changed, c->lhs.var);
1703 changed_count++;
1704 }
1705 }
1706 }
1707 }
1708
1709 /* Initialize and return a new SCC info structure. */
1710
1711 static struct scc_info *
1712 init_scc_info (size_t size)
1713 {
1714 struct scc_info *si = XNEW (struct scc_info);
1715 size_t i;
1716
1717 si->current_index = 0;
1718 si->visited = sbitmap_alloc (size);
1719 sbitmap_zero (si->visited);
1720 si->deleted = sbitmap_alloc (size);
1721 sbitmap_zero (si->deleted);
1722 si->node_mapping = XNEWVEC (unsigned int, size);
1723 si->dfs = XCNEWVEC (unsigned int, size);
1724
1725 for (i = 0; i < size; i++)
1726 si->node_mapping[i] = i;
1727
1728 si->scc_stack = VEC_alloc (unsigned, heap, 1);
1729 return si;
1730 }
1731
1732 /* Free an SCC info structure pointed to by SI */
1733
1734 static void
1735 free_scc_info (struct scc_info *si)
1736 {
1737 sbitmap_free (si->visited);
1738 sbitmap_free (si->deleted);
1739 free (si->node_mapping);
1740 free (si->dfs);
1741 VEC_free (unsigned, heap, si->scc_stack);
1742 free (si);
1743 }
1744
1745
1746 /* Find indirect cycles in GRAPH that occur, using strongly connected
1747 components, and note them in the indirect cycles map.
1748
1749 This technique comes from Ben Hardekopf and Calvin Lin,
1750 "It Pays to be Lazy: Fast and Accurate Pointer Analysis for Millions of
1751 Lines of Code", submitted to PLDI 2007. */
1752
1753 static void
1754 find_indirect_cycles (constraint_graph_t graph)
1755 {
1756 unsigned int i;
1757 unsigned int size = graph->size;
1758 struct scc_info *si = init_scc_info (size);
1759
1760 for (i = 0; i < MIN (LAST_REF_NODE, size); i ++ )
1761 if (!TEST_BIT (si->visited, i) && find (i) == i)
1762 scc_visit (graph, si, i);
1763
1764 free_scc_info (si);
1765 }
1766
1767 /* Compute a topological ordering for GRAPH, and store the result in the
1768 topo_info structure TI. */
1769
1770 static void
1771 compute_topo_order (constraint_graph_t graph,
1772 struct topo_info *ti)
1773 {
1774 unsigned int i;
1775 unsigned int size = graph->size;
1776
1777 for (i = 0; i != size; ++i)
1778 if (!TEST_BIT (ti->visited, i) && find (i) == i)
1779 topo_visit (graph, ti, i);
1780 }
1781
1782 /* Structure used to for hash value numbering of pointer equivalence
1783 classes. */
1784
1785 typedef struct equiv_class_label
1786 {
1787 unsigned int equivalence_class;
1788 bitmap labels;
1789 hashval_t hashcode;
1790 } *equiv_class_label_t;
1791 typedef const struct equiv_class_label *const_equiv_class_label_t;
1792
1793 /* A hashtable for mapping a bitmap of labels->pointer equivalence
1794 classes. */
1795 static htab_t pointer_equiv_class_table;
1796
1797 /* A hashtable for mapping a bitmap of labels->location equivalence
1798 classes. */
1799 static htab_t location_equiv_class_table;
1800
1801 /* Hash function for a equiv_class_label_t */
1802
1803 static hashval_t
1804 equiv_class_label_hash (const void *p)
1805 {
1806 const_equiv_class_label_t const ecl = (const_equiv_class_label_t) p;
1807 return ecl->hashcode;
1808 }
1809
1810 /* Equality function for two equiv_class_label_t's. */
1811
1812 static int
1813 equiv_class_label_eq (const void *p1, const void *p2)
1814 {
1815 const_equiv_class_label_t const eql1 = (const_equiv_class_label_t) p1;
1816 const_equiv_class_label_t const eql2 = (const_equiv_class_label_t) p2;
1817 return bitmap_equal_p (eql1->labels, eql2->labels);
1818 }
1819
1820 /* Lookup a equivalence class in TABLE by the bitmap of LABELS it
1821 contains. */
1822
1823 static unsigned int
1824 equiv_class_lookup (htab_t table, bitmap labels)
1825 {
1826 void **slot;
1827 struct equiv_class_label ecl;
1828
1829 ecl.labels = labels;
1830 ecl.hashcode = bitmap_hash (labels);
1831
1832 slot = htab_find_slot_with_hash (table, &ecl,
1833 ecl.hashcode, NO_INSERT);
1834 if (!slot)
1835 return 0;
1836 else
1837 return ((equiv_class_label_t) *slot)->equivalence_class;
1838 }
1839
1840
1841 /* Add an equivalence class named EQUIVALENCE_CLASS with labels LABELS
1842 to TABLE. */
1843
1844 static void
1845 equiv_class_add (htab_t table, unsigned int equivalence_class,
1846 bitmap labels)
1847 {
1848 void **slot;
1849 equiv_class_label_t ecl = XNEW (struct equiv_class_label);
1850
1851 ecl->labels = labels;
1852 ecl->equivalence_class = equivalence_class;
1853 ecl->hashcode = bitmap_hash (labels);
1854
1855 slot = htab_find_slot_with_hash (table, ecl,
1856 ecl->hashcode, INSERT);
1857 gcc_assert (!*slot);
1858 *slot = (void *) ecl;
1859 }
1860
1861 /* Perform offline variable substitution.
1862
1863 This is a worst case quadratic time way of identifying variables
1864 that must have equivalent points-to sets, including those caused by
1865 static cycles, and single entry subgraphs, in the constraint graph.
1866
1867 The technique is described in "Exploiting Pointer and Location
1868 Equivalence to Optimize Pointer Analysis. In the 14th International
1869 Static Analysis Symposium (SAS), August 2007." It is known as the
1870 "HU" algorithm, and is equivalent to value numbering the collapsed
1871 constraint graph including evaluating unions.
1872
1873 The general method of finding equivalence classes is as follows:
1874 Add fake nodes (REF nodes) and edges for *a = b and a = *b constraints.
1875 Initialize all non-REF nodes to be direct nodes.
1876 For each constraint a = a U {b}, we set pts(a) = pts(a) u {fresh
1877 variable}
1878 For each constraint containing the dereference, we also do the same
1879 thing.
1880
1881 We then compute SCC's in the graph and unify nodes in the same SCC,
1882 including pts sets.
1883
1884 For each non-collapsed node x:
1885 Visit all unvisited explicit incoming edges.
1886 Ignoring all non-pointers, set pts(x) = Union of pts(a) for y
1887 where y->x.
1888 Lookup the equivalence class for pts(x).
1889 If we found one, equivalence_class(x) = found class.
1890 Otherwise, equivalence_class(x) = new class, and new_class is
1891 added to the lookup table.
1892
1893 All direct nodes with the same equivalence class can be replaced
1894 with a single representative node.
1895 All unlabeled nodes (label == 0) are not pointers and all edges
1896 involving them can be eliminated.
1897 We perform these optimizations during rewrite_constraints
1898
1899 In addition to pointer equivalence class finding, we also perform
1900 location equivalence class finding. This is the set of variables
1901 that always appear together in points-to sets. We use this to
1902 compress the size of the points-to sets. */
1903
1904 /* Current maximum pointer equivalence class id. */
1905 static int pointer_equiv_class;
1906
1907 /* Current maximum location equivalence class id. */
1908 static int location_equiv_class;
1909
1910 /* Recursive routine to find strongly connected components in GRAPH,
1911 and label it's nodes with DFS numbers. */
1912
1913 static void
1914 condense_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
1915 {
1916 unsigned int i;
1917 bitmap_iterator bi;
1918 unsigned int my_dfs;
1919
1920 gcc_assert (si->node_mapping[n] == n);
1921 SET_BIT (si->visited, n);
1922 si->dfs[n] = si->current_index ++;
1923 my_dfs = si->dfs[n];
1924
1925 /* Visit all the successors. */
1926 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
1927 {
1928 unsigned int w = si->node_mapping[i];
1929
1930 if (TEST_BIT (si->deleted, w))
1931 continue;
1932
1933 if (!TEST_BIT (si->visited, w))
1934 condense_visit (graph, si, w);
1935 {
1936 unsigned int t = si->node_mapping[w];
1937 unsigned int nnode = si->node_mapping[n];
1938 gcc_assert (nnode == n);
1939
1940 if (si->dfs[t] < si->dfs[nnode])
1941 si->dfs[n] = si->dfs[t];
1942 }
1943 }
1944
1945 /* Visit all the implicit predecessors. */
1946 EXECUTE_IF_IN_NONNULL_BITMAP (graph->implicit_preds[n], 0, i, bi)
1947 {
1948 unsigned int w = si->node_mapping[i];
1949
1950 if (TEST_BIT (si->deleted, w))
1951 continue;
1952
1953 if (!TEST_BIT (si->visited, w))
1954 condense_visit (graph, si, w);
1955 {
1956 unsigned int t = si->node_mapping[w];
1957 unsigned int nnode = si->node_mapping[n];
1958 gcc_assert (nnode == n);
1959
1960 if (si->dfs[t] < si->dfs[nnode])
1961 si->dfs[n] = si->dfs[t];
1962 }
1963 }
1964
1965 /* See if any components have been identified. */
1966 if (si->dfs[n] == my_dfs)
1967 {
1968 while (VEC_length (unsigned, si->scc_stack) != 0
1969 && si->dfs[VEC_last (unsigned, si->scc_stack)] >= my_dfs)
1970 {
1971 unsigned int w = VEC_pop (unsigned, si->scc_stack);
1972 si->node_mapping[w] = n;
1973
1974 if (!TEST_BIT (graph->direct_nodes, w))
1975 RESET_BIT (graph->direct_nodes, n);
1976
1977 /* Unify our nodes. */
1978 if (graph->preds[w])
1979 {
1980 if (!graph->preds[n])
1981 graph->preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1982 bitmap_ior_into (graph->preds[n], graph->preds[w]);
1983 }
1984 if (graph->implicit_preds[w])
1985 {
1986 if (!graph->implicit_preds[n])
1987 graph->implicit_preds[n] = BITMAP_ALLOC (&predbitmap_obstack);
1988 bitmap_ior_into (graph->implicit_preds[n],
1989 graph->implicit_preds[w]);
1990 }
1991 if (graph->points_to[w])
1992 {
1993 if (!graph->points_to[n])
1994 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
1995 bitmap_ior_into (graph->points_to[n],
1996 graph->points_to[w]);
1997 }
1998 }
1999 SET_BIT (si->deleted, n);
2000 }
2001 else
2002 VEC_safe_push (unsigned, heap, si->scc_stack, n);
2003 }
2004
2005 /* Label pointer equivalences. */
2006
2007 static void
2008 label_visit (constraint_graph_t graph, struct scc_info *si, unsigned int n)
2009 {
2010 unsigned int i;
2011 bitmap_iterator bi;
2012 SET_BIT (si->visited, n);
2013
2014 if (!graph->points_to[n])
2015 graph->points_to[n] = BITMAP_ALLOC (&predbitmap_obstack);
2016
2017 /* Label and union our incoming edges's points to sets. */
2018 EXECUTE_IF_IN_NONNULL_BITMAP (graph->preds[n], 0, i, bi)
2019 {
2020 unsigned int w = si->node_mapping[i];
2021 if (!TEST_BIT (si->visited, w))
2022 label_visit (graph, si, w);
2023
2024 /* Skip unused edges */
2025 if (w == n || graph->pointer_label[w] == 0)
2026 continue;
2027
2028 if (graph->points_to[w])
2029 bitmap_ior_into(graph->points_to[n], graph->points_to[w]);
2030 }
2031 /* Indirect nodes get fresh variables. */
2032 if (!TEST_BIT (graph->direct_nodes, n))
2033 bitmap_set_bit (graph->points_to[n], FIRST_REF_NODE + n);
2034
2035 if (!bitmap_empty_p (graph->points_to[n]))
2036 {
2037 unsigned int label = equiv_class_lookup (pointer_equiv_class_table,
2038 graph->points_to[n]);
2039 if (!label)
2040 {
2041 label = pointer_equiv_class++;
2042 equiv_class_add (pointer_equiv_class_table,
2043 label, graph->points_to[n]);
2044 }
2045 graph->pointer_label[n] = label;
2046 }
2047 }
2048
2049 /* Perform offline variable substitution, discovering equivalence
2050 classes, and eliminating non-pointer variables. */
2051
2052 static struct scc_info *
2053 perform_var_substitution (constraint_graph_t graph)
2054 {
2055 unsigned int i;
2056 unsigned int size = graph->size;
2057 struct scc_info *si = init_scc_info (size);
2058
2059 bitmap_obstack_initialize (&iteration_obstack);
2060 pointer_equiv_class_table = htab_create (511, equiv_class_label_hash,
2061 equiv_class_label_eq, free);
2062 location_equiv_class_table = htab_create (511, equiv_class_label_hash,
2063 equiv_class_label_eq, free);
2064 pointer_equiv_class = 1;
2065 location_equiv_class = 1;
2066
2067 /* Condense the nodes, which means to find SCC's, count incoming
2068 predecessors, and unite nodes in SCC's. */
2069 for (i = 0; i < FIRST_REF_NODE; i++)
2070 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2071 condense_visit (graph, si, si->node_mapping[i]);
2072
2073 sbitmap_zero (si->visited);
2074 /* Actually the label the nodes for pointer equivalences */
2075 for (i = 0; i < FIRST_REF_NODE; i++)
2076 if (!TEST_BIT (si->visited, si->node_mapping[i]))
2077 label_visit (graph, si, si->node_mapping[i]);
2078
2079 /* Calculate location equivalence labels. */
2080 for (i = 0; i < FIRST_REF_NODE; i++)
2081 {
2082 bitmap pointed_by;
2083 bitmap_iterator bi;
2084 unsigned int j;
2085 unsigned int label;
2086
2087 if (!graph->pointed_by[i])
2088 continue;
2089 pointed_by = BITMAP_ALLOC (&iteration_obstack);
2090
2091 /* Translate the pointed-by mapping for pointer equivalence
2092 labels. */
2093 EXECUTE_IF_SET_IN_BITMAP (graph->pointed_by[i], 0, j, bi)
2094 {
2095 bitmap_set_bit (pointed_by,
2096 graph->pointer_label[si->node_mapping[j]]);
2097 }
2098 /* The original pointed_by is now dead. */
2099 BITMAP_FREE (graph->pointed_by[i]);
2100
2101 /* Look up the location equivalence label if one exists, or make
2102 one otherwise. */
2103 label = equiv_class_lookup (location_equiv_class_table,
2104 pointed_by);
2105 if (label == 0)
2106 {
2107 label = location_equiv_class++;
2108 equiv_class_add (location_equiv_class_table,
2109 label, pointed_by);
2110 }
2111 else
2112 {
2113 if (dump_file && (dump_flags & TDF_DETAILS))
2114 fprintf (dump_file, "Found location equivalence for node %s\n",
2115 get_varinfo (i)->name);
2116 BITMAP_FREE (pointed_by);
2117 }
2118 graph->loc_label[i] = label;
2119
2120 }
2121
2122 if (dump_file && (dump_flags & TDF_DETAILS))
2123 for (i = 0; i < FIRST_REF_NODE; i++)
2124 {
2125 bool direct_node = TEST_BIT (graph->direct_nodes, i);
2126 fprintf (dump_file,
2127 "Equivalence classes for %s node id %d:%s are pointer: %d"
2128 ", location:%d\n",
2129 direct_node ? "Direct node" : "Indirect node", i,
2130 get_varinfo (i)->name,
2131 graph->pointer_label[si->node_mapping[i]],
2132 graph->loc_label[si->node_mapping[i]]);
2133 }
2134
2135 /* Quickly eliminate our non-pointer variables. */
2136
2137 for (i = 0; i < FIRST_REF_NODE; i++)
2138 {
2139 unsigned int node = si->node_mapping[i];
2140
2141 if (graph->pointer_label[node] == 0)
2142 {
2143 if (dump_file && (dump_flags & TDF_DETAILS))
2144 fprintf (dump_file,
2145 "%s is a non-pointer variable, eliminating edges.\n",
2146 get_varinfo (node)->name);
2147 stats.nonpointer_vars++;
2148 clear_edges_for_node (graph, node);
2149 }
2150 }
2151
2152 return si;
2153 }
2154
2155 /* Free information that was only necessary for variable
2156 substitution. */
2157
2158 static void
2159 free_var_substitution_info (struct scc_info *si)
2160 {
2161 free_scc_info (si);
2162 free (graph->pointer_label);
2163 free (graph->loc_label);
2164 free (graph->pointed_by);
2165 free (graph->points_to);
2166 free (graph->eq_rep);
2167 sbitmap_free (graph->direct_nodes);
2168 htab_delete (pointer_equiv_class_table);
2169 htab_delete (location_equiv_class_table);
2170 bitmap_obstack_release (&iteration_obstack);
2171 }
2172
2173 /* Return an existing node that is equivalent to NODE, which has
2174 equivalence class LABEL, if one exists. Return NODE otherwise. */
2175
2176 static unsigned int
2177 find_equivalent_node (constraint_graph_t graph,
2178 unsigned int node, unsigned int label)
2179 {
2180 /* If the address version of this variable is unused, we can
2181 substitute it for anything else with the same label.
2182 Otherwise, we know the pointers are equivalent, but not the
2183 locations, and we can unite them later. */
2184
2185 if (!bitmap_bit_p (graph->address_taken, node))
2186 {
2187 gcc_assert (label < graph->size);
2188
2189 if (graph->eq_rep[label] != -1)
2190 {
2191 /* Unify the two variables since we know they are equivalent. */
2192 if (unite (graph->eq_rep[label], node))
2193 unify_nodes (graph, graph->eq_rep[label], node, false);
2194 return graph->eq_rep[label];
2195 }
2196 else
2197 {
2198 graph->eq_rep[label] = node;
2199 graph->pe_rep[label] = node;
2200 }
2201 }
2202 else
2203 {
2204 gcc_assert (label < graph->size);
2205 graph->pe[node] = label;
2206 if (graph->pe_rep[label] == -1)
2207 graph->pe_rep[label] = node;
2208 }
2209
2210 return node;
2211 }
2212
2213 /* Unite pointer equivalent but not location equivalent nodes in
2214 GRAPH. This may only be performed once variable substitution is
2215 finished. */
2216
2217 static void
2218 unite_pointer_equivalences (constraint_graph_t graph)
2219 {
2220 unsigned int i;
2221
2222 /* Go through the pointer equivalences and unite them to their
2223 representative, if they aren't already. */
2224 for (i = 0; i < FIRST_REF_NODE; i++)
2225 {
2226 unsigned int label = graph->pe[i];
2227 if (label)
2228 {
2229 int label_rep = graph->pe_rep[label];
2230
2231 if (label_rep == -1)
2232 continue;
2233
2234 label_rep = find (label_rep);
2235 if (label_rep >= 0 && unite (label_rep, find (i)))
2236 unify_nodes (graph, label_rep, i, false);
2237 }
2238 }
2239 }
2240
2241 /* Move complex constraints to the GRAPH nodes they belong to. */
2242
2243 static void
2244 move_complex_constraints (constraint_graph_t graph)
2245 {
2246 int i;
2247 constraint_t c;
2248
2249 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2250 {
2251 if (c)
2252 {
2253 struct constraint_expr lhs = c->lhs;
2254 struct constraint_expr rhs = c->rhs;
2255
2256 if (lhs.type == DEREF)
2257 {
2258 insert_into_complex (graph, lhs.var, c);
2259 }
2260 else if (rhs.type == DEREF)
2261 {
2262 if (!(get_varinfo (lhs.var)->is_special_var))
2263 insert_into_complex (graph, rhs.var, c);
2264 }
2265 else if (rhs.type != ADDRESSOF && lhs.var > anything_id
2266 && (lhs.offset != 0 || rhs.offset != 0))
2267 {
2268 insert_into_complex (graph, rhs.var, c);
2269 }
2270 }
2271 }
2272 }
2273
2274
2275 /* Optimize and rewrite complex constraints while performing
2276 collapsing of equivalent nodes. SI is the SCC_INFO that is the
2277 result of perform_variable_substitution. */
2278
2279 static void
2280 rewrite_constraints (constraint_graph_t graph,
2281 struct scc_info *si)
2282 {
2283 int i;
2284 unsigned int j;
2285 constraint_t c;
2286
2287 for (j = 0; j < graph->size; j++)
2288 gcc_assert (find (j) == j);
2289
2290 for (i = 0; VEC_iterate (constraint_t, constraints, i, c); i++)
2291 {
2292 struct constraint_expr lhs = c->lhs;
2293 struct constraint_expr rhs = c->rhs;
2294 unsigned int lhsvar = find (get_varinfo_fc (lhs.var)->id);
2295 unsigned int rhsvar = find (get_varinfo_fc (rhs.var)->id);
2296 unsigned int lhsnode, rhsnode;
2297 unsigned int lhslabel, rhslabel;
2298
2299 lhsnode = si->node_mapping[lhsvar];
2300 rhsnode = si->node_mapping[rhsvar];
2301 lhslabel = graph->pointer_label[lhsnode];
2302 rhslabel = graph->pointer_label[rhsnode];
2303
2304 /* See if it is really a non-pointer variable, and if so, ignore
2305 the constraint. */
2306 if (lhslabel == 0)
2307 {
2308 if (dump_file && (dump_flags & TDF_DETAILS))
2309 {
2310
2311 fprintf (dump_file, "%s is a non-pointer variable,"
2312 "ignoring constraint:",
2313 get_varinfo (lhs.var)->name);
2314 dump_constraint (dump_file, c);
2315 }
2316 VEC_replace (constraint_t, constraints, i, NULL);
2317 continue;
2318 }
2319
2320 if (rhslabel == 0)
2321 {
2322 if (dump_file && (dump_flags & TDF_DETAILS))
2323 {
2324
2325 fprintf (dump_file, "%s is a non-pointer variable,"
2326 "ignoring constraint:",
2327 get_varinfo (rhs.var)->name);
2328 dump_constraint (dump_file, c);
2329 }
2330 VEC_replace (constraint_t, constraints, i, NULL);
2331 continue;
2332 }
2333
2334 lhsvar = find_equivalent_node (graph, lhsvar, lhslabel);
2335 rhsvar = find_equivalent_node (graph, rhsvar, rhslabel);
2336 c->lhs.var = lhsvar;
2337 c->rhs.var = rhsvar;
2338
2339 }
2340 }
2341
2342 /* Eliminate indirect cycles involving NODE. Return true if NODE was
2343 part of an SCC, false otherwise. */
2344
2345 static bool
2346 eliminate_indirect_cycles (unsigned int node)
2347 {
2348 if (graph->indirect_cycles[node] != -1
2349 && !bitmap_empty_p (get_varinfo (node)->solution))
2350 {
2351 unsigned int i;
2352 VEC(unsigned,heap) *queue = NULL;
2353 int queuepos;
2354 unsigned int to = find (graph->indirect_cycles[node]);
2355 bitmap_iterator bi;
2356
2357 /* We can't touch the solution set and call unify_nodes
2358 at the same time, because unify_nodes is going to do
2359 bitmap unions into it. */
2360
2361 EXECUTE_IF_SET_IN_BITMAP (get_varinfo (node)->solution, 0, i, bi)
2362 {
2363 if (find (i) == i && i != to)
2364 {
2365 if (unite (to, i))
2366 VEC_safe_push (unsigned, heap, queue, i);
2367 }
2368 }
2369
2370 for (queuepos = 0;
2371 VEC_iterate (unsigned, queue, queuepos, i);
2372 queuepos++)
2373 {
2374 unify_nodes (graph, to, i, true);
2375 }
2376 VEC_free (unsigned, heap, queue);
2377 return true;
2378 }
2379 return false;
2380 }
2381
2382 /* Solve the constraint graph GRAPH using our worklist solver.
2383 This is based on the PW* family of solvers from the "Efficient Field
2384 Sensitive Pointer Analysis for C" paper.
2385 It works by iterating over all the graph nodes, processing the complex
2386 constraints and propagating the copy constraints, until everything stops
2387 changed. This corresponds to steps 6-8 in the solving list given above. */
2388
2389 static void
2390 solve_graph (constraint_graph_t graph)
2391 {
2392 unsigned int size = graph->size;
2393 unsigned int i;
2394 bitmap pts;
2395
2396 changed_count = 0;
2397 changed = sbitmap_alloc (size);
2398 sbitmap_zero (changed);
2399
2400 /* Mark all initial non-collapsed nodes as changed. */
2401 for (i = 0; i < size; i++)
2402 {
2403 varinfo_t ivi = get_varinfo (i);
2404 if (find (i) == i && !bitmap_empty_p (ivi->solution)
2405 && ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
2406 || VEC_length (constraint_t, graph->complex[i]) > 0))
2407 {
2408 SET_BIT (changed, i);
2409 changed_count++;
2410 }
2411 }
2412
2413 /* Allocate a bitmap to be used to store the changed bits. */
2414 pts = BITMAP_ALLOC (&pta_obstack);
2415
2416 while (changed_count > 0)
2417 {
2418 unsigned int i;
2419 struct topo_info *ti = init_topo_info ();
2420 stats.iterations++;
2421
2422 bitmap_obstack_initialize (&iteration_obstack);
2423
2424 compute_topo_order (graph, ti);
2425
2426 while (VEC_length (unsigned, ti->topo_order) != 0)
2427 {
2428
2429 i = VEC_pop (unsigned, ti->topo_order);
2430
2431 /* If this variable is not a representative, skip it. */
2432 if (find (i) != i)
2433 continue;
2434
2435 /* In certain indirect cycle cases, we may merge this
2436 variable to another. */
2437 if (eliminate_indirect_cycles (i) && find (i) != i)
2438 continue;
2439
2440 /* If the node has changed, we need to process the
2441 complex constraints and outgoing edges again. */
2442 if (TEST_BIT (changed, i))
2443 {
2444 unsigned int j;
2445 constraint_t c;
2446 bitmap solution;
2447 VEC(constraint_t,heap) *complex = graph->complex[i];
2448 bool solution_empty;
2449
2450 RESET_BIT (changed, i);
2451 changed_count--;
2452
2453 /* Compute the changed set of solution bits. */
2454 bitmap_and_compl (pts, get_varinfo (i)->solution,
2455 get_varinfo (i)->oldsolution);
2456
2457 if (bitmap_empty_p (pts))
2458 continue;
2459
2460 bitmap_ior_into (get_varinfo (i)->oldsolution, pts);
2461
2462 solution = get_varinfo (i)->solution;
2463 solution_empty = bitmap_empty_p (solution);
2464
2465 /* Process the complex constraints */
2466 for (j = 0; VEC_iterate (constraint_t, complex, j, c); j++)
2467 {
2468 /* XXX: This is going to unsort the constraints in
2469 some cases, which will occasionally add duplicate
2470 constraints during unification. This does not
2471 affect correctness. */
2472 c->lhs.var = find (c->lhs.var);
2473 c->rhs.var = find (c->rhs.var);
2474
2475 /* The only complex constraint that can change our
2476 solution to non-empty, given an empty solution,
2477 is a constraint where the lhs side is receiving
2478 some set from elsewhere. */
2479 if (!solution_empty || c->lhs.type != DEREF)
2480 do_complex_constraint (graph, c, pts);
2481 }
2482
2483 solution_empty = bitmap_empty_p (solution);
2484
2485 if (!solution_empty
2486 /* Do not propagate the ESCAPED/CALLUSED solutions. */
2487 && i != escaped_id
2488 && i != callused_id)
2489 {
2490 bitmap_iterator bi;
2491
2492 /* Propagate solution to all successors. */
2493 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i],
2494 0, j, bi)
2495 {
2496 bitmap tmp;
2497 bool flag;
2498
2499 unsigned int to = find (j);
2500 tmp = get_varinfo (to)->solution;
2501 flag = false;
2502
2503 /* Don't try to propagate to ourselves. */
2504 if (to == i)
2505 continue;
2506
2507 flag = set_union_with_increment (tmp, pts, 0);
2508
2509 if (flag)
2510 {
2511 get_varinfo (to)->solution = tmp;
2512 if (!TEST_BIT (changed, to))
2513 {
2514 SET_BIT (changed, to);
2515 changed_count++;
2516 }
2517 }
2518 }
2519 }
2520 }
2521 }
2522 free_topo_info (ti);
2523 bitmap_obstack_release (&iteration_obstack);
2524 }
2525
2526 BITMAP_FREE (pts);
2527 sbitmap_free (changed);
2528 bitmap_obstack_release (&oldpta_obstack);
2529 }
2530
2531 /* Map from trees to variable infos. */
2532 static struct pointer_map_t *vi_for_tree;
2533
2534
2535 /* Insert ID as the variable id for tree T in the vi_for_tree map. */
2536
2537 static void
2538 insert_vi_for_tree (tree t, varinfo_t vi)
2539 {
2540 void **slot = pointer_map_insert (vi_for_tree, t);
2541 gcc_assert (vi);
2542 gcc_assert (*slot == NULL);
2543 *slot = vi;
2544 }
2545
2546 /* Find the variable info for tree T in VI_FOR_TREE. If T does not
2547 exist in the map, return NULL, otherwise, return the varinfo we found. */
2548
2549 static varinfo_t
2550 lookup_vi_for_tree (tree t)
2551 {
2552 void **slot = pointer_map_contains (vi_for_tree, t);
2553 if (slot == NULL)
2554 return NULL;
2555
2556 return (varinfo_t) *slot;
2557 }
2558
2559 /* Return a printable name for DECL */
2560
2561 static const char *
2562 alias_get_name (tree decl)
2563 {
2564 const char *res = get_name (decl);
2565 char *temp;
2566 int num_printed = 0;
2567
2568 if (res != NULL)
2569 return res;
2570
2571 res = "NULL";
2572 if (!dump_file)
2573 return res;
2574
2575 if (TREE_CODE (decl) == SSA_NAME)
2576 {
2577 num_printed = asprintf (&temp, "%s_%u",
2578 alias_get_name (SSA_NAME_VAR (decl)),
2579 SSA_NAME_VERSION (decl));
2580 }
2581 else if (DECL_P (decl))
2582 {
2583 num_printed = asprintf (&temp, "D.%u", DECL_UID (decl));
2584 }
2585 if (num_printed > 0)
2586 {
2587 res = ggc_strdup (temp);
2588 free (temp);
2589 }
2590 return res;
2591 }
2592
2593 /* Find the variable id for tree T in the map.
2594 If T doesn't exist in the map, create an entry for it and return it. */
2595
2596 static varinfo_t
2597 get_vi_for_tree (tree t)
2598 {
2599 void **slot = pointer_map_contains (vi_for_tree, t);
2600 if (slot == NULL)
2601 return get_varinfo (create_variable_info_for (t, alias_get_name (t)));
2602
2603 return (varinfo_t) *slot;
2604 }
2605
2606 /* Get a constraint expression for a new temporary variable. */
2607
2608 static struct constraint_expr
2609 get_constraint_exp_for_temp (tree t)
2610 {
2611 struct constraint_expr cexpr;
2612
2613 gcc_assert (SSA_VAR_P (t));
2614
2615 cexpr.type = SCALAR;
2616 cexpr.var = get_vi_for_tree (t)->id;
2617 cexpr.offset = 0;
2618
2619 return cexpr;
2620 }
2621
2622 /* Get a constraint expression vector from an SSA_VAR_P node.
2623 If address_p is true, the result will be taken its address of. */
2624
2625 static void
2626 get_constraint_for_ssa_var (tree t, VEC(ce_s, heap) **results, bool address_p)
2627 {
2628 struct constraint_expr cexpr;
2629 varinfo_t vi;
2630
2631 /* We allow FUNCTION_DECLs here even though it doesn't make much sense. */
2632 gcc_assert (SSA_VAR_P (t) || DECL_P (t));
2633
2634 /* For parameters, get at the points-to set for the actual parm
2635 decl. */
2636 if (TREE_CODE (t) == SSA_NAME
2637 && TREE_CODE (SSA_NAME_VAR (t)) == PARM_DECL
2638 && SSA_NAME_IS_DEFAULT_DEF (t))
2639 {
2640 get_constraint_for_ssa_var (SSA_NAME_VAR (t), results, address_p);
2641 return;
2642 }
2643
2644 vi = get_vi_for_tree (t);
2645 cexpr.var = vi->id;
2646 cexpr.type = SCALAR;
2647 cexpr.offset = 0;
2648 /* If we determine the result is "anything", and we know this is readonly,
2649 say it points to readonly memory instead. */
2650 if (cexpr.var == anything_id && TREE_READONLY (t))
2651 {
2652 gcc_unreachable ();
2653 cexpr.type = ADDRESSOF;
2654 cexpr.var = readonly_id;
2655 }
2656
2657 /* If we are not taking the address of the constraint expr, add all
2658 sub-fiels of the variable as well. */
2659 if (!address_p)
2660 {
2661 for (; vi; vi = vi->next)
2662 {
2663 cexpr.var = vi->id;
2664 VEC_safe_push (ce_s, heap, *results, &cexpr);
2665 }
2666 return;
2667 }
2668
2669 VEC_safe_push (ce_s, heap, *results, &cexpr);
2670 }
2671
2672 /* Process constraint T, performing various simplifications and then
2673 adding it to our list of overall constraints. */
2674
2675 static void
2676 process_constraint (constraint_t t)
2677 {
2678 struct constraint_expr rhs = t->rhs;
2679 struct constraint_expr lhs = t->lhs;
2680
2681 gcc_assert (rhs.var < VEC_length (varinfo_t, varmap));
2682 gcc_assert (lhs.var < VEC_length (varinfo_t, varmap));
2683
2684 /* ANYTHING == ANYTHING is pointless. */
2685 if (lhs.var == anything_id && rhs.var == anything_id)
2686 return;
2687
2688 /* If we have &ANYTHING = something, convert to SOMETHING = &ANYTHING) */
2689 else if (lhs.var == anything_id && lhs.type == ADDRESSOF)
2690 {
2691 rhs = t->lhs;
2692 t->lhs = t->rhs;
2693 t->rhs = rhs;
2694 process_constraint (t);
2695 }
2696 /* This can happen in our IR with things like n->a = *p */
2697 else if (rhs.type == DEREF && lhs.type == DEREF && rhs.var != anything_id)
2698 {
2699 /* Split into tmp = *rhs, *lhs = tmp */
2700 tree rhsdecl = get_varinfo (rhs.var)->decl;
2701 tree pointertype = TREE_TYPE (rhsdecl);
2702 tree pointedtotype = TREE_TYPE (pointertype);
2703 tree tmpvar = create_tmp_var_raw (pointedtotype, "doubledereftmp");
2704 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2705
2706 process_constraint (new_constraint (tmplhs, rhs));
2707 process_constraint (new_constraint (lhs, tmplhs));
2708 }
2709 else if (rhs.type == ADDRESSOF && lhs.type == DEREF)
2710 {
2711 /* Split into tmp = &rhs, *lhs = tmp */
2712 tree rhsdecl = get_varinfo (rhs.var)->decl;
2713 tree pointertype = TREE_TYPE (rhsdecl);
2714 tree tmpvar = create_tmp_var_raw (pointertype, "derefaddrtmp");
2715 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
2716
2717 process_constraint (new_constraint (tmplhs, rhs));
2718 process_constraint (new_constraint (lhs, tmplhs));
2719 }
2720 else
2721 {
2722 gcc_assert (rhs.type != ADDRESSOF || rhs.offset == 0);
2723 VEC_safe_push (constraint_t, heap, constraints, t);
2724 }
2725 }
2726
2727 /* Return true if T is a variable of a type that could contain
2728 pointers. */
2729
2730 static bool
2731 could_have_pointers (tree t)
2732 {
2733 tree type = TREE_TYPE (t);
2734
2735 if (POINTER_TYPE_P (type)
2736 || AGGREGATE_TYPE_P (type))
2737 return true;
2738
2739 return false;
2740 }
2741
2742 /* Return the position, in bits, of FIELD_DECL from the beginning of its
2743 structure. */
2744
2745 static HOST_WIDE_INT
2746 bitpos_of_field (const tree fdecl)
2747 {
2748
2749 if (!host_integerp (DECL_FIELD_OFFSET (fdecl), 0)
2750 || !host_integerp (DECL_FIELD_BIT_OFFSET (fdecl), 0))
2751 return -1;
2752
2753 return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * 8
2754 + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl)));
2755 }
2756
2757
2758 /* Get constraint expressions for offsetting PTR by OFFSET. Stores the
2759 resulting constraint expressions in *RESULTS. */
2760
2761 static void
2762 get_constraint_for_ptr_offset (tree ptr, tree offset,
2763 VEC (ce_s, heap) **results)
2764 {
2765 struct constraint_expr *c;
2766 unsigned int j, n;
2767 unsigned HOST_WIDE_INT rhsunitoffset, rhsoffset;
2768
2769 /* If we do not do field-sensitive PTA adding offsets to pointers
2770 does not change the points-to solution. */
2771 if (!use_field_sensitive)
2772 {
2773 get_constraint_for (ptr, results);
2774 return;
2775 }
2776
2777 /* If the offset is not a non-negative integer constant that fits
2778 in a HOST_WIDE_INT, we have to fall back to a conservative
2779 solution which includes all sub-fields of all pointed-to
2780 variables of ptr.
2781 ??? As we do not have the ability to express this, fall back
2782 to anything. */
2783 if (!host_integerp (offset, 1))
2784 {
2785 struct constraint_expr temp;
2786 temp.var = anything_id;
2787 temp.type = SCALAR;
2788 temp.offset = 0;
2789 VEC_safe_push (ce_s, heap, *results, &temp);
2790 return;
2791 }
2792
2793 /* Make sure the bit-offset also fits. */
2794 rhsunitoffset = TREE_INT_CST_LOW (offset);
2795 rhsoffset = rhsunitoffset * BITS_PER_UNIT;
2796 if (rhsunitoffset != rhsoffset / BITS_PER_UNIT)
2797 {
2798 struct constraint_expr temp;
2799 temp.var = anything_id;
2800 temp.type = SCALAR;
2801 temp.offset = 0;
2802 VEC_safe_push (ce_s, heap, *results, &temp);
2803 return;
2804 }
2805
2806 get_constraint_for (ptr, results);
2807 if (rhsoffset == 0)
2808 return;
2809
2810 /* As we are eventually appending to the solution do not use
2811 VEC_iterate here. */
2812 n = VEC_length (ce_s, *results);
2813 for (j = 0; j < n; j++)
2814 {
2815 varinfo_t curr;
2816 c = VEC_index (ce_s, *results, j);
2817 curr = get_varinfo (c->var);
2818
2819 if (c->type == ADDRESSOF
2820 && !curr->is_full_var)
2821 {
2822 varinfo_t temp, curr = get_varinfo (c->var);
2823
2824 /* Search the sub-field which overlaps with the
2825 pointed-to offset. As we deal with positive offsets
2826 only, we can start the search from the current variable. */
2827 temp = first_vi_for_offset (curr, curr->offset + rhsoffset);
2828
2829 /* If the result is outside of the variable we have to provide
2830 a conservative result, as the variable is still reachable
2831 from the resulting pointer (even though it technically
2832 cannot point to anything). The last sub-field is such
2833 a conservative result.
2834 ??? If we always had a sub-field for &object + 1 then
2835 we could represent this in a more precise way. */
2836 if (temp == NULL)
2837 {
2838 temp = curr;
2839 while (temp->next != NULL)
2840 temp = temp->next;
2841 continue;
2842 }
2843
2844 /* If the found variable is not exactly at the pointed to
2845 result, we have to include the next variable in the
2846 solution as well. Otherwise two increments by offset / 2
2847 do not result in the same or a conservative superset
2848 solution. */
2849 if (temp->offset != curr->offset + rhsoffset
2850 && temp->next != NULL)
2851 {
2852 struct constraint_expr c2;
2853 c2.var = temp->next->id;
2854 c2.type = ADDRESSOF;
2855 c2.offset = 0;
2856 VEC_safe_push (ce_s, heap, *results, &c2);
2857 }
2858 c->var = temp->id;
2859 c->offset = 0;
2860 }
2861 else if (c->type == ADDRESSOF
2862 /* If this varinfo represents a full variable just use it. */
2863 && curr->is_full_var)
2864 c->offset = 0;
2865 else
2866 c->offset = rhsoffset;
2867 }
2868 }
2869
2870
2871 /* Given a COMPONENT_REF T, return the constraint_expr vector for it.
2872 If address_p is true the result will be taken its address of. */
2873
2874 static void
2875 get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
2876 bool address_p)
2877 {
2878 tree orig_t = t;
2879 HOST_WIDE_INT bitsize = -1;
2880 HOST_WIDE_INT bitmaxsize = -1;
2881 HOST_WIDE_INT bitpos;
2882 tree forzero;
2883 struct constraint_expr *result;
2884
2885 /* Some people like to do cute things like take the address of
2886 &0->a.b */
2887 forzero = t;
2888 while (!SSA_VAR_P (forzero) && !CONSTANT_CLASS_P (forzero))
2889 forzero = TREE_OPERAND (forzero, 0);
2890
2891 if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
2892 {
2893 struct constraint_expr temp;
2894
2895 temp.offset = 0;
2896 temp.var = integer_id;
2897 temp.type = SCALAR;
2898 VEC_safe_push (ce_s, heap, *results, &temp);
2899 return;
2900 }
2901
2902 t = get_ref_base_and_extent (t, &bitpos, &bitsize, &bitmaxsize);
2903
2904 /* Pretend to take the address of the base, we'll take care of
2905 adding the required subset of sub-fields below. */
2906 get_constraint_for_1 (t, results, true);
2907 gcc_assert (VEC_length (ce_s, *results) == 1);
2908 result = VEC_last (ce_s, *results);
2909
2910 /* This can also happen due to weird offsetof type macros. */
2911 if (TREE_CODE (t) != ADDR_EXPR && result->type == ADDRESSOF)
2912 result->type = SCALAR;
2913
2914 if (result->type == SCALAR
2915 && get_varinfo (result->var)->is_full_var)
2916 /* For single-field vars do not bother about the offset. */
2917 result->offset = 0;
2918 else if (result->type == SCALAR)
2919 {
2920 /* In languages like C, you can access one past the end of an
2921 array. You aren't allowed to dereference it, so we can
2922 ignore this constraint. When we handle pointer subtraction,
2923 we may have to do something cute here. */
2924
2925 if ((unsigned HOST_WIDE_INT)bitpos < get_varinfo (result->var)->fullsize
2926 && bitmaxsize != 0)
2927 {
2928 /* It's also not true that the constraint will actually start at the
2929 right offset, it may start in some padding. We only care about
2930 setting the constraint to the first actual field it touches, so
2931 walk to find it. */
2932 struct constraint_expr cexpr = *result;
2933 varinfo_t curr;
2934 VEC_pop (ce_s, *results);
2935 cexpr.offset = 0;
2936 for (curr = get_varinfo (cexpr.var); curr; curr = curr->next)
2937 {
2938 if (ranges_overlap_p (curr->offset, curr->size,
2939 bitpos, bitmaxsize))
2940 {
2941 cexpr.var = curr->id;
2942 VEC_safe_push (ce_s, heap, *results, &cexpr);
2943 if (address_p)
2944 break;
2945 }
2946 }
2947 /* If we are going to take the address of this field then
2948 to be able to compute reachability correctly add at least
2949 the last field of the variable. */
2950 if (address_p
2951 && VEC_length (ce_s, *results) == 0)
2952 {
2953 curr = get_varinfo (cexpr.var);
2954 while (curr->next != NULL)
2955 curr = curr->next;
2956 cexpr.var = curr->id;
2957 VEC_safe_push (ce_s, heap, *results, &cexpr);
2958 }
2959 else
2960 /* Assert that we found *some* field there. The user couldn't be
2961 accessing *only* padding. */
2962 /* Still the user could access one past the end of an array
2963 embedded in a struct resulting in accessing *only* padding. */
2964 gcc_assert (VEC_length (ce_s, *results) >= 1
2965 || ref_contains_array_ref (orig_t));
2966 }
2967 else if (bitmaxsize == 0)
2968 {
2969 if (dump_file && (dump_flags & TDF_DETAILS))
2970 fprintf (dump_file, "Access to zero-sized part of variable,"
2971 "ignoring\n");
2972 }
2973 else
2974 if (dump_file && (dump_flags & TDF_DETAILS))
2975 fprintf (dump_file, "Access to past the end of variable, ignoring\n");
2976 }
2977 else if (bitmaxsize == -1)
2978 {
2979 /* We can't handle DEREF constraints with unknown size, we'll
2980 get the wrong answer. Punt and return anything. */
2981 result->var = anything_id;
2982 result->offset = 0;
2983 }
2984 else
2985 result->offset = bitpos;
2986 }
2987
2988
2989 /* Dereference the constraint expression CONS, and return the result.
2990 DEREF (ADDRESSOF) = SCALAR
2991 DEREF (SCALAR) = DEREF
2992 DEREF (DEREF) = (temp = DEREF1; result = DEREF(temp))
2993 This is needed so that we can handle dereferencing DEREF constraints. */
2994
2995 static void
2996 do_deref (VEC (ce_s, heap) **constraints)
2997 {
2998 struct constraint_expr *c;
2999 unsigned int i = 0;
3000
3001 for (i = 0; VEC_iterate (ce_s, *constraints, i, c); i++)
3002 {
3003 if (c->type == SCALAR)
3004 c->type = DEREF;
3005 else if (c->type == ADDRESSOF)
3006 c->type = SCALAR;
3007 else if (c->type == DEREF)
3008 {
3009 tree tmpvar = create_tmp_var_raw (ptr_type_node, "dereftmp");
3010 struct constraint_expr tmplhs = get_constraint_exp_for_temp (tmpvar);
3011 process_constraint (new_constraint (tmplhs, *c));
3012 c->var = tmplhs.var;
3013 }
3014 else
3015 gcc_unreachable ();
3016 }
3017 }
3018
3019 /* Given a tree T, return the constraint expression for it. */
3020
3021 static void
3022 get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
3023 {
3024 struct constraint_expr temp;
3025
3026 /* x = integer is all glommed to a single variable, which doesn't
3027 point to anything by itself. That is, of course, unless it is an
3028 integer constant being treated as a pointer, in which case, we
3029 will return that this is really the addressof anything. This
3030 happens below, since it will fall into the default case. The only
3031 case we know something about an integer treated like a pointer is
3032 when it is the NULL pointer, and then we just say it points to
3033 NULL. */
3034 if (TREE_CODE (t) == INTEGER_CST
3035 && integer_zerop (t))
3036 {
3037 temp.var = nothing_id;
3038 temp.type = ADDRESSOF;
3039 temp.offset = 0;
3040 VEC_safe_push (ce_s, heap, *results, &temp);
3041 return;
3042 }
3043
3044 /* String constants are read-only. */
3045 if (TREE_CODE (t) == STRING_CST)
3046 {
3047 temp.var = readonly_id;
3048 temp.type = SCALAR;
3049 temp.offset = 0;
3050 VEC_safe_push (ce_s, heap, *results, &temp);
3051 return;
3052 }
3053
3054 switch (TREE_CODE_CLASS (TREE_CODE (t)))
3055 {
3056 case tcc_expression:
3057 {
3058 switch (TREE_CODE (t))
3059 {
3060 case ADDR_EXPR:
3061 {
3062 struct constraint_expr *c;
3063 unsigned int i;
3064 tree exp = TREE_OPERAND (t, 0);
3065
3066 get_constraint_for_1 (exp, results, true);
3067
3068 for (i = 0; VEC_iterate (ce_s, *results, i, c); i++)
3069 {
3070 if (c->type == DEREF)
3071 c->type = SCALAR;
3072 else
3073 c->type = ADDRESSOF;
3074 }
3075 return;
3076 }
3077 break;
3078 default:;
3079 }
3080 break;
3081 }
3082 case tcc_reference:
3083 {
3084 switch (TREE_CODE (t))
3085 {
3086 case INDIRECT_REF:
3087 {
3088 get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
3089 do_deref (results);
3090 return;
3091 }
3092 case ARRAY_REF:
3093 case ARRAY_RANGE_REF:
3094 case COMPONENT_REF:
3095 get_constraint_for_component_ref (t, results, address_p);
3096 return;
3097 default:;
3098 }
3099 break;
3100 }
3101 case tcc_exceptional:
3102 {
3103 switch (TREE_CODE (t))
3104 {
3105 case SSA_NAME:
3106 {
3107 get_constraint_for_ssa_var (t, results, address_p);
3108 return;
3109 }
3110 default:;
3111 }
3112 break;
3113 }
3114 case tcc_declaration:
3115 {
3116 get_constraint_for_ssa_var (t, results, address_p);
3117 return;
3118 }
3119 default:;
3120 }
3121
3122 /* The default fallback is a constraint from anything. */
3123 temp.type = ADDRESSOF;
3124 temp.var = anything_id;
3125 temp.offset = 0;
3126 VEC_safe_push (ce_s, heap, *results, &temp);
3127 }
3128
3129 /* Given a gimple tree T, return the constraint expression vector for it. */
3130
3131 static void
3132 get_constraint_for (tree t, VEC (ce_s, heap) **results)
3133 {
3134 gcc_assert (VEC_length (ce_s, *results) == 0);
3135
3136 get_constraint_for_1 (t, results, false);
3137 }
3138
3139 /* Handle the structure copy case where we have a simple structure copy
3140 between LHS and RHS that is of SIZE (in bits)
3141
3142 For each field of the lhs variable (lhsfield)
3143 For each field of the rhs variable at lhsfield.offset (rhsfield)
3144 add the constraint lhsfield = rhsfield
3145
3146 If we fail due to some kind of type unsafety or other thing we
3147 can't handle, return false. We expect the caller to collapse the
3148 variable in that case. */
3149
3150 static bool
3151 do_simple_structure_copy (const struct constraint_expr lhs,
3152 const struct constraint_expr rhs,
3153 const unsigned HOST_WIDE_INT size)
3154 {
3155 varinfo_t p = get_varinfo (lhs.var);
3156 unsigned HOST_WIDE_INT pstart, last;
3157 pstart = p->offset;
3158 last = p->offset + size;
3159 for (; p && p->offset < last; p = p->next)
3160 {
3161 varinfo_t q;
3162 struct constraint_expr templhs = lhs;
3163 struct constraint_expr temprhs = rhs;
3164 unsigned HOST_WIDE_INT fieldoffset;
3165
3166 templhs.var = p->id;
3167 q = get_varinfo (temprhs.var);
3168 fieldoffset = p->offset - pstart;
3169 q = first_vi_for_offset (q, q->offset + fieldoffset);
3170 if (!q)
3171 return false;
3172 temprhs.var = q->id;
3173 process_constraint (new_constraint (templhs, temprhs));
3174 }
3175 return true;
3176 }
3177
3178
3179 /* Handle the structure copy case where we have a structure copy between a
3180 aggregate on the LHS and a dereference of a pointer on the RHS
3181 that is of SIZE (in bits)
3182
3183 For each field of the lhs variable (lhsfield)
3184 rhs.offset = lhsfield->offset
3185 add the constraint lhsfield = rhs
3186 */
3187
3188 static void
3189 do_rhs_deref_structure_copy (const struct constraint_expr lhs,
3190 const struct constraint_expr rhs,
3191 const unsigned HOST_WIDE_INT size)
3192 {
3193 varinfo_t p = get_varinfo (lhs.var);
3194 unsigned HOST_WIDE_INT pstart,last;
3195 pstart = p->offset;
3196 last = p->offset + size;
3197
3198 for (; p && p->offset < last; p = p->next)
3199 {
3200 varinfo_t q;
3201 struct constraint_expr templhs = lhs;
3202 struct constraint_expr temprhs = rhs;
3203 unsigned HOST_WIDE_INT fieldoffset;
3204
3205
3206 if (templhs.type == SCALAR)
3207 templhs.var = p->id;
3208 else
3209 templhs.offset = p->offset;
3210
3211 q = get_varinfo (temprhs.var);
3212 fieldoffset = p->offset - pstart;
3213 temprhs.offset += fieldoffset;
3214 process_constraint (new_constraint (templhs, temprhs));
3215 }
3216 }
3217
3218 /* Handle the structure copy case where we have a structure copy
3219 between an aggregate on the RHS and a dereference of a pointer on
3220 the LHS that is of SIZE (in bits)
3221
3222 For each field of the rhs variable (rhsfield)
3223 lhs.offset = rhsfield->offset
3224 add the constraint lhs = rhsfield
3225 */
3226
3227 static void
3228 do_lhs_deref_structure_copy (const struct constraint_expr lhs,
3229 const struct constraint_expr rhs,
3230 const unsigned HOST_WIDE_INT size)
3231 {
3232 varinfo_t p = get_varinfo (rhs.var);
3233 unsigned HOST_WIDE_INT pstart,last;
3234 pstart = p->offset;
3235 last = p->offset + size;
3236
3237 for (; p && p->offset < last; p = p->next)
3238 {
3239 varinfo_t q;
3240 struct constraint_expr templhs = lhs;
3241 struct constraint_expr temprhs = rhs;
3242 unsigned HOST_WIDE_INT fieldoffset;
3243
3244
3245 if (temprhs.type == SCALAR)
3246 temprhs.var = p->id;
3247 else
3248 temprhs.offset = p->offset;
3249
3250 q = get_varinfo (templhs.var);
3251 fieldoffset = p->offset - pstart;
3252 templhs.offset += fieldoffset;
3253 process_constraint (new_constraint (templhs, temprhs));
3254 }
3255 }
3256
3257 /* Sometimes, frontends like to give us bad type information. This
3258 function will collapse all the fields from VAR to the end of VAR,
3259 into VAR, so that we treat those fields as a single variable.
3260 We return the variable they were collapsed into. */
3261
3262 static unsigned int
3263 collapse_rest_of_var (unsigned int var)
3264 {
3265 varinfo_t currvar = get_varinfo (var);
3266 varinfo_t field;
3267
3268 for (field = currvar->next; field; field = field->next)
3269 {
3270 if (dump_file)
3271 fprintf (dump_file, "Type safety: Collapsing var %s into %s\n",
3272 field->name, currvar->name);
3273
3274 gcc_assert (field->collapsed_to == 0);
3275 field->collapsed_to = currvar->id;
3276 }
3277
3278 currvar->next = NULL;
3279 currvar->size = currvar->fullsize - currvar->offset;
3280
3281 return currvar->id;
3282 }
3283
3284 /* Handle aggregate copies by expanding into copies of the respective
3285 fields of the structures. */
3286
3287 static void
3288 do_structure_copy (tree lhsop, tree rhsop)
3289 {
3290 struct constraint_expr lhs, rhs, tmp;
3291 VEC (ce_s, heap) *lhsc = NULL, *rhsc = NULL;
3292 varinfo_t p;
3293 unsigned HOST_WIDE_INT lhssize;
3294 unsigned HOST_WIDE_INT rhssize;
3295
3296 /* Pretend we are taking the address of the constraint exprs.
3297 We deal with walking the sub-fields ourselves. */
3298 get_constraint_for_1 (lhsop, &lhsc, true);
3299 get_constraint_for_1 (rhsop, &rhsc, true);
3300 gcc_assert (VEC_length (ce_s, lhsc) == 1);
3301 gcc_assert (VEC_length (ce_s, rhsc) == 1);
3302 lhs = *(VEC_last (ce_s, lhsc));
3303 rhs = *(VEC_last (ce_s, rhsc));
3304
3305 VEC_free (ce_s, heap, lhsc);
3306 VEC_free (ce_s, heap, rhsc);
3307
3308 /* If we have special var = x, swap it around. */
3309 if (lhs.var <= integer_id && !(get_varinfo (rhs.var)->is_special_var))
3310 {
3311 tmp = lhs;
3312 lhs = rhs;
3313 rhs = tmp;
3314 }
3315
3316 /* This is fairly conservative for the RHS == ADDRESSOF case, in that it's
3317 possible it's something we could handle. However, most cases falling
3318 into this are dealing with transparent unions, which are slightly
3319 weird. */
3320 if (rhs.type == ADDRESSOF && !(get_varinfo (rhs.var)->is_special_var))
3321 {
3322 rhs.type = ADDRESSOF;
3323 rhs.var = anything_id;
3324 }
3325
3326 /* If the RHS is a special var, or an addressof, set all the LHS fields to
3327 that special var. */
3328 if (rhs.var <= integer_id)
3329 {
3330 for (p = get_varinfo (lhs.var); p; p = p->next)
3331 {
3332 struct constraint_expr templhs = lhs;
3333 struct constraint_expr temprhs = rhs;
3334
3335 if (templhs.type == SCALAR )
3336 templhs.var = p->id;
3337 else
3338 templhs.offset += p->offset;
3339 process_constraint (new_constraint (templhs, temprhs));
3340 }
3341 }
3342 else
3343 {
3344 tree rhstype = TREE_TYPE (rhsop);
3345 tree lhstype = TREE_TYPE (lhsop);
3346 tree rhstypesize;
3347 tree lhstypesize;
3348
3349 lhstypesize = DECL_P (lhsop) ? DECL_SIZE (lhsop) : TYPE_SIZE (lhstype);
3350 rhstypesize = DECL_P (rhsop) ? DECL_SIZE (rhsop) : TYPE_SIZE (rhstype);
3351
3352 /* If we have a variably sized types on the rhs or lhs, and a deref
3353 constraint, add the constraint, lhsconstraint = &ANYTHING.
3354 This is conservatively correct because either the lhs is an unknown
3355 sized var (if the constraint is SCALAR), or the lhs is a DEREF
3356 constraint, and every variable it can point to must be unknown sized
3357 anyway, so we don't need to worry about fields at all. */
3358 if ((rhs.type == DEREF && TREE_CODE (rhstypesize) != INTEGER_CST)
3359 || (lhs.type == DEREF && TREE_CODE (lhstypesize) != INTEGER_CST))
3360 {
3361 rhs.var = anything_id;
3362 rhs.type = ADDRESSOF;
3363 rhs.offset = 0;
3364 process_constraint (new_constraint (lhs, rhs));
3365 return;
3366 }
3367
3368 /* The size only really matters insofar as we don't set more or less of
3369 the variable. If we hit an unknown size var, the size should be the
3370 whole darn thing. */
3371 if (get_varinfo (rhs.var)->is_unknown_size_var)
3372 rhssize = ~0;
3373 else
3374 rhssize = TREE_INT_CST_LOW (rhstypesize);
3375
3376 if (get_varinfo (lhs.var)->is_unknown_size_var)
3377 lhssize = ~0;
3378 else
3379 lhssize = TREE_INT_CST_LOW (lhstypesize);
3380
3381
3382 if (rhs.type == SCALAR && lhs.type == SCALAR)
3383 {
3384 if (!do_simple_structure_copy (lhs, rhs, MIN (lhssize, rhssize)))
3385 {
3386 lhs.var = collapse_rest_of_var (lhs.var);
3387 rhs.var = collapse_rest_of_var (rhs.var);
3388 lhs.offset = 0;
3389 rhs.offset = 0;
3390 lhs.type = SCALAR;
3391 rhs.type = SCALAR;
3392 process_constraint (new_constraint (lhs, rhs));
3393 }
3394 }
3395 else if (lhs.type != DEREF && rhs.type == DEREF)
3396 do_rhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3397 else if (lhs.type == DEREF && rhs.type != DEREF)
3398 do_lhs_deref_structure_copy (lhs, rhs, MIN (lhssize, rhssize));
3399 else
3400 {
3401 tree pointedtotype = lhstype;
3402 tree tmpvar;
3403
3404 gcc_assert (rhs.type == DEREF && lhs.type == DEREF);
3405 tmpvar = create_tmp_var_raw (pointedtotype, "structcopydereftmp");
3406 do_structure_copy (tmpvar, rhsop);
3407 do_structure_copy (lhsop, tmpvar);
3408 }
3409 }
3410 }
3411
3412 /* Create a constraint ID = OP. */
3413
3414 static void
3415 make_constraint_to (unsigned id, tree op)
3416 {
3417 VEC(ce_s, heap) *rhsc = NULL;
3418 struct constraint_expr *c;
3419 struct constraint_expr includes;
3420 unsigned int j;
3421
3422 includes.var = id;
3423 includes.offset = 0;
3424 includes.type = SCALAR;
3425
3426 get_constraint_for (op, &rhsc);
3427 for (j = 0; VEC_iterate (ce_s, rhsc, j, c); j++)
3428 process_constraint (new_constraint (includes, *c));
3429 VEC_free (ce_s, heap, rhsc);
3430 }
3431
3432 /* Make constraints necessary to make OP escape. */
3433
3434 static void
3435 make_escape_constraint (tree op)
3436 {
3437 make_constraint_to (escaped_id, op);
3438 }
3439
3440 /* For non-IPA mode, generate constraints necessary for a call on the
3441 RHS. */
3442
3443 static void
3444 handle_rhs_call (gimple stmt)
3445 {
3446 unsigned i;
3447
3448 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3449 {
3450 tree arg = gimple_call_arg (stmt, i);
3451
3452 /* Find those pointers being passed, and make sure they end up
3453 pointing to anything. */
3454 if (could_have_pointers (arg))
3455 make_escape_constraint (arg);
3456 }
3457
3458 /* The static chain escapes as well. */
3459 if (gimple_call_chain (stmt))
3460 make_escape_constraint (gimple_call_chain (stmt));
3461 }
3462
3463 /* For non-IPA mode, generate constraints necessary for a call
3464 that returns a pointer and assigns it to LHS. This simply makes
3465 the LHS point to global and escaped variables. */
3466
3467 static void
3468 handle_lhs_call (tree lhs, int flags)
3469 {
3470 VEC(ce_s, heap) *lhsc = NULL;
3471 struct constraint_expr rhsc;
3472 unsigned int j;
3473 struct constraint_expr *lhsp;
3474
3475 get_constraint_for (lhs, &lhsc);
3476
3477 if (flags & ECF_MALLOC)
3478 {
3479 tree heapvar = heapvar_lookup (lhs);
3480 varinfo_t vi;
3481
3482 if (heapvar == NULL)
3483 {
3484 heapvar = create_tmp_var_raw (ptr_type_node, "HEAP");
3485 DECL_EXTERNAL (heapvar) = 1;
3486 get_var_ann (heapvar)->is_heapvar = 1;
3487 if (gimple_referenced_vars (cfun))
3488 add_referenced_var (heapvar);
3489 heapvar_insert (lhs, heapvar);
3490 }
3491
3492 rhsc.var = create_variable_info_for (heapvar,
3493 alias_get_name (heapvar));
3494 vi = get_varinfo (rhsc.var);
3495 vi->is_artificial_var = 1;
3496 vi->is_heap_var = 1;
3497 rhsc.type = ADDRESSOF;
3498 rhsc.offset = 0;
3499 }
3500 else
3501 {
3502 rhsc.var = escaped_id;
3503 rhsc.offset = 0;
3504 rhsc.type = ADDRESSOF;
3505 }
3506 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3507 process_constraint (new_constraint (*lhsp, rhsc));
3508 VEC_free (ce_s, heap, lhsc);
3509 }
3510
3511 /* For non-IPA mode, generate constraints necessary for a call of a
3512 const function that returns a pointer in the statement STMT. */
3513
3514 static void
3515 handle_const_call (gimple stmt)
3516 {
3517 tree lhs = gimple_call_lhs (stmt);
3518 VEC(ce_s, heap) *lhsc = NULL;
3519 struct constraint_expr rhsc;
3520 unsigned int j, k;
3521 struct constraint_expr *lhsp;
3522 tree tmpvar;
3523 struct constraint_expr tmpc;
3524
3525 get_constraint_for (lhs, &lhsc);
3526
3527 /* If this is a nested function then it can return anything. */
3528 if (gimple_call_chain (stmt))
3529 {
3530 rhsc.var = anything_id;
3531 rhsc.offset = 0;
3532 rhsc.type = ADDRESSOF;
3533 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3534 process_constraint (new_constraint (*lhsp, rhsc));
3535 VEC_free (ce_s, heap, lhsc);
3536 return;
3537 }
3538
3539 /* We always use a temporary here, otherwise we end up with a quadratic
3540 amount of constraints for
3541 large_struct = const_call (large_struct);
3542 in field-sensitive PTA. */
3543 tmpvar = create_tmp_var_raw (ptr_type_node, "consttmp");
3544 tmpc = get_constraint_exp_for_temp (tmpvar);
3545
3546 /* May return addresses of globals. */
3547 rhsc.var = nonlocal_id;
3548 rhsc.offset = 0;
3549 rhsc.type = ADDRESSOF;
3550 process_constraint (new_constraint (tmpc, rhsc));
3551
3552 /* May return arguments. */
3553 for (k = 0; k < gimple_call_num_args (stmt); ++k)
3554 {
3555 tree arg = gimple_call_arg (stmt, k);
3556
3557 if (could_have_pointers (arg))
3558 {
3559 VEC(ce_s, heap) *argc = NULL;
3560 struct constraint_expr *argp;
3561 int i;
3562
3563 get_constraint_for (arg, &argc);
3564 for (i = 0; VEC_iterate (ce_s, argc, i, argp); i++)
3565 process_constraint (new_constraint (tmpc, *argp));
3566 VEC_free (ce_s, heap, argc);
3567 }
3568 }
3569
3570 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3571 process_constraint (new_constraint (*lhsp, tmpc));
3572
3573 VEC_free (ce_s, heap, lhsc);
3574 }
3575
3576 /* For non-IPA mode, generate constraints necessary for a call to a
3577 pure function in statement STMT. */
3578
3579 static void
3580 handle_pure_call (gimple stmt)
3581 {
3582 unsigned i;
3583
3584 /* Memory reached from pointer arguments is call-used. */
3585 for (i = 0; i < gimple_call_num_args (stmt); ++i)
3586 {
3587 tree arg = gimple_call_arg (stmt, i);
3588
3589 if (could_have_pointers (arg))
3590 make_constraint_to (callused_id, arg);
3591 }
3592
3593 /* The static chain is used as well. */
3594 if (gimple_call_chain (stmt))
3595 make_constraint_to (callused_id, gimple_call_chain (stmt));
3596
3597 /* If the call returns a pointer it may point to reachable memory
3598 from the arguments. Not so for malloc functions though. */
3599 if (gimple_call_lhs (stmt)
3600 && could_have_pointers (gimple_call_lhs (stmt))
3601 && !(gimple_call_flags (stmt) & ECF_MALLOC))
3602 {
3603 tree lhs = gimple_call_lhs (stmt);
3604 VEC(ce_s, heap) *lhsc = NULL;
3605 struct constraint_expr rhsc;
3606 struct constraint_expr *lhsp;
3607 unsigned j;
3608
3609 get_constraint_for (lhs, &lhsc);
3610
3611 /* If this is a nested function then it can return anything. */
3612 if (gimple_call_chain (stmt))
3613 {
3614 rhsc.var = anything_id;
3615 rhsc.offset = 0;
3616 rhsc.type = ADDRESSOF;
3617 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3618 process_constraint (new_constraint (*lhsp, rhsc));
3619 VEC_free (ce_s, heap, lhsc);
3620 return;
3621 }
3622
3623 /* Else just add the call-used memory here. Escaped variables
3624 and globals will be dealt with in handle_lhs_call. */
3625 rhsc.var = callused_id;
3626 rhsc.offset = 0;
3627 rhsc.type = ADDRESSOF;
3628 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3629 process_constraint (new_constraint (*lhsp, rhsc));
3630 VEC_free (ce_s, heap, lhsc);
3631 }
3632 }
3633
3634 /* Walk statement T setting up aliasing constraints according to the
3635 references found in T. This function is the main part of the
3636 constraint builder. AI points to auxiliary alias information used
3637 when building alias sets and computing alias grouping heuristics. */
3638
3639 static void
3640 find_func_aliases (gimple origt)
3641 {
3642 gimple t = origt;
3643 VEC(ce_s, heap) *lhsc = NULL;
3644 VEC(ce_s, heap) *rhsc = NULL;
3645 struct constraint_expr *c;
3646 enum escape_type stmt_escape_type;
3647
3648 /* Now build constraints expressions. */
3649 if (gimple_code (t) == GIMPLE_PHI)
3650 {
3651 gcc_assert (!AGGREGATE_TYPE_P (TREE_TYPE (gimple_phi_result (t))));
3652
3653 /* Only care about pointers and structures containing
3654 pointers. */
3655 if (could_have_pointers (gimple_phi_result (t)))
3656 {
3657 size_t i;
3658 unsigned int j;
3659
3660 /* For a phi node, assign all the arguments to
3661 the result. */
3662 get_constraint_for (gimple_phi_result (t), &lhsc);
3663 for (i = 0; i < gimple_phi_num_args (t); i++)
3664 {
3665 tree rhstype;
3666 tree strippedrhs = PHI_ARG_DEF (t, i);
3667
3668 STRIP_NOPS (strippedrhs);
3669 rhstype = TREE_TYPE (strippedrhs);
3670 get_constraint_for (gimple_phi_arg_def (t, i), &rhsc);
3671
3672 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3673 {
3674 struct constraint_expr *c2;
3675 while (VEC_length (ce_s, rhsc) > 0)
3676 {
3677 c2 = VEC_last (ce_s, rhsc);
3678 process_constraint (new_constraint (*c, *c2));
3679 VEC_pop (ce_s, rhsc);
3680 }
3681 }
3682 }
3683 }
3684 }
3685 /* In IPA mode, we need to generate constraints to pass call
3686 arguments through their calls. There are two cases,
3687 either a GIMPLE_CALL returning a value, or just a plain
3688 GIMPLE_CALL when we are not.
3689
3690 In non-ipa mode, we need to generate constraints for each
3691 pointer passed by address. */
3692 else if (is_gimple_call (t))
3693 {
3694 if (!in_ipa_mode)
3695 {
3696 int flags = gimple_call_flags (t);
3697
3698 /* Const functions can return their arguments and addresses
3699 of global memory but not of escaped memory. */
3700 if (flags & ECF_CONST)
3701 {
3702 if (gimple_call_lhs (t)
3703 && could_have_pointers (gimple_call_lhs (t)))
3704 handle_const_call (t);
3705 }
3706 /* Pure functions can return addresses in and of memory
3707 reachable from their arguments, but they are not an escape
3708 point for reachable memory of their arguments. */
3709 else if (flags & ECF_PURE)
3710 {
3711 handle_pure_call (t);
3712 if (gimple_call_lhs (t)
3713 && could_have_pointers (gimple_call_lhs (t)))
3714 handle_lhs_call (gimple_call_lhs (t), flags);
3715 }
3716 else
3717 {
3718 handle_rhs_call (t);
3719 if (gimple_call_lhs (t)
3720 && could_have_pointers (gimple_call_lhs (t)))
3721 handle_lhs_call (gimple_call_lhs (t), flags);
3722 }
3723 }
3724 else
3725 {
3726 tree lhsop;
3727 varinfo_t fi;
3728 int i = 1;
3729 size_t j;
3730 tree decl;
3731
3732 lhsop = gimple_call_lhs (t);
3733 decl = gimple_call_fndecl (t);
3734
3735 /* If we can directly resolve the function being called, do so.
3736 Otherwise, it must be some sort of indirect expression that
3737 we should still be able to handle. */
3738 if (decl)
3739 fi = get_vi_for_tree (decl);
3740 else
3741 {
3742 decl = gimple_call_fn (t);
3743 fi = get_vi_for_tree (decl);
3744 }
3745
3746 /* Assign all the passed arguments to the appropriate incoming
3747 parameters of the function. */
3748 for (j = 0; j < gimple_call_num_args (t); j++)
3749 {
3750 struct constraint_expr lhs ;
3751 struct constraint_expr *rhsp;
3752 tree arg = gimple_call_arg (t, j);
3753
3754 get_constraint_for (arg, &rhsc);
3755 if (TREE_CODE (decl) != FUNCTION_DECL)
3756 {
3757 lhs.type = DEREF;
3758 lhs.var = fi->id;
3759 lhs.offset = i;
3760 }
3761 else
3762 {
3763 lhs.type = SCALAR;
3764 lhs.var = first_vi_for_offset (fi, i)->id;
3765 lhs.offset = 0;
3766 }
3767 while (VEC_length (ce_s, rhsc) != 0)
3768 {
3769 rhsp = VEC_last (ce_s, rhsc);
3770 process_constraint (new_constraint (lhs, *rhsp));
3771 VEC_pop (ce_s, rhsc);
3772 }
3773 i++;
3774 }
3775
3776 /* If we are returning a value, assign it to the result. */
3777 if (lhsop)
3778 {
3779 struct constraint_expr rhs;
3780 struct constraint_expr *lhsp;
3781 unsigned int j = 0;
3782
3783 get_constraint_for (lhsop, &lhsc);
3784 if (TREE_CODE (decl) != FUNCTION_DECL)
3785 {
3786 rhs.type = DEREF;
3787 rhs.var = fi->id;
3788 rhs.offset = i;
3789 }
3790 else
3791 {
3792 rhs.type = SCALAR;
3793 rhs.var = first_vi_for_offset (fi, i)->id;
3794 rhs.offset = 0;
3795 }
3796 for (j = 0; VEC_iterate (ce_s, lhsc, j, lhsp); j++)
3797 process_constraint (new_constraint (*lhsp, rhs));
3798 }
3799 }
3800 }
3801 /* Otherwise, just a regular assignment statement. Only care about
3802 operations with pointer result, others are dealt with as escape
3803 points if they have pointer operands. */
3804 else if (is_gimple_assign (t)
3805 && could_have_pointers (gimple_assign_lhs (t)))
3806 {
3807 /* Otherwise, just a regular assignment statement. */
3808 tree lhsop = gimple_assign_lhs (t);
3809 tree rhsop = (gimple_num_ops (t) == 2) ? gimple_assign_rhs1 (t) : NULL;
3810
3811 if (rhsop && AGGREGATE_TYPE_P (TREE_TYPE (lhsop)))
3812 do_structure_copy (lhsop, rhsop);
3813 else
3814 {
3815 unsigned int j;
3816 struct constraint_expr temp;
3817 get_constraint_for (lhsop, &lhsc);
3818
3819 if (gimple_assign_rhs_code (t) == POINTER_PLUS_EXPR)
3820 get_constraint_for_ptr_offset (gimple_assign_rhs1 (t),
3821 gimple_assign_rhs2 (t), &rhsc);
3822 else if ((CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3823 && !(POINTER_TYPE_P (gimple_expr_type (t))
3824 && !POINTER_TYPE_P (TREE_TYPE (rhsop))))
3825 || gimple_assign_single_p (t))
3826 get_constraint_for (rhsop, &rhsc);
3827 else
3828 {
3829 temp.type = ADDRESSOF;
3830 temp.var = anything_id;
3831 temp.offset = 0;
3832 VEC_safe_push (ce_s, heap, rhsc, &temp);
3833 }
3834 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); j++)
3835 {
3836 struct constraint_expr *c2;
3837 unsigned int k;
3838
3839 for (k = 0; VEC_iterate (ce_s, rhsc, k, c2); k++)
3840 process_constraint (new_constraint (*c, *c2));
3841 }
3842 }
3843 }
3844 else if (gimple_code (t) == GIMPLE_CHANGE_DYNAMIC_TYPE)
3845 {
3846 unsigned int j;
3847
3848 get_constraint_for (gimple_cdt_location (t), &lhsc);
3849 for (j = 0; VEC_iterate (ce_s, lhsc, j, c); ++j)
3850 get_varinfo (c->var)->no_tbaa_pruning = true;
3851 }
3852
3853 stmt_escape_type = is_escape_site (t);
3854 if (stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
3855 {
3856 gcc_assert (is_gimple_assign (t));
3857 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
3858 {
3859 tree rhs = gimple_assign_rhs1 (t);
3860 tree base = get_base_address (TREE_OPERAND (rhs, 0));
3861 if (base
3862 && (!DECL_P (base)
3863 || !is_global_var (base)))
3864 make_escape_constraint (rhs);
3865 }
3866 else if (get_gimple_rhs_class (gimple_assign_rhs_code (t))
3867 == GIMPLE_SINGLE_RHS)
3868 {
3869 if (could_have_pointers (gimple_assign_rhs1 (t)))
3870 make_escape_constraint (gimple_assign_rhs1 (t));
3871 }
3872 else
3873 gcc_unreachable ();
3874 }
3875 else if (stmt_escape_type == ESCAPE_BAD_CAST)
3876 {
3877 gcc_assert (is_gimple_assign (t));
3878 gcc_assert (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (t))
3879 || gimple_assign_rhs_code (t) == VIEW_CONVERT_EXPR);
3880 make_escape_constraint (gimple_assign_rhs1 (t));
3881 }
3882 else if (stmt_escape_type == ESCAPE_TO_ASM)
3883 {
3884 unsigned i;
3885 for (i = 0; i < gimple_asm_noutputs (t); ++i)
3886 {
3887 tree op = TREE_VALUE (gimple_asm_output_op (t, i));
3888 if (op && could_have_pointers (op))
3889 /* Strictly we'd only need the constraints from ESCAPED and
3890 NONLOCAL. */
3891 make_escape_constraint (op);
3892 }
3893 for (i = 0; i < gimple_asm_ninputs (t); ++i)
3894 {
3895 tree op = TREE_VALUE (gimple_asm_input_op (t, i));
3896 if (op && could_have_pointers (op))
3897 /* Strictly we'd only need the constraint to ESCAPED. */
3898 make_escape_constraint (op);
3899 }
3900 }
3901
3902 /* After promoting variables and computing aliasing we will
3903 need to re-scan most statements. FIXME: Try to minimize the
3904 number of statements re-scanned. It's not really necessary to
3905 re-scan *all* statements. */
3906 if (!in_ipa_mode)
3907 gimple_set_modified (origt, true);
3908 VEC_free (ce_s, heap, rhsc);
3909 VEC_free (ce_s, heap, lhsc);
3910 }
3911
3912
3913 /* Find the first varinfo in the same variable as START that overlaps with
3914 OFFSET.
3915 Effectively, walk the chain of fields for the variable START to find the
3916 first field that overlaps with OFFSET.
3917 Return NULL if we can't find one. */
3918
3919 static varinfo_t
3920 first_vi_for_offset (varinfo_t start, unsigned HOST_WIDE_INT offset)
3921 {
3922 varinfo_t curr = start;
3923 while (curr)
3924 {
3925 /* We may not find a variable in the field list with the actual
3926 offset when when we have glommed a structure to a variable.
3927 In that case, however, offset should still be within the size
3928 of the variable. */
3929 if (offset >= curr->offset && offset < (curr->offset + curr->size))
3930 return curr;
3931 curr = curr->next;
3932 }
3933 return NULL;
3934 }
3935
3936
3937 /* Insert the varinfo FIELD into the field list for BASE, at the front
3938 of the list. */
3939
3940 static void
3941 insert_into_field_list (varinfo_t base, varinfo_t field)
3942 {
3943 varinfo_t prev = base;
3944 varinfo_t curr = base->next;
3945
3946 field->next = curr;
3947 prev->next = field;
3948 }
3949
3950 /* Insert the varinfo FIELD into the field list for BASE, ordered by
3951 offset. */
3952
3953 static void
3954 insert_into_field_list_sorted (varinfo_t base, varinfo_t field)
3955 {
3956 varinfo_t prev = base;
3957 varinfo_t curr = base->next;
3958
3959 if (curr == NULL)
3960 {
3961 prev->next = field;
3962 field->next = NULL;
3963 }
3964 else
3965 {
3966 while (curr)
3967 {
3968 if (field->offset <= curr->offset)
3969 break;
3970 prev = curr;
3971 curr = curr->next;
3972 }
3973 field->next = prev->next;
3974 prev->next = field;
3975 }
3976 }
3977
3978 /* This structure is used during pushing fields onto the fieldstack
3979 to track the offset of the field, since bitpos_of_field gives it
3980 relative to its immediate containing type, and we want it relative
3981 to the ultimate containing object. */
3982
3983 struct fieldoff
3984 {
3985 /* Offset from the base of the base containing object to this field. */
3986 HOST_WIDE_INT offset;
3987
3988 /* Size, in bits, of the field. */
3989 unsigned HOST_WIDE_INT size;
3990
3991 unsigned has_unknown_size : 1;
3992
3993 unsigned may_have_pointers : 1;
3994 };
3995 typedef struct fieldoff fieldoff_s;
3996
3997 DEF_VEC_O(fieldoff_s);
3998 DEF_VEC_ALLOC_O(fieldoff_s,heap);
3999
4000 /* qsort comparison function for two fieldoff's PA and PB */
4001
4002 static int
4003 fieldoff_compare (const void *pa, const void *pb)
4004 {
4005 const fieldoff_s *foa = (const fieldoff_s *)pa;
4006 const fieldoff_s *fob = (const fieldoff_s *)pb;
4007 unsigned HOST_WIDE_INT foasize, fobsize;
4008
4009 if (foa->offset < fob->offset)
4010 return -1;
4011 else if (foa->offset > fob->offset)
4012 return 1;
4013
4014 foasize = foa->size;
4015 fobsize = fob->size;
4016 if (foasize < fobsize)
4017 return -1;
4018 else if (foasize > fobsize)
4019 return 1;
4020 return 0;
4021 }
4022
4023 /* Sort a fieldstack according to the field offset and sizes. */
4024 static void
4025 sort_fieldstack (VEC(fieldoff_s,heap) *fieldstack)
4026 {
4027 qsort (VEC_address (fieldoff_s, fieldstack),
4028 VEC_length (fieldoff_s, fieldstack),
4029 sizeof (fieldoff_s),
4030 fieldoff_compare);
4031 }
4032
4033 /* Return true if V is a tree that we can have subvars for.
4034 Normally, this is any aggregate type. Also complex
4035 types which are not gimple registers can have subvars. */
4036
4037 static inline bool
4038 var_can_have_subvars (const_tree v)
4039 {
4040 /* Volatile variables should never have subvars. */
4041 if (TREE_THIS_VOLATILE (v))
4042 return false;
4043
4044 /* Non decls or memory tags can never have subvars. */
4045 if (!DECL_P (v) || MTAG_P (v))
4046 return false;
4047
4048 /* Aggregates without overlapping fields can have subvars. */
4049 if (TREE_CODE (TREE_TYPE (v)) == RECORD_TYPE)
4050 return true;
4051
4052 return false;
4053 }
4054
4055 /* Given a TYPE, and a vector of field offsets FIELDSTACK, push all
4056 the fields of TYPE onto fieldstack, recording their offsets along
4057 the way.
4058
4059 OFFSET is used to keep track of the offset in this entire
4060 structure, rather than just the immediately containing structure.
4061 Returns the number of fields pushed. */
4062
4063 static int
4064 push_fields_onto_fieldstack (tree type, VEC(fieldoff_s,heap) **fieldstack,
4065 HOST_WIDE_INT offset)
4066 {
4067 tree field;
4068 int count = 0;
4069
4070 if (TREE_CODE (type) != RECORD_TYPE)
4071 return 0;
4072
4073 /* If the vector of fields is growing too big, bail out early.
4074 Callers check for VEC_length <= MAX_FIELDS_FOR_FIELD_SENSITIVE, make
4075 sure this fails. */
4076 if (VEC_length (fieldoff_s, *fieldstack) > MAX_FIELDS_FOR_FIELD_SENSITIVE)
4077 return 0;
4078
4079 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
4080 if (TREE_CODE (field) == FIELD_DECL)
4081 {
4082 bool push = false;
4083 int pushed = 0;
4084 HOST_WIDE_INT foff = bitpos_of_field (field);
4085
4086 if (!var_can_have_subvars (field)
4087 || TREE_CODE (TREE_TYPE (field)) == QUAL_UNION_TYPE
4088 || TREE_CODE (TREE_TYPE (field)) == UNION_TYPE)
4089 push = true;
4090 else if (!(pushed = push_fields_onto_fieldstack
4091 (TREE_TYPE (field), fieldstack, offset + foff))
4092 && (DECL_SIZE (field)
4093 && !integer_zerop (DECL_SIZE (field))))
4094 /* Empty structures may have actual size, like in C++. So
4095 see if we didn't push any subfields and the size is
4096 nonzero, push the field onto the stack. */
4097 push = true;
4098
4099 if (push)
4100 {
4101 fieldoff_s *pair = NULL;
4102 bool has_unknown_size = false;
4103
4104 if (!VEC_empty (fieldoff_s, *fieldstack))
4105 pair = VEC_last (fieldoff_s, *fieldstack);
4106
4107 if (!DECL_SIZE (field)
4108 || !host_integerp (DECL_SIZE (field), 1))
4109 has_unknown_size = true;
4110
4111 /* If adjacent fields do not contain pointers merge them. */
4112 if (pair
4113 && !pair->may_have_pointers
4114 && !could_have_pointers (field)
4115 && !pair->has_unknown_size
4116 && !has_unknown_size
4117 && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff)
4118 {
4119 pair = VEC_last (fieldoff_s, *fieldstack);
4120 pair->size += TREE_INT_CST_LOW (DECL_SIZE (field));
4121 }
4122 else
4123 {
4124 pair = VEC_safe_push (fieldoff_s, heap, *fieldstack, NULL);
4125 pair->offset = offset + foff;
4126 pair->has_unknown_size = has_unknown_size;
4127 if (!has_unknown_size)
4128 pair->size = TREE_INT_CST_LOW (DECL_SIZE (field));
4129 else
4130 pair->size = -1;
4131 pair->may_have_pointers = could_have_pointers (field);
4132 count++;
4133 }
4134 }
4135 else
4136 count += pushed;
4137 }
4138
4139 return count;
4140 }
4141
4142 /* Create a constraint ID = &FROM. */
4143
4144 static void
4145 make_constraint_from (varinfo_t vi, int from)
4146 {
4147 struct constraint_expr lhs, rhs;
4148
4149 lhs.var = vi->id;
4150 lhs.offset = 0;
4151 lhs.type = SCALAR;
4152
4153 rhs.var = from;
4154 rhs.offset = 0;
4155 rhs.type = ADDRESSOF;
4156 process_constraint (new_constraint (lhs, rhs));
4157 }
4158
4159 /* Count the number of arguments DECL has, and set IS_VARARGS to true
4160 if it is a varargs function. */
4161
4162 static unsigned int
4163 count_num_arguments (tree decl, bool *is_varargs)
4164 {
4165 unsigned int i = 0;
4166 tree t;
4167
4168 for (t = TYPE_ARG_TYPES (TREE_TYPE (decl));
4169 t;
4170 t = TREE_CHAIN (t))
4171 {
4172 if (TREE_VALUE (t) == void_type_node)
4173 break;
4174 i++;
4175 }
4176
4177 if (!t)
4178 *is_varargs = true;
4179 return i;
4180 }
4181
4182 /* Creation function node for DECL, using NAME, and return the index
4183 of the variable we've created for the function. */
4184
4185 static unsigned int
4186 create_function_info_for (tree decl, const char *name)
4187 {
4188 unsigned int index = VEC_length (varinfo_t, varmap);
4189 varinfo_t vi;
4190 tree arg;
4191 unsigned int i;
4192 bool is_varargs = false;
4193
4194 /* Create the variable info. */
4195
4196 vi = new_var_info (decl, index, name);
4197 vi->decl = decl;
4198 vi->offset = 0;
4199 vi->size = 1;
4200 vi->fullsize = count_num_arguments (decl, &is_varargs) + 1;
4201 insert_vi_for_tree (vi->decl, vi);
4202 VEC_safe_push (varinfo_t, heap, varmap, vi);
4203
4204 stats.total_vars++;
4205
4206 /* If it's varargs, we don't know how many arguments it has, so we
4207 can't do much. */
4208 if (is_varargs)
4209 {
4210 vi->fullsize = ~0;
4211 vi->size = ~0;
4212 vi->is_unknown_size_var = true;
4213 return index;
4214 }
4215
4216
4217 arg = DECL_ARGUMENTS (decl);
4218
4219 /* Set up variables for each argument. */
4220 for (i = 1; i < vi->fullsize; i++)
4221 {
4222 varinfo_t argvi;
4223 const char *newname;
4224 char *tempname;
4225 unsigned int newindex;
4226 tree argdecl = decl;
4227
4228 if (arg)
4229 argdecl = arg;
4230
4231 newindex = VEC_length (varinfo_t, varmap);
4232 asprintf (&tempname, "%s.arg%d", name, i-1);
4233 newname = ggc_strdup (tempname);
4234 free (tempname);
4235
4236 argvi = new_var_info (argdecl, newindex, newname);
4237 argvi->decl = argdecl;
4238 VEC_safe_push (varinfo_t, heap, varmap, argvi);
4239 argvi->offset = i;
4240 argvi->size = 1;
4241 argvi->is_full_var = true;
4242 argvi->fullsize = vi->fullsize;
4243 insert_into_field_list_sorted (vi, argvi);
4244 stats.total_vars ++;
4245 if (arg)
4246 {
4247 insert_vi_for_tree (arg, argvi);
4248 arg = TREE_CHAIN (arg);
4249 }
4250 }
4251
4252 /* Create a variable for the return var. */
4253 if (DECL_RESULT (decl) != NULL
4254 || !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (decl))))
4255 {
4256 varinfo_t resultvi;
4257 const char *newname;
4258 char *tempname;
4259 unsigned int newindex;
4260 tree resultdecl = decl;
4261
4262 vi->fullsize ++;
4263
4264 if (DECL_RESULT (decl))
4265 resultdecl = DECL_RESULT (decl);
4266
4267 newindex = VEC_length (varinfo_t, varmap);
4268 asprintf (&tempname, "%s.result", name);
4269 newname = ggc_strdup (tempname);
4270 free (tempname);
4271
4272 resultvi = new_var_info (resultdecl, newindex, newname);
4273 resultvi->decl = resultdecl;
4274 VEC_safe_push (varinfo_t, heap, varmap, resultvi);
4275 resultvi->offset = i;
4276 resultvi->size = 1;
4277 resultvi->fullsize = vi->fullsize;
4278 resultvi->is_full_var = true;
4279 insert_into_field_list_sorted (vi, resultvi);
4280 stats.total_vars ++;
4281 if (DECL_RESULT (decl))
4282 insert_vi_for_tree (DECL_RESULT (decl), resultvi);
4283 }
4284 return index;
4285 }
4286
4287
4288 /* Return true if FIELDSTACK contains fields that overlap.
4289 FIELDSTACK is assumed to be sorted by offset. */
4290
4291 static bool
4292 check_for_overlaps (VEC (fieldoff_s,heap) *fieldstack)
4293 {
4294 fieldoff_s *fo = NULL;
4295 unsigned int i;
4296 HOST_WIDE_INT lastoffset = -1;
4297
4298 for (i = 0; VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4299 {
4300 if (fo->offset == lastoffset)
4301 return true;
4302 lastoffset = fo->offset;
4303 }
4304 return false;
4305 }
4306
4307 /* Create a varinfo structure for NAME and DECL, and add it to VARMAP.
4308 This will also create any varinfo structures necessary for fields
4309 of DECL. */
4310
4311 static unsigned int
4312 create_variable_info_for (tree decl, const char *name)
4313 {
4314 unsigned int index = VEC_length (varinfo_t, varmap);
4315 varinfo_t vi;
4316 tree decl_type = TREE_TYPE (decl);
4317 tree declsize = DECL_P (decl) ? DECL_SIZE (decl) : TYPE_SIZE (decl_type);
4318 bool is_global = DECL_P (decl) ? is_global_var (decl) : false;
4319 VEC (fieldoff_s,heap) *fieldstack = NULL;
4320
4321 if (TREE_CODE (decl) == FUNCTION_DECL && in_ipa_mode)
4322 return create_function_info_for (decl, name);
4323
4324 if (var_can_have_subvars (decl) && use_field_sensitive
4325 && (!var_ann (decl)
4326 || var_ann (decl)->noalias_state == 0)
4327 && (!var_ann (decl)
4328 || !var_ann (decl)->is_heapvar))
4329 push_fields_onto_fieldstack (decl_type, &fieldstack, 0);
4330
4331 /* If the variable doesn't have subvars, we may end up needing to
4332 sort the field list and create fake variables for all the
4333 fields. */
4334 vi = new_var_info (decl, index, name);
4335 vi->decl = decl;
4336 vi->offset = 0;
4337 if (!declsize
4338 || !host_integerp (declsize, 1))
4339 {
4340 vi->is_unknown_size_var = true;
4341 vi->fullsize = ~0;
4342 vi->size = ~0;
4343 }
4344 else
4345 {
4346 vi->fullsize = TREE_INT_CST_LOW (declsize);
4347 vi->size = vi->fullsize;
4348 }
4349
4350 insert_vi_for_tree (vi->decl, vi);
4351 VEC_safe_push (varinfo_t, heap, varmap, vi);
4352 if (is_global && (!flag_whole_program || !in_ipa_mode)
4353 && could_have_pointers (decl))
4354 {
4355 if (var_ann (decl)
4356 && var_ann (decl)->noalias_state == NO_ALIAS_ANYTHING)
4357 make_constraint_from (vi, vi->id);
4358 else
4359 make_constraint_from (vi, escaped_id);
4360 }
4361
4362 stats.total_vars++;
4363 if (use_field_sensitive
4364 && !vi->is_unknown_size_var
4365 && var_can_have_subvars (decl)
4366 && VEC_length (fieldoff_s, fieldstack) > 1
4367 && VEC_length (fieldoff_s, fieldstack) <= MAX_FIELDS_FOR_FIELD_SENSITIVE)
4368 {
4369 unsigned int newindex = VEC_length (varinfo_t, varmap);
4370 fieldoff_s *fo = NULL;
4371 bool notokay = false;
4372 unsigned int i;
4373
4374 for (i = 0; !notokay && VEC_iterate (fieldoff_s, fieldstack, i, fo); i++)
4375 {
4376 if (fo->has_unknown_size
4377 || fo->offset < 0)
4378 {
4379 notokay = true;
4380 break;
4381 }
4382 }
4383
4384 /* We can't sort them if we have a field with a variable sized type,
4385 which will make notokay = true. In that case, we are going to return
4386 without creating varinfos for the fields anyway, so sorting them is a
4387 waste to boot. */
4388 if (!notokay)
4389 {
4390 sort_fieldstack (fieldstack);
4391 /* Due to some C++ FE issues, like PR 22488, we might end up
4392 what appear to be overlapping fields even though they,
4393 in reality, do not overlap. Until the C++ FE is fixed,
4394 we will simply disable field-sensitivity for these cases. */
4395 notokay = check_for_overlaps (fieldstack);
4396 }
4397
4398
4399 if (VEC_length (fieldoff_s, fieldstack) != 0)
4400 fo = VEC_index (fieldoff_s, fieldstack, 0);
4401
4402 if (fo == NULL || notokay)
4403 {
4404 vi->is_unknown_size_var = 1;
4405 vi->fullsize = ~0;
4406 vi->size = ~0;
4407 vi->is_full_var = true;
4408 VEC_free (fieldoff_s, heap, fieldstack);
4409 return index;
4410 }
4411
4412 vi->size = fo->size;
4413 vi->offset = fo->offset;
4414 for (i = VEC_length (fieldoff_s, fieldstack) - 1;
4415 i >= 1 && VEC_iterate (fieldoff_s, fieldstack, i, fo);
4416 i--)
4417 {
4418 varinfo_t newvi;
4419 const char *newname = "NULL";
4420 char *tempname;
4421
4422 newindex = VEC_length (varinfo_t, varmap);
4423 if (dump_file)
4424 {
4425 asprintf (&tempname, "%s." HOST_WIDE_INT_PRINT_DEC
4426 "+" HOST_WIDE_INT_PRINT_DEC,
4427 vi->name, fo->offset, fo->size);
4428 newname = ggc_strdup (tempname);
4429 free (tempname);
4430 }
4431 newvi = new_var_info (decl, newindex, newname);
4432 newvi->offset = fo->offset;
4433 newvi->size = fo->size;
4434 newvi->fullsize = vi->fullsize;
4435 insert_into_field_list (vi, newvi);
4436 VEC_safe_push (varinfo_t, heap, varmap, newvi);
4437 if (is_global && (!flag_whole_program || !in_ipa_mode)
4438 && fo->may_have_pointers)
4439 make_constraint_from (newvi, escaped_id);
4440
4441 stats.total_vars++;
4442 }
4443 }
4444 else
4445 vi->is_full_var = true;
4446
4447 VEC_free (fieldoff_s, heap, fieldstack);
4448
4449 return index;
4450 }
4451
4452 /* Print out the points-to solution for VAR to FILE. */
4453
4454 void
4455 dump_solution_for_var (FILE *file, unsigned int var)
4456 {
4457 varinfo_t vi = get_varinfo (var);
4458 unsigned int i;
4459 bitmap_iterator bi;
4460
4461 if (find (var) != var)
4462 {
4463 varinfo_t vipt = get_varinfo (find (var));
4464 fprintf (file, "%s = same as %s\n", vi->name, vipt->name);
4465 }
4466 else
4467 {
4468 fprintf (file, "%s = { ", vi->name);
4469 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4470 {
4471 fprintf (file, "%s ", get_varinfo (i)->name);
4472 }
4473 fprintf (file, "}");
4474 if (vi->no_tbaa_pruning)
4475 fprintf (file, " no-tbaa-pruning");
4476 fprintf (file, "\n");
4477 }
4478 }
4479
4480 /* Print the points-to solution for VAR to stdout. */
4481
4482 void
4483 debug_solution_for_var (unsigned int var)
4484 {
4485 dump_solution_for_var (stdout, var);
4486 }
4487
4488 /* Create varinfo structures for all of the variables in the
4489 function for intraprocedural mode. */
4490
4491 static void
4492 intra_create_variable_infos (void)
4493 {
4494 tree t;
4495 struct constraint_expr lhs, rhs;
4496
4497 /* For each incoming pointer argument arg, create the constraint ARG
4498 = NONLOCAL or a dummy variable if flag_argument_noalias is set. */
4499 for (t = DECL_ARGUMENTS (current_function_decl); t; t = TREE_CHAIN (t))
4500 {
4501 varinfo_t p;
4502
4503 if (!could_have_pointers (t))
4504 continue;
4505
4506 /* If flag_argument_noalias is set, then function pointer
4507 arguments are guaranteed not to point to each other. In that
4508 case, create an artificial variable PARM_NOALIAS and the
4509 constraint ARG = &PARM_NOALIAS. */
4510 if (POINTER_TYPE_P (TREE_TYPE (t)) && flag_argument_noalias > 0)
4511 {
4512 varinfo_t vi;
4513 tree heapvar = heapvar_lookup (t);
4514
4515 lhs.offset = 0;
4516 lhs.type = SCALAR;
4517 lhs.var = get_vi_for_tree (t)->id;
4518
4519 if (heapvar == NULL_TREE)
4520 {
4521 var_ann_t ann;
4522 heapvar = create_tmp_var_raw (TREE_TYPE (TREE_TYPE (t)),
4523 "PARM_NOALIAS");
4524 DECL_EXTERNAL (heapvar) = 1;
4525 if (gimple_referenced_vars (cfun))
4526 add_referenced_var (heapvar);
4527
4528 heapvar_insert (t, heapvar);
4529
4530 ann = get_var_ann (heapvar);
4531 ann->is_heapvar = 1;
4532 if (flag_argument_noalias == 1)
4533 ann->noalias_state = NO_ALIAS;
4534 else if (flag_argument_noalias == 2)
4535 ann->noalias_state = NO_ALIAS_GLOBAL;
4536 else if (flag_argument_noalias == 3)
4537 ann->noalias_state = NO_ALIAS_ANYTHING;
4538 else
4539 gcc_unreachable ();
4540 }
4541
4542 vi = get_vi_for_tree (heapvar);
4543 vi->is_artificial_var = 1;
4544 vi->is_heap_var = 1;
4545 rhs.var = vi->id;
4546 rhs.type = ADDRESSOF;
4547 rhs.offset = 0;
4548 for (p = get_varinfo (lhs.var); p; p = p->next)
4549 {
4550 struct constraint_expr temp = lhs;
4551 temp.var = p->id;
4552 process_constraint (new_constraint (temp, rhs));
4553 }
4554 }
4555 else
4556 {
4557 varinfo_t arg_vi = get_vi_for_tree (t);
4558
4559 for (p = arg_vi; p; p = p->next)
4560 make_constraint_from (p, nonlocal_id);
4561 }
4562 }
4563
4564 /* Add a constraint for the incoming static chain parameter. */
4565 if (cfun->static_chain_decl != NULL_TREE)
4566 {
4567 varinfo_t p, chain_vi = get_vi_for_tree (cfun->static_chain_decl);
4568
4569 for (p = chain_vi; p; p = p->next)
4570 make_constraint_from (p, nonlocal_id);
4571 }
4572 }
4573
4574 /* Structure used to put solution bitmaps in a hashtable so they can
4575 be shared among variables with the same points-to set. */
4576
4577 typedef struct shared_bitmap_info
4578 {
4579 bitmap pt_vars;
4580 hashval_t hashcode;
4581 } *shared_bitmap_info_t;
4582 typedef const struct shared_bitmap_info *const_shared_bitmap_info_t;
4583
4584 static htab_t shared_bitmap_table;
4585
4586 /* Hash function for a shared_bitmap_info_t */
4587
4588 static hashval_t
4589 shared_bitmap_hash (const void *p)
4590 {
4591 const_shared_bitmap_info_t const bi = (const_shared_bitmap_info_t) p;
4592 return bi->hashcode;
4593 }
4594
4595 /* Equality function for two shared_bitmap_info_t's. */
4596
4597 static int
4598 shared_bitmap_eq (const void *p1, const void *p2)
4599 {
4600 const_shared_bitmap_info_t const sbi1 = (const_shared_bitmap_info_t) p1;
4601 const_shared_bitmap_info_t const sbi2 = (const_shared_bitmap_info_t) p2;
4602 return bitmap_equal_p (sbi1->pt_vars, sbi2->pt_vars);
4603 }
4604
4605 /* Lookup a bitmap in the shared bitmap hashtable, and return an already
4606 existing instance if there is one, NULL otherwise. */
4607
4608 static bitmap
4609 shared_bitmap_lookup (bitmap pt_vars)
4610 {
4611 void **slot;
4612 struct shared_bitmap_info sbi;
4613
4614 sbi.pt_vars = pt_vars;
4615 sbi.hashcode = bitmap_hash (pt_vars);
4616
4617 slot = htab_find_slot_with_hash (shared_bitmap_table, &sbi,
4618 sbi.hashcode, NO_INSERT);
4619 if (!slot)
4620 return NULL;
4621 else
4622 return ((shared_bitmap_info_t) *slot)->pt_vars;
4623 }
4624
4625
4626 /* Add a bitmap to the shared bitmap hashtable. */
4627
4628 static void
4629 shared_bitmap_add (bitmap pt_vars)
4630 {
4631 void **slot;
4632 shared_bitmap_info_t sbi = XNEW (struct shared_bitmap_info);
4633
4634 sbi->pt_vars = pt_vars;
4635 sbi->hashcode = bitmap_hash (pt_vars);
4636
4637 slot = htab_find_slot_with_hash (shared_bitmap_table, sbi,
4638 sbi->hashcode, INSERT);
4639 gcc_assert (!*slot);
4640 *slot = (void *) sbi;
4641 }
4642
4643
4644 /* Set bits in INTO corresponding to the variable uids in solution set
4645 FROM, which came from variable PTR.
4646 For variables that are actually dereferenced, we also use type
4647 based alias analysis to prune the points-to sets.
4648 IS_DEREFED is true if PTR was directly dereferenced, which we use to
4649 help determine whether we are we are allowed to prune using TBAA.
4650 If NO_TBAA_PRUNING is true, we do not perform any TBAA pruning of
4651 the from set. Returns the number of pruned variables. */
4652
4653 static unsigned
4654 set_uids_in_ptset (tree ptr, bitmap into, bitmap from, bool is_derefed,
4655 bool no_tbaa_pruning)
4656 {
4657 unsigned int i;
4658 bitmap_iterator bi;
4659 unsigned pruned = 0;
4660
4661 gcc_assert (POINTER_TYPE_P (TREE_TYPE (ptr)));
4662
4663 EXECUTE_IF_SET_IN_BITMAP (from, 0, i, bi)
4664 {
4665 varinfo_t vi = get_varinfo (i);
4666
4667 /* The only artificial variables that are allowed in a may-alias
4668 set are heap variables. */
4669 if (vi->is_artificial_var && !vi->is_heap_var)
4670 continue;
4671
4672 if (TREE_CODE (vi->decl) == VAR_DECL
4673 || TREE_CODE (vi->decl) == PARM_DECL
4674 || TREE_CODE (vi->decl) == RESULT_DECL)
4675 {
4676 /* Just add VI->DECL to the alias set.
4677 Don't type prune artificial vars or points-to sets
4678 for pointers that have not been dereferenced or with
4679 type-based pruning disabled. */
4680 if (vi->is_artificial_var
4681 || !is_derefed
4682 || no_tbaa_pruning)
4683 bitmap_set_bit (into, DECL_UID (vi->decl));
4684 else
4685 {
4686 alias_set_type var_alias_set, mem_alias_set;
4687 var_alias_set = get_alias_set (vi->decl);
4688 mem_alias_set = get_alias_set (TREE_TYPE (TREE_TYPE (ptr)));
4689 if (may_alias_p (SSA_NAME_VAR (ptr), mem_alias_set,
4690 vi->decl, var_alias_set, true))
4691 bitmap_set_bit (into, DECL_UID (vi->decl));
4692 else
4693 ++pruned;
4694 }
4695 }
4696 }
4697
4698 return pruned;
4699 }
4700
4701
4702 static bool have_alias_info = false;
4703
4704 /* Emit a note for the pointer initialization point DEF. */
4705
4706 static void
4707 emit_pointer_definition (gimple def)
4708 {
4709 if (gimple_code (def) == GIMPLE_PHI)
4710 {
4711 use_operand_p argp;
4712 ssa_op_iter oi;
4713
4714 FOR_EACH_PHI_ARG (argp, def, oi, SSA_OP_USE)
4715 {
4716 tree arg = USE_FROM_PTR (argp);
4717 if (TREE_CODE (arg) == SSA_NAME)
4718 emit_pointer_definition (SSA_NAME_DEF_STMT (arg));
4719 else
4720 inform (0, "initialized from %qE", arg);
4721 }
4722 }
4723 else if (!gimple_nop_p (def))
4724 inform (gimple_location (def), "initialized from here");
4725 }
4726
4727 /* Emit a strict aliasing warning for dereferencing the pointer PTR. */
4728
4729 static void
4730 emit_alias_warning (tree ptr)
4731 {
4732 gimple def = SSA_NAME_DEF_STMT (ptr);
4733 gimple use;
4734 imm_use_iterator ui;
4735 unsigned warned = 0;
4736
4737 FOR_EACH_IMM_USE_STMT (use, ui, ptr)
4738 {
4739 tree deref = NULL_TREE;
4740
4741 if (gimple_has_lhs (use))
4742 {
4743 tree lhs = get_base_address (gimple_get_lhs (use));
4744 if (lhs
4745 && INDIRECT_REF_P (lhs)
4746 && TREE_OPERAND (lhs, 0) == ptr)
4747 deref = lhs;
4748 }
4749 if (gimple_assign_single_p (use))
4750 {
4751 tree rhs = get_base_address (gimple_assign_rhs1 (use));
4752 if (rhs
4753 && INDIRECT_REF_P (rhs)
4754 && TREE_OPERAND (rhs, 0) == ptr)
4755 deref = rhs;
4756 }
4757 else if (is_gimple_call (use))
4758 {
4759 unsigned i;
4760 for (i = 0; i < gimple_call_num_args (use); ++i)
4761 {
4762 tree op = get_base_address (gimple_call_arg (use, i));
4763 if (op
4764 && INDIRECT_REF_P (op)
4765 && TREE_OPERAND (op, 0) == ptr)
4766 deref = op;
4767 }
4768 }
4769 if (deref
4770 && !TREE_NO_WARNING (deref))
4771 {
4772 TREE_NO_WARNING (deref) = 1;
4773 warning_at (gimple_location (use), OPT_Wstrict_aliasing,
4774 "dereferencing pointer %qD does break strict-aliasing "
4775 "rules", SSA_NAME_VAR (ptr));
4776 ++warned;
4777 }
4778 }
4779 if (warned > 0)
4780 emit_pointer_definition (def);
4781 }
4782
4783 /* Given a pointer variable P, fill in its points-to set, or return
4784 false if we can't.
4785 Rather than return false for variables that point-to anything, we
4786 instead find the corresponding SMT, and merge in its aliases. In
4787 addition to these aliases, we also set the bits for the SMT's
4788 themselves and their subsets, as SMT's are still in use by
4789 non-SSA_NAME's, and pruning may eliminate every one of their
4790 aliases. In such a case, if we did not include the right set of
4791 SMT's in the points-to set of the variable, we'd end up with
4792 statements that do not conflict but should. */
4793
4794 bool
4795 find_what_p_points_to (tree p)
4796 {
4797 tree lookup_p = p;
4798 varinfo_t vi;
4799
4800 if (!have_alias_info)
4801 return false;
4802
4803 /* For parameters, get at the points-to set for the actual parm
4804 decl. */
4805 if (TREE_CODE (p) == SSA_NAME
4806 && TREE_CODE (SSA_NAME_VAR (p)) == PARM_DECL
4807 && SSA_NAME_IS_DEFAULT_DEF (p))
4808 lookup_p = SSA_NAME_VAR (p);
4809
4810 vi = lookup_vi_for_tree (lookup_p);
4811 if (vi)
4812 {
4813 if (vi->is_artificial_var)
4814 return false;
4815
4816 /* See if this is a field or a structure. */
4817 if (vi->size != vi->fullsize)
4818 {
4819 /* Nothing currently asks about structure fields directly,
4820 but when they do, we need code here to hand back the
4821 points-to set. */
4822 return false;
4823 }
4824 else
4825 {
4826 struct ptr_info_def *pi = get_ptr_info (p);
4827 unsigned int i, pruned;
4828 bitmap_iterator bi;
4829 bool was_pt_anything = false;
4830 bitmap finished_solution;
4831 bitmap result;
4832
4833 if (!pi->memory_tag_needed)
4834 return false;
4835
4836 /* This variable may have been collapsed, let's get the real
4837 variable. */
4838 vi = get_varinfo (find (vi->id));
4839
4840 /* Translate artificial variables into SSA_NAME_PTR_INFO
4841 attributes. */
4842 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4843 {
4844 varinfo_t vi = get_varinfo (i);
4845
4846 if (vi->is_artificial_var)
4847 {
4848 /* FIXME. READONLY should be handled better so that
4849 flow insensitive aliasing can disregard writable
4850 aliases. */
4851 if (vi->id == nothing_id)
4852 pi->pt_null = 1;
4853 else if (vi->id == anything_id
4854 || vi->id == nonlocal_id
4855 || vi->id == escaped_id
4856 || vi->id == callused_id)
4857 was_pt_anything = 1;
4858 else if (vi->id == readonly_id)
4859 was_pt_anything = 1;
4860 else if (vi->id == integer_id)
4861 was_pt_anything = 1;
4862 else if (vi->is_heap_var)
4863 pi->pt_global_mem = 1;
4864 }
4865 }
4866
4867 /* Instead of doing extra work, simply do not create
4868 points-to information for pt_anything pointers. This
4869 will cause the operand scanner to fall back to the
4870 type-based SMT and its aliases. Which is the best
4871 we could do here for the points-to set as well. */
4872 if (was_pt_anything)
4873 return false;
4874
4875 /* Share the final set of variables when possible. */
4876 finished_solution = BITMAP_GGC_ALLOC ();
4877 stats.points_to_sets_created++;
4878
4879 pruned = set_uids_in_ptset (p, finished_solution, vi->solution,
4880 pi->is_dereferenced,
4881 vi->no_tbaa_pruning);
4882 result = shared_bitmap_lookup (finished_solution);
4883
4884 if (!result)
4885 {
4886 shared_bitmap_add (finished_solution);
4887 pi->pt_vars = finished_solution;
4888 }
4889 else
4890 {
4891 pi->pt_vars = result;
4892 bitmap_clear (finished_solution);
4893 }
4894
4895 if (bitmap_empty_p (pi->pt_vars))
4896 {
4897 pi->pt_vars = NULL;
4898 if (pruned > 0
4899 && pi->is_dereferenced
4900 && warn_strict_aliasing > 0
4901 && !SSA_NAME_IS_DEFAULT_DEF (p))
4902 {
4903 if (dump_file && dump_flags & TDF_DETAILS)
4904 {
4905 fprintf (dump_file, "alias warning for ");
4906 print_generic_expr (dump_file, p, 0);
4907 fprintf (dump_file, "\n");
4908 }
4909 emit_alias_warning (p);
4910 }
4911 }
4912
4913 return true;
4914 }
4915 }
4916
4917 return false;
4918 }
4919
4920 /* Mark the ESCAPED solution as call clobbered. Returns false if
4921 pt_anything escaped which needs all locals that have their address
4922 taken marked call clobbered as well. */
4923
4924 bool
4925 clobber_what_escaped (void)
4926 {
4927 varinfo_t vi;
4928 unsigned int i;
4929 bitmap_iterator bi;
4930
4931 if (!have_alias_info)
4932 return false;
4933
4934 /* This variable may have been collapsed, let's get the real
4935 variable for escaped_id. */
4936 vi = get_varinfo (find (escaped_id));
4937
4938 /* If call-used memory escapes we need to include it in the
4939 set of escaped variables. This can happen if a pure
4940 function returns a pointer and this pointer escapes. */
4941 if (bitmap_bit_p (vi->solution, callused_id))
4942 {
4943 varinfo_t cu_vi = get_varinfo (find (callused_id));
4944 bitmap_ior_into (vi->solution, cu_vi->solution);
4945 }
4946
4947 /* Mark variables in the solution call-clobbered. */
4948 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4949 {
4950 varinfo_t vi = get_varinfo (i);
4951
4952 if (vi->is_artificial_var)
4953 {
4954 /* nothing_id and readonly_id do not cause any
4955 call clobber ops. For anything_id and integer_id
4956 we need to clobber all addressable vars. */
4957 if (vi->id == anything_id
4958 || vi->id == integer_id)
4959 return false;
4960 }
4961
4962 /* Only artificial heap-vars are further interesting. */
4963 if (vi->is_artificial_var && !vi->is_heap_var)
4964 continue;
4965
4966 if ((TREE_CODE (vi->decl) == VAR_DECL
4967 || TREE_CODE (vi->decl) == PARM_DECL
4968 || TREE_CODE (vi->decl) == RESULT_DECL)
4969 && !unmodifiable_var_p (vi->decl))
4970 mark_call_clobbered (vi->decl, ESCAPE_TO_CALL);
4971 }
4972
4973 return true;
4974 }
4975
4976 /* Compute the call-used variables. */
4977
4978 void
4979 compute_call_used_vars (void)
4980 {
4981 varinfo_t vi;
4982 unsigned int i;
4983 bitmap_iterator bi;
4984 bool has_anything_id = false;
4985
4986 if (!have_alias_info)
4987 return;
4988
4989 /* This variable may have been collapsed, let's get the real
4990 variable for escaped_id. */
4991 vi = get_varinfo (find (callused_id));
4992
4993 /* Mark variables in the solution call-clobbered. */
4994 EXECUTE_IF_SET_IN_BITMAP (vi->solution, 0, i, bi)
4995 {
4996 varinfo_t vi = get_varinfo (i);
4997
4998 if (vi->is_artificial_var)
4999 {
5000 /* For anything_id and integer_id we need to make
5001 all local addressable vars call-used. */
5002 if (vi->id == anything_id
5003 || vi->id == integer_id)
5004 has_anything_id = true;
5005 }
5006
5007 /* Only artificial heap-vars are further interesting. */
5008 if (vi->is_artificial_var && !vi->is_heap_var)
5009 continue;
5010
5011 if ((TREE_CODE (vi->decl) == VAR_DECL
5012 || TREE_CODE (vi->decl) == PARM_DECL
5013 || TREE_CODE (vi->decl) == RESULT_DECL)
5014 && !unmodifiable_var_p (vi->decl))
5015 bitmap_set_bit (gimple_call_used_vars (cfun), DECL_UID (vi->decl));
5016 }
5017
5018 /* If anything is call-used, add all addressable locals to the set. */
5019 if (has_anything_id)
5020 bitmap_ior_into (gimple_call_used_vars (cfun),
5021 gimple_addressable_vars (cfun));
5022 }
5023
5024
5025 /* Dump points-to information to OUTFILE. */
5026
5027 void
5028 dump_sa_points_to_info (FILE *outfile)
5029 {
5030 unsigned int i;
5031
5032 fprintf (outfile, "\nPoints-to sets\n\n");
5033
5034 if (dump_flags & TDF_STATS)
5035 {
5036 fprintf (outfile, "Stats:\n");
5037 fprintf (outfile, "Total vars: %d\n", stats.total_vars);
5038 fprintf (outfile, "Non-pointer vars: %d\n",
5039 stats.nonpointer_vars);
5040 fprintf (outfile, "Statically unified vars: %d\n",
5041 stats.unified_vars_static);
5042 fprintf (outfile, "Dynamically unified vars: %d\n",
5043 stats.unified_vars_dynamic);
5044 fprintf (outfile, "Iterations: %d\n", stats.iterations);
5045 fprintf (outfile, "Number of edges: %d\n", stats.num_edges);
5046 fprintf (outfile, "Number of implicit edges: %d\n",
5047 stats.num_implicit_edges);
5048 }
5049
5050 for (i = 0; i < VEC_length (varinfo_t, varmap); i++)
5051 dump_solution_for_var (outfile, i);
5052 }
5053
5054
5055 /* Debug points-to information to stderr. */
5056
5057 void
5058 debug_sa_points_to_info (void)
5059 {
5060 dump_sa_points_to_info (stderr);
5061 }
5062
5063
5064 /* Initialize the always-existing constraint variables for NULL
5065 ANYTHING, READONLY, and INTEGER */
5066
5067 static void
5068 init_base_vars (void)
5069 {
5070 struct constraint_expr lhs, rhs;
5071
5072 /* Create the NULL variable, used to represent that a variable points
5073 to NULL. */
5074 nothing_tree = create_tmp_var_raw (void_type_node, "NULL");
5075 var_nothing = new_var_info (nothing_tree, nothing_id, "NULL");
5076 insert_vi_for_tree (nothing_tree, var_nothing);
5077 var_nothing->is_artificial_var = 1;
5078 var_nothing->offset = 0;
5079 var_nothing->size = ~0;
5080 var_nothing->fullsize = ~0;
5081 var_nothing->is_special_var = 1;
5082 VEC_safe_push (varinfo_t, heap, varmap, var_nothing);
5083
5084 /* Create the ANYTHING variable, used to represent that a variable
5085 points to some unknown piece of memory. */
5086 anything_tree = create_tmp_var_raw (void_type_node, "ANYTHING");
5087 var_anything = new_var_info (anything_tree, anything_id, "ANYTHING");
5088 insert_vi_for_tree (anything_tree, var_anything);
5089 var_anything->is_artificial_var = 1;
5090 var_anything->size = ~0;
5091 var_anything->offset = 0;
5092 var_anything->next = NULL;
5093 var_anything->fullsize = ~0;
5094 var_anything->is_special_var = 1;
5095
5096 /* Anything points to anything. This makes deref constraints just
5097 work in the presence of linked list and other p = *p type loops,
5098 by saying that *ANYTHING = ANYTHING. */
5099 VEC_safe_push (varinfo_t, heap, varmap, var_anything);
5100 lhs.type = SCALAR;
5101 lhs.var = anything_id;
5102 lhs.offset = 0;
5103 rhs.type = ADDRESSOF;
5104 rhs.var = anything_id;
5105 rhs.offset = 0;
5106
5107 /* This specifically does not use process_constraint because
5108 process_constraint ignores all anything = anything constraints, since all
5109 but this one are redundant. */
5110 VEC_safe_push (constraint_t, heap, constraints, new_constraint (lhs, rhs));
5111
5112 /* Create the READONLY variable, used to represent that a variable
5113 points to readonly memory. */
5114 readonly_tree = create_tmp_var_raw (void_type_node, "READONLY");
5115 var_readonly = new_var_info (readonly_tree, readonly_id, "READONLY");
5116 var_readonly->is_artificial_var = 1;
5117 var_readonly->offset = 0;
5118 var_readonly->size = ~0;
5119 var_readonly->fullsize = ~0;
5120 var_readonly->next = NULL;
5121 var_readonly->is_special_var = 1;
5122 insert_vi_for_tree (readonly_tree, var_readonly);
5123 VEC_safe_push (varinfo_t, heap, varmap, var_readonly);
5124
5125 /* readonly memory points to anything, in order to make deref
5126 easier. In reality, it points to anything the particular
5127 readonly variable can point to, but we don't track this
5128 separately. */
5129 lhs.type = SCALAR;
5130 lhs.var = readonly_id;
5131 lhs.offset = 0;
5132 rhs.type = ADDRESSOF;
5133 rhs.var = readonly_id; /* FIXME */
5134 rhs.offset = 0;
5135 process_constraint (new_constraint (lhs, rhs));
5136
5137 /* Create the ESCAPED variable, used to represent the set of escaped
5138 memory. */
5139 escaped_tree = create_tmp_var_raw (void_type_node, "ESCAPED");
5140 var_escaped = new_var_info (escaped_tree, escaped_id, "ESCAPED");
5141 insert_vi_for_tree (escaped_tree, var_escaped);
5142 var_escaped->is_artificial_var = 1;
5143 var_escaped->offset = 0;
5144 var_escaped->size = ~0;
5145 var_escaped->fullsize = ~0;
5146 var_escaped->is_special_var = 0;
5147 VEC_safe_push (varinfo_t, heap, varmap, var_escaped);
5148 gcc_assert (VEC_index (varinfo_t, varmap, 3) == var_escaped);
5149
5150 /* ESCAPED = *ESCAPED, because escaped is may-deref'd at calls, etc. */
5151 lhs.type = SCALAR;
5152 lhs.var = escaped_id;
5153 lhs.offset = 0;
5154 rhs.type = DEREF;
5155 rhs.var = escaped_id;
5156 rhs.offset = 0;
5157 process_constraint (new_constraint (lhs, rhs));
5158
5159 /* Create the NONLOCAL variable, used to represent the set of nonlocal
5160 memory. */
5161 nonlocal_tree = create_tmp_var_raw (void_type_node, "NONLOCAL");
5162 var_nonlocal = new_var_info (nonlocal_tree, nonlocal_id, "NONLOCAL");
5163 insert_vi_for_tree (nonlocal_tree, var_nonlocal);
5164 var_nonlocal->is_artificial_var = 1;
5165 var_nonlocal->offset = 0;
5166 var_nonlocal->size = ~0;
5167 var_nonlocal->fullsize = ~0;
5168 var_nonlocal->is_special_var = 1;
5169 VEC_safe_push (varinfo_t, heap, varmap, var_nonlocal);
5170
5171 /* Nonlocal memory points to escaped (which includes nonlocal),
5172 in order to make deref easier. */
5173 lhs.type = SCALAR;
5174 lhs.var = nonlocal_id;
5175 lhs.offset = 0;
5176 rhs.type = ADDRESSOF;
5177 rhs.var = escaped_id;
5178 rhs.offset = 0;
5179 process_constraint (new_constraint (lhs, rhs));
5180
5181 /* Create the CALLUSED variable, used to represent the set of call-used
5182 memory. */
5183 callused_tree = create_tmp_var_raw (void_type_node, "CALLUSED");
5184 var_callused = new_var_info (callused_tree, callused_id, "CALLUSED");
5185 insert_vi_for_tree (callused_tree, var_callused);
5186 var_callused->is_artificial_var = 1;
5187 var_callused->offset = 0;
5188 var_callused->size = ~0;
5189 var_callused->fullsize = ~0;
5190 var_callused->is_special_var = 0;
5191 VEC_safe_push (varinfo_t, heap, varmap, var_callused);
5192
5193 /* CALLUSED = *CALLUSED, because call-used is may-deref'd at calls, etc. */
5194 lhs.type = SCALAR;
5195 lhs.var = callused_id;
5196 lhs.offset = 0;
5197 rhs.type = DEREF;
5198 rhs.var = callused_id;
5199 rhs.offset = 0;
5200 process_constraint (new_constraint (lhs, rhs));
5201
5202 /* Create the INTEGER variable, used to represent that a variable points
5203 to an INTEGER. */
5204 integer_tree = create_tmp_var_raw (void_type_node, "INTEGER");
5205 var_integer = new_var_info (integer_tree, integer_id, "INTEGER");
5206 insert_vi_for_tree (integer_tree, var_integer);
5207 var_integer->is_artificial_var = 1;
5208 var_integer->size = ~0;
5209 var_integer->fullsize = ~0;
5210 var_integer->offset = 0;
5211 var_integer->next = NULL;
5212 var_integer->is_special_var = 1;
5213 VEC_safe_push (varinfo_t, heap, varmap, var_integer);
5214
5215 /* INTEGER = ANYTHING, because we don't know where a dereference of
5216 a random integer will point to. */
5217 lhs.type = SCALAR;
5218 lhs.var = integer_id;
5219 lhs.offset = 0;
5220 rhs.type = ADDRESSOF;
5221 rhs.var = anything_id;
5222 rhs.offset = 0;
5223 process_constraint (new_constraint (lhs, rhs));
5224
5225 /* *ESCAPED = &ESCAPED. This is true because we have to assume
5226 everything pointed to by escaped can also point to escaped. */
5227 lhs.type = DEREF;
5228 lhs.var = escaped_id;
5229 lhs.offset = 0;
5230 rhs.type = ADDRESSOF;
5231 rhs.var = escaped_id;
5232 rhs.offset = 0;
5233 process_constraint (new_constraint (lhs, rhs));
5234
5235 /* *ESCAPED = &NONLOCAL. This is true because we have to assume
5236 everything pointed to by escaped can also point to nonlocal. */
5237 lhs.type = DEREF;
5238 lhs.var = escaped_id;
5239 lhs.offset = 0;
5240 rhs.type = ADDRESSOF;
5241 rhs.var = nonlocal_id;
5242 rhs.offset = 0;
5243 process_constraint (new_constraint (lhs, rhs));
5244 }
5245
5246 /* Initialize things necessary to perform PTA */
5247
5248 static void
5249 init_alias_vars (void)
5250 {
5251 use_field_sensitive = (MAX_FIELDS_FOR_FIELD_SENSITIVE > 1);
5252
5253 bitmap_obstack_initialize (&pta_obstack);
5254 bitmap_obstack_initialize (&oldpta_obstack);
5255 bitmap_obstack_initialize (&predbitmap_obstack);
5256
5257 constraint_pool = create_alloc_pool ("Constraint pool",
5258 sizeof (struct constraint), 30);
5259 variable_info_pool = create_alloc_pool ("Variable info pool",
5260 sizeof (struct variable_info), 30);
5261 constraints = VEC_alloc (constraint_t, heap, 8);
5262 varmap = VEC_alloc (varinfo_t, heap, 8);
5263 vi_for_tree = pointer_map_create ();
5264
5265 memset (&stats, 0, sizeof (stats));
5266 shared_bitmap_table = htab_create (511, shared_bitmap_hash,
5267 shared_bitmap_eq, free);
5268 init_base_vars ();
5269 }
5270
5271 /* Remove the REF and ADDRESS edges from GRAPH, as well as all the
5272 predecessor edges. */
5273
5274 static void
5275 remove_preds_and_fake_succs (constraint_graph_t graph)
5276 {
5277 unsigned int i;
5278
5279 /* Clear the implicit ref and address nodes from the successor
5280 lists. */
5281 for (i = 0; i < FIRST_REF_NODE; i++)
5282 {
5283 if (graph->succs[i])
5284 bitmap_clear_range (graph->succs[i], FIRST_REF_NODE,
5285 FIRST_REF_NODE * 2);
5286 }
5287
5288 /* Free the successor list for the non-ref nodes. */
5289 for (i = FIRST_REF_NODE; i < graph->size; i++)
5290 {
5291 if (graph->succs[i])
5292 BITMAP_FREE (graph->succs[i]);
5293 }
5294
5295 /* Now reallocate the size of the successor list as, and blow away
5296 the predecessor bitmaps. */
5297 graph->size = VEC_length (varinfo_t, varmap);
5298 graph->succs = XRESIZEVEC (bitmap, graph->succs, graph->size);
5299
5300 free (graph->implicit_preds);
5301 graph->implicit_preds = NULL;
5302 free (graph->preds);
5303 graph->preds = NULL;
5304 bitmap_obstack_release (&predbitmap_obstack);
5305 }
5306
5307 /* Compute the set of variables we can't TBAA prune. */
5308
5309 static void
5310 compute_tbaa_pruning (void)
5311 {
5312 unsigned int size = VEC_length (varinfo_t, varmap);
5313 unsigned int i;
5314 bool any;
5315
5316 changed_count = 0;
5317 changed = sbitmap_alloc (size);
5318 sbitmap_zero (changed);
5319
5320 /* Mark all initial no_tbaa_pruning nodes as changed. */
5321 any = false;
5322 for (i = 0; i < size; ++i)
5323 {
5324 varinfo_t ivi = get_varinfo (i);
5325
5326 if (find (i) == i && ivi->no_tbaa_pruning)
5327 {
5328 any = true;
5329 if ((graph->succs[i] && !bitmap_empty_p (graph->succs[i]))
5330 || VEC_length (constraint_t, graph->complex[i]) > 0)
5331 {
5332 SET_BIT (changed, i);
5333 ++changed_count;
5334 }
5335 }
5336 }
5337
5338 while (changed_count > 0)
5339 {
5340 struct topo_info *ti = init_topo_info ();
5341 ++stats.iterations;
5342
5343 compute_topo_order (graph, ti);
5344
5345 while (VEC_length (unsigned, ti->topo_order) != 0)
5346 {
5347 bitmap_iterator bi;
5348
5349 i = VEC_pop (unsigned, ti->topo_order);
5350
5351 /* If this variable is not a representative, skip it. */
5352 if (find (i) != i)
5353 continue;
5354
5355 /* If the node has changed, we need to process the complex
5356 constraints and outgoing edges again. */
5357 if (TEST_BIT (changed, i))
5358 {
5359 unsigned int j;
5360 constraint_t c;
5361 VEC(constraint_t,heap) *complex = graph->complex[i];
5362
5363 RESET_BIT (changed, i);
5364 --changed_count;
5365
5366 /* Process the complex copy constraints. */
5367 for (j = 0; VEC_iterate (constraint_t, complex, j, c); ++j)
5368 {
5369 if (c->lhs.type == SCALAR && c->rhs.type == SCALAR)
5370 {
5371 varinfo_t lhsvi = get_varinfo (find (c->lhs.var));
5372
5373 if (!lhsvi->no_tbaa_pruning)
5374 {
5375 lhsvi->no_tbaa_pruning = true;
5376 if (!TEST_BIT (changed, lhsvi->id))
5377 {
5378 SET_BIT (changed, lhsvi->id);
5379 ++changed_count;
5380 }
5381 }
5382 }
5383 }
5384
5385 /* Propagate to all successors. */
5386 EXECUTE_IF_IN_NONNULL_BITMAP (graph->succs[i], 0, j, bi)
5387 {
5388 unsigned int to = find (j);
5389 varinfo_t tovi = get_varinfo (to);
5390
5391 /* Don't propagate to ourselves. */
5392 if (to == i)
5393 continue;
5394
5395 if (!tovi->no_tbaa_pruning)
5396 {
5397 tovi->no_tbaa_pruning = true;
5398 if (!TEST_BIT (changed, to))
5399 {
5400 SET_BIT (changed, to);
5401 ++changed_count;
5402 }
5403 }
5404 }
5405 }
5406 }
5407
5408 free_topo_info (ti);
5409 }
5410
5411 sbitmap_free (changed);
5412
5413 if (any)
5414 {
5415 for (i = 0; i < size; ++i)
5416 {
5417 varinfo_t ivi = get_varinfo (i);
5418 varinfo_t ivip = get_varinfo (find (i));
5419
5420 if (ivip->no_tbaa_pruning)
5421 {
5422 tree var = ivi->decl;
5423
5424 if (TREE_CODE (var) == SSA_NAME)
5425 var = SSA_NAME_VAR (var);
5426
5427 if (POINTER_TYPE_P (TREE_TYPE (var)))
5428 {
5429 DECL_NO_TBAA_P (var) = 1;
5430
5431 /* Tell the RTL layer that this pointer can alias
5432 anything. */
5433 DECL_POINTER_ALIAS_SET (var) = 0;
5434 }
5435 }
5436 }
5437 }
5438 }
5439
5440 /* Create points-to sets for the current function. See the comments
5441 at the start of the file for an algorithmic overview. */
5442
5443 void
5444 compute_points_to_sets (void)
5445 {
5446 struct scc_info *si;
5447 basic_block bb;
5448
5449 timevar_push (TV_TREE_PTA);
5450
5451 init_alias_vars ();
5452 init_alias_heapvars ();
5453
5454 intra_create_variable_infos ();
5455
5456 /* Now walk all statements and derive aliases. */
5457 FOR_EACH_BB (bb)
5458 {
5459 gimple_stmt_iterator gsi;
5460
5461 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5462 {
5463 gimple phi = gsi_stmt (gsi);
5464
5465 if (is_gimple_reg (gimple_phi_result (phi)))
5466 find_func_aliases (phi);
5467 }
5468
5469 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); )
5470 {
5471 gimple stmt = gsi_stmt (gsi);
5472
5473 find_func_aliases (stmt);
5474
5475 /* The information in GIMPLE_CHANGE_DYNAMIC_TYPE statements
5476 has now been captured, and we can remove them. */
5477 if (gimple_code (stmt) == GIMPLE_CHANGE_DYNAMIC_TYPE)
5478 gsi_remove (&gsi, true);
5479 else
5480 gsi_next (&gsi);
5481 }
5482 }
5483
5484
5485 if (dump_file)
5486 {
5487 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5488 dump_constraints (dump_file);
5489 }
5490
5491 if (dump_file)
5492 fprintf (dump_file,
5493 "\nCollapsing static cycles and doing variable "
5494 "substitution\n");
5495
5496 init_graph (VEC_length (varinfo_t, varmap) * 2);
5497
5498 if (dump_file)
5499 fprintf (dump_file, "Building predecessor graph\n");
5500 build_pred_graph ();
5501
5502 if (dump_file)
5503 fprintf (dump_file, "Detecting pointer and location "
5504 "equivalences\n");
5505 si = perform_var_substitution (graph);
5506
5507 if (dump_file)
5508 fprintf (dump_file, "Rewriting constraints and unifying "
5509 "variables\n");
5510 rewrite_constraints (graph, si);
5511 free_var_substitution_info (si);
5512
5513 build_succ_graph ();
5514
5515 if (dump_file && (dump_flags & TDF_GRAPH))
5516 dump_constraint_graph (dump_file);
5517
5518 move_complex_constraints (graph);
5519
5520 if (dump_file)
5521 fprintf (dump_file, "Uniting pointer but not location equivalent "
5522 "variables\n");
5523 unite_pointer_equivalences (graph);
5524
5525 if (dump_file)
5526 fprintf (dump_file, "Finding indirect cycles\n");
5527 find_indirect_cycles (graph);
5528
5529 /* Implicit nodes and predecessors are no longer necessary at this
5530 point. */
5531 remove_preds_and_fake_succs (graph);
5532
5533 if (dump_file)
5534 fprintf (dump_file, "Solving graph\n");
5535
5536 solve_graph (graph);
5537
5538 compute_tbaa_pruning ();
5539
5540 if (dump_file)
5541 dump_sa_points_to_info (dump_file);
5542
5543 have_alias_info = true;
5544
5545 timevar_pop (TV_TREE_PTA);
5546 }
5547
5548
5549 /* Delete created points-to sets. */
5550
5551 void
5552 delete_points_to_sets (void)
5553 {
5554 unsigned int i;
5555
5556 htab_delete (shared_bitmap_table);
5557 if (dump_file && (dump_flags & TDF_STATS))
5558 fprintf (dump_file, "Points to sets created:%d\n",
5559 stats.points_to_sets_created);
5560
5561 pointer_map_destroy (vi_for_tree);
5562 bitmap_obstack_release (&pta_obstack);
5563 VEC_free (constraint_t, heap, constraints);
5564
5565 for (i = 0; i < graph->size; i++)
5566 VEC_free (constraint_t, heap, graph->complex[i]);
5567 free (graph->complex);
5568
5569 free (graph->rep);
5570 free (graph->succs);
5571 free (graph->pe);
5572 free (graph->pe_rep);
5573 free (graph->indirect_cycles);
5574 free (graph);
5575
5576 VEC_free (varinfo_t, heap, varmap);
5577 free_alloc_pool (variable_info_pool);
5578 free_alloc_pool (constraint_pool);
5579 have_alias_info = false;
5580 }
5581
5582 /* Return true if we should execute IPA PTA. */
5583 static bool
5584 gate_ipa_pta (void)
5585 {
5586 return (flag_ipa_pta
5587 /* Don't bother doing anything if the program has errors. */
5588 && !(errorcount || sorrycount));
5589 }
5590
5591 /* Execute the driver for IPA PTA. */
5592 static unsigned int
5593 ipa_pta_execute (void)
5594 {
5595 struct cgraph_node *node;
5596 struct scc_info *si;
5597
5598 in_ipa_mode = 1;
5599 init_alias_heapvars ();
5600 init_alias_vars ();
5601
5602 for (node = cgraph_nodes; node; node = node->next)
5603 {
5604 if (!node->analyzed || cgraph_is_master_clone (node))
5605 {
5606 unsigned int varid;
5607
5608 varid = create_function_info_for (node->decl,
5609 cgraph_node_name (node));
5610 if (node->local.externally_visible)
5611 {
5612 varinfo_t fi = get_varinfo (varid);
5613 for (; fi; fi = fi->next)
5614 make_constraint_from (fi, anything_id);
5615 }
5616 }
5617 }
5618 for (node = cgraph_nodes; node; node = node->next)
5619 {
5620 if (node->analyzed && cgraph_is_master_clone (node))
5621 {
5622 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
5623 basic_block bb;
5624 tree old_func_decl = current_function_decl;
5625 if (dump_file)
5626 fprintf (dump_file,
5627 "Generating constraints for %s\n",
5628 cgraph_node_name (node));
5629 push_cfun (func);
5630 current_function_decl = node->decl;
5631
5632 FOR_EACH_BB_FN (bb, func)
5633 {
5634 gimple_stmt_iterator gsi;
5635
5636 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5637 gsi_next (&gsi))
5638 {
5639 gimple phi = gsi_stmt (gsi);
5640
5641 if (is_gimple_reg (gimple_phi_result (phi)))
5642 find_func_aliases (phi);
5643 }
5644
5645 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5646 find_func_aliases (gsi_stmt (gsi));
5647 }
5648 current_function_decl = old_func_decl;
5649 pop_cfun ();
5650 }
5651 else
5652 {
5653 /* Make point to anything. */
5654 }
5655 }
5656
5657 if (dump_file)
5658 {
5659 fprintf (dump_file, "Points-to analysis\n\nConstraints:\n\n");
5660 dump_constraints (dump_file);
5661 }
5662
5663 if (dump_file)
5664 fprintf (dump_file,
5665 "\nCollapsing static cycles and doing variable "
5666 "substitution:\n");
5667
5668 init_graph (VEC_length (varinfo_t, varmap) * 2);
5669 build_pred_graph ();
5670 si = perform_var_substitution (graph);
5671 rewrite_constraints (graph, si);
5672 free_var_substitution_info (si);
5673
5674 build_succ_graph ();
5675 move_complex_constraints (graph);
5676 unite_pointer_equivalences (graph);
5677 find_indirect_cycles (graph);
5678
5679 /* Implicit nodes and predecessors are no longer necessary at this
5680 point. */
5681 remove_preds_and_fake_succs (graph);
5682
5683 if (dump_file)
5684 fprintf (dump_file, "\nSolving graph\n");
5685
5686 solve_graph (graph);
5687
5688 if (dump_file)
5689 dump_sa_points_to_info (dump_file);
5690
5691 in_ipa_mode = 0;
5692 delete_alias_heapvars ();
5693 delete_points_to_sets ();
5694 return 0;
5695 }
5696
5697 struct simple_ipa_opt_pass pass_ipa_pta =
5698 {
5699 {
5700 SIMPLE_IPA_PASS,
5701 "pta", /* name */
5702 gate_ipa_pta, /* gate */
5703 ipa_pta_execute, /* execute */
5704 NULL, /* sub */
5705 NULL, /* next */
5706 0, /* static_pass_number */
5707 TV_IPA_PTA, /* tv_id */
5708 0, /* properties_required */
5709 0, /* properties_provided */
5710 0, /* properties_destroyed */
5711 0, /* todo_flags_start */
5712 TODO_update_ssa /* todo_flags_finish */
5713 }
5714 };
5715
5716 /* Initialize the heapvar for statement mapping. */
5717 void
5718 init_alias_heapvars (void)
5719 {
5720 if (!heapvar_for_stmt)
5721 heapvar_for_stmt = htab_create_ggc (11, tree_map_hash, tree_map_eq,
5722 NULL);
5723 }
5724
5725 void
5726 delete_alias_heapvars (void)
5727 {
5728 htab_delete (heapvar_for_stmt);
5729 heapvar_for_stmt = NULL;
5730 }
5731
5732 #include "gt-tree-ssa-structalias.h"
This page took 0.267965 seconds and 6 git commands to generate.