]> gcc.gnu.org Git - gcc.git/blob - gcc/ipa-type-escape.c
a-crbtgk.adb, [...]: Fix typos in comments.
[gcc.git] / gcc / ipa-type-escape.c
1 /* Type based alias analysis.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation,
3 Inc.
4 Contributed by Kenneth Zadeck <zadeck@naturalbridge.com>
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* This pass determines which types in the program contain only
23 instances that are completely encapsulated by the compilation unit.
24 Those types that are encapsulated must also pass the further
25 requirement that there be no bad operations on any instances of
26 those types.
27
28 A great deal of freedom in compilation is allowed for the instances
29 of those types that pass these conditions.
30 */
31
32 /* The code in this module is called by the ipa pass manager. It
33 should be one of the later passes since its information is used by
34 the rest of the compilation. */
35
36 #include "config.h"
37 #include "system.h"
38 #include "coretypes.h"
39 #include "tm.h"
40 #include "tree.h"
41 #include "tree-flow.h"
42 #include "tree-inline.h"
43 #include "tree-pass.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
46 #include "ggc.h"
47 #include "ipa-utils.h"
48 #include "ipa-type-escape.h"
49 #include "c-common.h"
50 #include "gimple.h"
51 #include "cgraph.h"
52 #include "output.h"
53 #include "flags.h"
54 #include "timevar.h"
55 #include "diagnostic.h"
56 #include "langhooks.h"
57
58 /* Some of the aliasing is called very early, before this phase is
59 called. To assure that this is not a problem, we keep track of if
60 this phase has been run. */
61 static bool initialized = false;
62
63 /* Scratch bitmap for avoiding work. */
64 static bitmap been_there_done_that;
65 static bitmap bitmap_tmp;
66
67 /* There are two levels of escape that types can undergo.
68
69 EXPOSED_PARAMETER - some instance of the variable is
70 passed by value into an externally visible function or some
71 instance of the variable is passed out of an externally visible
72 function as a return value. In this case any of the fields of the
73 variable that are pointer types end up having their types marked as
74 FULL_ESCAPE.
75
76 FULL_ESCAPE - when bad things happen to good types. One of the
77 following things happens to the type: (a) either an instance of the
78 variable has its address passed to an externally visible function,
79 (b) the address is taken and some bad cast happens to the address
80 or (c) explicit arithmetic is done to the address.
81 */
82
83 enum escape_t
84 {
85 EXPOSED_PARAMETER,
86 FULL_ESCAPE
87 };
88
89 /* The following two bit vectors global_types_* correspond to
90 previous cases above. During the analysis phase, a bit is set in
91 one of these vectors if an operation of the offending class is
92 discovered to happen on the associated type. */
93
94 static bitmap global_types_exposed_parameter;
95 static bitmap global_types_full_escape;
96
97 /* All of the types seen in this compilation unit. */
98 static bitmap global_types_seen;
99 /* Reverse map to take a canon uid and map it to a canon type. Uid's
100 are never manipulated unless they are associated with a canon
101 type. */
102 static splay_tree uid_to_canon_type;
103
104 /* Internal structure of type mapping code. This maps a canon type
105 name to its canon type. */
106 static splay_tree all_canon_types;
107
108 /* Map from type clones to the single canon type. */
109 static splay_tree type_to_canon_type;
110
111 /* A splay tree of bitmaps. An element X in the splay tree has a bit
112 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (Y)) if there was
113 an operation in the program of the form "&X.Y". */
114 static splay_tree uid_to_addressof_down_map;
115
116 /* A splay tree of bitmaps. An element Y in the splay tree has a bit
117 set in its bitmap at TYPE_UID (TYPE_MAIN_VARIANT (X)) if there was
118 an operation in the program of the form "&X.Y". */
119 static splay_tree uid_to_addressof_up_map;
120
121 /* Tree to hold the subtype maps used to mark subtypes of escaped
122 types. */
123 static splay_tree uid_to_subtype_map;
124
125 /* Records tree nodes seen in cgraph_create_edges. Simply using
126 walk_tree_without_duplicates doesn't guarantee each node is visited
127 once because it gets a new htab upon each recursive call from
128 scan_for_refs. */
129 static struct pointer_set_t *visited_nodes;
130
131 /* Visited stmts by walk_use_def_chains function because it's called
132 recursively. */
133 static struct pointer_set_t *visited_stmts;
134
135 static bitmap_obstack ipa_obstack;
136
137 /* Static functions from this file that are used
138 before being defined. */
139 static unsigned int look_for_casts (tree);
140 static bool is_cast_from_non_pointer (tree, gimple, void *);
141
142 /* Get the name of TYPE or return the string "<UNNAMED>". */
143 static const char*
144 get_name_of_type (tree type)
145 {
146 tree name = TYPE_NAME (type);
147
148 if (!name)
149 /* Unnamed type, do what you like here. */
150 return "<UNNAMED>";
151
152 /* It will be a TYPE_DECL in the case of a typedef, otherwise, an
153 identifier_node */
154 if (TREE_CODE (name) == TYPE_DECL)
155 {
156 /* Each DECL has a DECL_NAME field which contains an
157 IDENTIFIER_NODE. (Some decls, most often labels, may have
158 zero as the DECL_NAME). */
159 if (DECL_NAME (name))
160 return IDENTIFIER_POINTER (DECL_NAME (name));
161 else
162 /* Unnamed type, do what you like here. */
163 return "<UNNAMED>";
164 }
165 else if (TREE_CODE (name) == IDENTIFIER_NODE)
166 return IDENTIFIER_POINTER (name);
167 else
168 return "<UNNAMED>";
169 }
170
171 struct type_brand_s
172 {
173 const char* name;
174 int seq;
175 };
176
177 /* Splay tree comparison function on type_brand_s structures. */
178
179 static int
180 compare_type_brand (splay_tree_key sk1, splay_tree_key sk2)
181 {
182 struct type_brand_s * k1 = (struct type_brand_s *) sk1;
183 struct type_brand_s * k2 = (struct type_brand_s *) sk2;
184
185 int value = strcmp(k1->name, k2->name);
186 if (value == 0)
187 return k2->seq - k1->seq;
188 else
189 return value;
190 }
191
192 /* All of the "unique_type" code is a hack to get around the sleazy
193 implementation used to compile more than file. Currently gcc does
194 not get rid of multiple instances of the same type that have been
195 collected from different compilation units. */
196 /* This is a trivial algorithm for removing duplicate types. This
197 would not work for any language that used structural equivalence as
198 the basis of its type system. */
199 /* Return TYPE if no type compatible with TYPE has been seen so far,
200 otherwise return a type compatible with TYPE that has already been
201 processed. */
202
203 static tree
204 discover_unique_type (tree type)
205 {
206 struct type_brand_s * brand = XNEW (struct type_brand_s);
207 int i = 0;
208 splay_tree_node result;
209
210 brand->name = get_name_of_type (type);
211
212 while (1)
213 {
214 brand->seq = i++;
215 result = splay_tree_lookup (all_canon_types, (splay_tree_key) brand);
216
217 if (result)
218 {
219 /* Create an alias since this is just the same as
220 other_type. */
221 tree other_type = (tree) result->value;
222 if (types_compatible_p (type, other_type))
223 {
224 free (brand);
225 /* Insert this new type as an alias for other_type. */
226 splay_tree_insert (type_to_canon_type,
227 (splay_tree_key) type,
228 (splay_tree_value) other_type);
229 return other_type;
230 }
231 /* Not compatible, look for next instance with same name. */
232 }
233 else
234 {
235 /* No more instances, create new one since this is the first
236 time we saw this type. */
237 brand->seq = i++;
238 /* Insert the new brand. */
239 splay_tree_insert (all_canon_types,
240 (splay_tree_key) brand,
241 (splay_tree_value) type);
242
243 /* Insert this new type as an alias for itself. */
244 splay_tree_insert (type_to_canon_type,
245 (splay_tree_key) type,
246 (splay_tree_value) type);
247
248 /* Insert the uid for reverse lookup; */
249 splay_tree_insert (uid_to_canon_type,
250 (splay_tree_key) TYPE_UID (type),
251 (splay_tree_value) type);
252
253 bitmap_set_bit (global_types_seen, TYPE_UID (type));
254 return type;
255 }
256 }
257 }
258
259 /* Return true if TYPE is one of the type classes that we are willing
260 to analyze. This skips the goofy types like arrays of pointers to
261 methods. */
262 static bool
263 type_to_consider (tree type)
264 {
265 /* Strip the *'s off. */
266 type = TYPE_MAIN_VARIANT (type);
267 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
268 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
269
270 switch (TREE_CODE (type))
271 {
272 case BOOLEAN_TYPE:
273 case COMPLEX_TYPE:
274 case ENUMERAL_TYPE:
275 case INTEGER_TYPE:
276 case QUAL_UNION_TYPE:
277 case REAL_TYPE:
278 case FIXED_POINT_TYPE:
279 case RECORD_TYPE:
280 case UNION_TYPE:
281 case VECTOR_TYPE:
282 case VOID_TYPE:
283 return true;
284
285 default:
286 return false;
287 }
288 }
289
290 /* Get the canon type of TYPE. If SEE_THRU_PTRS is true, remove all
291 the POINTER_TOs and if SEE_THRU_ARRAYS is true, remove all of the
292 ARRAY_OFs and POINTER_TOs. */
293
294 static tree
295 get_canon_type (tree type, bool see_thru_ptrs, bool see_thru_arrays)
296 {
297 splay_tree_node result;
298 /* Strip the *'s off. */
299 if (!type || !type_to_consider (type))
300 return NULL;
301
302 type = TYPE_MAIN_VARIANT (type);
303 if (see_thru_arrays)
304 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
305 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
306
307 else if (see_thru_ptrs)
308 while (POINTER_TYPE_P (type))
309 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
310
311 result = splay_tree_lookup (type_to_canon_type, (splay_tree_key) type);
312
313 if (result == NULL)
314 return discover_unique_type (type);
315 else return (tree) result->value;
316 }
317
318 /* Same as GET_CANON_TYPE, except return the TYPE_ID rather than the
319 TYPE. */
320
321 static int
322 get_canon_type_uid (tree type, bool see_thru_ptrs, bool see_thru_arrays)
323 {
324 type = get_canon_type (type, see_thru_ptrs, see_thru_arrays);
325 if (type)
326 return TYPE_UID(type);
327 else return 0;
328 }
329
330 /* Return 0 if TYPE is a record or union type. Return a positive
331 number if TYPE is a pointer to a record or union. The number is
332 the number of pointer types stripped to get to the record or union
333 type. Return -1 if TYPE is none of the above. */
334
335 int
336 ipa_type_escape_star_count_of_interesting_type (tree type)
337 {
338 int count = 0;
339 /* Strip the *'s off. */
340 if (!type)
341 return -1;
342 type = TYPE_MAIN_VARIANT (type);
343 while (POINTER_TYPE_P (type))
344 {
345 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
346 count++;
347 }
348
349 /* We are interested in records, and unions only. */
350 if (TREE_CODE (type) == RECORD_TYPE
351 || TREE_CODE (type) == QUAL_UNION_TYPE
352 || TREE_CODE (type) == UNION_TYPE)
353 return count;
354 else
355 return -1;
356 }
357
358
359 /* Return 0 if TYPE is a record or union type. Return a positive
360 number if TYPE is a pointer to a record or union. The number is
361 the number of pointer types stripped to get to the record or union
362 type. Return -1 if TYPE is none of the above. */
363
364 int
365 ipa_type_escape_star_count_of_interesting_or_array_type (tree type)
366 {
367 int count = 0;
368 /* Strip the *'s off. */
369 if (!type)
370 return -1;
371 type = TYPE_MAIN_VARIANT (type);
372 while (POINTER_TYPE_P (type) || TREE_CODE (type) == ARRAY_TYPE)
373 {
374 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
375 count++;
376 }
377
378 /* We are interested in records, and unions only. */
379 if (TREE_CODE (type) == RECORD_TYPE
380 || TREE_CODE (type) == QUAL_UNION_TYPE
381 || TREE_CODE (type) == UNION_TYPE)
382 return count;
383 else
384 return -1;
385 }
386
387
388 /* Return true if the record, or union TYPE passed in escapes this
389 compilation unit. Note that all of the pointer-to's are removed
390 before testing since these may not be correct. */
391
392 bool
393 ipa_type_escape_type_contained_p (tree type)
394 {
395 if (!initialized)
396 return false;
397 return !bitmap_bit_p (global_types_full_escape,
398 get_canon_type_uid (type, true, false));
399 }
400
401 /* Return true if a modification to a field of type FIELD_TYPE cannot
402 clobber a record of RECORD_TYPE. */
403
404 bool
405 ipa_type_escape_field_does_not_clobber_p (tree record_type, tree field_type)
406 {
407 splay_tree_node result;
408 int uid;
409
410 if (!initialized)
411 return false;
412
413 /* Strip off all of the pointer tos on the record type. Strip the
414 same number of pointer tos from the field type. If the field
415 type has fewer, it could not have been aliased. */
416 record_type = TYPE_MAIN_VARIANT (record_type);
417 field_type = TYPE_MAIN_VARIANT (field_type);
418 while (POINTER_TYPE_P (record_type))
419 {
420 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
421 if (POINTER_TYPE_P (field_type))
422 field_type = TYPE_MAIN_VARIANT (TREE_TYPE (field_type));
423 else
424 /* However, if field_type is a union, this quick test is not
425 correct since one of the variants of the union may be a
426 pointer to type and we cannot see across that here. So we
427 just strip the remaining pointer tos off the record type
428 and fall thru to the more precise code. */
429 if (TREE_CODE (field_type) == QUAL_UNION_TYPE
430 || TREE_CODE (field_type) == UNION_TYPE)
431 {
432 while (POINTER_TYPE_P (record_type))
433 record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_type));
434 break;
435 }
436 else
437 return true;
438 }
439
440 record_type = get_canon_type (record_type, true, true);
441 /* The record type must be contained. The field type may
442 escape. */
443 if (!ipa_type_escape_type_contained_p (record_type))
444 return false;
445
446 uid = TYPE_UID (record_type);
447 result = splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
448
449 if (result)
450 {
451 bitmap field_type_map = (bitmap) result->value;
452 uid = get_canon_type_uid (field_type, true, true);
453 /* If the bit is there, the address was taken. If not, it
454 wasn't. */
455 return !bitmap_bit_p (field_type_map, uid);
456 }
457 else
458 /* No bitmap means no addresses were taken. */
459 return true;
460 }
461
462
463 /* Add TYPE to the suspect type set. Return true if the bit needed to
464 be marked. */
465
466 static tree
467 mark_type (tree type, enum escape_t escape_status)
468 {
469 bitmap map = NULL;
470 int uid;
471
472 type = get_canon_type (type, true, true);
473 if (!type)
474 return NULL;
475
476 switch (escape_status)
477 {
478 case EXPOSED_PARAMETER:
479 map = global_types_exposed_parameter;
480 break;
481 case FULL_ESCAPE:
482 map = global_types_full_escape;
483 break;
484 }
485
486 uid = TYPE_UID (type);
487 if (bitmap_bit_p (map, uid))
488 return type;
489 else
490 {
491 bitmap_set_bit (map, uid);
492 if (escape_status == FULL_ESCAPE)
493 {
494 /* Efficiency hack. When things are bad, do not mess around
495 with this type anymore. */
496 bitmap_set_bit (global_types_exposed_parameter, uid);
497 }
498 }
499 return type;
500 }
501
502 /* Add interesting TYPE to the suspect type set. If the set is
503 EXPOSED_PARAMETER and the TYPE is a pointer type, the set is
504 changed to FULL_ESCAPE. */
505
506 static void
507 mark_interesting_type (tree type, enum escape_t escape_status)
508 {
509 if (!type) return;
510 if (ipa_type_escape_star_count_of_interesting_type (type) >= 0)
511 {
512 if ((escape_status == EXPOSED_PARAMETER)
513 && POINTER_TYPE_P (type))
514 /* EXPOSED_PARAMETERs are only structs or unions are passed by
515 value. Anything passed by reference to an external
516 function fully exposes the type. */
517 mark_type (type, FULL_ESCAPE);
518 else
519 mark_type (type, escape_status);
520 }
521 }
522
523 /* Return true if PARENT is supertype of CHILD. Both types must be
524 known to be structures or unions. */
525
526 static bool
527 parent_type_p (tree parent, tree child)
528 {
529 int i;
530 tree binfo, base_binfo;
531 if (TYPE_BINFO (parent))
532 for (binfo = TYPE_BINFO (parent), i = 0;
533 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
534 {
535 tree binfotype = BINFO_TYPE (base_binfo);
536 if (binfotype == child)
537 return true;
538 else if (parent_type_p (binfotype, child))
539 return true;
540 }
541 if (TREE_CODE (parent) == UNION_TYPE
542 || TREE_CODE (parent) == QUAL_UNION_TYPE)
543 {
544 tree field;
545 /* Search all of the variants in the union to see if one of them
546 is the child. */
547 for (field = TYPE_FIELDS (parent);
548 field;
549 field = TREE_CHAIN (field))
550 {
551 tree field_type;
552 if (TREE_CODE (field) != FIELD_DECL)
553 continue;
554
555 field_type = TREE_TYPE (field);
556 if (field_type == child)
557 return true;
558 }
559
560 /* If we did not find it, recursively ask the variants if one of
561 their children is the child type. */
562 for (field = TYPE_FIELDS (parent);
563 field;
564 field = TREE_CHAIN (field))
565 {
566 tree field_type;
567 if (TREE_CODE (field) != FIELD_DECL)
568 continue;
569
570 field_type = TREE_TYPE (field);
571 if (TREE_CODE (field_type) == RECORD_TYPE
572 || TREE_CODE (field_type) == QUAL_UNION_TYPE
573 || TREE_CODE (field_type) == UNION_TYPE)
574 if (parent_type_p (field_type, child))
575 return true;
576 }
577 }
578
579 if (TREE_CODE (parent) == RECORD_TYPE)
580 {
581 tree field;
582 for (field = TYPE_FIELDS (parent);
583 field;
584 field = TREE_CHAIN (field))
585 {
586 tree field_type;
587 if (TREE_CODE (field) != FIELD_DECL)
588 continue;
589
590 field_type = TREE_TYPE (field);
591 if (field_type == child)
592 return true;
593 /* You can only cast to the first field so if it does not
594 match, quit. */
595 if (TREE_CODE (field_type) == RECORD_TYPE
596 || TREE_CODE (field_type) == QUAL_UNION_TYPE
597 || TREE_CODE (field_type) == UNION_TYPE)
598 {
599 if (parent_type_p (field_type, child))
600 return true;
601 else
602 break;
603 }
604 }
605 }
606 return false;
607 }
608
609 /* Return the number of pointer tos for TYPE and return TYPE with all
610 of these stripped off. */
611
612 static int
613 count_stars (tree* type_ptr)
614 {
615 tree type = *type_ptr;
616 int i = 0;
617 type = TYPE_MAIN_VARIANT (type);
618 while (POINTER_TYPE_P (type))
619 {
620 type = TYPE_MAIN_VARIANT (TREE_TYPE (type));
621 i++;
622 }
623
624 *type_ptr = type;
625 return i;
626 }
627
628 enum cast_type {
629 CT_UP = 0x1,
630 CT_DOWN = 0x2,
631 CT_SIDEWAYS = 0x4,
632 CT_USELESS = 0x8,
633 CT_FROM_P_BAD = 0x10,
634 CT_FROM_NON_P = 0x20,
635 CT_TO_NON_INTER = 0x40,
636 CT_FROM_MALLOC = 0x80,
637 CT_NO_CAST = 0x100
638 };
639
640 /* Check the cast FROM_TYPE to TO_TYPE. This function requires that
641 the two types have already passed the
642 ipa_type_escape_star_count_of_interesting_type test. */
643
644 static enum cast_type
645 check_cast_type (tree to_type, tree from_type)
646 {
647 int to_stars = count_stars (&to_type);
648 int from_stars = count_stars (&from_type);
649 if (to_stars != from_stars)
650 return CT_SIDEWAYS;
651
652 if (to_type == from_type)
653 return CT_USELESS;
654
655 if (parent_type_p (to_type, from_type)) return CT_UP;
656 if (parent_type_p (from_type, to_type)) return CT_DOWN;
657 return CT_SIDEWAYS;
658 }
659
660 /* This function returns nonzero if VAR is result of call
661 to malloc function. */
662
663 static bool
664 is_malloc_result (tree var)
665 {
666 gimple def_stmt;
667
668 if (!var)
669 return false;
670
671 if (SSA_NAME_IS_DEFAULT_DEF (var))
672 return false;
673
674 def_stmt = SSA_NAME_DEF_STMT (var);
675
676 if (!is_gimple_call (def_stmt))
677 return false;
678
679 if (var != gimple_call_lhs (def_stmt))
680 return false;
681
682 return ((gimple_call_flags (def_stmt) & ECF_MALLOC) != 0);
683
684 }
685
686 /* Check a cast FROM this variable, TO_TYPE. Mark the escaping types
687 if appropriate. Returns cast_type as detected. */
688
689 static enum cast_type
690 check_cast (tree to_type, tree from)
691 {
692 tree from_type = get_canon_type (TREE_TYPE (from), false, false);
693 bool to_interesting_type, from_interesting_type;
694 enum cast_type cast = CT_NO_CAST;
695
696 to_type = get_canon_type (to_type, false, false);
697 if (!from_type || !to_type || from_type == to_type)
698 return cast;
699
700 to_interesting_type =
701 ipa_type_escape_star_count_of_interesting_type (to_type) >= 0;
702 from_interesting_type =
703 ipa_type_escape_star_count_of_interesting_type (from_type) >= 0;
704
705 if (to_interesting_type)
706 if (from_interesting_type)
707 {
708 /* Both types are interesting. This can be one of four types
709 of cast: useless, up, down, or sideways. We do not care
710 about up or useless. Sideways casts are always bad and
711 both sides get marked as escaping. Downcasts are not
712 interesting here because if type is marked as escaping, all
713 of its subtypes escape. */
714 cast = check_cast_type (to_type, from_type);
715 switch (cast)
716 {
717 case CT_UP:
718 case CT_USELESS:
719 case CT_DOWN:
720 break;
721
722 case CT_SIDEWAYS:
723 mark_type (to_type, FULL_ESCAPE);
724 mark_type (from_type, FULL_ESCAPE);
725 break;
726
727 default:
728 break;
729 }
730 }
731 else
732 {
733 /* This code excludes two cases from marking as escaped:
734
735 1. if this is a cast of index of array of structures/unions
736 that happens before accessing array element, we should not
737 mark it as escaped.
738 2. if this is a cast from the local that is a result from a
739 call to malloc, do not mark the cast as bad.
740
741 */
742
743 if (POINTER_TYPE_P (to_type) && !POINTER_TYPE_P (from_type))
744 cast = CT_FROM_NON_P;
745 else if (TREE_CODE (from) == SSA_NAME
746 && is_malloc_result (from))
747 cast = CT_FROM_MALLOC;
748 else
749 {
750 cast = CT_FROM_P_BAD;
751 mark_type (to_type, FULL_ESCAPE);
752 }
753 }
754 else if (from_interesting_type)
755 {
756 mark_type (from_type, FULL_ESCAPE);
757 cast = CT_TO_NON_INTER;
758 }
759
760 return cast;
761 }
762
763
764 /* Scan assignment statement S to see if there are any casts within it. */
765
766 static unsigned int
767 look_for_casts_stmt (gimple s)
768 {
769 unsigned int cast = 0;
770
771 gcc_assert (is_gimple_assign (s));
772
773 if (gimple_assign_cast_p (s))
774 {
775 tree castfromvar = gimple_assign_rhs1 (s);
776 cast |= check_cast (TREE_TYPE (gimple_assign_lhs (s)), castfromvar);
777 }
778 else
779 {
780 size_t i;
781 for (i = 0; i < gimple_num_ops (s); i++)
782 cast |= look_for_casts (gimple_op (s, i));
783 }
784
785 if (!cast)
786 cast = CT_NO_CAST;
787
788 return cast;
789 }
790
791
792 typedef struct cast
793 {
794 int type;
795 gimple stmt;
796 } cast_t;
797
798 /* This function is a callback for walk_use_def_chains function called
799 from is_array_access_through_pointer_and_index. */
800
801 static bool
802 is_cast_from_non_pointer (tree var, gimple def_stmt, void *data)
803 {
804 if (!def_stmt || !var)
805 return false;
806
807 if (gimple_code (def_stmt) == GIMPLE_PHI)
808 return false;
809
810 if (SSA_NAME_IS_DEFAULT_DEF (var))
811 return false;
812
813 if (is_gimple_assign (def_stmt))
814 {
815 use_operand_p use_p;
816 ssa_op_iter iter;
817 unsigned int cast = look_for_casts_stmt (def_stmt);
818
819 /* Check that only one cast happened, and it's of non-pointer
820 type. */
821 if ((cast & CT_FROM_NON_P) == (CT_FROM_NON_P)
822 && (cast & ~(CT_FROM_NON_P)) == 0)
823 {
824 ((cast_t *)data)->stmt = def_stmt;
825 ((cast_t *)data)->type++;
826
827 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
828 {
829 walk_use_def_chains (USE_FROM_PTR (use_p),
830 is_cast_from_non_pointer, data, false);
831 if (((cast_t*)data)->type == -1)
832 break;
833 }
834 }
835 /* Check that there is no cast, or cast is not harmful. */
836 else if ((cast & CT_NO_CAST) == (CT_NO_CAST)
837 || (cast & CT_DOWN) == (CT_DOWN)
838 || (cast & CT_UP) == (CT_UP)
839 || (cast & CT_USELESS) == (CT_USELESS)
840 || (cast & CT_FROM_MALLOC) == (CT_FROM_MALLOC))
841 {
842 FOR_EACH_SSA_USE_OPERAND (use_p, def_stmt, iter, SSA_OP_ALL_USES)
843 {
844 walk_use_def_chains (USE_FROM_PTR (use_p),
845 is_cast_from_non_pointer, data, false);
846 if (((cast_t*)data)->type == -1)
847 break;
848 }
849 }
850 /* The cast is harmful. */
851 else
852 ((cast_t *)data)->type = -1;
853 }
854
855 if (((cast_t*)data)->type == -1)
856 return true;
857
858 return false;
859 }
860
861 /* When array element a_p[i] is accessed through the pointer a_p
862 and index i, it's translated into the following sequence
863 in gimple:
864
865 i.1_5 = (unsigned int) i_1;
866 D.1605_6 = i.1_5 * 16;
867 D.1606_7 = (struct str_t *) D.1605_6;
868 a_p.2_8 = a_p;
869 D.1608_9 = D.1606_7 + a_p.2_8;
870
871 OP0 and OP1 are of the same pointer types and stand for
872 D.1606_7 and a_p.2_8 or vise versa.
873
874 This function checks that:
875
876 1. one of OP0 and OP1 (D.1606_7) has passed only one cast from
877 non-pointer type (D.1606_7 = (struct str_t *) D.1605_6;).
878
879 2. one of OP0 and OP1 which has passed the cast from
880 non-pointer type (D.1606_7), is actually generated by multiplication of
881 index by size of type to which both OP0 and OP1 point to
882 (in this case D.1605_6 = i.1_5 * 16; ).
883
884 3. an address of def of the var to which was made cast (D.1605_6)
885 was not taken.(How can it happen?)
886
887 The following items are checked implicitly by the end of algorithm:
888
889 4. one of OP0 and OP1 (a_p.2_8) have never been cast
890 (because if it was cast to pointer type, its type, that is also
891 the type of OP0 and OP1, will be marked as escaped during
892 analysis of casting stmt (when check_cast() is called
893 from scan_for_refs for this stmt)).
894
895 5. defs of OP0 and OP1 are not passed into externally visible function
896 (because if they are passed then their type, that is also the type of OP0
897 and OP1, will be marked and escaped during check_call function called from
898 scan_for_refs with call stmt).
899
900 In total, 1-5 guaranty that it's an access to array by pointer and index.
901
902 */
903
904 bool
905 is_array_access_through_pointer_and_index (enum tree_code code, tree op0,
906 tree op1, tree *base, tree *offset,
907 gimple *offset_cast_stmt)
908 {
909 tree before_cast;
910 gimple before_cast_def_stmt;
911 cast_t op0_cast, op1_cast;
912
913 *base = NULL;
914 *offset = NULL;
915 *offset_cast_stmt = NULL;
916
917 /* Check 1. */
918 if (code == POINTER_PLUS_EXPR)
919 {
920 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
921 tree op1type = TYPE_MAIN_VARIANT (TREE_TYPE (op1));
922
923 /* One of op0 and op1 is of pointer type and the other is numerical. */
924 if (POINTER_TYPE_P (op0type) && NUMERICAL_TYPE_CHECK (op1type))
925 {
926 *base = op0;
927 *offset = op1;
928 }
929 else if (POINTER_TYPE_P (op1type) && NUMERICAL_TYPE_CHECK (op0type))
930 {
931 *base = op1;
932 *offset = op0;
933 }
934 else
935 return false;
936 }
937 else
938 {
939 /* Init data for walk_use_def_chains function. */
940 op0_cast.type = op1_cast.type = 0;
941 op0_cast.stmt = op1_cast.stmt = NULL;
942
943 visited_stmts = pointer_set_create ();
944 walk_use_def_chains (op0, is_cast_from_non_pointer,(void *)(&op0_cast),
945 false);
946 pointer_set_destroy (visited_stmts);
947
948 visited_stmts = pointer_set_create ();
949 walk_use_def_chains (op1, is_cast_from_non_pointer,(void *)(&op1_cast),
950 false);
951 pointer_set_destroy (visited_stmts);
952
953 if (op0_cast.type == 1 && op1_cast.type == 0)
954 {
955 *base = op1;
956 *offset = op0;
957 *offset_cast_stmt = op0_cast.stmt;
958 }
959 else if (op0_cast.type == 0 && op1_cast.type == 1)
960 {
961 *base = op0;
962 *offset = op1;
963 *offset_cast_stmt = op1_cast.stmt;
964 }
965 else
966 return false;
967 }
968
969 /* Check 2.
970 offset_cast_stmt is of the form:
971 D.1606_7 = (struct str_t *) D.1605_6; */
972
973 if (*offset_cast_stmt)
974 {
975 before_cast = SINGLE_SSA_TREE_OPERAND (*offset_cast_stmt, SSA_OP_USE);
976 if (!before_cast)
977 return false;
978
979 if (SSA_NAME_IS_DEFAULT_DEF (before_cast))
980 return false;
981
982 before_cast_def_stmt = SSA_NAME_DEF_STMT (before_cast);
983 if (!before_cast_def_stmt)
984 return false;
985 }
986 else
987 before_cast_def_stmt = SSA_NAME_DEF_STMT (*offset);
988
989 /* before_cast_def_stmt should be of the form:
990 D.1605_6 = i.1_5 * 16; */
991
992 if (is_gimple_assign (before_cast_def_stmt))
993 {
994 /* We expect temporary here. */
995 if (!is_gimple_reg (gimple_assign_lhs (before_cast_def_stmt)))
996 return false;
997
998 if (gimple_assign_rhs_code (before_cast_def_stmt) == MULT_EXPR)
999 {
1000 tree arg0 = gimple_assign_rhs1 (before_cast_def_stmt);
1001 tree arg1 = gimple_assign_rhs2 (before_cast_def_stmt);
1002 tree unit_size =
1003 TYPE_SIZE_UNIT (TREE_TYPE (TYPE_MAIN_VARIANT (TREE_TYPE (op0))));
1004
1005 if (!(CONSTANT_CLASS_P (arg0)
1006 && simple_cst_equal (arg0, unit_size))
1007 && !(CONSTANT_CLASS_P (arg1)
1008 && simple_cst_equal (arg1, unit_size)))
1009 return false;
1010 }
1011 else
1012 return false;
1013 }
1014 else
1015 return false;
1016
1017 /* Check 3.
1018 check that address of D.1605_6 was not taken.
1019 FIXME: if D.1605_6 is gimple reg than it cannot be addressable. */
1020
1021 return true;
1022 }
1023
1024 /* Register the parameter and return types of function FN. The type
1025 ESCAPES if the function is visible outside of the compilation
1026 unit. */
1027 static void
1028 check_function_parameter_and_return_types (tree fn, bool escapes)
1029 {
1030 tree arg;
1031
1032 if (TYPE_ARG_TYPES (TREE_TYPE (fn)))
1033 {
1034 for (arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1035 arg && TREE_VALUE (arg) != void_type_node;
1036 arg = TREE_CHAIN (arg))
1037 {
1038 tree type = get_canon_type (TREE_VALUE (arg), false, false);
1039 if (escapes)
1040 mark_interesting_type (type, EXPOSED_PARAMETER);
1041 }
1042 }
1043 else
1044 {
1045 /* FIXME - According to Geoff Keating, we should never have to
1046 do this; the front ends should always process the arg list
1047 from the TYPE_ARG_LIST. However, Geoff is wrong, this code
1048 does seem to be live. */
1049
1050 for (arg = DECL_ARGUMENTS (fn); arg; arg = TREE_CHAIN (arg))
1051 {
1052 tree type = get_canon_type (TREE_TYPE (arg), false, false);
1053 if (escapes)
1054 mark_interesting_type (type, EXPOSED_PARAMETER);
1055 }
1056 }
1057 if (escapes)
1058 {
1059 tree type = get_canon_type (TREE_TYPE (TREE_TYPE (fn)), false, false);
1060 mark_interesting_type (type, EXPOSED_PARAMETER);
1061 }
1062 }
1063
1064 /* Return true if the variable T is the right kind of static variable to
1065 perform compilation unit scope escape analysis. */
1066
1067 static inline void
1068 has_proper_scope_for_analysis (tree t)
1069 {
1070 /* If the variable has the "used" attribute, treat it as if it had a
1071 been touched by the devil. */
1072 tree type = get_canon_type (TREE_TYPE (t), false, false);
1073 if (!type) return;
1074
1075 if (lookup_attribute ("used", DECL_ATTRIBUTES (t)))
1076 {
1077 mark_interesting_type (type, FULL_ESCAPE);
1078 return;
1079 }
1080
1081 /* Do not want to do anything with volatile except mark any
1082 function that uses one to be not const or pure. */
1083 if (TREE_THIS_VOLATILE (t))
1084 return;
1085
1086 /* Do not care about a local automatic that is not static. */
1087 if (!TREE_STATIC (t) && !DECL_EXTERNAL (t))
1088 return;
1089
1090 if (DECL_EXTERNAL (t) || TREE_PUBLIC (t))
1091 {
1092 /* If the front end set the variable to be READONLY and
1093 constant, we can allow this variable in pure or const
1094 functions but the scope is too large for our analysis to set
1095 these bits ourselves. */
1096
1097 if (TREE_READONLY (t)
1098 && DECL_INITIAL (t)
1099 && is_gimple_min_invariant (DECL_INITIAL (t)))
1100 ; /* Read of a constant, do not change the function state. */
1101 else
1102 {
1103 /* The type escapes for all public and externs. */
1104 mark_interesting_type (type, FULL_ESCAPE);
1105 }
1106 }
1107 }
1108
1109 /* If T is a VAR_DECL for a static that we are interested in, add the
1110 uid to the bitmap. */
1111
1112 static void
1113 check_operand (tree t)
1114 {
1115 if (!t) return;
1116
1117 /* This is an assignment from a function, register the types as
1118 escaping. */
1119 if (TREE_CODE (t) == FUNCTION_DECL)
1120 check_function_parameter_and_return_types (t, true);
1121
1122 else if (TREE_CODE (t) == VAR_DECL)
1123 has_proper_scope_for_analysis (t);
1124 }
1125
1126 /* Examine tree T for references. */
1127
1128 static void
1129 check_tree (tree t)
1130 {
1131 if ((TREE_CODE (t) == EXC_PTR_EXPR) || (TREE_CODE (t) == FILTER_EXPR))
1132 return;
1133
1134 /* We want to catch here also REALPART_EXPR and IMAGEPART_EXPR,
1135 but they already included in handled_component_p. */
1136 while (handled_component_p (t))
1137 {
1138 if (TREE_CODE (t) == ARRAY_REF)
1139 check_operand (TREE_OPERAND (t, 1));
1140 t = TREE_OPERAND (t, 0);
1141 }
1142
1143 if (INDIRECT_REF_P (t))
1144 /* || TREE_CODE (t) == MEM_REF) */
1145 check_tree (TREE_OPERAND (t, 0));
1146
1147 if (SSA_VAR_P (t) || (TREE_CODE (t) == FUNCTION_DECL))
1148 {
1149 check_operand (t);
1150 if (DECL_P (t) && DECL_INITIAL (t))
1151 check_tree (DECL_INITIAL (t));
1152 }
1153 }
1154
1155 /* Create an address_of edge FROM_TYPE.TO_TYPE. */
1156 static void
1157 mark_interesting_addressof (tree to_type, tree from_type)
1158 {
1159 int from_uid;
1160 int to_uid;
1161 bitmap type_map;
1162 splay_tree_node result;
1163
1164 from_type = get_canon_type (from_type, false, false);
1165 to_type = get_canon_type (to_type, false, false);
1166
1167 if (!from_type || !to_type)
1168 return;
1169
1170 from_uid = TYPE_UID (from_type);
1171 to_uid = TYPE_UID (to_type);
1172
1173 gcc_assert (ipa_type_escape_star_count_of_interesting_type (from_type) == 0);
1174
1175 /* Process the Y into X map pointer. */
1176 result = splay_tree_lookup (uid_to_addressof_down_map,
1177 (splay_tree_key) from_uid);
1178
1179 if (result)
1180 type_map = (bitmap) result->value;
1181 else
1182 {
1183 type_map = BITMAP_ALLOC (&ipa_obstack);
1184 splay_tree_insert (uid_to_addressof_down_map,
1185 from_uid,
1186 (splay_tree_value)type_map);
1187 }
1188 bitmap_set_bit (type_map, TYPE_UID (to_type));
1189
1190 /* Process the X into Y reverse map pointer. */
1191 result =
1192 splay_tree_lookup (uid_to_addressof_up_map, (splay_tree_key) to_uid);
1193
1194 if (result)
1195 type_map = (bitmap) result->value;
1196 else
1197 {
1198 type_map = BITMAP_ALLOC (&ipa_obstack);
1199 splay_tree_insert (uid_to_addressof_up_map,
1200 to_uid,
1201 (splay_tree_value)type_map);
1202 }
1203 bitmap_set_bit (type_map, TYPE_UID (from_type));
1204 }
1205
1206 /* Scan tree T to see if there are any addresses taken in within T. */
1207
1208 static void
1209 look_for_address_of (tree t)
1210 {
1211 if (TREE_CODE (t) == ADDR_EXPR)
1212 {
1213 tree x = get_base_var (t);
1214 tree cref = TREE_OPERAND (t, 0);
1215
1216 /* If we have an expression of the form "&a.b.c.d", mark a.b,
1217 b.c and c.d. as having its address taken. */
1218 tree fielddecl = NULL_TREE;
1219 while (cref!= x)
1220 {
1221 if (TREE_CODE (cref) == COMPONENT_REF)
1222 {
1223 fielddecl = TREE_OPERAND (cref, 1);
1224 mark_interesting_addressof (TREE_TYPE (fielddecl),
1225 DECL_FIELD_CONTEXT (fielddecl));
1226 }
1227 else if (TREE_CODE (cref) == ARRAY_REF)
1228 get_canon_type (TREE_TYPE (cref), false, false);
1229
1230 cref = TREE_OPERAND (cref, 0);
1231 }
1232
1233 if (TREE_CODE (x) == VAR_DECL)
1234 has_proper_scope_for_analysis (x);
1235 }
1236 }
1237
1238
1239 /* Scan tree T to see if there are any casts within it. */
1240
1241 static unsigned int
1242 look_for_casts (tree t)
1243 {
1244 unsigned int cast = 0;
1245
1246 if (is_gimple_cast (t) || TREE_CODE (t) == VIEW_CONVERT_EXPR)
1247 {
1248 tree castfromvar = TREE_OPERAND (t, 0);
1249 cast = cast | check_cast (TREE_TYPE (t), castfromvar);
1250 }
1251 else
1252 while (handled_component_p (t))
1253 {
1254 t = TREE_OPERAND (t, 0);
1255 if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
1256 {
1257 /* This may be some part of a component ref.
1258 IE it may be a.b.VIEW_CONVERT_EXPR<weird_type>(c).d, AFAIK.
1259 castfromref will give you a.b.c, not a. */
1260 tree castfromref = TREE_OPERAND (t, 0);
1261 cast = cast | check_cast (TREE_TYPE (t), castfromref);
1262 }
1263 else if (TREE_CODE (t) == COMPONENT_REF)
1264 get_canon_type (TREE_TYPE (TREE_OPERAND (t, 1)), false, false);
1265 }
1266
1267 if (!cast)
1268 cast = CT_NO_CAST;
1269 return cast;
1270 }
1271
1272 /* Check to see if T is a read or address of operation on a static var
1273 we are interested in analyzing. */
1274
1275 static void
1276 check_rhs_var (tree t)
1277 {
1278 look_for_address_of (t);
1279 check_tree (t);
1280 }
1281
1282 /* Check to see if T is an assignment to a static var we are
1283 interested in analyzing. */
1284
1285 static void
1286 check_lhs_var (tree t)
1287 {
1288 check_tree (t);
1289 }
1290
1291 /* This is a scaled down version of get_asm_expr_operands from
1292 tree_ssa_operands.c. The version there runs much later and assumes
1293 that aliasing information is already available. Here we are just
1294 trying to find if the set of inputs and outputs contain references
1295 or address of operations to local. FN is the function being
1296 analyzed and STMT is the actual asm statement. */
1297
1298 static void
1299 check_asm (gimple stmt)
1300 {
1301 size_t i;
1302
1303 for (i = 0; i < gimple_asm_noutputs (stmt); i++)
1304 check_lhs_var (gimple_asm_output_op (stmt, i));
1305
1306 for (i = 0; i < gimple_asm_ninputs (stmt); i++)
1307 check_rhs_var (gimple_asm_input_op (stmt, i));
1308
1309 /* There is no code here to check for asm memory clobbers. The
1310 casual maintainer might think that such code would be necessary,
1311 but that appears to be wrong. In other parts of the compiler,
1312 the asm memory clobbers are assumed to only clobber variables
1313 that are addressable. All types with addressable instances are
1314 assumed to already escape. So, we are protected here. */
1315 }
1316
1317
1318 /* Check the parameters of function call to CALL to mark the
1319 types that pass across the function boundary. Also check to see if
1320 this is either an indirect call, a call outside the compilation
1321 unit. */
1322
1323 static void
1324 check_call (gimple call)
1325 {
1326 tree callee_t = gimple_call_fndecl (call);
1327 struct cgraph_node* callee;
1328 enum availability avail = AVAIL_NOT_AVAILABLE;
1329 size_t i;
1330
1331 for (i = 0; i < gimple_call_num_args (call); i++)
1332 check_rhs_var (gimple_call_arg (call, i));
1333
1334 if (callee_t)
1335 {
1336 tree arg_type;
1337 tree last_arg_type = NULL;
1338 callee = cgraph_node(callee_t);
1339 avail = cgraph_function_body_availability (callee);
1340
1341 /* Check that there are no implicit casts in the passing of
1342 parameters. */
1343 if (TYPE_ARG_TYPES (TREE_TYPE (callee_t)))
1344 {
1345 for (arg_type = TYPE_ARG_TYPES (TREE_TYPE (callee_t)), i = 0;
1346 arg_type && TREE_VALUE (arg_type) != void_type_node;
1347 arg_type = TREE_CHAIN (arg_type), i++)
1348 {
1349 tree operand = gimple_call_arg (call, i);
1350 if (operand)
1351 {
1352 last_arg_type = TREE_VALUE(arg_type);
1353 check_cast (last_arg_type, operand);
1354 }
1355 else
1356 /* The code reaches here for some unfortunate
1357 builtin functions that do not have a list of
1358 argument types. */
1359 break;
1360 }
1361 }
1362 else
1363 {
1364 /* FIXME - According to Geoff Keating, we should never
1365 have to do this; the front ends should always process
1366 the arg list from the TYPE_ARG_LIST. */
1367 for (arg_type = DECL_ARGUMENTS (callee_t), i = 0;
1368 arg_type;
1369 arg_type = TREE_CHAIN (arg_type), i++)
1370 {
1371 tree operand = gimple_call_arg (call, i);
1372 if (operand)
1373 {
1374 last_arg_type = TREE_TYPE (arg_type);
1375 check_cast (last_arg_type, operand);
1376 }
1377 else
1378 /* The code reaches here for some unfortunate
1379 builtin functions that do not have a list of
1380 argument types. */
1381 break;
1382 }
1383 }
1384
1385 /* In the case where we have a var_args function, we need to
1386 check the remaining parameters against the last argument. */
1387 arg_type = last_arg_type;
1388 for ( ; i < gimple_call_num_args (call); i++)
1389 {
1390 tree operand = gimple_call_arg (call, i);
1391 if (arg_type)
1392 check_cast (arg_type, operand);
1393 else
1394 {
1395 /* The code reaches here for some unfortunate
1396 builtin functions that do not have a list of
1397 argument types. Most of these functions have
1398 been marked as having their parameters not
1399 escape, but for the rest, the type is doomed. */
1400 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1401 mark_interesting_type (type, FULL_ESCAPE);
1402 }
1403 }
1404 }
1405
1406 /* The callee is either unknown (indirect call) or there is just no
1407 scannable code for it (external call) . We look to see if there
1408 are any bits available for the callee (such as by declaration or
1409 because it is builtin) and process solely on the basis of those
1410 bits. */
1411 if (avail == AVAIL_NOT_AVAILABLE || avail == AVAIL_OVERWRITABLE)
1412 {
1413 /* If this is a direct call to an external function, mark all of
1414 the parameter and return types. */
1415 for (i = 0; i < gimple_call_num_args (call); i++)
1416 {
1417 tree operand = gimple_call_arg (call, i);
1418 tree type = get_canon_type (TREE_TYPE (operand), false, false);
1419 mark_interesting_type (type, EXPOSED_PARAMETER);
1420 }
1421
1422 if (callee_t)
1423 {
1424 tree type =
1425 get_canon_type (TREE_TYPE (TREE_TYPE (callee_t)), false, false);
1426 mark_interesting_type (type, EXPOSED_PARAMETER);
1427 }
1428 }
1429 }
1430
1431 /* CODE is the operation on OP0 and OP1. OP0 is the operand that we
1432 *know* is a pointer type. OP1 may be a pointer type. */
1433 static bool
1434 okay_pointer_operation (enum tree_code code, tree op0, tree op1)
1435 {
1436 tree op0type = TYPE_MAIN_VARIANT (TREE_TYPE (op0));
1437
1438 switch (code)
1439 {
1440 case MULT_EXPR:
1441 /* Multiplication does not change alignment. */
1442 return true;
1443 break;
1444 case MINUS_EXPR:
1445 case PLUS_EXPR:
1446 case POINTER_PLUS_EXPR:
1447 {
1448 tree base, offset;
1449 gimple offset_cast_stmt;
1450
1451 if (POINTER_TYPE_P (op0type)
1452 && TREE_CODE (op0) == SSA_NAME
1453 && TREE_CODE (op1) == SSA_NAME
1454 && is_array_access_through_pointer_and_index (code, op0, op1,
1455 &base,
1456 &offset,
1457 &offset_cast_stmt))
1458 return true;
1459 else
1460 {
1461 tree size_of_op0_points_to = TYPE_SIZE_UNIT (TREE_TYPE (op0type));
1462
1463 if (CONSTANT_CLASS_P (op1)
1464 && size_of_op0_points_to
1465 && multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1466 op1, size_of_op0_points_to))
1467 return true;
1468
1469 if (CONSTANT_CLASS_P (op0)
1470 && size_of_op0_points_to
1471 && multiple_of_p (TREE_TYPE (size_of_op0_points_to),
1472 op0, size_of_op0_points_to))
1473 return true;
1474 }
1475 }
1476 break;
1477 default:
1478 return false;
1479 }
1480 return false;
1481 }
1482
1483
1484
1485 /* Helper for scan_for_refs. Check the operands of an assignment to
1486 mark types that may escape. */
1487
1488 static void
1489 check_assign (gimple t)
1490 {
1491 /* First look on the lhs and see what variable is stored to */
1492 check_lhs_var (gimple_assign_lhs (t));
1493
1494 /* For the purposes of figuring out what the cast affects */
1495
1496 /* Next check the operands on the rhs to see if they are ok. */
1497 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (t)))
1498 {
1499 case tcc_binary:
1500 {
1501 tree op0 = gimple_assign_rhs1 (t);
1502 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1503 tree op1 = gimple_assign_rhs2 (t);
1504 tree type1 = get_canon_type (TREE_TYPE (op1), false, false);
1505
1506 /* If this is pointer arithmetic of any bad sort, then
1507 we need to mark the types as bad. For binary
1508 operations, no binary operator we currently support
1509 is always "safe" in regard to what it would do to
1510 pointers for purposes of determining which types
1511 escape, except operations of the size of the type.
1512 It is possible that min and max under the right set
1513 of circumstances and if the moon is in the correct
1514 place could be safe, but it is hard to see how this
1515 is worth the effort. */
1516 if (type0 && POINTER_TYPE_P (type0)
1517 && !okay_pointer_operation (gimple_assign_rhs_code (t), op0, op1))
1518 mark_interesting_type (type0, FULL_ESCAPE);
1519
1520 if (type1 && POINTER_TYPE_P (type1)
1521 && !okay_pointer_operation (gimple_assign_rhs_code (t), op1, op0))
1522 mark_interesting_type (type1, FULL_ESCAPE);
1523
1524 look_for_casts (op0);
1525 look_for_casts (op1);
1526 check_rhs_var (op0);
1527 check_rhs_var (op1);
1528 }
1529 break;
1530
1531 case tcc_unary:
1532 {
1533 tree op0 = gimple_assign_rhs1 (t);
1534 tree type0 = get_canon_type (TREE_TYPE (op0), false, false);
1535
1536 /* For unary operations, if the operation is NEGATE or ABS on
1537 a pointer, this is also considered pointer arithmetic and
1538 thus, bad for business. */
1539 if (type0
1540 && POINTER_TYPE_P (type0)
1541 && (TREE_CODE (op0) == NEGATE_EXPR
1542 || TREE_CODE (op0) == ABS_EXPR))
1543 mark_interesting_type (type0, FULL_ESCAPE);
1544
1545 check_rhs_var (op0);
1546 look_for_casts (op0);
1547 }
1548 break;
1549
1550 case tcc_reference:
1551 look_for_casts (gimple_assign_rhs1 (t));
1552 check_rhs_var (gimple_assign_rhs1 (t));
1553 break;
1554
1555 case tcc_declaration:
1556 check_rhs_var (gimple_assign_rhs1 (t));
1557 break;
1558
1559 case tcc_expression:
1560 if (gimple_assign_rhs_code (t) == ADDR_EXPR)
1561 {
1562 tree rhs = gimple_assign_rhs1 (t);
1563 look_for_casts (TREE_OPERAND (rhs, 0));
1564 check_rhs_var (rhs);
1565 }
1566 break;
1567
1568 default:
1569 break;
1570 }
1571 }
1572
1573
1574 /* Scan statement T for references to types and mark anything
1575 interesting. */
1576
1577 static void
1578 scan_for_refs (gimple t)
1579 {
1580 switch (gimple_code (t))
1581 {
1582 case GIMPLE_ASSIGN:
1583 check_assign (t);
1584 break;
1585
1586 case GIMPLE_CALL:
1587 /* If this is a call to malloc, squirrel away the result so we
1588 do mark the resulting cast as being bad. */
1589 check_call (t);
1590 break;
1591
1592 case GIMPLE_ASM:
1593 check_asm (t);
1594 break;
1595
1596 default:
1597 break;
1598 }
1599
1600 return;
1601 }
1602
1603
1604 /* The init routine for analyzing global static variable usage. See
1605 comments at top for description. */
1606 static void
1607 ipa_init (void)
1608 {
1609 bitmap_obstack_initialize (&ipa_obstack);
1610 global_types_exposed_parameter = BITMAP_ALLOC (&ipa_obstack);
1611 global_types_full_escape = BITMAP_ALLOC (&ipa_obstack);
1612 global_types_seen = BITMAP_ALLOC (&ipa_obstack);
1613
1614 uid_to_canon_type = splay_tree_new (splay_tree_compare_ints, 0, 0);
1615 all_canon_types = splay_tree_new (compare_type_brand, 0, 0);
1616 type_to_canon_type = splay_tree_new (splay_tree_compare_pointers, 0, 0);
1617 uid_to_subtype_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1618 uid_to_addressof_down_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1619 uid_to_addressof_up_map = splay_tree_new (splay_tree_compare_ints, 0, 0);
1620
1621 /* There are some shared nodes, in particular the initializers on
1622 static declarations. We do not need to scan them more than once
1623 since all we would be interested in are the addressof
1624 operations. */
1625 visited_nodes = pointer_set_create ();
1626 initialized = true;
1627 }
1628
1629 /* Check out the rhs of a static or global initialization VNODE to see
1630 if any of them contain addressof operations. Note that some of
1631 these variables may not even be referenced in the code in this
1632 compilation unit but their right hand sides may contain references
1633 to variables defined within this unit. */
1634
1635 static void
1636 analyze_variable (struct varpool_node *vnode)
1637 {
1638 tree global = vnode->decl;
1639 tree type = get_canon_type (TREE_TYPE (global), false, false);
1640
1641 /* If this variable has exposure beyond the compilation unit, add
1642 its type to the global types. */
1643
1644 if (vnode->externally_visible)
1645 mark_interesting_type (type, FULL_ESCAPE);
1646
1647 gcc_assert (TREE_CODE (global) == VAR_DECL);
1648
1649 if (DECL_INITIAL (global))
1650 check_tree (DECL_INITIAL (global));
1651 }
1652
1653 /* This is the main routine for finding the reference patterns for
1654 global variables within a function FN. */
1655
1656 static void
1657 analyze_function (struct cgraph_node *fn)
1658 {
1659 tree decl = fn->decl;
1660 check_function_parameter_and_return_types (decl,
1661 fn->local.externally_visible);
1662 if (dump_file)
1663 fprintf (dump_file, "\n local analysis of %s", cgraph_node_name (fn));
1664
1665 {
1666 struct function *this_cfun = DECL_STRUCT_FUNCTION (decl);
1667 basic_block this_block;
1668
1669 FOR_EACH_BB_FN (this_block, this_cfun)
1670 {
1671 gimple_stmt_iterator gsi;
1672 for (gsi = gsi_start_bb (this_block); !gsi_end_p (gsi); gsi_next (&gsi))
1673 scan_for_refs (gsi_stmt (gsi));
1674 }
1675 }
1676
1677 /* There may be const decls with interesting right hand sides. */
1678 if (DECL_STRUCT_FUNCTION (decl))
1679 {
1680 tree step;
1681 for (step = DECL_STRUCT_FUNCTION (decl)->local_decls;
1682 step;
1683 step = TREE_CHAIN (step))
1684 {
1685 tree var = TREE_VALUE (step);
1686 if (TREE_CODE (var) == VAR_DECL
1687 && DECL_INITIAL (var)
1688 && !TREE_STATIC (var))
1689 check_tree (DECL_INITIAL (var));
1690 get_canon_type (TREE_TYPE (var), false, false);
1691 }
1692 }
1693 }
1694
1695 \f
1696
1697 /* Convert a type_UID into a type. */
1698 static tree
1699 type_for_uid (int uid)
1700 {
1701 splay_tree_node result =
1702 splay_tree_lookup (uid_to_canon_type, (splay_tree_key) uid);
1703
1704 if (result)
1705 return (tree) result->value;
1706 else return NULL;
1707 }
1708
1709 /* Return a bitmap with the subtypes of the type for UID. If it
1710 does not exist, return either NULL or a new bitmap depending on the
1711 value of CREATE. */
1712
1713 static bitmap
1714 subtype_map_for_uid (int uid, bool create)
1715 {
1716 splay_tree_node result = splay_tree_lookup (uid_to_subtype_map,
1717 (splay_tree_key) uid);
1718
1719 if (result)
1720 return (bitmap) result->value;
1721 else if (create)
1722 {
1723 bitmap subtype_map = BITMAP_ALLOC (&ipa_obstack);
1724 splay_tree_insert (uid_to_subtype_map,
1725 uid,
1726 (splay_tree_value)subtype_map);
1727 return subtype_map;
1728 }
1729 else return NULL;
1730 }
1731
1732 /* Mark all of the supertypes and field types of TYPE as being seen.
1733 Also accumulate the subtypes for each type so that
1734 close_types_full_escape can mark a subtype as escaping if the
1735 supertype escapes. */
1736
1737 static void
1738 close_type_seen (tree type)
1739 {
1740 tree field;
1741 int i, uid;
1742 tree binfo, base_binfo;
1743
1744 /* See thru all pointer tos and array ofs. */
1745 type = get_canon_type (type, true, true);
1746 if (!type)
1747 return;
1748
1749 uid = TYPE_UID (type);
1750
1751 if (bitmap_bit_p (been_there_done_that, uid))
1752 return;
1753 bitmap_set_bit (been_there_done_that, uid);
1754
1755 /* If we are doing a language with a type hierarchy, mark all of
1756 the superclasses. */
1757 if (TYPE_BINFO (type))
1758 for (binfo = TYPE_BINFO (type), i = 0;
1759 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1760 {
1761 tree binfo_type = BINFO_TYPE (base_binfo);
1762 bitmap subtype_map = subtype_map_for_uid
1763 (TYPE_UID (TYPE_MAIN_VARIANT (binfo_type)), true);
1764 bitmap_set_bit (subtype_map, uid);
1765 close_type_seen (get_canon_type (binfo_type, true, true));
1766 }
1767
1768 /* If the field is a struct or union type, mark all of the
1769 subfields. */
1770 for (field = TYPE_FIELDS (type);
1771 field;
1772 field = TREE_CHAIN (field))
1773 {
1774 tree field_type;
1775 if (TREE_CODE (field) != FIELD_DECL)
1776 continue;
1777
1778 field_type = TREE_TYPE (field);
1779 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1780 close_type_seen (get_canon_type (field_type, true, true));
1781 }
1782 }
1783
1784 /* Take a TYPE that has been passed by value to an external function
1785 and mark all of the fields that have pointer types as escaping. For
1786 any of the non pointer types that are structures or unions,
1787 recurse. TYPE is never a pointer type. */
1788
1789 static void
1790 close_type_exposed_parameter (tree type)
1791 {
1792 tree field;
1793 int uid;
1794
1795 type = get_canon_type (type, false, false);
1796 if (!type)
1797 return;
1798 uid = TYPE_UID (type);
1799 gcc_assert (!POINTER_TYPE_P (type));
1800
1801 if (bitmap_bit_p (been_there_done_that, uid))
1802 return;
1803 bitmap_set_bit (been_there_done_that, uid);
1804
1805 /* If the field is a struct or union type, mark all of the
1806 subfields. */
1807 for (field = TYPE_FIELDS (type);
1808 field;
1809 field = TREE_CHAIN (field))
1810 {
1811 tree field_type;
1812
1813 if (TREE_CODE (field) != FIELD_DECL)
1814 continue;
1815
1816 field_type = get_canon_type (TREE_TYPE (field), false, false);
1817 mark_interesting_type (field_type, EXPOSED_PARAMETER);
1818
1819 /* Only recurse for non pointer types of structures and unions. */
1820 if (ipa_type_escape_star_count_of_interesting_type (field_type) == 0)
1821 close_type_exposed_parameter (field_type);
1822 }
1823 }
1824
1825 /* The next function handles the case where a type fully escapes.
1826 This means that not only does the type itself escape,
1827
1828 a) the type of every field recursively escapes
1829 b) the type of every subtype escapes as well as the super as well
1830 as all of the pointer to types for each field.
1831
1832 Note that pointer to types are not marked as escaping. If the
1833 pointed to type escapes, the pointer to type also escapes.
1834
1835 Take a TYPE that has had the address taken for an instance of it
1836 and mark all of the types for its fields as having their addresses
1837 taken. */
1838
1839 static void
1840 close_type_full_escape (tree type)
1841 {
1842 tree field;
1843 unsigned int i;
1844 int uid;
1845 tree binfo, base_binfo;
1846 bitmap_iterator bi;
1847 bitmap subtype_map;
1848 splay_tree_node address_result;
1849
1850 /* Strip off any pointer or array types. */
1851 type = get_canon_type (type, true, true);
1852 if (!type)
1853 return;
1854 uid = TYPE_UID (type);
1855
1856 if (bitmap_bit_p (been_there_done_that, uid))
1857 return;
1858 bitmap_set_bit (been_there_done_that, uid);
1859
1860 subtype_map = subtype_map_for_uid (uid, false);
1861
1862 /* If we are doing a language with a type hierarchy, mark all of
1863 the superclasses. */
1864 if (TYPE_BINFO (type))
1865 for (binfo = TYPE_BINFO (type), i = 0;
1866 BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
1867 {
1868 tree binfotype = BINFO_TYPE (base_binfo);
1869 binfotype = mark_type (binfotype, FULL_ESCAPE);
1870 close_type_full_escape (binfotype);
1871 }
1872
1873 /* Mark as escaped any types that have been down casted to
1874 this type. */
1875 if (subtype_map)
1876 EXECUTE_IF_SET_IN_BITMAP (subtype_map, 0, i, bi)
1877 {
1878 tree subtype = type_for_uid (i);
1879 subtype = mark_type (subtype, FULL_ESCAPE);
1880 close_type_full_escape (subtype);
1881 }
1882
1883 /* If the field is a struct or union type, mark all of the
1884 subfields. */
1885 for (field = TYPE_FIELDS (type);
1886 field;
1887 field = TREE_CHAIN (field))
1888 {
1889 tree field_type;
1890 if (TREE_CODE (field) != FIELD_DECL)
1891 continue;
1892
1893 field_type = TREE_TYPE (field);
1894 if (ipa_type_escape_star_count_of_interesting_or_array_type (field_type) >= 0)
1895 {
1896 field_type = mark_type (field_type, FULL_ESCAPE);
1897 close_type_full_escape (field_type);
1898 }
1899 }
1900
1901 /* For all of the types A that contain this type B and were part of
1902 an expression like "&...A.B...", mark the A's as escaping. */
1903 address_result = splay_tree_lookup (uid_to_addressof_up_map,
1904 (splay_tree_key) uid);
1905 if (address_result)
1906 {
1907 bitmap containing_classes = (bitmap) address_result->value;
1908 EXECUTE_IF_SET_IN_BITMAP (containing_classes, 0, i, bi)
1909 {
1910 close_type_full_escape (type_for_uid (i));
1911 }
1912 }
1913 }
1914
1915 /* Transitively close the addressof bitmap for the type with UID.
1916 This means that if we had a.b and b.c, a would have both b and c in
1917 its maps. */
1918
1919 static bitmap
1920 close_addressof_down (int uid)
1921 {
1922 bitmap_iterator bi;
1923 splay_tree_node result =
1924 splay_tree_lookup (uid_to_addressof_down_map, (splay_tree_key) uid);
1925 bitmap map = NULL;
1926 bitmap new_map;
1927 unsigned int i;
1928
1929 if (result)
1930 map = (bitmap) result->value;
1931 else
1932 return NULL;
1933
1934 if (bitmap_bit_p (been_there_done_that, uid))
1935 return map;
1936 bitmap_set_bit (been_there_done_that, uid);
1937
1938 /* If the type escapes, get rid of the addressof map, it will not be
1939 needed. */
1940 if (bitmap_bit_p (global_types_full_escape, uid))
1941 {
1942 BITMAP_FREE (map);
1943 splay_tree_remove (uid_to_addressof_down_map, (splay_tree_key) uid);
1944 return NULL;
1945 }
1946
1947 /* The new_map will have all of the bits for the enclosed fields and
1948 will have the unique id version of the old map. */
1949 new_map = BITMAP_ALLOC (&ipa_obstack);
1950
1951 EXECUTE_IF_SET_IN_BITMAP (map, 0, i, bi)
1952 {
1953 bitmap submap = close_addressof_down (i);
1954 bitmap_set_bit (new_map, i);
1955 if (submap)
1956 bitmap_ior_into (new_map, submap);
1957 }
1958 result->value = (splay_tree_value) new_map;
1959
1960 BITMAP_FREE (map);
1961 return new_map;
1962 }
1963
1964 \f
1965 /* The main entry point for type escape analysis. */
1966
1967 static unsigned int
1968 type_escape_execute (void)
1969 {
1970 struct cgraph_node *node;
1971 struct varpool_node *vnode;
1972 unsigned int i;
1973 bitmap_iterator bi;
1974 splay_tree_node result;
1975
1976 ipa_init ();
1977
1978 /* Process all of the variables first. */
1979 FOR_EACH_STATIC_VARIABLE (vnode)
1980 analyze_variable (vnode);
1981
1982 /* Process all of the functions next.
1983
1984 We do not want to process any of the clones so we check that this
1985 is a master clone. However, we do need to process any
1986 AVAIL_OVERWRITABLE functions (these are never clones) because
1987 they may cause a type variable to escape.
1988 */
1989 for (node = cgraph_nodes; node; node = node->next)
1990 if (node->analyzed
1991 && (cgraph_is_master_clone (node)
1992 || (cgraph_function_body_availability (node) == AVAIL_OVERWRITABLE)))
1993 analyze_function (node);
1994
1995
1996 pointer_set_destroy (visited_nodes);
1997 visited_nodes = NULL;
1998
1999 /* Do all of the closures to discover which types escape the
2000 compilation unit. */
2001
2002 been_there_done_that = BITMAP_ALLOC (&ipa_obstack);
2003 bitmap_tmp = BITMAP_ALLOC (&ipa_obstack);
2004
2005 /* Examine the types that we have directly seen in scanning the code
2006 and add to that any contained types or superclasses. */
2007
2008 bitmap_copy (bitmap_tmp, global_types_seen);
2009 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2010 {
2011 tree type = type_for_uid (i);
2012 /* Only look at records and unions and pointer tos. */
2013 if (ipa_type_escape_star_count_of_interesting_or_array_type (type) >= 0)
2014 close_type_seen (type);
2015 }
2016 bitmap_clear (been_there_done_that);
2017
2018 /* Examine all of the types passed by value and mark any enclosed
2019 pointer types as escaping. */
2020 bitmap_copy (bitmap_tmp, global_types_exposed_parameter);
2021 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2022 {
2023 close_type_exposed_parameter (type_for_uid (i));
2024 }
2025 bitmap_clear (been_there_done_that);
2026
2027 /* Close the types for escape. If something escapes, then any
2028 enclosed types escape as well as any subtypes. */
2029 bitmap_copy (bitmap_tmp, global_types_full_escape);
2030 EXECUTE_IF_SET_IN_BITMAP (bitmap_tmp, 0, i, bi)
2031 {
2032 close_type_full_escape (type_for_uid (i));
2033 }
2034 bitmap_clear (been_there_done_that);
2035
2036 /* Before this pass, the uid_to_addressof_down_map for type X
2037 contained an entry for Y if there had been an operation of the
2038 form &X.Y. This step adds all of the fields contained within Y
2039 (recursively) to X's map. */
2040
2041 result = splay_tree_min (uid_to_addressof_down_map);
2042 while (result)
2043 {
2044 int uid = result->key;
2045 /* Close the addressof map, i.e. copy all of the transitive
2046 substructures up to this level. */
2047 close_addressof_down (uid);
2048 result = splay_tree_successor (uid_to_addressof_down_map, uid);
2049 }
2050
2051 /* Do not need the array types and pointer types in the persistent
2052 data structures. */
2053 result = splay_tree_min (all_canon_types);
2054 while (result)
2055 {
2056 tree type = (tree) result->value;
2057 tree key = (tree) result->key;
2058 if (POINTER_TYPE_P (type)
2059 || TREE_CODE (type) == ARRAY_TYPE)
2060 {
2061 splay_tree_remove (all_canon_types, (splay_tree_key) result->key);
2062 splay_tree_remove (type_to_canon_type, (splay_tree_key) type);
2063 splay_tree_remove (uid_to_canon_type, (splay_tree_key) TYPE_UID (type));
2064 bitmap_clear_bit (global_types_seen, TYPE_UID (type));
2065 }
2066 result = splay_tree_successor (all_canon_types, (splay_tree_key) key);
2067 }
2068
2069 if (dump_file)
2070 {
2071 EXECUTE_IF_SET_IN_BITMAP (global_types_seen, 0, i, bi)
2072 {
2073 /* The pointer types are in the global_types_full_escape
2074 bitmap but not in the backwards map. They also contain
2075 no useful information since they are not marked. */
2076 tree type = type_for_uid (i);
2077 fprintf(dump_file, "type %d ", i);
2078 print_generic_expr (dump_file, type, 0);
2079 if (bitmap_bit_p (global_types_full_escape, i))
2080 fprintf(dump_file, " escaped\n");
2081 else
2082 fprintf(dump_file, " contained\n");
2083 }
2084 }
2085
2086 /* Get rid of uid_to_addressof_up_map and its bitmaps. */
2087 result = splay_tree_min (uid_to_addressof_up_map);
2088 while (result)
2089 {
2090 int uid = (int)result->key;
2091 bitmap bm = (bitmap)result->value;
2092
2093 BITMAP_FREE (bm);
2094 splay_tree_remove (uid_to_addressof_up_map, (splay_tree_key) uid);
2095 result = splay_tree_successor (uid_to_addressof_up_map, uid);
2096 }
2097
2098 /* Get rid of the subtype map. */
2099 result = splay_tree_min (uid_to_subtype_map);
2100 while (result)
2101 {
2102 bitmap b = (bitmap)result->value;
2103 BITMAP_FREE(b);
2104 splay_tree_remove (uid_to_subtype_map, result->key);
2105 result = splay_tree_min (uid_to_subtype_map);
2106 }
2107 splay_tree_delete (uid_to_subtype_map);
2108 uid_to_subtype_map = NULL;
2109
2110 BITMAP_FREE (global_types_exposed_parameter);
2111 BITMAP_FREE (been_there_done_that);
2112 BITMAP_FREE (bitmap_tmp);
2113 return 0;
2114 }
2115
2116 static bool
2117 gate_type_escape_vars (void)
2118 {
2119 return (flag_ipa_type_escape
2120 /* Don't bother doing anything if the program has errors. */
2121 && !(errorcount || sorrycount));
2122 }
2123
2124 struct simple_ipa_opt_pass pass_ipa_type_escape =
2125 {
2126 {
2127 SIMPLE_IPA_PASS,
2128 "type-escape-var", /* name */
2129 gate_type_escape_vars, /* gate */
2130 type_escape_execute, /* execute */
2131 NULL, /* sub */
2132 NULL, /* next */
2133 0, /* static_pass_number */
2134 TV_IPA_TYPE_ESCAPE, /* tv_id */
2135 0, /* properties_required */
2136 0, /* properties_provided */
2137 0, /* properties_destroyed */
2138 0, /* todo_flags_start */
2139 0 /* todo_flags_finish */
2140 }
2141 };
This page took 0.119331 seconds and 5 git commands to generate.