]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa-devirt.c
re PR c++/58703 (ICE with invalid types in OpenMP declare reduction clause)
[gcc.git] / gcc / ipa-devirt.c
CommitLineData
eefe9a99
JH
1/* Basic IPA utilities for type inheritance graph construction and
2 devirtualization.
23a5b65a 3 Copyright (C) 2013-2014 Free Software Foundation, Inc.
eefe9a99
JH
4 Contributed by Jan Hubicka
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
10Software Foundation; either version 3, or (at your option) any later
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
21
22/* Brief vocalburary:
23 ODR = One Definition Rule
24 In short, the ODR states that:
25 1 In any translation unit, a template, type, function, or object can
26 have no more than one definition. Some of these can have any number
27 of declarations. A definition provides an instance.
28 2 In the entire program, an object or non-inline function cannot have
29 more than one definition; if an object or function is used, it must
30 have exactly one definition. You can declare an object or function
31 that is never used, in which case you don't have to provide
32 a definition. In no event can there be more than one definition.
33 3 Some things, like types, templates, and extern inline functions, can
34 be defined in more than one translation unit. For a given entity,
35 each definition must be the same. Non-extern objects and functions
36 in different translation units are different entities, even if their
37 names and types are the same.
38
39 OTR = OBJ_TYPE_REF
0e1474e5 40 This is the Gimple representation of type information of a polymorphic call.
eefe9a99
JH
41 It contains two parameters:
42 otr_type is a type of class whose method is called.
0e1474e5 43 otr_token is the index into virtual table where address is taken.
eefe9a99
JH
44
45 BINFO
46 This is the type inheritance information attached to each tree
47 RECORD_TYPE by the C++ frotend. It provides information about base
48 types and virtual tables.
49
50 BINFO is linked to the RECORD_TYPE by TYPE_BINFO.
51 BINFO also links to its type by BINFO_TYPE and to the virtual table by
52 BINFO_VTABLE.
53
54 Base types of a given type are enumerated by BINFO_BASE_BINFO
55 vector. Members of this vectors are not BINFOs associated
56 with a base type. Rather they are new copies of BINFOs
57 (base BINFOs). Their virtual tables may differ from
0e1474e5 58 virtual table of the base type. Also BINFO_OFFSET specifies
eefe9a99
JH
59 offset of the base within the type.
60
61 In the case of single inheritance, the virtual table is shared
62 and BINFO_VTABLE of base BINFO is NULL. In the case of multiple
63 inheritance the individual virtual tables are pointer to by
64 BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of
65 binfo associated to the base type).
66
67 BINFO lookup for a given base type and offset can be done by
68 get_binfo_at_offset. It returns proper BINFO whose virtual table
69 can be used for lookup of virtual methods associated with the
70 base type.
71
72 token
73 This is an index of virtual method in virtual table associated
74 to the type defining it. Token can be looked up from OBJ_TYPE_REF
0e1474e5 75 or from DECL_VINDEX of a given virtual table.
eefe9a99
JH
76
77 polymorphic (indirect) call
78 This is callgraph represention of virtual method call. Every
79 polymorphic call contains otr_type and otr_token taken from
80 original OBJ_TYPE_REF at callgraph construction time.
81
82 What we do here:
83
84 build_type_inheritance_graph triggers a construction of the type inheritance
85 graph.
86
87 We reconstruct it based on types of methods we see in the unit.
88 This means that the graph is not complete. Types with no methods are not
0e1474e5 89 inserted into the graph. Also types without virtual methods are not
eefe9a99
JH
90 represented at all, though it may be easy to add this.
91
92 The inheritance graph is represented as follows:
93
94 Vertices are structures odr_type. Every odr_type may correspond
95 to one or more tree type nodes that are equivalent by ODR rule.
96 (the multiple type nodes appear only with linktime optimization)
97
0e1474e5 98 Edges are represented by odr_type->base and odr_type->derived_types.
eefe9a99
JH
99 At the moment we do not track offsets of types for multiple inheritance.
100 Adding this is easy.
101
102 possible_polymorphic_call_targets returns, given an parameters found in
103 indirect polymorphic edge all possible polymorphic call targets of the call.
bbc9396b
JH
104
105 pass_ipa_devirt performs simple speculative devirtualization.
eefe9a99
JH
106*/
107
108#include "config.h"
109#include "system.h"
110#include "coretypes.h"
111#include "tm.h"
4d648807 112#include "tree.h"
d8a2d370
DN
113#include "print-tree.h"
114#include "calls.h"
eefe9a99 115#include "cgraph.h"
d8a2d370 116#include "expr.h"
eefe9a99 117#include "tree-pass.h"
eefe9a99
JH
118#include "pointer-set.h"
119#include "target.h"
120#include "hash-table.h"
121#include "tree-pretty-print.h"
122#include "ipa-utils.h"
2fb9a547
AM
123#include "tree-ssa-alias.h"
124#include "internal-fn.h"
125#include "gimple-fold.h"
126#include "gimple-expr.h"
eefe9a99 127#include "gimple.h"
bbc9396b 128#include "ipa-inline.h"
61a74079 129#include "diagnostic.h"
68377e53
JH
130#include "tree-dfa.h"
131
132/* Dummy polymorphic call context. */
133
134const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context
135 = {0, NULL, false, true};
eefe9a99 136
0e1474e5
JH
137/* Pointer set of all call targets appearing in the cache. */
138static pointer_set_t *cached_polymorphic_call_targets;
139
eefe9a99
JH
140/* The node of type inheritance graph. For each type unique in
141 One Defintion Rule (ODR) sense, we produce one node linking all
142 main variants of types equivalent to it, bases and derived types. */
143
144struct GTY(()) odr_type_d
145{
eefe9a99
JH
146 /* leader type. */
147 tree type;
148 /* All bases. */
149 vec<odr_type> GTY((skip)) bases;
150 /* All derrived types with virtual methods seen in unit. */
151 vec<odr_type> GTY((skip)) derived_types;
0e1474e5 152
61a74079
JH
153 /* All equivalent types, if more than one. */
154 vec<tree, va_gc> *types;
155 /* Set of all equivalent types, if NON-NULL. */
156 pointer_set_t * GTY((skip)) types_set;
157
0e1474e5
JH
158 /* Unique ID indexing the type in odr_types array. */
159 int id;
eefe9a99
JH
160 /* Is it in anonymous namespace? */
161 bool anonymous_namespace;
162};
163
164
0e1474e5
JH
165/* Return true if BINFO corresponds to a type with virtual methods.
166
167 Every type has several BINFOs. One is the BINFO associated by the type
168 while other represents bases of derived types. The BINFOs representing
169 bases do not have BINFO_VTABLE pointer set when this is the single
170 inheritance (because vtables are shared). Look up the BINFO of type
171 and check presence of its vtable. */
eefe9a99
JH
172
173static inline bool
174polymorphic_type_binfo_p (tree binfo)
175{
176 /* See if BINFO's type has an virtual table associtated with it. */
177 return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo)));
178}
179
180/* One Definition Rule hashtable helpers. */
181
182struct odr_hasher
183{
184 typedef odr_type_d value_type;
185 typedef union tree_node compare_type;
186 static inline hashval_t hash (const value_type *);
187 static inline bool equal (const value_type *, const compare_type *);
188 static inline void remove (value_type *);
189};
190
191/* Produce hash based on type name. */
192
193hashval_t
194hash_type_name (tree t)
195{
196 gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t);
197
198 /* If not in LTO, all main variants are unique, so we can do
199 pointer hash. */
200 if (!in_lto_p)
201 return htab_hash_pointer (t);
202
203 /* Anonymous types are unique. */
204 if (type_in_anonymous_namespace_p (t))
205 return htab_hash_pointer (t);
206
61a74079
JH
207 /* For polymorphic types, we can simply hash the virtual table. */
208 if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t)))
209 {
210 tree v = BINFO_VTABLE (TYPE_BINFO (t));
211 hashval_t hash = 0;
212
213 if (TREE_CODE (v) == POINTER_PLUS_EXPR)
214 {
215 hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1));
216 v = TREE_OPERAND (TREE_OPERAND (v, 0), 0);
217 }
218
219 v = DECL_ASSEMBLER_NAME (v);
61a74079
JH
220 hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v));
221 return hash;
222 }
223
eefe9a99
JH
224 /* Rest is not implemented yet. */
225 gcc_unreachable ();
226}
227
228/* Return the computed hashcode for ODR_TYPE. */
229
230inline hashval_t
231odr_hasher::hash (const value_type *odr_type)
232{
233 return hash_type_name (odr_type->type);
234}
235
0e1474e5 236/* Compare types T1 and T2 and return true if they are
eefe9a99
JH
237 equivalent. */
238
239inline bool
240odr_hasher::equal (const value_type *t1, const compare_type *ct2)
241{
242 tree t2 = const_cast <tree> (ct2);
243
244 gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2);
245 if (t1->type == t2)
246 return true;
247 if (!in_lto_p)
248 return false;
249 return types_same_for_odr (t1->type, t2);
250}
251
0e1474e5 252/* Free ODR type V. */
eefe9a99
JH
253
254inline void
255odr_hasher::remove (value_type *v)
256{
257 v->bases.release ();
258 v->derived_types.release ();
61a74079
JH
259 if (v->types_set)
260 pointer_set_destroy (v->types_set);
eefe9a99
JH
261 ggc_free (v);
262}
263
264/* ODR type hash used to lookup ODR type based on tree type node. */
265
266typedef hash_table <odr_hasher> odr_hash_type;
267static odr_hash_type odr_hash;
268
269/* ODR types are also stored into ODR_TYPE vector to allow consistent
270 walking. Bases appear before derived types. Vector is garbage collected
271 so we won't end up visiting empty types. */
272
273static GTY(()) vec <odr_type, va_gc> *odr_types_ptr;
274#define odr_types (*odr_types_ptr)
275
61a74079
JH
276/* TYPE is equivalent to VAL by ODR, but its tree representation differs
277 from VAL->type. This may happen in LTO where tree merging did not merge
278 all variants of the same type. It may or may not mean the ODR violation.
279 Add it to the list of duplicates and warn on some violations. */
280
281static void
282add_type_duplicate (odr_type val, tree type)
283{
284 if (!val->types_set)
285 val->types_set = pointer_set_create ();
286
287 /* See if this duplicate is new. */
288 if (!pointer_set_insert (val->types_set, type))
289 {
290 bool merge = true;
291 bool base_mismatch = false;
292 gcc_assert (in_lto_p);
293 vec_safe_push (val->types, type);
294 unsigned int i,j;
295
296 /* First we compare memory layout. */
297 if (!types_compatible_p (val->type, type))
298 {
299 merge = false;
300 if (BINFO_VTABLE (TYPE_BINFO (val->type))
301 && warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
302 "type %qD violates one definition rule ",
303 type))
304 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
305 "a type with the same name but different layout is "
306 "defined in another translation unit");
61a74079
JH
307 if (cgraph_dump_file)
308 {
309 fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n");
310
311 print_node (cgraph_dump_file, "", val->type, 0);
312 putc ('\n',cgraph_dump_file);
313 print_node (cgraph_dump_file, "", type, 0);
314 putc ('\n',cgraph_dump_file);
315 }
316 }
317
318 /* Next sanity check that bases are the same. If not, we will end
319 up producing wrong answers. */
320 for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++)
321 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i)))
322 {
323 odr_type base = get_odr_type
324 (BINFO_TYPE
325 (BINFO_BASE_BINFO (TYPE_BINFO (type),
326 i)),
327 true);
328 if (val->bases.length () <= j || val->bases[j] != base)
329 base_mismatch = true;
330 j++;
331 }
332 if (base_mismatch)
333 {
334 merge = false;
335
336 if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0,
337 "type %qD violates one definition rule ",
338 type))
339 inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)),
340 "a type with the same name but different bases is "
341 "defined in another translation unit");
342 if (cgraph_dump_file)
343 {
344 fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n");
345
346 print_node (cgraph_dump_file, "", val->type, 0);
347 putc ('\n',cgraph_dump_file);
348 print_node (cgraph_dump_file, "", type, 0);
349 putc ('\n',cgraph_dump_file);
350 }
351 }
352
353 /* Regularize things a little. During LTO same types may come with
354 different BINFOs. Either because their virtual table was
355 not merged by tree merging and only later at decl merging or
356 because one type comes with external vtable, while other
357 with internal. We want to merge equivalent binfos to conserve
358 memory and streaming overhead.
359
360 The external vtables are more harmful: they contain references
361 to external declarations of methods that may be defined in the
362 merged LTO unit. For this reason we absolutely need to remove
363 them and replace by internal variants. Not doing so will lead
364 to incomplete answers from possible_polymorphic_call_targets. */
365 if (!flag_ltrans && merge)
366 {
367 tree master_binfo = TYPE_BINFO (val->type);
368 tree v1 = BINFO_VTABLE (master_binfo);
369 tree v2 = BINFO_VTABLE (TYPE_BINFO (type));
370
371 if (TREE_CODE (v1) == POINTER_PLUS_EXPR)
372 {
373 gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR
374 && operand_equal_p (TREE_OPERAND (v1, 1),
375 TREE_OPERAND (v2, 1), 0));
376 v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0);
377 v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0);
378 }
379 gcc_assert (DECL_ASSEMBLER_NAME (v1)
380 == DECL_ASSEMBLER_NAME (v2));
381
382 if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2))
383 {
384 unsigned int i;
385
386 TYPE_BINFO (val->type) = TYPE_BINFO (type);
c3284718 387 for (i = 0; i < val->types->length (); i++)
61a74079
JH
388 {
389 if (TYPE_BINFO ((*val->types)[i])
390 == master_binfo)
391 TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type);
392 }
393 }
394 else
395 TYPE_BINFO (type) = master_binfo;
396 }
397 }
398}
399
eefe9a99
JH
400/* Get ODR type hash entry for TYPE. If INSERT is true, create
401 possibly new entry. */
402
403odr_type
404get_odr_type (tree type, bool insert)
405{
406 odr_type_d **slot;
407 odr_type val;
408 hashval_t hash;
409
410 type = TYPE_MAIN_VARIANT (type);
411 gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type);
412 hash = hash_type_name (type);
413 slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT);
414 if (!slot)
415 return NULL;
416
417 /* See if we already have entry for type. */
418 if (*slot)
419 {
420 val = *slot;
421
61a74079
JH
422 /* With LTO we need to support multiple tree representation of
423 the same ODR type. */
424 if (val->type != type)
425 add_type_duplicate (val, type);
eefe9a99
JH
426 }
427 else
428 {
429 tree binfo = TYPE_BINFO (type);
430 unsigned int i;
431
432 val = ggc_alloc_cleared_odr_type_d ();
433 val->type = type;
434 val->bases = vNULL;
435 val->derived_types = vNULL;
0e1474e5 436 val->anonymous_namespace = type_in_anonymous_namespace_p (type);
eefe9a99
JH
437 *slot = val;
438 for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++)
439 /* For now record only polymorphic types. other are
440 pointless for devirtualization and we can not precisely
441 determine ODR equivalency of these during LTO. */
442 if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i)))
443 {
444 odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo,
445 i)),
446 true);
447 base->derived_types.safe_push (val);
448 val->bases.safe_push (base);
449 }
450 /* First record bases, then add into array so ids are increasing. */
451 if (odr_types_ptr)
c3284718 452 val->id = odr_types.length ();
eefe9a99
JH
453 vec_safe_push (odr_types_ptr, val);
454 }
455 return val;
456}
457
458/* Dump ODR type T and all its derrived type. INDENT specify indentation for
459 recusive printing. */
460
461static void
462dump_odr_type (FILE *f, odr_type t, int indent=0)
463{
464 unsigned int i;
465 fprintf (f, "%*s type %i: ", indent * 2, "", t->id);
466 print_generic_expr (f, t->type, TDF_SLIM);
0e1474e5 467 fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":"");
eefe9a99
JH
468 if (TYPE_NAME (t->type))
469 {
470 fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "",
471 DECL_SOURCE_FILE (TYPE_NAME (t->type)),
472 DECL_SOURCE_LINE (TYPE_NAME (t->type)));
473 }
c3284718 474 if (t->bases.length ())
eefe9a99
JH
475 {
476 fprintf (f, "%*s base odr type ids: ", indent * 2, "");
c3284718 477 for (i = 0; i < t->bases.length (); i++)
eefe9a99
JH
478 fprintf (f, " %i", t->bases[i]->id);
479 fprintf (f, "\n");
480 }
c3284718 481 if (t->derived_types.length ())
eefe9a99
JH
482 {
483 fprintf (f, "%*s derived types:\n", indent * 2, "");
c3284718 484 for (i = 0; i < t->derived_types.length (); i++)
eefe9a99
JH
485 dump_odr_type (f, t->derived_types[i], indent + 1);
486 }
487 fprintf (f, "\n");
488}
489
490/* Dump the type inheritance graph. */
491
492static void
493dump_type_inheritance_graph (FILE *f)
494{
495 unsigned int i;
0e1474e5
JH
496 if (!odr_types_ptr)
497 return;
eefe9a99 498 fprintf (f, "\n\nType inheritance graph:\n");
c3284718 499 for (i = 0; i < odr_types.length (); i++)
eefe9a99 500 {
c3284718 501 if (odr_types[i]->bases.length () == 0)
eefe9a99
JH
502 dump_odr_type (f, odr_types[i]);
503 }
c3284718 504 for (i = 0; i < odr_types.length (); i++)
61a74079 505 {
c3284718 506 if (odr_types[i]->types && odr_types[i]->types->length ())
61a74079
JH
507 {
508 unsigned int j;
509 fprintf (f, "Duplicate tree types for odr type %i\n", i);
510 print_node (f, "", odr_types[i]->type, 0);
c3284718 511 for (j = 0; j < odr_types[i]->types->length (); j++)
61a74079
JH
512 {
513 tree t;
514 fprintf (f, "duplicate #%i\n", j);
515 print_node (f, "", (*odr_types[i]->types)[j], 0);
516 t = (*odr_types[i]->types)[j];
517 while (TYPE_P (t) && TYPE_CONTEXT (t))
518 {
519 t = TYPE_CONTEXT (t);
520 print_node (f, "", t, 0);
521 }
522 putc ('\n',f);
523 }
524 }
525 }
eefe9a99
JH
526}
527
528/* Given method type T, return type of class it belongs to.
529 Lookup this pointer and get its type. */
530
64cbf23d 531tree
eefe9a99
JH
532method_class_type (tree t)
533{
534 tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t));
68377e53 535 gcc_assert (TREE_CODE (t) == METHOD_TYPE);
eefe9a99
JH
536
537 return TREE_TYPE (first_parm_type);
538}
539
540/* Initialize IPA devirt and build inheritance tree graph. */
541
542void
543build_type_inheritance_graph (void)
544{
b270b096 545 struct symtab_node *n;
eefe9a99
JH
546 FILE *inheritance_dump_file;
547 int flags;
548
549 if (odr_hash.is_created ())
550 return;
551 timevar_push (TV_IPA_INHERITANCE);
552 inheritance_dump_file = dump_begin (TDI_inheritance, &flags);
553 odr_hash.create (23);
554
555 /* We reconstruct the graph starting of types of all methods seen in the
556 the unit. */
b270b096
JH
557 FOR_EACH_SYMBOL (n)
558 if (is_a <cgraph_node> (n)
559 && DECL_VIRTUAL_P (n->decl)
67348ccc
DM
560 && symtab_real_symbol_p (n))
561 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
b270b096
JH
562
563 /* Look also for virtual tables of types that do not define any methods.
564
565 We need it in a case where class B has virtual base of class A
566 re-defining its virtual method and there is class C with no virtual
567 methods with B as virtual base.
568
569 Here we output B's virtual method in two variant - for non-virtual
570 and virtual inheritance. B's virtual table has non-virtual version,
571 while C's has virtual.
572
573 For this reason we need to know about C in order to include both
574 variants of B. More correctly, record_target_from_binfo should
575 add both variants of the method when walking B, but we have no
576 link in between them.
577
578 We rely on fact that either the method is exported and thus we
579 assume it is called externally or C is in anonymous namespace and
580 thus we will see the vtable. */
581
582 else if (is_a <varpool_node> (n)
583 && DECL_VIRTUAL_P (n->decl)
584 && TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE
585 && TYPE_BINFO (DECL_CONTEXT (n->decl))
586 && polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl))))
587 get_odr_type (DECL_CONTEXT (n->decl), true);
eefe9a99
JH
588 if (inheritance_dump_file)
589 {
590 dump_type_inheritance_graph (inheritance_dump_file);
591 dump_end (TDI_inheritance, inheritance_dump_file);
592 }
593 timevar_pop (TV_IPA_INHERITANCE);
594}
595
68377e53
JH
596/* If TARGET has associated node, record it in the NODES array.
597 if TARGET can not be inserted (for example because its body was
598 already removed and there is no way to refer to it), clear COMPLETEP. */
eefe9a99
JH
599
600static void
601maybe_record_node (vec <cgraph_node *> &nodes,
68377e53
JH
602 tree target, pointer_set_t *inserted,
603 bool *completep)
eefe9a99
JH
604{
605 struct cgraph_node *target_node;
606 enum built_in_function fcode;
607
68377e53 608 if (!target
eefe9a99 609 /* Those are used to mark impossible scenarios. */
68377e53
JH
610 || (fcode = DECL_FUNCTION_CODE (target))
611 == BUILT_IN_UNREACHABLE
612 || fcode == BUILT_IN_TRAP)
613 return;
614
615 target_node = cgraph_get_node (target);
616
617 if (target_node != NULL
3462aa02 618 && (TREE_PUBLIC (target)
67348ccc
DM
619 || target_node->definition)
620 && symtab_real_symbol_p (target_node))
0e1474e5 621 {
68377e53
JH
622 gcc_assert (!target_node->global.inlined_to);
623 gcc_assert (symtab_real_symbol_p (target_node));
624 if (!pointer_set_insert (inserted, target))
625 {
626 pointer_set_insert (cached_polymorphic_call_targets,
627 target_node);
628 nodes.safe_push (target_node);
629 }
0e1474e5 630 }
68377e53
JH
631 else if (completep
632 && !type_in_anonymous_namespace_p
633 (method_class_type (TREE_TYPE (target))))
634 *completep = true;
eefe9a99
JH
635}
636
68377e53
JH
637/* See if BINFO's type match OUTER_TYPE. If so, lookup
638 BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find
639 method in vtable and insert method to NODES array.
eefe9a99
JH
640 Otherwise recurse to base BINFOs.
641 This match what get_binfo_at_offset does, but with offset
642 being unknown.
643
a3788dde
JH
644 TYPE_BINFOS is a stack of BINFOS of types with defined
645 virtual table seen on way from class type to BINFO.
eefe9a99
JH
646
647 MATCHED_VTABLES tracks virtual tables we already did lookup
68377e53
JH
648 for virtual function in. INSERTED tracks nodes we already
649 inserted.
3462aa02
JH
650
651 ANONYMOUS is true if BINFO is part of anonymous namespace.
eefe9a99
JH
652 */
653
654static void
68377e53
JH
655record_target_from_binfo (vec <cgraph_node *> &nodes,
656 tree binfo,
657 tree otr_type,
a3788dde 658 vec <tree> &type_binfos,
68377e53
JH
659 HOST_WIDE_INT otr_token,
660 tree outer_type,
661 HOST_WIDE_INT offset,
662 pointer_set_t *inserted,
663 pointer_set_t *matched_vtables,
664 bool anonymous)
eefe9a99
JH
665{
666 tree type = BINFO_TYPE (binfo);
667 int i;
668 tree base_binfo;
669
eefe9a99 670
a3788dde
JH
671 if (BINFO_VTABLE (binfo))
672 type_binfos.safe_push (binfo);
68377e53 673 if (types_same_for_odr (type, outer_type))
eefe9a99 674 {
a3788dde
JH
675 int i;
676 tree type_binfo = NULL;
677
678 /* Lookup BINFO with virtual table. For normal types it is always last
679 binfo on stack. */
680 for (i = type_binfos.length () - 1; i >= 0; i--)
681 if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo))
682 {
683 type_binfo = type_binfos[i];
684 break;
685 }
686 if (BINFO_VTABLE (binfo))
687 type_binfos.pop ();
688 /* If this is duplicated BINFO for base shared by virtual inheritance,
689 we may not have its associated vtable. This is not a problem, since
690 we will walk it on the other path. */
691 if (!type_binfo)
692 {
693 gcc_assert (BINFO_VIRTUAL_P (binfo));
694 return;
695 }
68377e53
JH
696 tree inner_binfo = get_binfo_at_offset (type_binfo,
697 offset, otr_type);
3462aa02
JH
698 /* For types in anonymous namespace first check if the respective vtable
699 is alive. If not, we know the type can't be called. */
700 if (!flag_ltrans && anonymous)
701 {
68377e53 702 tree vtable = BINFO_VTABLE (inner_binfo);
2c8326a5 703 varpool_node *vnode;
3462aa02
JH
704
705 if (TREE_CODE (vtable) == POINTER_PLUS_EXPR)
706 vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0);
707 vnode = varpool_get_node (vtable);
67348ccc 708 if (!vnode || !vnode->definition)
3462aa02
JH
709 return;
710 }
68377e53
JH
711 gcc_assert (inner_binfo);
712 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo)))
713 {
714 tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo);
715 if (target)
716 maybe_record_node (nodes, target, inserted, NULL);
717 }
eefe9a99
JH
718 return;
719 }
720
721 /* Walk bases. */
722 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
723 /* Walking bases that have no virtual method is pointless excercise. */
724 if (polymorphic_type_binfo_p (base_binfo))
68377e53 725 record_target_from_binfo (nodes, base_binfo, otr_type,
a3788dde 726 type_binfos,
68377e53
JH
727 otr_token, outer_type, offset, inserted,
728 matched_vtables, anonymous);
a3788dde
JH
729 if (BINFO_VTABLE (binfo))
730 type_binfos.pop ();
eefe9a99
JH
731}
732
733/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN)
734 of TYPE, insert them to NODES, recurse into derived nodes.
735 INSERTED is used to avoid duplicate insertions of methods into NODES.
736 MATCHED_VTABLES are used to avoid duplicate walking vtables. */
737
738static void
739possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes,
740 pointer_set_t *inserted,
741 pointer_set_t *matched_vtables,
742 tree otr_type,
743 odr_type type,
68377e53
JH
744 HOST_WIDE_INT otr_token,
745 tree outer_type,
746 HOST_WIDE_INT offset)
eefe9a99
JH
747{
748 tree binfo = TYPE_BINFO (type->type);
749 unsigned int i;
a3788dde 750 vec <tree> type_binfos = vNULL;
eefe9a99 751
a3788dde 752 record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token,
68377e53
JH
753 outer_type, offset,
754 inserted, matched_vtables,
755 type->anonymous_namespace);
a3788dde 756 type_binfos.release ();
c3284718 757 for (i = 0; i < type->derived_types.length (); i++)
eefe9a99
JH
758 possible_polymorphic_call_targets_1 (nodes, inserted,
759 matched_vtables,
760 otr_type,
761 type->derived_types[i],
68377e53 762 otr_token, outer_type, offset);
eefe9a99
JH
763}
764
765/* Cache of queries for polymorphic call targets.
766
767 Enumerating all call targets may get expensive when there are many
768 polymorphic calls in the program, so we memoize all the previous
769 queries and avoid duplicated work. */
770
771struct polymorphic_call_target_d
772{
eefe9a99 773 HOST_WIDE_INT otr_token;
68377e53
JH
774 ipa_polymorphic_call_context context;
775 odr_type type;
eefe9a99 776 vec <cgraph_node *> targets;
68377e53 777 bool final;
eefe9a99
JH
778};
779
780/* Polymorphic call target cache helpers. */
781
782struct polymorphic_call_target_hasher
783{
784 typedef polymorphic_call_target_d value_type;
785 typedef polymorphic_call_target_d compare_type;
786 static inline hashval_t hash (const value_type *);
787 static inline bool equal (const value_type *, const compare_type *);
788 static inline void remove (value_type *);
789};
790
791/* Return the computed hashcode for ODR_QUERY. */
792
793inline hashval_t
794polymorphic_call_target_hasher::hash (const value_type *odr_query)
795{
68377e53
JH
796 hashval_t hash;
797
798 hash = iterative_hash_host_wide_int
799 (odr_query->otr_token,
800 odr_query->type->id);
801 hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type),
802 hash);
803 hash = iterative_hash_host_wide_int (odr_query->context.offset, hash);
804 return iterative_hash_hashval_t
805 (((int)odr_query->context.maybe_in_construction << 1)
806 | (int)odr_query->context.maybe_derived_type, hash);
eefe9a99
JH
807}
808
809/* Compare cache entries T1 and T2. */
810
811inline bool
812polymorphic_call_target_hasher::equal (const value_type *t1,
813 const compare_type *t2)
814{
68377e53
JH
815 return (t1->type == t2->type && t1->otr_token == t2->otr_token
816 && t1->context.offset == t2->context.offset
817 && t1->context.outer_type == t2->context.outer_type
818 && t1->context.maybe_in_construction
819 == t2->context.maybe_in_construction
820 && t1->context.maybe_derived_type == t2->context.maybe_derived_type);
eefe9a99
JH
821}
822
823/* Remove entry in polymorphic call target cache hash. */
824
825inline void
826polymorphic_call_target_hasher::remove (value_type *v)
827{
828 v->targets.release ();
829 free (v);
830}
831
832/* Polymorphic call target query cache. */
833
834typedef hash_table <polymorphic_call_target_hasher>
835 polymorphic_call_target_hash_type;
836static polymorphic_call_target_hash_type polymorphic_call_target_hash;
eefe9a99
JH
837
838/* Destroy polymorphic call target query cache. */
839
840static void
841free_polymorphic_call_targets_hash ()
842{
0e1474e5
JH
843 if (cached_polymorphic_call_targets)
844 {
845 polymorphic_call_target_hash.dispose ();
846 pointer_set_destroy (cached_polymorphic_call_targets);
847 cached_polymorphic_call_targets = NULL;
848 }
eefe9a99
JH
849}
850
851/* When virtual function is removed, we may need to flush the cache. */
852
853static void
854devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED)
855{
0e1474e5
JH
856 if (cached_polymorphic_call_targets
857 && pointer_set_contains (cached_polymorphic_call_targets, n))
eefe9a99
JH
858 free_polymorphic_call_targets_hash ();
859}
860
68377e53
JH
861/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE
862 is contained at CONTEXT->OFFSET. Walk the memory representation of
863 CONTEXT->OUTER_TYPE and find the outermost class type that match
864 EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT
865 to represent it.
866
867 For example when CONTEXT represents type
868 class A
869 {
870 int a;
871 class B b;
872 }
873 and we look for type at offset sizeof(int), we end up with B and offset 0.
874 If the same is produced by multiple inheritance, we end up with A and offset
875 sizeof(int).
876
877 If we can not find corresponding class, give up by setting
878 CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL.
879 Return true when lookup was sucesful. */
880
881static bool
882get_class_context (ipa_polymorphic_call_context *context,
883 tree expected_type)
884{
885 tree type = context->outer_type;
886 HOST_WIDE_INT offset = context->offset;
887
888 /* Find the sub-object the constant actually refers to and mark whether it is
889 an artificial one (as opposed to a user-defined one). */
890 while (true)
891 {
892 HOST_WIDE_INT pos, size;
893 tree fld;
894
895 /* On a match, just return what we found. */
896 if (TREE_CODE (type) == TREE_CODE (expected_type)
897 && types_same_for_odr (type, expected_type))
898 {
3b4e93c3
JH
899 /* Type can not contain itself on an non-zero offset. In that case
900 just give up. */
901 if (offset != 0)
902 goto give_up;
68377e53
JH
903 gcc_assert (offset == 0);
904 return true;
905 }
906
907 /* Walk fields and find corresponding on at OFFSET. */
908 if (TREE_CODE (type) == RECORD_TYPE)
909 {
910 for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld))
911 {
912 if (TREE_CODE (fld) != FIELD_DECL)
913 continue;
914
915 pos = int_bit_position (fld);
916 size = tree_to_uhwi (DECL_SIZE (fld));
917 if (pos <= offset && (pos + size) > offset)
918 break;
919 }
920
921 if (!fld)
922 goto give_up;
923
924 type = TREE_TYPE (fld);
925 offset -= pos;
926 /* DECL_ARTIFICIAL represents a basetype. */
927 if (!DECL_ARTIFICIAL (fld))
928 {
929 context->outer_type = type;
930 context->offset = offset;
931 /* As soon as we se an field containing the type,
932 we know we are not looking for derivations. */
933 context->maybe_derived_type = false;
934 }
935 }
936 else if (TREE_CODE (type) == ARRAY_TYPE)
937 {
938 tree subtype = TREE_TYPE (type);
939
940 /* Give up if we don't know array size. */
941 if (!tree_fits_shwi_p (TYPE_SIZE (subtype))
942 || !tree_to_shwi (TYPE_SIZE (subtype)) <= 0)
943 goto give_up;
944 offset = offset % tree_to_shwi (TYPE_SIZE (subtype));
945 type = subtype;
946 context->outer_type = type;
947 context->offset = offset;
948 context->maybe_derived_type = false;
949 }
950 /* Give up on anything else. */
951 else
952 goto give_up;
953 }
954
955 /* If we failed to find subtype we look for, give up and fall back to the
956 most generic query. */
957give_up:
958 context->outer_type = expected_type;
959 context->offset = 0;
960 context->maybe_derived_type = true;
961 return false;
962}
963
964/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */
965
966static bool
967contains_type_p (tree outer_type, HOST_WIDE_INT offset,
968 tree otr_type)
969{
970 ipa_polymorphic_call_context context = {offset, outer_type,
971 false, true};
972 return get_class_context (&context, otr_type);
973}
974
390675c8
JH
975/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */
976
977static tree
85942f45
JH
978subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset,
979 tree vtable)
390675c8
JH
980{
981 tree v = BINFO_VTABLE (binfo);
982 int i;
983 tree base_binfo;
85942f45 984 unsigned HOST_WIDE_INT this_offset;
390675c8 985
85942f45
JH
986 if (v)
987 {
988 if (!vtable_pointer_value_to_vtable (v, &v, &this_offset))
989 gcc_unreachable ();
990
991 if (offset == this_offset
992 && DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable))
993 return binfo;
994 }
390675c8 995
390675c8
JH
996 for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++)
997 if (polymorphic_type_binfo_p (base_binfo))
998 {
999 base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable);
1000 if (base_binfo)
1001 return base_binfo;
1002 }
1003 return NULL;
1004}
1005
85942f45
JH
1006/* T is known constant value of virtual table pointer.
1007 Store virtual table to V and its offset to OFFSET.
1008 Return false if T does not look like virtual table reference. */
390675c8 1009
85942f45
JH
1010bool
1011vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset)
390675c8
JH
1012{
1013 /* We expect &MEM[(void *)&virtual_table + 16B].
1014 We obtain object's BINFO from the context of the virtual table.
1015 This one contains pointer to virtual table represented via
1016 POINTER_PLUS_EXPR. Verify that this pointer match to what
1017 we propagated through.
1018
1019 In the case of virtual inheritance, the virtual tables may
1020 be nested, i.e. the offset may be different from 16 and we may
1021 need to dive into the type representation. */
85942f45 1022 if (TREE_CODE (t) == ADDR_EXPR
390675c8
JH
1023 && TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF
1024 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR
1025 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST
1026 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0))
1027 == VAR_DECL)
1028 && DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND
1029 (TREE_OPERAND (t, 0), 0), 0)))
1030 {
85942f45
JH
1031 *v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0);
1032 *offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1));
1033 return true;
390675c8 1034 }
85942f45
JH
1035
1036 /* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR.
1037 We need to handle it when T comes from static variable initializer or
1038 BINFO. */
1039 if (TREE_CODE (t) == POINTER_PLUS_EXPR)
1040 {
1041 *offset = tree_to_uhwi (TREE_OPERAND (t, 1));
1042 t = TREE_OPERAND (t, 0);
1043 }
1044 else
1045 *offset = 0;
1046
1047 if (TREE_CODE (t) != ADDR_EXPR)
1048 return false;
1049 *v = TREE_OPERAND (t, 0);
1050 return true;
1051}
1052
1053/* T is known constant value of virtual table pointer. Return BINFO of the
1054 instance type. */
1055
1056tree
1057vtable_pointer_value_to_binfo (tree t)
1058{
1059 tree vtable;
1060 unsigned HOST_WIDE_INT offset;
1061
1062 if (!vtable_pointer_value_to_vtable (t, &vtable, &offset))
1063 return NULL_TREE;
1064
1065 /* FIXME: for stores of construction vtables we return NULL,
1066 because we do not have BINFO for those. Eventually we should fix
1067 our representation to allow this case to be handled, too.
1068 In the case we see store of BINFO we however may assume
1069 that standard folding will be ale to cope with it. */
1070 return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)),
1071 offset, vtable);
390675c8
JH
1072}
1073
5bccb77a
JH
1074/* Proudce polymorphic call context for call method of instance
1075 that is located within BASE (that is assumed to be a decl) at OFFSET. */
1076
1077static void
1078get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context,
1079 tree base, HOST_WIDE_INT offset)
1080{
1081 gcc_assert (DECL_P (base));
1082
1083 context->outer_type = TREE_TYPE (base);
1084 context->offset = offset;
1085 /* Make very conservative assumption that all objects
1086 may be in construction.
1087 TODO: ipa-prop already contains code to tell better.
1088 merge it later. */
1089 context->maybe_in_construction = true;
1090 context->maybe_derived_type = false;
1091}
1092
1093/* CST is an invariant (address of decl), try to get meaningful
1094 polymorphic call context for polymorphic call of method
1095 if instance of OTR_TYPE that is located at OFFSET of this invariant.
1096 Return FALSE if nothing meaningful can be found. */
1097
1098bool
1099get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context,
1100 tree cst,
1101 tree otr_type,
1102 HOST_WIDE_INT offset)
1103{
1104 HOST_WIDE_INT offset2, size, max_size;
1105 tree base;
1106
1107 if (TREE_CODE (cst) != ADDR_EXPR)
1108 return NULL_TREE;
1109
1110 cst = TREE_OPERAND (cst, 0);
1111 base = get_ref_base_and_extent (cst, &offset2, &size, &max_size);
1112 if (!DECL_P (base)
1113 || max_size == -1
1114 || max_size != size)
1115 return NULL_TREE;
1116
1117 /* Only type inconsistent programs can have otr_type that is
1118 not part of outer type. */
1119 if (!contains_type_p (TREE_TYPE (base),
1120 offset, otr_type))
1121 return NULL_TREE;
1122
1123 get_polymorphic_call_info_for_decl (context,
1124 base, offset);
1125 return true;
1126}
1127
68377e53
JH
1128/* Given REF call in FNDECL, determine class of the polymorphic
1129 call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT.
1130 Return pointer to object described by the context */
1131
1132tree
1133get_polymorphic_call_info (tree fndecl,
1134 tree ref,
1135 tree *otr_type,
1136 HOST_WIDE_INT *otr_token,
1137 ipa_polymorphic_call_context *context)
1138{
1139 tree base_pointer;
1140 *otr_type = obj_type_ref_class (ref);
1141 *otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref));
1142
1143 /* Set up basic info in case we find nothing interesting in the analysis. */
1144 context->outer_type = *otr_type;
1145 context->offset = 0;
1146 base_pointer = OBJ_TYPE_REF_OBJECT (ref);
1147 context->maybe_derived_type = true;
1148 context->maybe_in_construction = false;
1149
1150 /* Walk SSA for outer object. */
1151 do
1152 {
1153 if (TREE_CODE (base_pointer) == SSA_NAME
1154 && !SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1155 && SSA_NAME_DEF_STMT (base_pointer)
1156 && gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer)))
1157 {
1158 base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer));
1159 STRIP_NOPS (base_pointer);
1160 }
1161 else if (TREE_CODE (base_pointer) == ADDR_EXPR)
1162 {
1163 HOST_WIDE_INT size, max_size;
1164 HOST_WIDE_INT offset2;
1165 tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0),
1166 &offset2, &size, &max_size);
1167
1168 /* If this is a varying address, punt. */
1169 if ((TREE_CODE (base) == MEM_REF || DECL_P (base))
1170 && max_size != -1
1171 && max_size == size)
1172 {
1173 /* We found dereference of a pointer. Type of the pointer
1174 and MEM_REF is meaningless, but we can look futher. */
1175 if (TREE_CODE (base) == MEM_REF)
1176 {
1177 base_pointer = TREE_OPERAND (base, 0);
1178 context->offset
1179 += offset2 + mem_ref_offset (base).low * BITS_PER_UNIT;
1180 context->outer_type = NULL;
1181 }
1182 /* We found base object. In this case the outer_type
1183 is known. */
1184 else if (DECL_P (base))
1185 {
7656ee72 1186 gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base)));
68377e53
JH
1187
1188 /* Only type inconsistent programs can have otr_type that is
1189 not part of outer type. */
7656ee72
JH
1190 if (!contains_type_p (TREE_TYPE (base),
1191 context->offset + offset2, *otr_type))
68377e53 1192 return base_pointer;
5bccb77a
JH
1193 get_polymorphic_call_info_for_decl (context, base,
1194 context->offset + offset2);
7656ee72 1195 return NULL;
68377e53
JH
1196 }
1197 else
1198 break;
1199 }
1200 else
1201 break;
1202 }
1203 else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR
1204 && tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1)))
1205 {
1206 context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1))
1207 * BITS_PER_UNIT;
1208 base_pointer = TREE_OPERAND (base_pointer, 0);
1209 }
1210 else
1211 break;
1212 }
1213 while (true);
1214
1215 /* Try to determine type of the outer object. */
1216 if (TREE_CODE (base_pointer) == SSA_NAME
1217 && SSA_NAME_IS_DEFAULT_DEF (base_pointer)
1218 && TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL)
1219 {
1220 /* See if parameter is THIS pointer of a method. */
1221 if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE
1222 && SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl))
1223 {
1224 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1225 gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE);
1226
1227 /* Dynamic casting has possibly upcasted the type
1228 in the hiearchy. In this case outer type is less
1229 informative than inner type and we should forget
1230 about it. */
1231 if (!contains_type_p (context->outer_type, context->offset,
1232 *otr_type))
1233 {
1234 context->outer_type = NULL;
1235 return base_pointer;
1236 }
1237
1238 /* If the function is constructor or destructor, then
1239 the type is possibly in consturction, but we know
1240 it is not derived type. */
1241 if (DECL_CXX_CONSTRUCTOR_P (fndecl)
1242 || DECL_CXX_DESTRUCTOR_P (fndecl))
1243 {
1244 context->maybe_in_construction = true;
1245 context->maybe_derived_type = false;
1246 }
1247 else
1248 {
1249 context->maybe_derived_type = true;
1250 context->maybe_in_construction = false;
1251 }
1252 return base_pointer;
1253 }
1254 /* Non-PODs passed by value are really passed by invisible
1255 reference. In this case we also know the type of the
1256 object. */
1257 if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer)))
1258 {
1259 context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer));
1260 gcc_assert (!POINTER_TYPE_P (context->outer_type));
1261 /* Only type inconsistent programs can have otr_type that is
1262 not part of outer type. */
1263 if (!contains_type_p (context->outer_type, context->offset,
1264 *otr_type))
1265 {
1266 context->outer_type = NULL;
1267 gcc_unreachable ();
1268 return base_pointer;
1269 }
1270 context->maybe_derived_type = false;
1271 context->maybe_in_construction = false;
1272 return base_pointer;
1273 }
1274 }
1275 /* TODO: There are multiple ways to derive a type. For instance
1276 if BASE_POINTER is passed to an constructor call prior our refernece.
1277 We do not make this type of flow sensitive analysis yet. */
1278 return base_pointer;
1279}
1280
1281/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET.
1282 Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE
1283 and insert them to NODES.
1284
1285 MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */
1286
1287static void
1288record_targets_from_bases (tree otr_type,
1289 HOST_WIDE_INT otr_token,
1290 tree outer_type,
1291 HOST_WIDE_INT offset,
1292 vec <cgraph_node *> nodes,
1293 pointer_set_t *inserted,
1294 pointer_set_t *matched_vtables,
1295 bool *completep)
1296{
1297 while (true)
1298 {
1299 HOST_WIDE_INT pos, size;
1300 tree base_binfo;
1301 tree fld;
1302
1303 if (types_same_for_odr (outer_type, otr_type))
1304 return;
1305
1306 for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld))
1307 {
1308 if (TREE_CODE (fld) != FIELD_DECL)
1309 continue;
1310
1311 pos = int_bit_position (fld);
1312 size = tree_to_shwi (DECL_SIZE (fld));
1313 if (pos <= offset && (pos + size) > offset)
1314 break;
1315 }
1316 /* Within a class type we should always find correcponding fields. */
1317 gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE);
1318
1319 /* Nonbasetypes should have been stripped by outer_class_type. */
1320 gcc_assert (DECL_ARTIFICIAL (fld));
1321
1322 outer_type = TREE_TYPE (fld);
1323 offset -= pos;
1324
1325 base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type),
1326 offset, otr_type);
1327 gcc_assert (base_binfo);
1328 if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)))
1329 {
1330 tree target = gimple_get_virt_method_for_binfo (otr_token, base_binfo);
1331 if (target)
1332 maybe_record_node (nodes, target, inserted, completep);
1333 /* The only way method in anonymous namespace can become unreferable
1334 is that it has been fully optimized out. */
1335 else if (flag_ltrans || !type_in_anonymous_namespace_p (outer_type))
1336 *completep = false;
1337 pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo));
1338 }
1339 }
1340}
1341
3462aa02
JH
1342/* When virtual table is removed, we may need to flush the cache. */
1343
1344static void
2c8326a5 1345devirt_variable_node_removal_hook (varpool_node *n,
3462aa02
JH
1346 void *d ATTRIBUTE_UNUSED)
1347{
1348 if (cached_polymorphic_call_targets
67348ccc
DM
1349 && DECL_VIRTUAL_P (n->decl)
1350 && type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl)))
3462aa02
JH
1351 free_polymorphic_call_targets_hash ();
1352}
1353
eefe9a99 1354/* Return vector containing possible targets of polymorphic call of type
68377e53
JH
1355 OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET.
1356 If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig
1357 OTR_TYPE and include their virtual method. This is useful for types
1358 possibly in construction or destruction where the virtual table may
1359 temporarily change to one of base types. INCLUDE_DERIVER_TYPES make
1360 us to walk the inheritance graph for all derivations.
1361
1362 If COMPLETEP is non-NULL, store true if the list is complette.
eefe9a99
JH
1363 CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry
1364 in the target cache. If user needs to visit every target list
1365 just once, it can memoize them.
1366
1367 Returned vector is placed into cache. It is NOT caller's responsibility
1368 to free it. The vector can be freed on cgraph_remove_node call if
1369 the particular node is a virtual function present in the cache. */
1370
1371vec <cgraph_node *>
1372possible_polymorphic_call_targets (tree otr_type,
1373 HOST_WIDE_INT otr_token,
68377e53
JH
1374 ipa_polymorphic_call_context context,
1375 bool *completep,
eefe9a99
JH
1376 void **cache_token)
1377{
1378 static struct cgraph_node_hook_list *node_removal_hook_holder;
1379 pointer_set_t *inserted;
1380 pointer_set_t *matched_vtables;
1381 vec <cgraph_node *> nodes=vNULL;
68377e53 1382 odr_type type, outer_type;
eefe9a99
JH
1383 polymorphic_call_target_d key;
1384 polymorphic_call_target_d **slot;
1385 unsigned int i;
1386 tree binfo, target;
68377e53 1387 bool final;
eefe9a99 1388
68377e53 1389 type = get_odr_type (otr_type, true);
eefe9a99 1390
68377e53
JH
1391 /* Lookup the outer class type we want to walk. */
1392 if (context.outer_type)
1393 get_class_context (&context, otr_type);
eefe9a99 1394
68377e53
JH
1395 /* We now canonicalize our query, so we do not need extra hashtable entries. */
1396
1397 /* Without outer type, we have no use for offset. Just do the
1398 basic search from innter type */
1399 if (!context.outer_type)
1400 {
1401 context.outer_type = otr_type;
1402 context.offset = 0;
1403 }
1404 /* We need to update our hiearchy if the type does not exist. */
1405 outer_type = get_odr_type (context.outer_type, true);
1406 /* If outer and inner type match, there are no bases to see. */
1407 if (type == outer_type)
1408 context.maybe_in_construction = false;
1409 /* If the type is final, there are no derivations. */
1410 if (TYPE_FINAL_P (outer_type->type))
1411 context.maybe_derived_type = false;
eefe9a99
JH
1412
1413 /* Initialize query cache. */
1414 if (!cached_polymorphic_call_targets)
1415 {
1416 cached_polymorphic_call_targets = pointer_set_create ();
1417 polymorphic_call_target_hash.create (23);
1418 if (!node_removal_hook_holder)
3462aa02
JH
1419 {
1420 node_removal_hook_holder =
1421 cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL);
1422 varpool_add_node_removal_hook (&devirt_variable_node_removal_hook,
1423 NULL);
1424 }
eefe9a99
JH
1425 }
1426
1427 /* Lookup cached answer. */
1428 key.type = type;
1429 key.otr_token = otr_token;
68377e53 1430 key.context = context;
eefe9a99
JH
1431 slot = polymorphic_call_target_hash.find_slot (&key, INSERT);
1432 if (cache_token)
1433 *cache_token = (void *)*slot;
1434 if (*slot)
68377e53
JH
1435 {
1436 if (completep)
1437 *completep = (*slot)->final;
1438 return (*slot)->targets;
1439 }
1440
1441 final = true;
eefe9a99
JH
1442
1443 /* Do actual search. */
1444 timevar_push (TV_IPA_VIRTUAL_CALL);
1445 *slot = XCNEW (polymorphic_call_target_d);
1446 if (cache_token)
68377e53 1447 *cache_token = (void *)*slot;
eefe9a99
JH
1448 (*slot)->type = type;
1449 (*slot)->otr_token = otr_token;
68377e53 1450 (*slot)->context = context;
eefe9a99
JH
1451
1452 inserted = pointer_set_create ();
1453 matched_vtables = pointer_set_create ();
1454
1455 /* First see virtual method of type itself. */
1456
68377e53
JH
1457 binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type),
1458 context.offset, otr_type);
eefe9a99
JH
1459 target = gimple_get_virt_method_for_binfo (otr_token, binfo);
1460 if (target)
68377e53
JH
1461 {
1462 maybe_record_node (nodes, target, inserted, &final);
1463
1464 /* In the case we get final method, we don't need
1465 to walk derivations. */
1466 if (DECL_FINAL_P (target))
1467 context.maybe_derived_type = false;
1468 }
1469 /* The only way method in anonymous namespace can become unreferable
1470 is that it has been fully optimized out. */
1471 else if (flag_ltrans || !type->anonymous_namespace)
1472 final = false;
eefe9a99
JH
1473 pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo));
1474
68377e53
JH
1475 /* Next walk bases, if asked to. */
1476 if (context.maybe_in_construction)
1477 record_targets_from_bases (otr_type, otr_token, outer_type->type,
1478 context.offset, nodes, inserted,
1479 matched_vtables, &final);
eefe9a99 1480
68377e53
JH
1481 /* Finally walk recursively all derived types. */
1482 if (context.maybe_derived_type)
1483 {
1484 /* For anonymous namespace types we can attempt to build full type.
1485 All derivations must be in this unit (unless we see partial unit). */
1486 if (!type->anonymous_namespace || flag_ltrans)
1487 final = false;
1488 for (i = 0; i < outer_type->derived_types.length(); i++)
1489 possible_polymorphic_call_targets_1 (nodes, inserted,
1490 matched_vtables,
1491 otr_type, outer_type->derived_types[i],
1492 otr_token, outer_type->type,
1493 context.offset);
1494 }
eefe9a99 1495 (*slot)->targets = nodes;
68377e53
JH
1496 (*slot)->final = final;
1497 if (completep)
1498 *completep = final;
eefe9a99
JH
1499
1500 pointer_set_destroy (inserted);
1501 pointer_set_destroy (matched_vtables);
1502 timevar_pop (TV_IPA_VIRTUAL_CALL);
1503 return nodes;
1504}
1505
1506/* Dump all possible targets of a polymorphic call. */
1507
1508void
1509dump_possible_polymorphic_call_targets (FILE *f,
68377e53
JH
1510 tree otr_type,
1511 HOST_WIDE_INT otr_token,
1512 const ipa_polymorphic_call_context &ctx)
eefe9a99
JH
1513{
1514 vec <cgraph_node *> targets;
1515 bool final;
1516 odr_type type = get_odr_type (otr_type, false);
1517 unsigned int i;
1518
1519 if (!type)
1520 return;
1521 targets = possible_polymorphic_call_targets (otr_type, otr_token,
68377e53 1522 ctx,
eefe9a99 1523 &final);
68377e53 1524 fprintf (f, " Targets of polymorphic call of type %i:", type->id);
eefe9a99 1525 print_generic_expr (f, type->type, TDF_SLIM);
68377e53
JH
1526 fprintf (f, " token %i\n"
1527 " Contained in type:",
1528 (int)otr_token);
1529 print_generic_expr (f, ctx.outer_type, TDF_SLIM);
1530 fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n"
1531 " %s%s%s\n ",
1532 ctx.offset,
1533 final ? "This is full list." :
1534 "This is partial list; extra targets may be defined in other units.",
1535 ctx.maybe_in_construction ? " (base types included)" : "",
1536 ctx.maybe_derived_type ? " (derived types included)" : "");
eefe9a99 1537 for (i = 0; i < targets.length (); i++)
fec39fa6 1538 fprintf (f, " %s/%i", targets[i]->name (),
67348ccc 1539 targets[i]->order);
68377e53 1540 fprintf (f, "\n\n");
eefe9a99
JH
1541}
1542
0e1474e5
JH
1543
1544/* Return true if N can be possibly target of a polymorphic call of
1545 OTR_TYPE/OTR_TOKEN. */
1546
1547bool
1548possible_polymorphic_call_target_p (tree otr_type,
1549 HOST_WIDE_INT otr_token,
68377e53 1550 const ipa_polymorphic_call_context &ctx,
0e1474e5
JH
1551 struct cgraph_node *n)
1552{
1553 vec <cgraph_node *> targets;
1554 unsigned int i;
68377e53 1555 enum built_in_function fcode;
450ad0cd 1556 bool final;
0e1474e5 1557
68377e53
JH
1558 if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE
1559 && ((fcode = DECL_FUNCTION_CODE (n->decl))
1560 == BUILT_IN_UNREACHABLE
1561 || fcode == BUILT_IN_TRAP))
1562 return true;
1563
0e1474e5
JH
1564 if (!odr_hash.is_created ())
1565 return true;
68377e53 1566 targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final);
0e1474e5 1567 for (i = 0; i < targets.length (); i++)
68377e53 1568 if (symtab_semantically_equivalent_p (n, targets[i]))
0e1474e5 1569 return true;
450ad0cd
JH
1570
1571 /* At a moment we allow middle end to dig out new external declarations
1572 as a targets of polymorphic calls. */
67348ccc 1573 if (!final && !n->definition)
450ad0cd 1574 return true;
0e1474e5
JH
1575 return false;
1576}
1577
1578
1579/* After callgraph construction new external nodes may appear.
1580 Add them into the graph. */
1581
1582void
1583update_type_inheritance_graph (void)
1584{
1585 struct cgraph_node *n;
1586
1587 if (!odr_hash.is_created ())
1588 return;
1589 free_polymorphic_call_targets_hash ();
1590 timevar_push (TV_IPA_INHERITANCE);
68377e53 1591 /* We reconstruct the graph starting from types of all methods seen in the
0e1474e5
JH
1592 the unit. */
1593 FOR_EACH_FUNCTION (n)
67348ccc
DM
1594 if (DECL_VIRTUAL_P (n->decl)
1595 && !n->definition
1596 && symtab_real_symbol_p (n))
1597 get_odr_type (method_class_type (TREE_TYPE (n->decl)), true);
0e1474e5
JH
1598 timevar_pop (TV_IPA_INHERITANCE);
1599}
bbc9396b
JH
1600
1601
1602/* Return true if N looks like likely target of a polymorphic call.
1603 Rule out cxa_pure_virtual, noreturns, function declared cold and
1604 other obvious cases. */
1605
1606bool
1607likely_target_p (struct cgraph_node *n)
1608{
1609 int flags;
1610 /* cxa_pure_virtual and similar things are not likely. */
67348ccc 1611 if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE)
bbc9396b 1612 return false;
67348ccc 1613 flags = flags_from_decl_or_type (n->decl);
bbc9396b
JH
1614 if (flags & ECF_NORETURN)
1615 return false;
1616 if (lookup_attribute ("cold",
67348ccc 1617 DECL_ATTRIBUTES (n->decl)))
bbc9396b
JH
1618 return false;
1619 if (n->frequency < NODE_FREQUENCY_NORMAL)
1620 return false;
1621 return true;
1622}
1623
1624/* The ipa-devirt pass.
3462aa02
JH
1625 When polymorphic call has only one likely target in the unit,
1626 turn it into speculative call. */
bbc9396b
JH
1627
1628static unsigned int
1629ipa_devirt (void)
1630{
1631 struct cgraph_node *n;
1632 struct pointer_set_t *bad_call_targets = pointer_set_create ();
1633 struct cgraph_edge *e;
1634
1635 int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0;
1636 int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0;
1637 int nwrong = 0, nok = 0, nexternal = 0;;
1638
1639 FOR_EACH_DEFINED_FUNCTION (n)
1640 {
1641 bool update = false;
1642 if (dump_file && n->indirect_calls)
1643 fprintf (dump_file, "\n\nProcesing function %s/%i\n",
fec39fa6 1644 n->name (), n->order);
bbc9396b
JH
1645 for (e = n->indirect_calls; e; e = e->next_callee)
1646 if (e->indirect_info->polymorphic)
1647 {
1648 struct cgraph_node *likely_target = NULL;
1649 void *cache_token;
1650 bool final;
1651 vec <cgraph_node *>targets
1652 = possible_polymorphic_call_targets
1653 (e, &final, &cache_token);
1654 unsigned int i;
1655
1656 if (dump_file)
1657 dump_possible_polymorphic_call_targets
1658 (dump_file, e);
3462aa02 1659
bbc9396b
JH
1660 npolymorphic++;
1661
bbc9396b
JH
1662 if (!cgraph_maybe_hot_edge_p (e))
1663 {
1664 if (dump_file)
1665 fprintf (dump_file, "Call is cold\n");
1666 ncold++;
1667 continue;
1668 }
1669 if (e->speculative)
1670 {
1671 if (dump_file)
1672 fprintf (dump_file, "Call is aready speculated\n");
1673 nspeculated++;
1674
1675 /* When dumping see if we agree with speculation. */
1676 if (!dump_file)
1677 continue;
1678 }
1679 if (pointer_set_contains (bad_call_targets,
1680 cache_token))
1681 {
1682 if (dump_file)
1683 fprintf (dump_file, "Target list is known to be useless\n");
1684 nmultiple++;
1685 continue;
1686 }
c3284718 1687 for (i = 0; i < targets.length (); i++)
bbc9396b
JH
1688 if (likely_target_p (targets[i]))
1689 {
1690 if (likely_target)
1691 {
1692 likely_target = NULL;
1693 if (dump_file)
1694 fprintf (dump_file, "More than one likely target\n");
1695 nmultiple++;
1696 break;
1697 }
1698 likely_target = targets[i];
1699 }
1700 if (!likely_target)
1701 {
1702 pointer_set_insert (bad_call_targets, cache_token);
1703 continue;
1704 }
1705 /* This is reached only when dumping; check if we agree or disagree
1706 with the speculation. */
1707 if (e->speculative)
1708 {
1709 struct cgraph_edge *e2;
1710 struct ipa_ref *ref;
1711 cgraph_speculative_call_info (e, e2, e, ref);
1712 if (cgraph_function_or_thunk_node (e2->callee, NULL)
1713 == cgraph_function_or_thunk_node (likely_target, NULL))
1714 {
1715 fprintf (dump_file, "We agree with speculation\n");
1716 nok++;
1717 }
1718 else
1719 {
1720 fprintf (dump_file, "We disagree with speculation\n");
1721 nwrong++;
1722 }
1723 continue;
1724 }
67348ccc 1725 if (!likely_target->definition)
bbc9396b
JH
1726 {
1727 if (dump_file)
1728 fprintf (dump_file, "Target is not an definition\n");
1729 nnotdefined++;
1730 continue;
1731 }
1732 /* Do not introduce new references to external symbols. While we
1733 can handle these just well, it is common for programs to
1734 incorrectly with headers defining methods they are linked
1735 with. */
67348ccc 1736 if (DECL_EXTERNAL (likely_target->decl))
bbc9396b
JH
1737 {
1738 if (dump_file)
1739 fprintf (dump_file, "Target is external\n");
1740 nexternal++;
1741 continue;
1742 }
1743 if (cgraph_function_body_availability (likely_target)
1744 <= AVAIL_OVERWRITABLE
67348ccc 1745 && symtab_can_be_discarded (likely_target))
bbc9396b
JH
1746 {
1747 if (dump_file)
1748 fprintf (dump_file, "Target is overwritable\n");
1749 noverwritable++;
1750 continue;
1751 }
1752 else
1753 {
1754 if (dump_file)
1755 fprintf (dump_file,
1756 "Speculatively devirtualizing call in %s/%i to %s/%i\n",
fec39fa6
TS
1757 n->name (), n->order,
1758 likely_target->name (),
67348ccc
DM
1759 likely_target->order);
1760 if (!symtab_can_be_discarded (likely_target))
5b79657a
JH
1761 {
1762 cgraph_node *alias;
1763 alias = cgraph (symtab_nonoverwritable_alias
67348ccc 1764 (likely_target));
5b79657a
JH
1765 if (alias)
1766 likely_target = alias;
1767 }
bbc9396b
JH
1768 nconverted++;
1769 update = true;
1770 cgraph_turn_edge_to_speculative
1771 (e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10);
1772 }
1773 }
1774 if (update)
1775 inline_update_overall_summary (n);
1776 }
1777 pointer_set_destroy (bad_call_targets);
1778
1779 if (dump_file)
1780 fprintf (dump_file,
1781 "%i polymorphic calls, %i devirtualized,"
1782 " %i speculatively devirtualized, %i cold\n"
1783 "%i have multiple targets, %i overwritable,"
1784 " %i already speculated (%i agree, %i disagree),"
1785 " %i external, %i not defined\n",
1786 npolymorphic, ndevirtualized, nconverted, ncold,
1787 nmultiple, noverwritable, nspeculated, nok, nwrong,
1788 nexternal, nnotdefined);
1789 return ndevirtualized ? TODO_remove_functions : 0;
1790}
1791
a88bf705 1792/* Gate for speculative IPA devirtualization optimization. */
bbc9396b
JH
1793
1794static bool
1795gate_ipa_devirt (void)
1796{
a88bf705
JJ
1797 return (flag_devirtualize
1798 && flag_devirtualize_speculatively
1799 && optimize);
bbc9396b
JH
1800}
1801
1802namespace {
1803
1804const pass_data pass_data_ipa_devirt =
1805{
1806 IPA_PASS, /* type */
1807 "devirt", /* name */
1808 OPTGROUP_NONE, /* optinfo_flags */
1809 true, /* has_gate */
1810 true, /* has_execute */
1811 TV_IPA_DEVIRT, /* tv_id */
1812 0, /* properties_required */
1813 0, /* properties_provided */
1814 0, /* properties_destroyed */
1815 0, /* todo_flags_start */
1816 ( TODO_dump_symtab ), /* todo_flags_finish */
1817};
1818
1819class pass_ipa_devirt : public ipa_opt_pass_d
1820{
1821public:
c3284718
RS
1822 pass_ipa_devirt (gcc::context *ctxt)
1823 : ipa_opt_pass_d (pass_data_ipa_devirt, ctxt,
1824 NULL, /* generate_summary */
1825 NULL, /* write_summary */
1826 NULL, /* read_summary */
1827 NULL, /* write_optimization_summary */
1828 NULL, /* read_optimization_summary */
1829 NULL, /* stmt_fixup */
1830 0, /* function_transform_todo_flags_start */
1831 NULL, /* function_transform */
1832 NULL) /* variable_transform */
bbc9396b
JH
1833 {}
1834
1835 /* opt_pass methods: */
1836 bool gate () { return gate_ipa_devirt (); }
1837 unsigned int execute () { return ipa_devirt (); }
1838
1839}; // class pass_ipa_devirt
1840
1841} // anon namespace
1842
1843ipa_opt_pass_d *
1844make_pass_ipa_devirt (gcc::context *ctxt)
1845{
1846 return new pass_ipa_devirt (ctxt);
1847}
1848
eefe9a99 1849#include "gt-ipa-devirt.h"
This page took 0.453842 seconds and 5 git commands to generate.