| 1 |
/* Basic IPA utilities for type inheritance graph construction and |
| 2 |
devirtualization. |
| 3 |
Copyright (C) 2013-2014 Free Software Foundation, Inc. |
| 4 |
Contributed by Jan Hubicka |
| 5 |
|
| 6 |
This file is part of GCC. |
| 7 |
|
| 8 |
GCC is free software; you can redistribute it and/or modify it under |
| 9 |
the terms of the GNU General Public License as published by the Free |
| 10 |
Software Foundation; either version 3, or (at your option) any later |
| 11 |
version. |
| 12 |
|
| 13 |
GCC is distributed in the hope that it will be useful, but WITHOUT ANY |
| 14 |
WARRANTY; without even the implied warranty of MERCHANTABILITY or |
| 15 |
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License |
| 16 |
for more details. |
| 17 |
|
| 18 |
You should have received a copy of the GNU General Public License |
| 19 |
along with GCC; see the file COPYING3. If not see |
| 20 |
<http://www.gnu.org/licenses/>. */ |
| 21 |
|
| 22 |
/* Brief vocalburary: |
| 23 |
ODR = One Definition Rule |
| 24 |
In short, the ODR states that: |
| 25 |
1 In any translation unit, a template, type, function, or object can |
| 26 |
have no more than one definition. Some of these can have any number |
| 27 |
of declarations. A definition provides an instance. |
| 28 |
2 In the entire program, an object or non-inline function cannot have |
| 29 |
more than one definition; if an object or function is used, it must |
| 30 |
have exactly one definition. You can declare an object or function |
| 31 |
that is never used, in which case you don't have to provide |
| 32 |
a definition. In no event can there be more than one definition. |
| 33 |
3 Some things, like types, templates, and extern inline functions, can |
| 34 |
be defined in more than one translation unit. For a given entity, |
| 35 |
each definition must be the same. Non-extern objects and functions |
| 36 |
in different translation units are different entities, even if their |
| 37 |
names and types are the same. |
| 38 |
|
| 39 |
OTR = OBJ_TYPE_REF |
| 40 |
This is the Gimple representation of type information of a polymorphic call. |
| 41 |
It contains two parameters: |
| 42 |
otr_type is a type of class whose method is called. |
| 43 |
otr_token is the index into virtual table where address is taken. |
| 44 |
|
| 45 |
BINFO |
| 46 |
This is the type inheritance information attached to each tree |
| 47 |
RECORD_TYPE by the C++ frotend. It provides information about base |
| 48 |
types and virtual tables. |
| 49 |
|
| 50 |
BINFO is linked to the RECORD_TYPE by TYPE_BINFO. |
| 51 |
BINFO also links to its type by BINFO_TYPE and to the virtual table by |
| 52 |
BINFO_VTABLE. |
| 53 |
|
| 54 |
Base types of a given type are enumerated by BINFO_BASE_BINFO |
| 55 |
vector. Members of this vectors are not BINFOs associated |
| 56 |
with a base type. Rather they are new copies of BINFOs |
| 57 |
(base BINFOs). Their virtual tables may differ from |
| 58 |
virtual table of the base type. Also BINFO_OFFSET specifies |
| 59 |
offset of the base within the type. |
| 60 |
|
| 61 |
In the case of single inheritance, the virtual table is shared |
| 62 |
and BINFO_VTABLE of base BINFO is NULL. In the case of multiple |
| 63 |
inheritance the individual virtual tables are pointer to by |
| 64 |
BINFO_VTABLE of base binfos (that differs of BINFO_VTABLE of |
| 65 |
binfo associated to the base type). |
| 66 |
|
| 67 |
BINFO lookup for a given base type and offset can be done by |
| 68 |
get_binfo_at_offset. It returns proper BINFO whose virtual table |
| 69 |
can be used for lookup of virtual methods associated with the |
| 70 |
base type. |
| 71 |
|
| 72 |
token |
| 73 |
This is an index of virtual method in virtual table associated |
| 74 |
to the type defining it. Token can be looked up from OBJ_TYPE_REF |
| 75 |
or from DECL_VINDEX of a given virtual table. |
| 76 |
|
| 77 |
polymorphic (indirect) call |
| 78 |
This is callgraph represention of virtual method call. Every |
| 79 |
polymorphic call contains otr_type and otr_token taken from |
| 80 |
original OBJ_TYPE_REF at callgraph construction time. |
| 81 |
|
| 82 |
What we do here: |
| 83 |
|
| 84 |
build_type_inheritance_graph triggers a construction of the type inheritance |
| 85 |
graph. |
| 86 |
|
| 87 |
We reconstruct it based on types of methods we see in the unit. |
| 88 |
This means that the graph is not complete. Types with no methods are not |
| 89 |
inserted into the graph. Also types without virtual methods are not |
| 90 |
represented at all, though it may be easy to add this. |
| 91 |
|
| 92 |
The inheritance graph is represented as follows: |
| 93 |
|
| 94 |
Vertices are structures odr_type. Every odr_type may correspond |
| 95 |
to one or more tree type nodes that are equivalent by ODR rule. |
| 96 |
(the multiple type nodes appear only with linktime optimization) |
| 97 |
|
| 98 |
Edges are represented by odr_type->base and odr_type->derived_types. |
| 99 |
At the moment we do not track offsets of types for multiple inheritance. |
| 100 |
Adding this is easy. |
| 101 |
|
| 102 |
possible_polymorphic_call_targets returns, given an parameters found in |
| 103 |
indirect polymorphic edge all possible polymorphic call targets of the call. |
| 104 |
|
| 105 |
pass_ipa_devirt performs simple speculative devirtualization. |
| 106 |
*/ |
| 107 |
|
| 108 |
#include "config.h" |
| 109 |
#include "system.h" |
| 110 |
#include "coretypes.h" |
| 111 |
#include "tm.h" |
| 112 |
#include "tree.h" |
| 113 |
#include "print-tree.h" |
| 114 |
#include "calls.h" |
| 115 |
#include "cgraph.h" |
| 116 |
#include "expr.h" |
| 117 |
#include "tree-pass.h" |
| 118 |
#include "pointer-set.h" |
| 119 |
#include "target.h" |
| 120 |
#include "hash-table.h" |
| 121 |
#include "tree-pretty-print.h" |
| 122 |
#include "ipa-utils.h" |
| 123 |
#include "tree-ssa-alias.h" |
| 124 |
#include "internal-fn.h" |
| 125 |
#include "gimple-fold.h" |
| 126 |
#include "gimple-expr.h" |
| 127 |
#include "gimple.h" |
| 128 |
#include "ipa-inline.h" |
| 129 |
#include "diagnostic.h" |
| 130 |
#include "tree-dfa.h" |
| 131 |
|
| 132 |
/* Dummy polymorphic call context. */ |
| 133 |
|
| 134 |
const ipa_polymorphic_call_context ipa_dummy_polymorphic_call_context |
| 135 |
= {0, NULL, false, true}; |
| 136 |
|
| 137 |
/* Pointer set of all call targets appearing in the cache. */ |
| 138 |
static pointer_set_t *cached_polymorphic_call_targets; |
| 139 |
|
| 140 |
/* The node of type inheritance graph. For each type unique in |
| 141 |
One Defintion Rule (ODR) sense, we produce one node linking all |
| 142 |
main variants of types equivalent to it, bases and derived types. */ |
| 143 |
|
| 144 |
struct GTY(()) odr_type_d |
| 145 |
{ |
| 146 |
/* leader type. */ |
| 147 |
tree type; |
| 148 |
/* All bases. */ |
| 149 |
vec<odr_type> GTY((skip)) bases; |
| 150 |
/* All derrived types with virtual methods seen in unit. */ |
| 151 |
vec<odr_type> GTY((skip)) derived_types; |
| 152 |
|
| 153 |
/* All equivalent types, if more than one. */ |
| 154 |
vec<tree, va_gc> *types; |
| 155 |
/* Set of all equivalent types, if NON-NULL. */ |
| 156 |
pointer_set_t * GTY((skip)) types_set; |
| 157 |
|
| 158 |
/* Unique ID indexing the type in odr_types array. */ |
| 159 |
int id; |
| 160 |
/* Is it in anonymous namespace? */ |
| 161 |
bool anonymous_namespace; |
| 162 |
}; |
| 163 |
|
| 164 |
|
| 165 |
/* Return true if BINFO corresponds to a type with virtual methods. |
| 166 |
|
| 167 |
Every type has several BINFOs. One is the BINFO associated by the type |
| 168 |
while other represents bases of derived types. The BINFOs representing |
| 169 |
bases do not have BINFO_VTABLE pointer set when this is the single |
| 170 |
inheritance (because vtables are shared). Look up the BINFO of type |
| 171 |
and check presence of its vtable. */ |
| 172 |
|
| 173 |
static inline bool |
| 174 |
polymorphic_type_binfo_p (tree binfo) |
| 175 |
{ |
| 176 |
/* See if BINFO's type has an virtual table associtated with it. */ |
| 177 |
return BINFO_VTABLE (TYPE_BINFO (BINFO_TYPE (binfo))); |
| 178 |
} |
| 179 |
|
| 180 |
/* One Definition Rule hashtable helpers. */ |
| 181 |
|
| 182 |
struct odr_hasher |
| 183 |
{ |
| 184 |
typedef odr_type_d value_type; |
| 185 |
typedef union tree_node compare_type; |
| 186 |
static inline hashval_t hash (const value_type *); |
| 187 |
static inline bool equal (const value_type *, const compare_type *); |
| 188 |
static inline void remove (value_type *); |
| 189 |
}; |
| 190 |
|
| 191 |
/* Produce hash based on type name. */ |
| 192 |
|
| 193 |
hashval_t |
| 194 |
hash_type_name (tree t) |
| 195 |
{ |
| 196 |
gcc_checking_assert (TYPE_MAIN_VARIANT (t) == t); |
| 197 |
|
| 198 |
/* If not in LTO, all main variants are unique, so we can do |
| 199 |
pointer hash. */ |
| 200 |
if (!in_lto_p) |
| 201 |
return htab_hash_pointer (t); |
| 202 |
|
| 203 |
/* Anonymous types are unique. */ |
| 204 |
if (type_in_anonymous_namespace_p (t)) |
| 205 |
return htab_hash_pointer (t); |
| 206 |
|
| 207 |
/* For polymorphic types, we can simply hash the virtual table. */ |
| 208 |
if (TYPE_BINFO (t) && BINFO_VTABLE (TYPE_BINFO (t))) |
| 209 |
{ |
| 210 |
tree v = BINFO_VTABLE (TYPE_BINFO (t)); |
| 211 |
hashval_t hash = 0; |
| 212 |
|
| 213 |
if (TREE_CODE (v) == POINTER_PLUS_EXPR) |
| 214 |
{ |
| 215 |
hash = TREE_INT_CST_LOW (TREE_OPERAND (v, 1)); |
| 216 |
v = TREE_OPERAND (TREE_OPERAND (v, 0), 0); |
| 217 |
} |
| 218 |
|
| 219 |
v = DECL_ASSEMBLER_NAME (v); |
| 220 |
hash = iterative_hash_hashval_t (hash, htab_hash_pointer (v)); |
| 221 |
return hash; |
| 222 |
} |
| 223 |
|
| 224 |
/* Rest is not implemented yet. */ |
| 225 |
gcc_unreachable (); |
| 226 |
} |
| 227 |
|
| 228 |
/* Return the computed hashcode for ODR_TYPE. */ |
| 229 |
|
| 230 |
inline hashval_t |
| 231 |
odr_hasher::hash (const value_type *odr_type) |
| 232 |
{ |
| 233 |
return hash_type_name (odr_type->type); |
| 234 |
} |
| 235 |
|
| 236 |
/* Compare types T1 and T2 and return true if they are |
| 237 |
equivalent. */ |
| 238 |
|
| 239 |
inline bool |
| 240 |
odr_hasher::equal (const value_type *t1, const compare_type *ct2) |
| 241 |
{ |
| 242 |
tree t2 = const_cast <tree> (ct2); |
| 243 |
|
| 244 |
gcc_checking_assert (TYPE_MAIN_VARIANT (ct2) == ct2); |
| 245 |
if (t1->type == t2) |
| 246 |
return true; |
| 247 |
if (!in_lto_p) |
| 248 |
return false; |
| 249 |
return types_same_for_odr (t1->type, t2); |
| 250 |
} |
| 251 |
|
| 252 |
/* Free ODR type V. */ |
| 253 |
|
| 254 |
inline void |
| 255 |
odr_hasher::remove (value_type *v) |
| 256 |
{ |
| 257 |
v->bases.release (); |
| 258 |
v->derived_types.release (); |
| 259 |
if (v->types_set) |
| 260 |
pointer_set_destroy (v->types_set); |
| 261 |
ggc_free (v); |
| 262 |
} |
| 263 |
|
| 264 |
/* ODR type hash used to lookup ODR type based on tree type node. */ |
| 265 |
|
| 266 |
typedef hash_table <odr_hasher> odr_hash_type; |
| 267 |
static odr_hash_type odr_hash; |
| 268 |
|
| 269 |
/* ODR types are also stored into ODR_TYPE vector to allow consistent |
| 270 |
walking. Bases appear before derived types. Vector is garbage collected |
| 271 |
so we won't end up visiting empty types. */ |
| 272 |
|
| 273 |
static GTY(()) vec <odr_type, va_gc> *odr_types_ptr; |
| 274 |
#define odr_types (*odr_types_ptr) |
| 275 |
|
| 276 |
/* TYPE is equivalent to VAL by ODR, but its tree representation differs |
| 277 |
from VAL->type. This may happen in LTO where tree merging did not merge |
| 278 |
all variants of the same type. It may or may not mean the ODR violation. |
| 279 |
Add it to the list of duplicates and warn on some violations. */ |
| 280 |
|
| 281 |
static void |
| 282 |
add_type_duplicate (odr_type val, tree type) |
| 283 |
{ |
| 284 |
if (!val->types_set) |
| 285 |
val->types_set = pointer_set_create (); |
| 286 |
|
| 287 |
/* See if this duplicate is new. */ |
| 288 |
if (!pointer_set_insert (val->types_set, type)) |
| 289 |
{ |
| 290 |
bool merge = true; |
| 291 |
bool base_mismatch = false; |
| 292 |
gcc_assert (in_lto_p); |
| 293 |
vec_safe_push (val->types, type); |
| 294 |
unsigned int i,j; |
| 295 |
|
| 296 |
/* First we compare memory layout. */ |
| 297 |
if (!types_compatible_p (val->type, type)) |
| 298 |
{ |
| 299 |
merge = false; |
| 300 |
if (BINFO_VTABLE (TYPE_BINFO (val->type)) |
| 301 |
&& warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0, |
| 302 |
"type %qD violates one definition rule ", |
| 303 |
type)) |
| 304 |
inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)), |
| 305 |
"a type with the same name but different layout is " |
| 306 |
"defined in another translation unit"); |
| 307 |
if (cgraph_dump_file) |
| 308 |
{ |
| 309 |
fprintf (cgraph_dump_file, "ODR violation or merging or ODR type bug?\n"); |
| 310 |
|
| 311 |
print_node (cgraph_dump_file, "", val->type, 0); |
| 312 |
putc ('\n',cgraph_dump_file); |
| 313 |
print_node (cgraph_dump_file, "", type, 0); |
| 314 |
putc ('\n',cgraph_dump_file); |
| 315 |
} |
| 316 |
} |
| 317 |
|
| 318 |
/* Next sanity check that bases are the same. If not, we will end |
| 319 |
up producing wrong answers. */ |
| 320 |
for (j = 0, i = 0; i < BINFO_N_BASE_BINFOS (TYPE_BINFO (type)); i++) |
| 321 |
if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (TYPE_BINFO (type), i))) |
| 322 |
{ |
| 323 |
odr_type base = get_odr_type |
| 324 |
(BINFO_TYPE |
| 325 |
(BINFO_BASE_BINFO (TYPE_BINFO (type), |
| 326 |
i)), |
| 327 |
true); |
| 328 |
if (val->bases.length () <= j || val->bases[j] != base) |
| 329 |
base_mismatch = true; |
| 330 |
j++; |
| 331 |
} |
| 332 |
if (base_mismatch) |
| 333 |
{ |
| 334 |
merge = false; |
| 335 |
|
| 336 |
if (warning_at (DECL_SOURCE_LOCATION (TYPE_NAME (type)), 0, |
| 337 |
"type %qD violates one definition rule ", |
| 338 |
type)) |
| 339 |
inform (DECL_SOURCE_LOCATION (TYPE_NAME (val->type)), |
| 340 |
"a type with the same name but different bases is " |
| 341 |
"defined in another translation unit"); |
| 342 |
if (cgraph_dump_file) |
| 343 |
{ |
| 344 |
fprintf (cgraph_dump_file, "ODR bse violation or merging bug?\n"); |
| 345 |
|
| 346 |
print_node (cgraph_dump_file, "", val->type, 0); |
| 347 |
putc ('\n',cgraph_dump_file); |
| 348 |
print_node (cgraph_dump_file, "", type, 0); |
| 349 |
putc ('\n',cgraph_dump_file); |
| 350 |
} |
| 351 |
} |
| 352 |
|
| 353 |
/* Regularize things a little. During LTO same types may come with |
| 354 |
different BINFOs. Either because their virtual table was |
| 355 |
not merged by tree merging and only later at decl merging or |
| 356 |
because one type comes with external vtable, while other |
| 357 |
with internal. We want to merge equivalent binfos to conserve |
| 358 |
memory and streaming overhead. |
| 359 |
|
| 360 |
The external vtables are more harmful: they contain references |
| 361 |
to external declarations of methods that may be defined in the |
| 362 |
merged LTO unit. For this reason we absolutely need to remove |
| 363 |
them and replace by internal variants. Not doing so will lead |
| 364 |
to incomplete answers from possible_polymorphic_call_targets. */ |
| 365 |
if (!flag_ltrans && merge) |
| 366 |
{ |
| 367 |
tree master_binfo = TYPE_BINFO (val->type); |
| 368 |
tree v1 = BINFO_VTABLE (master_binfo); |
| 369 |
tree v2 = BINFO_VTABLE (TYPE_BINFO (type)); |
| 370 |
|
| 371 |
if (TREE_CODE (v1) == POINTER_PLUS_EXPR) |
| 372 |
{ |
| 373 |
gcc_assert (TREE_CODE (v2) == POINTER_PLUS_EXPR |
| 374 |
&& operand_equal_p (TREE_OPERAND (v1, 1), |
| 375 |
TREE_OPERAND (v2, 1), 0)); |
| 376 |
v1 = TREE_OPERAND (TREE_OPERAND (v1, 0), 0); |
| 377 |
v2 = TREE_OPERAND (TREE_OPERAND (v2, 0), 0); |
| 378 |
} |
| 379 |
gcc_assert (DECL_ASSEMBLER_NAME (v1) |
| 380 |
== DECL_ASSEMBLER_NAME (v2)); |
| 381 |
|
| 382 |
if (DECL_EXTERNAL (v1) && !DECL_EXTERNAL (v2)) |
| 383 |
{ |
| 384 |
unsigned int i; |
| 385 |
|
| 386 |
TYPE_BINFO (val->type) = TYPE_BINFO (type); |
| 387 |
for (i = 0; i < val->types->length (); i++) |
| 388 |
{ |
| 389 |
if (TYPE_BINFO ((*val->types)[i]) |
| 390 |
== master_binfo) |
| 391 |
TYPE_BINFO ((*val->types)[i]) = TYPE_BINFO (type); |
| 392 |
} |
| 393 |
} |
| 394 |
else |
| 395 |
TYPE_BINFO (type) = master_binfo; |
| 396 |
} |
| 397 |
} |
| 398 |
} |
| 399 |
|
| 400 |
/* Get ODR type hash entry for TYPE. If INSERT is true, create |
| 401 |
possibly new entry. */ |
| 402 |
|
| 403 |
odr_type |
| 404 |
get_odr_type (tree type, bool insert) |
| 405 |
{ |
| 406 |
odr_type_d **slot; |
| 407 |
odr_type val; |
| 408 |
hashval_t hash; |
| 409 |
|
| 410 |
type = TYPE_MAIN_VARIANT (type); |
| 411 |
gcc_checking_assert (TYPE_MAIN_VARIANT (type) == type); |
| 412 |
hash = hash_type_name (type); |
| 413 |
slot = odr_hash.find_slot_with_hash (type, hash, insert ? INSERT : NO_INSERT); |
| 414 |
if (!slot) |
| 415 |
return NULL; |
| 416 |
|
| 417 |
/* See if we already have entry for type. */ |
| 418 |
if (*slot) |
| 419 |
{ |
| 420 |
val = *slot; |
| 421 |
|
| 422 |
/* With LTO we need to support multiple tree representation of |
| 423 |
the same ODR type. */ |
| 424 |
if (val->type != type) |
| 425 |
add_type_duplicate (val, type); |
| 426 |
} |
| 427 |
else |
| 428 |
{ |
| 429 |
tree binfo = TYPE_BINFO (type); |
| 430 |
unsigned int i; |
| 431 |
|
| 432 |
val = ggc_alloc_cleared_odr_type_d (); |
| 433 |
val->type = type; |
| 434 |
val->bases = vNULL; |
| 435 |
val->derived_types = vNULL; |
| 436 |
val->anonymous_namespace = type_in_anonymous_namespace_p (type); |
| 437 |
*slot = val; |
| 438 |
for (i = 0; i < BINFO_N_BASE_BINFOS (binfo); i++) |
| 439 |
/* For now record only polymorphic types. other are |
| 440 |
pointless for devirtualization and we can not precisely |
| 441 |
determine ODR equivalency of these during LTO. */ |
| 442 |
if (polymorphic_type_binfo_p (BINFO_BASE_BINFO (binfo, i))) |
| 443 |
{ |
| 444 |
odr_type base = get_odr_type (BINFO_TYPE (BINFO_BASE_BINFO (binfo, |
| 445 |
i)), |
| 446 |
true); |
| 447 |
base->derived_types.safe_push (val); |
| 448 |
val->bases.safe_push (base); |
| 449 |
} |
| 450 |
/* First record bases, then add into array so ids are increasing. */ |
| 451 |
if (odr_types_ptr) |
| 452 |
val->id = odr_types.length (); |
| 453 |
vec_safe_push (odr_types_ptr, val); |
| 454 |
} |
| 455 |
return val; |
| 456 |
} |
| 457 |
|
| 458 |
/* Dump ODR type T and all its derrived type. INDENT specify indentation for |
| 459 |
recusive printing. */ |
| 460 |
|
| 461 |
static void |
| 462 |
dump_odr_type (FILE *f, odr_type t, int indent=0) |
| 463 |
{ |
| 464 |
unsigned int i; |
| 465 |
fprintf (f, "%*s type %i: ", indent * 2, "", t->id); |
| 466 |
print_generic_expr (f, t->type, TDF_SLIM); |
| 467 |
fprintf (f, "%s\n", t->anonymous_namespace ? " (anonymous namespace)":""); |
| 468 |
if (TYPE_NAME (t->type)) |
| 469 |
{ |
| 470 |
fprintf (f, "%*s defined at: %s:%i\n", indent * 2, "", |
| 471 |
DECL_SOURCE_FILE (TYPE_NAME (t->type)), |
| 472 |
DECL_SOURCE_LINE (TYPE_NAME (t->type))); |
| 473 |
} |
| 474 |
if (t->bases.length ()) |
| 475 |
{ |
| 476 |
fprintf (f, "%*s base odr type ids: ", indent * 2, ""); |
| 477 |
for (i = 0; i < t->bases.length (); i++) |
| 478 |
fprintf (f, " %i", t->bases[i]->id); |
| 479 |
fprintf (f, "\n"); |
| 480 |
} |
| 481 |
if (t->derived_types.length ()) |
| 482 |
{ |
| 483 |
fprintf (f, "%*s derived types:\n", indent * 2, ""); |
| 484 |
for (i = 0; i < t->derived_types.length (); i++) |
| 485 |
dump_odr_type (f, t->derived_types[i], indent + 1); |
| 486 |
} |
| 487 |
fprintf (f, "\n"); |
| 488 |
} |
| 489 |
|
| 490 |
/* Dump the type inheritance graph. */ |
| 491 |
|
| 492 |
static void |
| 493 |
dump_type_inheritance_graph (FILE *f) |
| 494 |
{ |
| 495 |
unsigned int i; |
| 496 |
if (!odr_types_ptr) |
| 497 |
return; |
| 498 |
fprintf (f, "\n\nType inheritance graph:\n"); |
| 499 |
for (i = 0; i < odr_types.length (); i++) |
| 500 |
{ |
| 501 |
if (odr_types[i]->bases.length () == 0) |
| 502 |
dump_odr_type (f, odr_types[i]); |
| 503 |
} |
| 504 |
for (i = 0; i < odr_types.length (); i++) |
| 505 |
{ |
| 506 |
if (odr_types[i]->types && odr_types[i]->types->length ()) |
| 507 |
{ |
| 508 |
unsigned int j; |
| 509 |
fprintf (f, "Duplicate tree types for odr type %i\n", i); |
| 510 |
print_node (f, "", odr_types[i]->type, 0); |
| 511 |
for (j = 0; j < odr_types[i]->types->length (); j++) |
| 512 |
{ |
| 513 |
tree t; |
| 514 |
fprintf (f, "duplicate #%i\n", j); |
| 515 |
print_node (f, "", (*odr_types[i]->types)[j], 0); |
| 516 |
t = (*odr_types[i]->types)[j]; |
| 517 |
while (TYPE_P (t) && TYPE_CONTEXT (t)) |
| 518 |
{ |
| 519 |
t = TYPE_CONTEXT (t); |
| 520 |
print_node (f, "", t, 0); |
| 521 |
} |
| 522 |
putc ('\n',f); |
| 523 |
} |
| 524 |
} |
| 525 |
} |
| 526 |
} |
| 527 |
|
| 528 |
/* Given method type T, return type of class it belongs to. |
| 529 |
Lookup this pointer and get its type. */ |
| 530 |
|
| 531 |
tree |
| 532 |
method_class_type (tree t) |
| 533 |
{ |
| 534 |
tree first_parm_type = TREE_VALUE (TYPE_ARG_TYPES (t)); |
| 535 |
gcc_assert (TREE_CODE (t) == METHOD_TYPE); |
| 536 |
|
| 537 |
return TREE_TYPE (first_parm_type); |
| 538 |
} |
| 539 |
|
| 540 |
/* Initialize IPA devirt and build inheritance tree graph. */ |
| 541 |
|
| 542 |
void |
| 543 |
build_type_inheritance_graph (void) |
| 544 |
{ |
| 545 |
struct symtab_node *n; |
| 546 |
FILE *inheritance_dump_file; |
| 547 |
int flags; |
| 548 |
|
| 549 |
if (odr_hash.is_created ()) |
| 550 |
return; |
| 551 |
timevar_push (TV_IPA_INHERITANCE); |
| 552 |
inheritance_dump_file = dump_begin (TDI_inheritance, &flags); |
| 553 |
odr_hash.create (23); |
| 554 |
|
| 555 |
/* We reconstruct the graph starting of types of all methods seen in the |
| 556 |
the unit. */ |
| 557 |
FOR_EACH_SYMBOL (n) |
| 558 |
if (is_a <cgraph_node> (n) |
| 559 |
&& DECL_VIRTUAL_P (n->decl) |
| 560 |
&& symtab_real_symbol_p (n)) |
| 561 |
get_odr_type (method_class_type (TREE_TYPE (n->decl)), true); |
| 562 |
|
| 563 |
/* Look also for virtual tables of types that do not define any methods. |
| 564 |
|
| 565 |
We need it in a case where class B has virtual base of class A |
| 566 |
re-defining its virtual method and there is class C with no virtual |
| 567 |
methods with B as virtual base. |
| 568 |
|
| 569 |
Here we output B's virtual method in two variant - for non-virtual |
| 570 |
and virtual inheritance. B's virtual table has non-virtual version, |
| 571 |
while C's has virtual. |
| 572 |
|
| 573 |
For this reason we need to know about C in order to include both |
| 574 |
variants of B. More correctly, record_target_from_binfo should |
| 575 |
add both variants of the method when walking B, but we have no |
| 576 |
link in between them. |
| 577 |
|
| 578 |
We rely on fact that either the method is exported and thus we |
| 579 |
assume it is called externally or C is in anonymous namespace and |
| 580 |
thus we will see the vtable. */ |
| 581 |
|
| 582 |
else if (is_a <varpool_node> (n) |
| 583 |
&& DECL_VIRTUAL_P (n->decl) |
| 584 |
&& TREE_CODE (DECL_CONTEXT (n->decl)) == RECORD_TYPE |
| 585 |
&& TYPE_BINFO (DECL_CONTEXT (n->decl)) |
| 586 |
&& polymorphic_type_binfo_p (TYPE_BINFO (DECL_CONTEXT (n->decl)))) |
| 587 |
get_odr_type (DECL_CONTEXT (n->decl), true); |
| 588 |
if (inheritance_dump_file) |
| 589 |
{ |
| 590 |
dump_type_inheritance_graph (inheritance_dump_file); |
| 591 |
dump_end (TDI_inheritance, inheritance_dump_file); |
| 592 |
} |
| 593 |
timevar_pop (TV_IPA_INHERITANCE); |
| 594 |
} |
| 595 |
|
| 596 |
/* If TARGET has associated node, record it in the NODES array. |
| 597 |
if TARGET can not be inserted (for example because its body was |
| 598 |
already removed and there is no way to refer to it), clear COMPLETEP. */ |
| 599 |
|
| 600 |
static void |
| 601 |
maybe_record_node (vec <cgraph_node *> &nodes, |
| 602 |
tree target, pointer_set_t *inserted, |
| 603 |
bool *completep) |
| 604 |
{ |
| 605 |
struct cgraph_node *target_node; |
| 606 |
enum built_in_function fcode; |
| 607 |
|
| 608 |
if (!target |
| 609 |
/* Those are used to mark impossible scenarios. */ |
| 610 |
|| (fcode = DECL_FUNCTION_CODE (target)) |
| 611 |
== BUILT_IN_UNREACHABLE |
| 612 |
|| fcode == BUILT_IN_TRAP) |
| 613 |
return; |
| 614 |
|
| 615 |
target_node = cgraph_get_node (target); |
| 616 |
|
| 617 |
if (target_node != NULL |
| 618 |
&& (TREE_PUBLIC (target) |
| 619 |
|| target_node->definition) |
| 620 |
&& symtab_real_symbol_p (target_node)) |
| 621 |
{ |
| 622 |
gcc_assert (!target_node->global.inlined_to); |
| 623 |
gcc_assert (symtab_real_symbol_p (target_node)); |
| 624 |
if (!pointer_set_insert (inserted, target)) |
| 625 |
{ |
| 626 |
pointer_set_insert (cached_polymorphic_call_targets, |
| 627 |
target_node); |
| 628 |
nodes.safe_push (target_node); |
| 629 |
} |
| 630 |
} |
| 631 |
else if (completep |
| 632 |
&& !type_in_anonymous_namespace_p |
| 633 |
(method_class_type (TREE_TYPE (target)))) |
| 634 |
*completep = true; |
| 635 |
} |
| 636 |
|
| 637 |
/* See if BINFO's type match OUTER_TYPE. If so, lookup |
| 638 |
BINFO of subtype of OTR_TYPE at OFFSET and in that BINFO find |
| 639 |
method in vtable and insert method to NODES array. |
| 640 |
Otherwise recurse to base BINFOs. |
| 641 |
This match what get_binfo_at_offset does, but with offset |
| 642 |
being unknown. |
| 643 |
|
| 644 |
TYPE_BINFOS is a stack of BINFOS of types with defined |
| 645 |
virtual table seen on way from class type to BINFO. |
| 646 |
|
| 647 |
MATCHED_VTABLES tracks virtual tables we already did lookup |
| 648 |
for virtual function in. INSERTED tracks nodes we already |
| 649 |
inserted. |
| 650 |
|
| 651 |
ANONYMOUS is true if BINFO is part of anonymous namespace. |
| 652 |
*/ |
| 653 |
|
| 654 |
static void |
| 655 |
record_target_from_binfo (vec <cgraph_node *> &nodes, |
| 656 |
tree binfo, |
| 657 |
tree otr_type, |
| 658 |
vec <tree> &type_binfos, |
| 659 |
HOST_WIDE_INT otr_token, |
| 660 |
tree outer_type, |
| 661 |
HOST_WIDE_INT offset, |
| 662 |
pointer_set_t *inserted, |
| 663 |
pointer_set_t *matched_vtables, |
| 664 |
bool anonymous) |
| 665 |
{ |
| 666 |
tree type = BINFO_TYPE (binfo); |
| 667 |
int i; |
| 668 |
tree base_binfo; |
| 669 |
|
| 670 |
|
| 671 |
if (BINFO_VTABLE (binfo)) |
| 672 |
type_binfos.safe_push (binfo); |
| 673 |
if (types_same_for_odr (type, outer_type)) |
| 674 |
{ |
| 675 |
int i; |
| 676 |
tree type_binfo = NULL; |
| 677 |
|
| 678 |
/* Lookup BINFO with virtual table. For normal types it is always last |
| 679 |
binfo on stack. */ |
| 680 |
for (i = type_binfos.length () - 1; i >= 0; i--) |
| 681 |
if (BINFO_OFFSET (type_binfos[i]) == BINFO_OFFSET (binfo)) |
| 682 |
{ |
| 683 |
type_binfo = type_binfos[i]; |
| 684 |
break; |
| 685 |
} |
| 686 |
if (BINFO_VTABLE (binfo)) |
| 687 |
type_binfos.pop (); |
| 688 |
/* If this is duplicated BINFO for base shared by virtual inheritance, |
| 689 |
we may not have its associated vtable. This is not a problem, since |
| 690 |
we will walk it on the other path. */ |
| 691 |
if (!type_binfo) |
| 692 |
return; |
| 693 |
tree inner_binfo = get_binfo_at_offset (type_binfo, |
| 694 |
offset, otr_type); |
| 695 |
/* For types in anonymous namespace first check if the respective vtable |
| 696 |
is alive. If not, we know the type can't be called. */ |
| 697 |
if (!flag_ltrans && anonymous) |
| 698 |
{ |
| 699 |
tree vtable = BINFO_VTABLE (inner_binfo); |
| 700 |
varpool_node *vnode; |
| 701 |
|
| 702 |
if (TREE_CODE (vtable) == POINTER_PLUS_EXPR) |
| 703 |
vtable = TREE_OPERAND (TREE_OPERAND (vtable, 0), 0); |
| 704 |
vnode = varpool_get_node (vtable); |
| 705 |
if (!vnode || !vnode->definition) |
| 706 |
return; |
| 707 |
} |
| 708 |
gcc_assert (inner_binfo); |
| 709 |
if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (inner_binfo))) |
| 710 |
{ |
| 711 |
tree target = gimple_get_virt_method_for_binfo (otr_token, inner_binfo); |
| 712 |
if (target) |
| 713 |
maybe_record_node (nodes, target, inserted, NULL); |
| 714 |
} |
| 715 |
return; |
| 716 |
} |
| 717 |
|
| 718 |
/* Walk bases. */ |
| 719 |
for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) |
| 720 |
/* Walking bases that have no virtual method is pointless excercise. */ |
| 721 |
if (polymorphic_type_binfo_p (base_binfo)) |
| 722 |
record_target_from_binfo (nodes, base_binfo, otr_type, |
| 723 |
type_binfos, |
| 724 |
otr_token, outer_type, offset, inserted, |
| 725 |
matched_vtables, anonymous); |
| 726 |
if (BINFO_VTABLE (binfo)) |
| 727 |
type_binfos.pop (); |
| 728 |
} |
| 729 |
|
| 730 |
/* Lookup virtual methods matching OTR_TYPE (with OFFSET and OTR_TOKEN) |
| 731 |
of TYPE, insert them to NODES, recurse into derived nodes. |
| 732 |
INSERTED is used to avoid duplicate insertions of methods into NODES. |
| 733 |
MATCHED_VTABLES are used to avoid duplicate walking vtables. */ |
| 734 |
|
| 735 |
static void |
| 736 |
possible_polymorphic_call_targets_1 (vec <cgraph_node *> &nodes, |
| 737 |
pointer_set_t *inserted, |
| 738 |
pointer_set_t *matched_vtables, |
| 739 |
tree otr_type, |
| 740 |
odr_type type, |
| 741 |
HOST_WIDE_INT otr_token, |
| 742 |
tree outer_type, |
| 743 |
HOST_WIDE_INT offset) |
| 744 |
{ |
| 745 |
tree binfo = TYPE_BINFO (type->type); |
| 746 |
unsigned int i; |
| 747 |
vec <tree> type_binfos = vNULL; |
| 748 |
|
| 749 |
record_target_from_binfo (nodes, binfo, otr_type, type_binfos, otr_token, |
| 750 |
outer_type, offset, |
| 751 |
inserted, matched_vtables, |
| 752 |
type->anonymous_namespace); |
| 753 |
type_binfos.release (); |
| 754 |
for (i = 0; i < type->derived_types.length (); i++) |
| 755 |
possible_polymorphic_call_targets_1 (nodes, inserted, |
| 756 |
matched_vtables, |
| 757 |
otr_type, |
| 758 |
type->derived_types[i], |
| 759 |
otr_token, outer_type, offset); |
| 760 |
} |
| 761 |
|
| 762 |
/* Cache of queries for polymorphic call targets. |
| 763 |
|
| 764 |
Enumerating all call targets may get expensive when there are many |
| 765 |
polymorphic calls in the program, so we memoize all the previous |
| 766 |
queries and avoid duplicated work. */ |
| 767 |
|
| 768 |
struct polymorphic_call_target_d |
| 769 |
{ |
| 770 |
HOST_WIDE_INT otr_token; |
| 771 |
ipa_polymorphic_call_context context; |
| 772 |
odr_type type; |
| 773 |
vec <cgraph_node *> targets; |
| 774 |
bool final; |
| 775 |
}; |
| 776 |
|
| 777 |
/* Polymorphic call target cache helpers. */ |
| 778 |
|
| 779 |
struct polymorphic_call_target_hasher |
| 780 |
{ |
| 781 |
typedef polymorphic_call_target_d value_type; |
| 782 |
typedef polymorphic_call_target_d compare_type; |
| 783 |
static inline hashval_t hash (const value_type *); |
| 784 |
static inline bool equal (const value_type *, const compare_type *); |
| 785 |
static inline void remove (value_type *); |
| 786 |
}; |
| 787 |
|
| 788 |
/* Return the computed hashcode for ODR_QUERY. */ |
| 789 |
|
| 790 |
inline hashval_t |
| 791 |
polymorphic_call_target_hasher::hash (const value_type *odr_query) |
| 792 |
{ |
| 793 |
hashval_t hash; |
| 794 |
|
| 795 |
hash = iterative_hash_host_wide_int |
| 796 |
(odr_query->otr_token, |
| 797 |
odr_query->type->id); |
| 798 |
hash = iterative_hash_hashval_t (TYPE_UID (odr_query->context.outer_type), |
| 799 |
hash); |
| 800 |
hash = iterative_hash_host_wide_int (odr_query->context.offset, hash); |
| 801 |
return iterative_hash_hashval_t |
| 802 |
(((int)odr_query->context.maybe_in_construction << 1) |
| 803 |
| (int)odr_query->context.maybe_derived_type, hash); |
| 804 |
} |
| 805 |
|
| 806 |
/* Compare cache entries T1 and T2. */ |
| 807 |
|
| 808 |
inline bool |
| 809 |
polymorphic_call_target_hasher::equal (const value_type *t1, |
| 810 |
const compare_type *t2) |
| 811 |
{ |
| 812 |
return (t1->type == t2->type && t1->otr_token == t2->otr_token |
| 813 |
&& t1->context.offset == t2->context.offset |
| 814 |
&& t1->context.outer_type == t2->context.outer_type |
| 815 |
&& t1->context.maybe_in_construction |
| 816 |
== t2->context.maybe_in_construction |
| 817 |
&& t1->context.maybe_derived_type == t2->context.maybe_derived_type); |
| 818 |
} |
| 819 |
|
| 820 |
/* Remove entry in polymorphic call target cache hash. */ |
| 821 |
|
| 822 |
inline void |
| 823 |
polymorphic_call_target_hasher::remove (value_type *v) |
| 824 |
{ |
| 825 |
v->targets.release (); |
| 826 |
free (v); |
| 827 |
} |
| 828 |
|
| 829 |
/* Polymorphic call target query cache. */ |
| 830 |
|
| 831 |
typedef hash_table <polymorphic_call_target_hasher> |
| 832 |
polymorphic_call_target_hash_type; |
| 833 |
static polymorphic_call_target_hash_type polymorphic_call_target_hash; |
| 834 |
|
| 835 |
/* Destroy polymorphic call target query cache. */ |
| 836 |
|
| 837 |
static void |
| 838 |
free_polymorphic_call_targets_hash () |
| 839 |
{ |
| 840 |
if (cached_polymorphic_call_targets) |
| 841 |
{ |
| 842 |
polymorphic_call_target_hash.dispose (); |
| 843 |
pointer_set_destroy (cached_polymorphic_call_targets); |
| 844 |
cached_polymorphic_call_targets = NULL; |
| 845 |
} |
| 846 |
} |
| 847 |
|
| 848 |
/* When virtual function is removed, we may need to flush the cache. */ |
| 849 |
|
| 850 |
static void |
| 851 |
devirt_node_removal_hook (struct cgraph_node *n, void *d ATTRIBUTE_UNUSED) |
| 852 |
{ |
| 853 |
if (cached_polymorphic_call_targets |
| 854 |
&& pointer_set_contains (cached_polymorphic_call_targets, n)) |
| 855 |
free_polymorphic_call_targets_hash (); |
| 856 |
} |
| 857 |
|
| 858 |
/* CONTEXT->OUTER_TYPE is a type of memory object where object of EXPECTED_TYPE |
| 859 |
is contained at CONTEXT->OFFSET. Walk the memory representation of |
| 860 |
CONTEXT->OUTER_TYPE and find the outermost class type that match |
| 861 |
EXPECTED_TYPE or contain EXPECTED_TYPE as a base. Update CONTEXT |
| 862 |
to represent it. |
| 863 |
|
| 864 |
For example when CONTEXT represents type |
| 865 |
class A |
| 866 |
{ |
| 867 |
int a; |
| 868 |
class B b; |
| 869 |
} |
| 870 |
and we look for type at offset sizeof(int), we end up with B and offset 0. |
| 871 |
If the same is produced by multiple inheritance, we end up with A and offset |
| 872 |
sizeof(int). |
| 873 |
|
| 874 |
If we can not find corresponding class, give up by setting |
| 875 |
CONTEXT->OUTER_TYPE to EXPECTED_TYPE and CONTEXT->OFFSET to NULL. |
| 876 |
Return true when lookup was sucesful. */ |
| 877 |
|
| 878 |
static bool |
| 879 |
get_class_context (ipa_polymorphic_call_context *context, |
| 880 |
tree expected_type) |
| 881 |
{ |
| 882 |
tree type = context->outer_type; |
| 883 |
HOST_WIDE_INT offset = context->offset; |
| 884 |
|
| 885 |
/* Find the sub-object the constant actually refers to and mark whether it is |
| 886 |
an artificial one (as opposed to a user-defined one). */ |
| 887 |
while (true) |
| 888 |
{ |
| 889 |
HOST_WIDE_INT pos, size; |
| 890 |
tree fld; |
| 891 |
|
| 892 |
/* On a match, just return what we found. */ |
| 893 |
if (TREE_CODE (type) == TREE_CODE (expected_type) |
| 894 |
&& types_same_for_odr (type, expected_type)) |
| 895 |
{ |
| 896 |
/* Type can not contain itself on an non-zero offset. In that case |
| 897 |
just give up. */ |
| 898 |
if (offset != 0) |
| 899 |
goto give_up; |
| 900 |
gcc_assert (offset == 0); |
| 901 |
return true; |
| 902 |
} |
| 903 |
|
| 904 |
/* Walk fields and find corresponding on at OFFSET. */ |
| 905 |
if (TREE_CODE (type) == RECORD_TYPE) |
| 906 |
{ |
| 907 |
for (fld = TYPE_FIELDS (type); fld; fld = DECL_CHAIN (fld)) |
| 908 |
{ |
| 909 |
if (TREE_CODE (fld) != FIELD_DECL) |
| 910 |
continue; |
| 911 |
|
| 912 |
pos = int_bit_position (fld); |
| 913 |
size = tree_to_uhwi (DECL_SIZE (fld)); |
| 914 |
if (pos <= offset && (pos + size) > offset) |
| 915 |
break; |
| 916 |
} |
| 917 |
|
| 918 |
if (!fld) |
| 919 |
goto give_up; |
| 920 |
|
| 921 |
type = TREE_TYPE (fld); |
| 922 |
offset -= pos; |
| 923 |
/* DECL_ARTIFICIAL represents a basetype. */ |
| 924 |
if (!DECL_ARTIFICIAL (fld)) |
| 925 |
{ |
| 926 |
context->outer_type = type; |
| 927 |
context->offset = offset; |
| 928 |
/* As soon as we se an field containing the type, |
| 929 |
we know we are not looking for derivations. */ |
| 930 |
context->maybe_derived_type = false; |
| 931 |
} |
| 932 |
} |
| 933 |
else if (TREE_CODE (type) == ARRAY_TYPE) |
| 934 |
{ |
| 935 |
tree subtype = TREE_TYPE (type); |
| 936 |
|
| 937 |
/* Give up if we don't know array size. */ |
| 938 |
if (!tree_fits_shwi_p (TYPE_SIZE (subtype)) |
| 939 |
|| !tree_to_shwi (TYPE_SIZE (subtype)) <= 0) |
| 940 |
goto give_up; |
| 941 |
offset = offset % tree_to_shwi (TYPE_SIZE (subtype)); |
| 942 |
type = subtype; |
| 943 |
context->outer_type = type; |
| 944 |
context->offset = offset; |
| 945 |
context->maybe_derived_type = false; |
| 946 |
} |
| 947 |
/* Give up on anything else. */ |
| 948 |
else |
| 949 |
goto give_up; |
| 950 |
} |
| 951 |
|
| 952 |
/* If we failed to find subtype we look for, give up and fall back to the |
| 953 |
most generic query. */ |
| 954 |
give_up: |
| 955 |
context->outer_type = expected_type; |
| 956 |
context->offset = 0; |
| 957 |
context->maybe_derived_type = true; |
| 958 |
return false; |
| 959 |
} |
| 960 |
|
| 961 |
/* Return true if OUTER_TYPE contains OTR_TYPE at OFFSET. */ |
| 962 |
|
| 963 |
static bool |
| 964 |
contains_type_p (tree outer_type, HOST_WIDE_INT offset, |
| 965 |
tree otr_type) |
| 966 |
{ |
| 967 |
ipa_polymorphic_call_context context = {offset, outer_type, |
| 968 |
false, true}; |
| 969 |
return get_class_context (&context, otr_type); |
| 970 |
} |
| 971 |
|
| 972 |
/* Lookup base of BINFO that has virtual table VTABLE with OFFSET. */ |
| 973 |
|
| 974 |
static tree |
| 975 |
subbinfo_with_vtable_at_offset (tree binfo, unsigned HOST_WIDE_INT offset, |
| 976 |
tree vtable) |
| 977 |
{ |
| 978 |
tree v = BINFO_VTABLE (binfo); |
| 979 |
int i; |
| 980 |
tree base_binfo; |
| 981 |
unsigned HOST_WIDE_INT this_offset; |
| 982 |
|
| 983 |
if (v) |
| 984 |
{ |
| 985 |
if (!vtable_pointer_value_to_vtable (v, &v, &this_offset)) |
| 986 |
gcc_unreachable (); |
| 987 |
|
| 988 |
if (offset == this_offset |
| 989 |
&& DECL_ASSEMBLER_NAME (v) == DECL_ASSEMBLER_NAME (vtable)) |
| 990 |
return binfo; |
| 991 |
} |
| 992 |
|
| 993 |
for (i = 0; BINFO_BASE_ITERATE (binfo, i, base_binfo); i++) |
| 994 |
if (polymorphic_type_binfo_p (base_binfo)) |
| 995 |
{ |
| 996 |
base_binfo = subbinfo_with_vtable_at_offset (base_binfo, offset, vtable); |
| 997 |
if (base_binfo) |
| 998 |
return base_binfo; |
| 999 |
} |
| 1000 |
return NULL; |
| 1001 |
} |
| 1002 |
|
| 1003 |
/* T is known constant value of virtual table pointer. |
| 1004 |
Store virtual table to V and its offset to OFFSET. |
| 1005 |
Return false if T does not look like virtual table reference. */ |
| 1006 |
|
| 1007 |
bool |
| 1008 |
vtable_pointer_value_to_vtable (tree t, tree *v, unsigned HOST_WIDE_INT *offset) |
| 1009 |
{ |
| 1010 |
/* We expect &MEM[(void *)&virtual_table + 16B]. |
| 1011 |
We obtain object's BINFO from the context of the virtual table. |
| 1012 |
This one contains pointer to virtual table represented via |
| 1013 |
POINTER_PLUS_EXPR. Verify that this pointer match to what |
| 1014 |
we propagated through. |
| 1015 |
|
| 1016 |
In the case of virtual inheritance, the virtual tables may |
| 1017 |
be nested, i.e. the offset may be different from 16 and we may |
| 1018 |
need to dive into the type representation. */ |
| 1019 |
if (TREE_CODE (t) == ADDR_EXPR |
| 1020 |
&& TREE_CODE (TREE_OPERAND (t, 0)) == MEM_REF |
| 1021 |
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)) == ADDR_EXPR |
| 1022 |
&& TREE_CODE (TREE_OPERAND (TREE_OPERAND (t, 0), 1)) == INTEGER_CST |
| 1023 |
&& (TREE_CODE (TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0)) |
| 1024 |
== VAR_DECL) |
| 1025 |
&& DECL_VIRTUAL_P (TREE_OPERAND (TREE_OPERAND |
| 1026 |
(TREE_OPERAND (t, 0), 0), 0))) |
| 1027 |
{ |
| 1028 |
*v = TREE_OPERAND (TREE_OPERAND (TREE_OPERAND (t, 0), 0), 0); |
| 1029 |
*offset = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (t, 0), 1)); |
| 1030 |
return true; |
| 1031 |
} |
| 1032 |
|
| 1033 |
/* Alternative representation, used by C++ frontend is POINTER_PLUS_EXPR. |
| 1034 |
We need to handle it when T comes from static variable initializer or |
| 1035 |
BINFO. */ |
| 1036 |
if (TREE_CODE (t) == POINTER_PLUS_EXPR) |
| 1037 |
{ |
| 1038 |
*offset = tree_to_uhwi (TREE_OPERAND (t, 1)); |
| 1039 |
t = TREE_OPERAND (t, 0); |
| 1040 |
} |
| 1041 |
else |
| 1042 |
*offset = 0; |
| 1043 |
|
| 1044 |
if (TREE_CODE (t) != ADDR_EXPR) |
| 1045 |
return false; |
| 1046 |
*v = TREE_OPERAND (t, 0); |
| 1047 |
return true; |
| 1048 |
} |
| 1049 |
|
| 1050 |
/* T is known constant value of virtual table pointer. Return BINFO of the |
| 1051 |
instance type. */ |
| 1052 |
|
| 1053 |
tree |
| 1054 |
vtable_pointer_value_to_binfo (tree t) |
| 1055 |
{ |
| 1056 |
tree vtable; |
| 1057 |
unsigned HOST_WIDE_INT offset; |
| 1058 |
|
| 1059 |
if (!vtable_pointer_value_to_vtable (t, &vtable, &offset)) |
| 1060 |
return NULL_TREE; |
| 1061 |
|
| 1062 |
/* FIXME: for stores of construction vtables we return NULL, |
| 1063 |
because we do not have BINFO for those. Eventually we should fix |
| 1064 |
our representation to allow this case to be handled, too. |
| 1065 |
In the case we see store of BINFO we however may assume |
| 1066 |
that standard folding will be ale to cope with it. */ |
| 1067 |
return subbinfo_with_vtable_at_offset (TYPE_BINFO (DECL_CONTEXT (vtable)), |
| 1068 |
offset, vtable); |
| 1069 |
} |
| 1070 |
|
| 1071 |
/* Proudce polymorphic call context for call method of instance |
| 1072 |
that is located within BASE (that is assumed to be a decl) at OFFSET. */ |
| 1073 |
|
| 1074 |
static void |
| 1075 |
get_polymorphic_call_info_for_decl (ipa_polymorphic_call_context *context, |
| 1076 |
tree base, HOST_WIDE_INT offset) |
| 1077 |
{ |
| 1078 |
gcc_assert (DECL_P (base)); |
| 1079 |
|
| 1080 |
context->outer_type = TREE_TYPE (base); |
| 1081 |
context->offset = offset; |
| 1082 |
/* Make very conservative assumption that all objects |
| 1083 |
may be in construction. |
| 1084 |
TODO: ipa-prop already contains code to tell better. |
| 1085 |
merge it later. */ |
| 1086 |
context->maybe_in_construction = true; |
| 1087 |
context->maybe_derived_type = false; |
| 1088 |
} |
| 1089 |
|
| 1090 |
/* CST is an invariant (address of decl), try to get meaningful |
| 1091 |
polymorphic call context for polymorphic call of method |
| 1092 |
if instance of OTR_TYPE that is located at OFFSET of this invariant. |
| 1093 |
Return FALSE if nothing meaningful can be found. */ |
| 1094 |
|
| 1095 |
bool |
| 1096 |
get_polymorphic_call_info_from_invariant (ipa_polymorphic_call_context *context, |
| 1097 |
tree cst, |
| 1098 |
tree otr_type, |
| 1099 |
HOST_WIDE_INT offset) |
| 1100 |
{ |
| 1101 |
HOST_WIDE_INT offset2, size, max_size; |
| 1102 |
tree base; |
| 1103 |
|
| 1104 |
if (TREE_CODE (cst) != ADDR_EXPR) |
| 1105 |
return NULL_TREE; |
| 1106 |
|
| 1107 |
cst = TREE_OPERAND (cst, 0); |
| 1108 |
base = get_ref_base_and_extent (cst, &offset2, &size, &max_size); |
| 1109 |
if (!DECL_P (base) |
| 1110 |
|| max_size == -1 |
| 1111 |
|| max_size != size) |
| 1112 |
return NULL_TREE; |
| 1113 |
|
| 1114 |
/* Only type inconsistent programs can have otr_type that is |
| 1115 |
not part of outer type. */ |
| 1116 |
if (!contains_type_p (TREE_TYPE (base), |
| 1117 |
offset, otr_type)) |
| 1118 |
return NULL_TREE; |
| 1119 |
|
| 1120 |
get_polymorphic_call_info_for_decl (context, |
| 1121 |
base, offset); |
| 1122 |
return true; |
| 1123 |
} |
| 1124 |
|
| 1125 |
/* Given REF call in FNDECL, determine class of the polymorphic |
| 1126 |
call (OTR_TYPE), its token (OTR_TOKEN) and CONTEXT. |
| 1127 |
Return pointer to object described by the context */ |
| 1128 |
|
| 1129 |
tree |
| 1130 |
get_polymorphic_call_info (tree fndecl, |
| 1131 |
tree ref, |
| 1132 |
tree *otr_type, |
| 1133 |
HOST_WIDE_INT *otr_token, |
| 1134 |
ipa_polymorphic_call_context *context) |
| 1135 |
{ |
| 1136 |
tree base_pointer; |
| 1137 |
*otr_type = obj_type_ref_class (ref); |
| 1138 |
*otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (ref)); |
| 1139 |
|
| 1140 |
/* Set up basic info in case we find nothing interesting in the analysis. */ |
| 1141 |
context->outer_type = *otr_type; |
| 1142 |
context->offset = 0; |
| 1143 |
base_pointer = OBJ_TYPE_REF_OBJECT (ref); |
| 1144 |
context->maybe_derived_type = true; |
| 1145 |
context->maybe_in_construction = false; |
| 1146 |
|
| 1147 |
/* Walk SSA for outer object. */ |
| 1148 |
do |
| 1149 |
{ |
| 1150 |
if (TREE_CODE (base_pointer) == SSA_NAME |
| 1151 |
&& !SSA_NAME_IS_DEFAULT_DEF (base_pointer) |
| 1152 |
&& SSA_NAME_DEF_STMT (base_pointer) |
| 1153 |
&& gimple_assign_single_p (SSA_NAME_DEF_STMT (base_pointer))) |
| 1154 |
{ |
| 1155 |
base_pointer = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (base_pointer)); |
| 1156 |
STRIP_NOPS (base_pointer); |
| 1157 |
} |
| 1158 |
else if (TREE_CODE (base_pointer) == ADDR_EXPR) |
| 1159 |
{ |
| 1160 |
HOST_WIDE_INT size, max_size; |
| 1161 |
HOST_WIDE_INT offset2; |
| 1162 |
tree base = get_ref_base_and_extent (TREE_OPERAND (base_pointer, 0), |
| 1163 |
&offset2, &size, &max_size); |
| 1164 |
|
| 1165 |
/* If this is a varying address, punt. */ |
| 1166 |
if ((TREE_CODE (base) == MEM_REF || DECL_P (base)) |
| 1167 |
&& max_size != -1 |
| 1168 |
&& max_size == size) |
| 1169 |
{ |
| 1170 |
/* We found dereference of a pointer. Type of the pointer |
| 1171 |
and MEM_REF is meaningless, but we can look futher. */ |
| 1172 |
if (TREE_CODE (base) == MEM_REF) |
| 1173 |
{ |
| 1174 |
base_pointer = TREE_OPERAND (base, 0); |
| 1175 |
context->offset |
| 1176 |
+= offset2 + mem_ref_offset (base).low * BITS_PER_UNIT; |
| 1177 |
context->outer_type = NULL; |
| 1178 |
} |
| 1179 |
/* We found base object. In this case the outer_type |
| 1180 |
is known. */ |
| 1181 |
else if (DECL_P (base)) |
| 1182 |
{ |
| 1183 |
gcc_assert (!POINTER_TYPE_P (TREE_TYPE (base))); |
| 1184 |
|
| 1185 |
/* Only type inconsistent programs can have otr_type that is |
| 1186 |
not part of outer type. */ |
| 1187 |
if (!contains_type_p (TREE_TYPE (base), |
| 1188 |
context->offset + offset2, *otr_type)) |
| 1189 |
return base_pointer; |
| 1190 |
get_polymorphic_call_info_for_decl (context, base, |
| 1191 |
context->offset + offset2); |
| 1192 |
return NULL; |
| 1193 |
} |
| 1194 |
else |
| 1195 |
break; |
| 1196 |
} |
| 1197 |
else |
| 1198 |
break; |
| 1199 |
} |
| 1200 |
else if (TREE_CODE (base_pointer) == POINTER_PLUS_EXPR |
| 1201 |
&& tree_fits_uhwi_p (TREE_OPERAND (base_pointer, 1))) |
| 1202 |
{ |
| 1203 |
context->offset += tree_to_shwi (TREE_OPERAND (base_pointer, 1)) |
| 1204 |
* BITS_PER_UNIT; |
| 1205 |
base_pointer = TREE_OPERAND (base_pointer, 0); |
| 1206 |
} |
| 1207 |
else |
| 1208 |
break; |
| 1209 |
} |
| 1210 |
while (true); |
| 1211 |
|
| 1212 |
/* Try to determine type of the outer object. */ |
| 1213 |
if (TREE_CODE (base_pointer) == SSA_NAME |
| 1214 |
&& SSA_NAME_IS_DEFAULT_DEF (base_pointer) |
| 1215 |
&& TREE_CODE (SSA_NAME_VAR (base_pointer)) == PARM_DECL) |
| 1216 |
{ |
| 1217 |
/* See if parameter is THIS pointer of a method. */ |
| 1218 |
if (TREE_CODE (TREE_TYPE (fndecl)) == METHOD_TYPE |
| 1219 |
&& SSA_NAME_VAR (base_pointer) == DECL_ARGUMENTS (fndecl)) |
| 1220 |
{ |
| 1221 |
context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer)); |
| 1222 |
gcc_assert (TREE_CODE (context->outer_type) == RECORD_TYPE); |
| 1223 |
|
| 1224 |
/* Dynamic casting has possibly upcasted the type |
| 1225 |
in the hiearchy. In this case outer type is less |
| 1226 |
informative than inner type and we should forget |
| 1227 |
about it. */ |
| 1228 |
if (!contains_type_p (context->outer_type, context->offset, |
| 1229 |
*otr_type)) |
| 1230 |
{ |
| 1231 |
context->outer_type = NULL; |
| 1232 |
return base_pointer; |
| 1233 |
} |
| 1234 |
|
| 1235 |
/* If the function is constructor or destructor, then |
| 1236 |
the type is possibly in consturction, but we know |
| 1237 |
it is not derived type. */ |
| 1238 |
if (DECL_CXX_CONSTRUCTOR_P (fndecl) |
| 1239 |
|| DECL_CXX_DESTRUCTOR_P (fndecl)) |
| 1240 |
{ |
| 1241 |
context->maybe_in_construction = true; |
| 1242 |
context->maybe_derived_type = false; |
| 1243 |
} |
| 1244 |
else |
| 1245 |
{ |
| 1246 |
context->maybe_derived_type = true; |
| 1247 |
context->maybe_in_construction = false; |
| 1248 |
} |
| 1249 |
return base_pointer; |
| 1250 |
} |
| 1251 |
/* Non-PODs passed by value are really passed by invisible |
| 1252 |
reference. In this case we also know the type of the |
| 1253 |
object. */ |
| 1254 |
if (DECL_BY_REFERENCE (SSA_NAME_VAR (base_pointer))) |
| 1255 |
{ |
| 1256 |
context->outer_type = TREE_TYPE (TREE_TYPE (base_pointer)); |
| 1257 |
gcc_assert (!POINTER_TYPE_P (context->outer_type)); |
| 1258 |
/* Only type inconsistent programs can have otr_type that is |
| 1259 |
not part of outer type. */ |
| 1260 |
if (!contains_type_p (context->outer_type, context->offset, |
| 1261 |
*otr_type)) |
| 1262 |
{ |
| 1263 |
context->outer_type = NULL; |
| 1264 |
gcc_unreachable (); |
| 1265 |
return base_pointer; |
| 1266 |
} |
| 1267 |
context->maybe_derived_type = false; |
| 1268 |
context->maybe_in_construction = false; |
| 1269 |
return base_pointer; |
| 1270 |
} |
| 1271 |
} |
| 1272 |
/* TODO: There are multiple ways to derive a type. For instance |
| 1273 |
if BASE_POINTER is passed to an constructor call prior our refernece. |
| 1274 |
We do not make this type of flow sensitive analysis yet. */ |
| 1275 |
return base_pointer; |
| 1276 |
} |
| 1277 |
|
| 1278 |
/* Walk bases of OUTER_TYPE that contain OTR_TYPE at OFFSET. |
| 1279 |
Lookup their respecitve virtual methods for OTR_TOKEN and OTR_TYPE |
| 1280 |
and insert them to NODES. |
| 1281 |
|
| 1282 |
MATCHED_VTABLES and INSERTED is used to avoid duplicated work. */ |
| 1283 |
|
| 1284 |
static void |
| 1285 |
record_targets_from_bases (tree otr_type, |
| 1286 |
HOST_WIDE_INT otr_token, |
| 1287 |
tree outer_type, |
| 1288 |
HOST_WIDE_INT offset, |
| 1289 |
vec <cgraph_node *> nodes, |
| 1290 |
pointer_set_t *inserted, |
| 1291 |
pointer_set_t *matched_vtables, |
| 1292 |
bool *completep) |
| 1293 |
{ |
| 1294 |
while (true) |
| 1295 |
{ |
| 1296 |
HOST_WIDE_INT pos, size; |
| 1297 |
tree base_binfo; |
| 1298 |
tree fld; |
| 1299 |
|
| 1300 |
if (types_same_for_odr (outer_type, otr_type)) |
| 1301 |
return; |
| 1302 |
|
| 1303 |
for (fld = TYPE_FIELDS (outer_type); fld; fld = DECL_CHAIN (fld)) |
| 1304 |
{ |
| 1305 |
if (TREE_CODE (fld) != FIELD_DECL) |
| 1306 |
continue; |
| 1307 |
|
| 1308 |
pos = int_bit_position (fld); |
| 1309 |
size = tree_to_shwi (DECL_SIZE (fld)); |
| 1310 |
if (pos <= offset && (pos + size) > offset) |
| 1311 |
break; |
| 1312 |
} |
| 1313 |
/* Within a class type we should always find correcponding fields. */ |
| 1314 |
gcc_assert (fld && TREE_CODE (TREE_TYPE (fld)) == RECORD_TYPE); |
| 1315 |
|
| 1316 |
/* Nonbasetypes should have been stripped by outer_class_type. */ |
| 1317 |
gcc_assert (DECL_ARTIFICIAL (fld)); |
| 1318 |
|
| 1319 |
outer_type = TREE_TYPE (fld); |
| 1320 |
offset -= pos; |
| 1321 |
|
| 1322 |
base_binfo = get_binfo_at_offset (TYPE_BINFO (outer_type), |
| 1323 |
offset, otr_type); |
| 1324 |
gcc_assert (base_binfo); |
| 1325 |
if (!pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo))) |
| 1326 |
{ |
| 1327 |
tree target = gimple_get_virt_method_for_binfo (otr_token, base_binfo); |
| 1328 |
if (target) |
| 1329 |
maybe_record_node (nodes, target, inserted, completep); |
| 1330 |
/* The only way method in anonymous namespace can become unreferable |
| 1331 |
is that it has been fully optimized out. */ |
| 1332 |
else if (flag_ltrans || !type_in_anonymous_namespace_p (outer_type)) |
| 1333 |
*completep = false; |
| 1334 |
pointer_set_insert (matched_vtables, BINFO_VTABLE (base_binfo)); |
| 1335 |
} |
| 1336 |
} |
| 1337 |
} |
| 1338 |
|
| 1339 |
/* When virtual table is removed, we may need to flush the cache. */ |
| 1340 |
|
| 1341 |
static void |
| 1342 |
devirt_variable_node_removal_hook (varpool_node *n, |
| 1343 |
void *d ATTRIBUTE_UNUSED) |
| 1344 |
{ |
| 1345 |
if (cached_polymorphic_call_targets |
| 1346 |
&& DECL_VIRTUAL_P (n->decl) |
| 1347 |
&& type_in_anonymous_namespace_p (DECL_CONTEXT (n->decl))) |
| 1348 |
free_polymorphic_call_targets_hash (); |
| 1349 |
} |
| 1350 |
|
| 1351 |
/* Return vector containing possible targets of polymorphic call of type |
| 1352 |
OTR_TYPE caling method OTR_TOKEN within type of OTR_OUTER_TYPE and OFFSET. |
| 1353 |
If INCLUDE_BASES is true, walk also base types of OUTER_TYPES containig |
| 1354 |
OTR_TYPE and include their virtual method. This is useful for types |
| 1355 |
possibly in construction or destruction where the virtual table may |
| 1356 |
temporarily change to one of base types. INCLUDE_DERIVER_TYPES make |
| 1357 |
us to walk the inheritance graph for all derivations. |
| 1358 |
|
| 1359 |
If COMPLETEP is non-NULL, store true if the list is complete. |
| 1360 |
CACHE_TOKEN (if non-NULL) will get stored to an unique ID of entry |
| 1361 |
in the target cache. If user needs to visit every target list |
| 1362 |
just once, it can memoize them. |
| 1363 |
|
| 1364 |
Returned vector is placed into cache. It is NOT caller's responsibility |
| 1365 |
to free it. The vector can be freed on cgraph_remove_node call if |
| 1366 |
the particular node is a virtual function present in the cache. */ |
| 1367 |
|
| 1368 |
vec <cgraph_node *> |
| 1369 |
possible_polymorphic_call_targets (tree otr_type, |
| 1370 |
HOST_WIDE_INT otr_token, |
| 1371 |
ipa_polymorphic_call_context context, |
| 1372 |
bool *completep, |
| 1373 |
void **cache_token) |
| 1374 |
{ |
| 1375 |
static struct cgraph_node_hook_list *node_removal_hook_holder; |
| 1376 |
pointer_set_t *inserted; |
| 1377 |
pointer_set_t *matched_vtables; |
| 1378 |
vec <cgraph_node *> nodes = vNULL; |
| 1379 |
odr_type type, outer_type; |
| 1380 |
polymorphic_call_target_d key; |
| 1381 |
polymorphic_call_target_d **slot; |
| 1382 |
unsigned int i; |
| 1383 |
tree binfo, target; |
| 1384 |
bool final; |
| 1385 |
|
| 1386 |
if (!odr_hash.is_created ()) |
| 1387 |
{ |
| 1388 |
if (completep) |
| 1389 |
*completep = false; |
| 1390 |
return nodes; |
| 1391 |
} |
| 1392 |
|
| 1393 |
type = get_odr_type (otr_type, true); |
| 1394 |
|
| 1395 |
/* Lookup the outer class type we want to walk. */ |
| 1396 |
if (context.outer_type) |
| 1397 |
get_class_context (&context, otr_type); |
| 1398 |
|
| 1399 |
/* We now canonicalize our query, so we do not need extra hashtable entries. */ |
| 1400 |
|
| 1401 |
/* Without outer type, we have no use for offset. Just do the |
| 1402 |
basic search from innter type */ |
| 1403 |
if (!context.outer_type) |
| 1404 |
{ |
| 1405 |
context.outer_type = otr_type; |
| 1406 |
context.offset = 0; |
| 1407 |
} |
| 1408 |
/* We need to update our hiearchy if the type does not exist. */ |
| 1409 |
outer_type = get_odr_type (context.outer_type, true); |
| 1410 |
/* If outer and inner type match, there are no bases to see. */ |
| 1411 |
if (type == outer_type) |
| 1412 |
context.maybe_in_construction = false; |
| 1413 |
/* If the type is final, there are no derivations. */ |
| 1414 |
if (TYPE_FINAL_P (outer_type->type)) |
| 1415 |
context.maybe_derived_type = false; |
| 1416 |
|
| 1417 |
/* Initialize query cache. */ |
| 1418 |
if (!cached_polymorphic_call_targets) |
| 1419 |
{ |
| 1420 |
cached_polymorphic_call_targets = pointer_set_create (); |
| 1421 |
polymorphic_call_target_hash.create (23); |
| 1422 |
if (!node_removal_hook_holder) |
| 1423 |
{ |
| 1424 |
node_removal_hook_holder = |
| 1425 |
cgraph_add_node_removal_hook (&devirt_node_removal_hook, NULL); |
| 1426 |
varpool_add_node_removal_hook (&devirt_variable_node_removal_hook, |
| 1427 |
NULL); |
| 1428 |
} |
| 1429 |
} |
| 1430 |
|
| 1431 |
/* Lookup cached answer. */ |
| 1432 |
key.type = type; |
| 1433 |
key.otr_token = otr_token; |
| 1434 |
key.context = context; |
| 1435 |
slot = polymorphic_call_target_hash.find_slot (&key, INSERT); |
| 1436 |
if (cache_token) |
| 1437 |
*cache_token = (void *)*slot; |
| 1438 |
if (*slot) |
| 1439 |
{ |
| 1440 |
if (completep) |
| 1441 |
*completep = (*slot)->final; |
| 1442 |
return (*slot)->targets; |
| 1443 |
} |
| 1444 |
|
| 1445 |
final = true; |
| 1446 |
|
| 1447 |
/* Do actual search. */ |
| 1448 |
timevar_push (TV_IPA_VIRTUAL_CALL); |
| 1449 |
*slot = XCNEW (polymorphic_call_target_d); |
| 1450 |
if (cache_token) |
| 1451 |
*cache_token = (void *)*slot; |
| 1452 |
(*slot)->type = type; |
| 1453 |
(*slot)->otr_token = otr_token; |
| 1454 |
(*slot)->context = context; |
| 1455 |
|
| 1456 |
inserted = pointer_set_create (); |
| 1457 |
matched_vtables = pointer_set_create (); |
| 1458 |
|
| 1459 |
/* First see virtual method of type itself. */ |
| 1460 |
|
| 1461 |
binfo = get_binfo_at_offset (TYPE_BINFO (outer_type->type), |
| 1462 |
context.offset, otr_type); |
| 1463 |
target = gimple_get_virt_method_for_binfo (otr_token, binfo); |
| 1464 |
if (target) |
| 1465 |
{ |
| 1466 |
maybe_record_node (nodes, target, inserted, &final); |
| 1467 |
|
| 1468 |
/* In the case we get final method, we don't need |
| 1469 |
to walk derivations. */ |
| 1470 |
if (DECL_FINAL_P (target)) |
| 1471 |
context.maybe_derived_type = false; |
| 1472 |
} |
| 1473 |
/* The only way method in anonymous namespace can become unreferable |
| 1474 |
is that it has been fully optimized out. */ |
| 1475 |
else if (flag_ltrans || !type->anonymous_namespace) |
| 1476 |
final = false; |
| 1477 |
pointer_set_insert (matched_vtables, BINFO_VTABLE (binfo)); |
| 1478 |
|
| 1479 |
/* Next walk bases, if asked to. */ |
| 1480 |
if (context.maybe_in_construction) |
| 1481 |
record_targets_from_bases (otr_type, otr_token, outer_type->type, |
| 1482 |
context.offset, nodes, inserted, |
| 1483 |
matched_vtables, &final); |
| 1484 |
|
| 1485 |
/* Finally walk recursively all derived types. */ |
| 1486 |
if (context.maybe_derived_type) |
| 1487 |
{ |
| 1488 |
/* For anonymous namespace types we can attempt to build full type. |
| 1489 |
All derivations must be in this unit (unless we see partial unit). */ |
| 1490 |
if (!type->anonymous_namespace || flag_ltrans) |
| 1491 |
final = false; |
| 1492 |
for (i = 0; i < outer_type->derived_types.length(); i++) |
| 1493 |
possible_polymorphic_call_targets_1 (nodes, inserted, |
| 1494 |
matched_vtables, |
| 1495 |
otr_type, outer_type->derived_types[i], |
| 1496 |
otr_token, outer_type->type, |
| 1497 |
context.offset); |
| 1498 |
} |
| 1499 |
(*slot)->targets = nodes; |
| 1500 |
(*slot)->final = final; |
| 1501 |
if (completep) |
| 1502 |
*completep = final; |
| 1503 |
|
| 1504 |
pointer_set_destroy (inserted); |
| 1505 |
pointer_set_destroy (matched_vtables); |
| 1506 |
timevar_pop (TV_IPA_VIRTUAL_CALL); |
| 1507 |
return nodes; |
| 1508 |
} |
| 1509 |
|
| 1510 |
/* Dump all possible targets of a polymorphic call. */ |
| 1511 |
|
| 1512 |
void |
| 1513 |
dump_possible_polymorphic_call_targets (FILE *f, |
| 1514 |
tree otr_type, |
| 1515 |
HOST_WIDE_INT otr_token, |
| 1516 |
const ipa_polymorphic_call_context &ctx) |
| 1517 |
{ |
| 1518 |
vec <cgraph_node *> targets; |
| 1519 |
bool final; |
| 1520 |
odr_type type = get_odr_type (otr_type, false); |
| 1521 |
unsigned int i; |
| 1522 |
|
| 1523 |
if (!type) |
| 1524 |
return; |
| 1525 |
targets = possible_polymorphic_call_targets (otr_type, otr_token, |
| 1526 |
ctx, |
| 1527 |
&final); |
| 1528 |
fprintf (f, " Targets of polymorphic call of type %i:", type->id); |
| 1529 |
print_generic_expr (f, type->type, TDF_SLIM); |
| 1530 |
fprintf (f, " token %i\n" |
| 1531 |
" Contained in type:", |
| 1532 |
(int)otr_token); |
| 1533 |
print_generic_expr (f, ctx.outer_type, TDF_SLIM); |
| 1534 |
fprintf (f, " at offset "HOST_WIDE_INT_PRINT_DEC"\n" |
| 1535 |
" %s%s%s\n ", |
| 1536 |
ctx.offset, |
| 1537 |
final ? "This is full list." : |
| 1538 |
"This is partial list; extra targets may be defined in other units.", |
| 1539 |
ctx.maybe_in_construction ? " (base types included)" : "", |
| 1540 |
ctx.maybe_derived_type ? " (derived types included)" : ""); |
| 1541 |
for (i = 0; i < targets.length (); i++) |
| 1542 |
fprintf (f, " %s/%i", targets[i]->name (), |
| 1543 |
targets[i]->order); |
| 1544 |
fprintf (f, "\n\n"); |
| 1545 |
} |
| 1546 |
|
| 1547 |
|
| 1548 |
/* Return true if N can be possibly target of a polymorphic call of |
| 1549 |
OTR_TYPE/OTR_TOKEN. */ |
| 1550 |
|
| 1551 |
bool |
| 1552 |
possible_polymorphic_call_target_p (tree otr_type, |
| 1553 |
HOST_WIDE_INT otr_token, |
| 1554 |
const ipa_polymorphic_call_context &ctx, |
| 1555 |
struct cgraph_node *n) |
| 1556 |
{ |
| 1557 |
vec <cgraph_node *> targets; |
| 1558 |
unsigned int i; |
| 1559 |
enum built_in_function fcode; |
| 1560 |
bool final; |
| 1561 |
|
| 1562 |
if (TREE_CODE (TREE_TYPE (n->decl)) == FUNCTION_TYPE |
| 1563 |
&& ((fcode = DECL_FUNCTION_CODE (n->decl)) |
| 1564 |
== BUILT_IN_UNREACHABLE |
| 1565 |
|| fcode == BUILT_IN_TRAP)) |
| 1566 |
return true; |
| 1567 |
|
| 1568 |
if (!odr_hash.is_created ()) |
| 1569 |
return true; |
| 1570 |
targets = possible_polymorphic_call_targets (otr_type, otr_token, ctx, &final); |
| 1571 |
for (i = 0; i < targets.length (); i++) |
| 1572 |
if (symtab_semantically_equivalent_p (n, targets[i])) |
| 1573 |
return true; |
| 1574 |
|
| 1575 |
/* At a moment we allow middle end to dig out new external declarations |
| 1576 |
as a targets of polymorphic calls. */ |
| 1577 |
if (!final && !n->definition) |
| 1578 |
return true; |
| 1579 |
return false; |
| 1580 |
} |
| 1581 |
|
| 1582 |
|
| 1583 |
/* After callgraph construction new external nodes may appear. |
| 1584 |
Add them into the graph. */ |
| 1585 |
|
| 1586 |
void |
| 1587 |
update_type_inheritance_graph (void) |
| 1588 |
{ |
| 1589 |
struct cgraph_node *n; |
| 1590 |
|
| 1591 |
if (!odr_hash.is_created ()) |
| 1592 |
return; |
| 1593 |
free_polymorphic_call_targets_hash (); |
| 1594 |
timevar_push (TV_IPA_INHERITANCE); |
| 1595 |
/* We reconstruct the graph starting from types of all methods seen in the |
| 1596 |
the unit. */ |
| 1597 |
FOR_EACH_FUNCTION (n) |
| 1598 |
if (DECL_VIRTUAL_P (n->decl) |
| 1599 |
&& !n->definition |
| 1600 |
&& symtab_real_symbol_p (n)) |
| 1601 |
get_odr_type (method_class_type (TREE_TYPE (n->decl)), true); |
| 1602 |
timevar_pop (TV_IPA_INHERITANCE); |
| 1603 |
} |
| 1604 |
|
| 1605 |
|
| 1606 |
/* Return true if N looks like likely target of a polymorphic call. |
| 1607 |
Rule out cxa_pure_virtual, noreturns, function declared cold and |
| 1608 |
other obvious cases. */ |
| 1609 |
|
| 1610 |
bool |
| 1611 |
likely_target_p (struct cgraph_node *n) |
| 1612 |
{ |
| 1613 |
int flags; |
| 1614 |
/* cxa_pure_virtual and similar things are not likely. */ |
| 1615 |
if (TREE_CODE (TREE_TYPE (n->decl)) != METHOD_TYPE) |
| 1616 |
return false; |
| 1617 |
flags = flags_from_decl_or_type (n->decl); |
| 1618 |
if (flags & ECF_NORETURN) |
| 1619 |
return false; |
| 1620 |
if (lookup_attribute ("cold", |
| 1621 |
DECL_ATTRIBUTES (n->decl))) |
| 1622 |
return false; |
| 1623 |
if (n->frequency < NODE_FREQUENCY_NORMAL) |
| 1624 |
return false; |
| 1625 |
return true; |
| 1626 |
} |
| 1627 |
|
| 1628 |
/* The ipa-devirt pass. |
| 1629 |
When polymorphic call has only one likely target in the unit, |
| 1630 |
turn it into speculative call. */ |
| 1631 |
|
| 1632 |
static unsigned int |
| 1633 |
ipa_devirt (void) |
| 1634 |
{ |
| 1635 |
struct cgraph_node *n; |
| 1636 |
struct pointer_set_t *bad_call_targets = pointer_set_create (); |
| 1637 |
struct cgraph_edge *e; |
| 1638 |
|
| 1639 |
int npolymorphic = 0, nspeculated = 0, nconverted = 0, ncold = 0; |
| 1640 |
int nmultiple = 0, noverwritable = 0, ndevirtualized = 0, nnotdefined = 0; |
| 1641 |
int nwrong = 0, nok = 0, nexternal = 0;; |
| 1642 |
|
| 1643 |
FOR_EACH_DEFINED_FUNCTION (n) |
| 1644 |
{ |
| 1645 |
bool update = false; |
| 1646 |
if (dump_file && n->indirect_calls) |
| 1647 |
fprintf (dump_file, "\n\nProcesing function %s/%i\n", |
| 1648 |
n->name (), n->order); |
| 1649 |
for (e = n->indirect_calls; e; e = e->next_callee) |
| 1650 |
if (e->indirect_info->polymorphic) |
| 1651 |
{ |
| 1652 |
struct cgraph_node *likely_target = NULL; |
| 1653 |
void *cache_token; |
| 1654 |
bool final; |
| 1655 |
vec <cgraph_node *>targets |
| 1656 |
= possible_polymorphic_call_targets |
| 1657 |
(e, &final, &cache_token); |
| 1658 |
unsigned int i; |
| 1659 |
|
| 1660 |
if (dump_file) |
| 1661 |
dump_possible_polymorphic_call_targets |
| 1662 |
(dump_file, e); |
| 1663 |
|
| 1664 |
npolymorphic++; |
| 1665 |
|
| 1666 |
if (!cgraph_maybe_hot_edge_p (e)) |
| 1667 |
{ |
| 1668 |
if (dump_file) |
| 1669 |
fprintf (dump_file, "Call is cold\n"); |
| 1670 |
ncold++; |
| 1671 |
continue; |
| 1672 |
} |
| 1673 |
if (e->speculative) |
| 1674 |
{ |
| 1675 |
if (dump_file) |
| 1676 |
fprintf (dump_file, "Call is aready speculated\n"); |
| 1677 |
nspeculated++; |
| 1678 |
|
| 1679 |
/* When dumping see if we agree with speculation. */ |
| 1680 |
if (!dump_file) |
| 1681 |
continue; |
| 1682 |
} |
| 1683 |
if (pointer_set_contains (bad_call_targets, |
| 1684 |
cache_token)) |
| 1685 |
{ |
| 1686 |
if (dump_file) |
| 1687 |
fprintf (dump_file, "Target list is known to be useless\n"); |
| 1688 |
nmultiple++; |
| 1689 |
continue; |
| 1690 |
} |
| 1691 |
for (i = 0; i < targets.length (); i++) |
| 1692 |
if (likely_target_p (targets[i])) |
| 1693 |
{ |
| 1694 |
if (likely_target) |
| 1695 |
{ |
| 1696 |
likely_target = NULL; |
| 1697 |
if (dump_file) |
| 1698 |
fprintf (dump_file, "More than one likely target\n"); |
| 1699 |
nmultiple++; |
| 1700 |
break; |
| 1701 |
} |
| 1702 |
likely_target = targets[i]; |
| 1703 |
} |
| 1704 |
if (!likely_target) |
| 1705 |
{ |
| 1706 |
pointer_set_insert (bad_call_targets, cache_token); |
| 1707 |
continue; |
| 1708 |
} |
| 1709 |
/* This is reached only when dumping; check if we agree or disagree |
| 1710 |
with the speculation. */ |
| 1711 |
if (e->speculative) |
| 1712 |
{ |
| 1713 |
struct cgraph_edge *e2; |
| 1714 |
struct ipa_ref *ref; |
| 1715 |
cgraph_speculative_call_info (e, e2, e, ref); |
| 1716 |
if (cgraph_function_or_thunk_node (e2->callee, NULL) |
| 1717 |
== cgraph_function_or_thunk_node (likely_target, NULL)) |
| 1718 |
{ |
| 1719 |
fprintf (dump_file, "We agree with speculation\n"); |
| 1720 |
nok++; |
| 1721 |
} |
| 1722 |
else |
| 1723 |
{ |
| 1724 |
fprintf (dump_file, "We disagree with speculation\n"); |
| 1725 |
nwrong++; |
| 1726 |
} |
| 1727 |
continue; |
| 1728 |
} |
| 1729 |
if (!likely_target->definition) |
| 1730 |
{ |
| 1731 |
if (dump_file) |
| 1732 |
fprintf (dump_file, "Target is not an definition\n"); |
| 1733 |
nnotdefined++; |
| 1734 |
continue; |
| 1735 |
} |
| 1736 |
/* Do not introduce new references to external symbols. While we |
| 1737 |
can handle these just well, it is common for programs to |
| 1738 |
incorrectly with headers defining methods they are linked |
| 1739 |
with. */ |
| 1740 |
if (DECL_EXTERNAL (likely_target->decl)) |
| 1741 |
{ |
| 1742 |
if (dump_file) |
| 1743 |
fprintf (dump_file, "Target is external\n"); |
| 1744 |
nexternal++; |
| 1745 |
continue; |
| 1746 |
} |
| 1747 |
if (cgraph_function_body_availability (likely_target) |
| 1748 |
<= AVAIL_OVERWRITABLE |
| 1749 |
&& symtab_can_be_discarded (likely_target)) |
| 1750 |
{ |
| 1751 |
if (dump_file) |
| 1752 |
fprintf (dump_file, "Target is overwritable\n"); |
| 1753 |
noverwritable++; |
| 1754 |
continue; |
| 1755 |
} |
| 1756 |
else |
| 1757 |
{ |
| 1758 |
if (dump_file) |
| 1759 |
fprintf (dump_file, |
| 1760 |
"Speculatively devirtualizing call in %s/%i to %s/%i\n", |
| 1761 |
n->name (), n->order, |
| 1762 |
likely_target->name (), |
| 1763 |
likely_target->order); |
| 1764 |
if (!symtab_can_be_discarded (likely_target)) |
| 1765 |
{ |
| 1766 |
cgraph_node *alias; |
| 1767 |
alias = cgraph (symtab_nonoverwritable_alias |
| 1768 |
(likely_target)); |
| 1769 |
if (alias) |
| 1770 |
likely_target = alias; |
| 1771 |
} |
| 1772 |
nconverted++; |
| 1773 |
update = true; |
| 1774 |
cgraph_turn_edge_to_speculative |
| 1775 |
(e, likely_target, e->count * 8 / 10, e->frequency * 8 / 10); |
| 1776 |
} |
| 1777 |
} |
| 1778 |
if (update) |
| 1779 |
inline_update_overall_summary (n); |
| 1780 |
} |
| 1781 |
pointer_set_destroy (bad_call_targets); |
| 1782 |
|
| 1783 |
if (dump_file) |
| 1784 |
fprintf (dump_file, |
| 1785 |
"%i polymorphic calls, %i devirtualized," |
| 1786 |
" %i speculatively devirtualized, %i cold\n" |
| 1787 |
"%i have multiple targets, %i overwritable," |
| 1788 |
" %i already speculated (%i agree, %i disagree)," |
| 1789 |
" %i external, %i not defined\n", |
| 1790 |
npolymorphic, ndevirtualized, nconverted, ncold, |
| 1791 |
nmultiple, noverwritable, nspeculated, nok, nwrong, |
| 1792 |
nexternal, nnotdefined); |
| 1793 |
return ndevirtualized ? TODO_remove_functions : 0; |
| 1794 |
} |
| 1795 |
|
| 1796 |
/* Gate for speculative IPA devirtualization optimization. */ |
| 1797 |
|
| 1798 |
static bool |
| 1799 |
gate_ipa_devirt (void) |
| 1800 |
{ |
| 1801 |
return (flag_devirtualize |
| 1802 |
&& flag_devirtualize_speculatively |
| 1803 |
&& optimize); |
| 1804 |
} |
| 1805 |
|
| 1806 |
namespace { |
| 1807 |
|
| 1808 |
const pass_data pass_data_ipa_devirt = |
| 1809 |
{ |
| 1810 |
IPA_PASS, /* type */ |
| 1811 |
"devirt", /* name */ |
| 1812 |
OPTGROUP_NONE, /* optinfo_flags */ |
| 1813 |
true, /* has_gate */ |
| 1814 |
true, /* has_execute */ |
| 1815 |
TV_IPA_DEVIRT, /* tv_id */ |
| 1816 |
0, /* properties_required */ |
| 1817 |
0, /* properties_provided */ |
| 1818 |
0, /* properties_destroyed */ |
| 1819 |
0, /* todo_flags_start */ |
| 1820 |
( TODO_dump_symtab ), /* todo_flags_finish */ |
| 1821 |
}; |
| 1822 |
|
| 1823 |
class pass_ipa_devirt : public ipa_opt_pass_d |
| 1824 |
{ |
| 1825 |
public: |
| 1826 |
pass_ipa_devirt (gcc::context *ctxt) |
| 1827 |
: ipa_opt_pass_d (pass_data_ipa_devirt, ctxt, |
| 1828 |
NULL, /* generate_summary */ |
| 1829 |
NULL, /* write_summary */ |
| 1830 |
NULL, /* read_summary */ |
| 1831 |
NULL, /* write_optimization_summary */ |
| 1832 |
NULL, /* read_optimization_summary */ |
| 1833 |
NULL, /* stmt_fixup */ |
| 1834 |
0, /* function_transform_todo_flags_start */ |
| 1835 |
NULL, /* function_transform */ |
| 1836 |
NULL) /* variable_transform */ |
| 1837 |
{} |
| 1838 |
|
| 1839 |
/* opt_pass methods: */ |
| 1840 |
bool gate () { return gate_ipa_devirt (); } |
| 1841 |
unsigned int execute () { return ipa_devirt (); } |
| 1842 |
|
| 1843 |
}; // class pass_ipa_devirt |
| 1844 |
|
| 1845 |
} // anon namespace |
| 1846 |
|
| 1847 |
ipa_opt_pass_d * |
| 1848 |
make_pass_ipa_devirt (gcc::context *ctxt) |
| 1849 |
{ |
| 1850 |
return new pass_ipa_devirt (ctxt); |
| 1851 |
} |
| 1852 |
|
| 1853 |
#include "gt-ipa-devirt.h" |