]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa-prop.c
New syntax for -fsanitize-recover.
[gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
23a5b65a 2 Copyright (C) 2005-2014 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
2fb9a547
AM
24#include "basic-block.h"
25#include "tree-ssa-alias.h"
26#include "internal-fn.h"
27#include "gimple-fold.h"
28#include "tree-eh.h"
29#include "gimple-expr.h"
30#include "is-a.h"
18f429e2 31#include "gimple.h"
d8a2d370
DN
32#include "expr.h"
33#include "stor-layout.h"
34#include "print-tree.h"
45b0be94 35#include "gimplify.h"
5be5c238 36#include "gimple-iterator.h"
18f429e2 37#include "gimplify-me.h"
5be5c238 38#include "gimple-walk.h"
518dc859 39#include "langhooks.h"
518dc859 40#include "target.h"
518dc859 41#include "ipa-prop.h"
442b4905
AM
42#include "bitmap.h"
43#include "gimple-ssa.h"
44#include "tree-cfg.h"
45#include "tree-phinodes.h"
46#include "ssa-iterators.h"
47#include "tree-into-ssa.h"
48#include "tree-dfa.h"
518dc859 49#include "tree-pass.h"
771578a0 50#include "tree-inline.h"
0f378cb5 51#include "ipa-inline.h"
518dc859 52#include "flags.h"
3e293154 53#include "diagnostic.h"
cf835838 54#include "gimple-pretty-print.h"
fb3f88cc 55#include "lto-streamer.h"
f0efc7aa
DN
56#include "data-streamer.h"
57#include "tree-streamer.h"
dfea20f1 58#include "params.h"
450ad0cd 59#include "ipa-utils.h"
4df65a85
RB
60#include "stringpool.h"
61#include "tree-ssanames.h"
2b5f0895 62#include "dbgcnt.h"
8aab5218 63#include "domwalk.h"
9b2b7279 64#include "builtins.h"
058d0a90 65#include "calls.h"
771578a0 66
8aab5218
MJ
67/* Intermediate information that we get from alias analysis about a particular
68 parameter in a particular basic_block. When a parameter or the memory it
69 references is marked modified, we use that information in all dominatd
70 blocks without cosulting alias analysis oracle. */
062c604f 71
8aab5218 72struct param_aa_status
062c604f 73{
8aab5218
MJ
74 /* Set when this structure contains meaningful information. If not, the
75 structure describing a dominating BB should be used instead. */
76 bool valid;
77
78 /* Whether we have seen something which might have modified the data in
79 question. PARM is for the parameter itself, REF is for data it points to
80 but using the alias type of individual accesses and PT is the same thing
81 but for computing aggregate pass-through functions using a very inclusive
82 ao_ref. */
8b7773a4 83 bool parm_modified, ref_modified, pt_modified;
8aab5218
MJ
84};
85
86/* Information related to a given BB that used only when looking at function
87 body. */
88
89struct ipa_bb_info
90{
91 /* Call graph edges going out of this BB. */
d52f5295 92 vec<cgraph_edge *> cg_edges;
8aab5218
MJ
93 /* Alias analysis statuses of each formal parameter at this bb. */
94 vec<param_aa_status> param_aa_statuses;
95};
96
97/* Structure with global information that is only used when looking at function
98 body. */
99
100struct func_body_info
101{
102 /* The node that is being analyzed. */
103 cgraph_node *node;
104
105 /* Its info. */
106 struct ipa_node_params *info;
107
108 /* Information about individual BBs. */
109 vec<ipa_bb_info> bb_infos;
110
111 /* Number of parameters. */
112 int param_count;
113
114 /* Number of statements already walked by when analyzing this function. */
115 unsigned int aa_walked;
062c604f
MJ
116};
117
771578a0 118/* Vector where the parameter infos are actually stored. */
84562394 119vec<ipa_node_params> ipa_node_params_vector;
2c9561b5 120/* Vector of known aggregate values in cloned nodes. */
9771b263 121vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 122/* Vector where the parameter infos are actually stored. */
84562394 123vec<ipa_edge_args, va_gc> *ipa_edge_args_vector;
771578a0
MJ
124
125/* Holders of ipa cgraph hooks: */
e2c9111c
JH
126static struct cgraph_edge_hook_list *edge_removal_hook_holder;
127static struct cgraph_node_hook_list *node_removal_hook_holder;
128static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
129static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 130static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 131
4502fe8d
MJ
132/* Description of a reference to an IPA constant. */
133struct ipa_cst_ref_desc
134{
135 /* Edge that corresponds to the statement which took the reference. */
136 struct cgraph_edge *cs;
137 /* Linked list of duplicates created when call graph edges are cloned. */
138 struct ipa_cst_ref_desc *next_duplicate;
139 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
140 if out of control. */
141 int refcount;
142};
143
144/* Allocation pool for reference descriptions. */
145
146static alloc_pool ipa_refdesc_pool;
147
5fe8e757
MJ
148/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
149 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
150
151static bool
152ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
153{
67348ccc 154 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
155 struct cl_optimization *os;
156
157 if (!fs_opts)
158 return false;
159 os = TREE_OPTIMIZATION (fs_opts);
160 return !os->x_optimize || !os->x_flag_ipa_cp;
161}
162
be95e2b9
MJ
163/* Return index of the formal whose tree is PTREE in function which corresponds
164 to INFO. */
165
d044dd17 166static int
84562394 167ipa_get_param_decl_index_1 (vec<ipa_param_descriptor> descriptors, tree ptree)
518dc859
RL
168{
169 int i, count;
170
9771b263 171 count = descriptors.length ();
518dc859 172 for (i = 0; i < count; i++)
9771b263 173 if (descriptors[i].decl == ptree)
518dc859
RL
174 return i;
175
176 return -1;
177}
178
d044dd17
MJ
179/* Return index of the formal whose tree is PTREE in function which corresponds
180 to INFO. */
181
182int
183ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
184{
185 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
186}
187
188/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
189 NODE. */
be95e2b9 190
f8e2a1ed
MJ
191static void
192ipa_populate_param_decls (struct cgraph_node *node,
84562394 193 vec<ipa_param_descriptor> &descriptors)
518dc859
RL
194{
195 tree fndecl;
196 tree fnargs;
197 tree parm;
198 int param_num;
3e293154 199
67348ccc 200 fndecl = node->decl;
0e8853ee 201 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
202 fnargs = DECL_ARGUMENTS (fndecl);
203 param_num = 0;
910ad8de 204 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 205 {
9771b263 206 descriptors[param_num].decl = parm;
b4c9af96
RB
207 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm),
208 true);
518dc859
RL
209 param_num++;
210 }
211}
212
3f84bf08
MJ
213/* Return how many formal parameters FNDECL has. */
214
fd29c024 215int
310bc633 216count_formal_params (tree fndecl)
3f84bf08
MJ
217{
218 tree parm;
219 int count = 0;
0e8853ee 220 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 221
910ad8de 222 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
223 count++;
224
225 return count;
226}
227
0e8853ee
JH
228/* Return the declaration of Ith formal parameter of the function corresponding
229 to INFO. Note there is no setter function as this array is built just once
230 using ipa_initialize_node_params. */
231
232void
233ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
234{
235 fprintf (file, "param #%i", i);
236 if (info->descriptors[i].decl)
237 {
238 fprintf (file, " ");
239 print_generic_expr (file, info->descriptors[i].decl, 0);
240 }
241}
242
243/* Initialize the ipa_node_params structure associated with NODE
244 to hold PARAM_COUNT parameters. */
245
246void
247ipa_alloc_node_params (struct cgraph_node *node, int param_count)
248{
249 struct ipa_node_params *info = IPA_NODE_REF (node);
250
251 if (!info->descriptors.exists () && param_count)
252 info->descriptors.safe_grow_cleared (param_count);
253}
254
f8e2a1ed
MJ
255/* Initialize the ipa_node_params structure associated with NODE by counting
256 the function parameters, creating the descriptors and populating their
257 param_decls. */
be95e2b9 258
f8e2a1ed
MJ
259void
260ipa_initialize_node_params (struct cgraph_node *node)
261{
262 struct ipa_node_params *info = IPA_NODE_REF (node);
263
9771b263 264 if (!info->descriptors.exists ())
f8e2a1ed 265 {
67348ccc 266 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 267 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 268 }
518dc859
RL
269}
270
749aa96d
MJ
271/* Print the jump functions associated with call graph edge CS to file F. */
272
273static void
274ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
275{
276 int i, count;
277
278 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
279 for (i = 0; i < count; i++)
280 {
281 struct ipa_jump_func *jump_func;
282 enum jump_func_type type;
283
284 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
285 type = jump_func->type;
286
287 fprintf (f, " param %d: ", i);
288 if (type == IPA_JF_UNKNOWN)
289 fprintf (f, "UNKNOWN\n");
290 else if (type == IPA_JF_KNOWN_TYPE)
291 {
c7573249
MJ
292 fprintf (f, "KNOWN TYPE: base ");
293 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
294 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
295 jump_func->value.known_type.offset);
296 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
297 fprintf (f, "\n");
749aa96d
MJ
298 }
299 else if (type == IPA_JF_CONST)
300 {
4502fe8d 301 tree val = jump_func->value.constant.value;
749aa96d
MJ
302 fprintf (f, "CONST: ");
303 print_generic_expr (f, val, 0);
304 if (TREE_CODE (val) == ADDR_EXPR
305 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
306 {
307 fprintf (f, " -> ");
308 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
309 0);
310 }
311 fprintf (f, "\n");
312 }
749aa96d
MJ
313 else if (type == IPA_JF_PASS_THROUGH)
314 {
315 fprintf (f, "PASS THROUGH: ");
8b7773a4 316 fprintf (f, "%d, op %s",
749aa96d 317 jump_func->value.pass_through.formal_id,
5806f481 318 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 319 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
320 {
321 fprintf (f, " ");
322 print_generic_expr (f,
323 jump_func->value.pass_through.operand, 0);
324 }
325 if (jump_func->value.pass_through.agg_preserved)
326 fprintf (f, ", agg_preserved");
b8f6e610
MJ
327 if (jump_func->value.pass_through.type_preserved)
328 fprintf (f, ", type_preserved");
3ea6239f 329 fprintf (f, "\n");
749aa96d
MJ
330 }
331 else if (type == IPA_JF_ANCESTOR)
332 {
333 fprintf (f, "ANCESTOR: ");
334 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
335 jump_func->value.ancestor.formal_id,
336 jump_func->value.ancestor.offset);
337 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
338 if (jump_func->value.ancestor.agg_preserved)
339 fprintf (f, ", agg_preserved");
b8f6e610
MJ
340 if (jump_func->value.ancestor.type_preserved)
341 fprintf (f, ", type_preserved");
3ea6239f 342 fprintf (f, "\n");
749aa96d 343 }
8b7773a4
MJ
344
345 if (jump_func->agg.items)
346 {
347 struct ipa_agg_jf_item *item;
348 int j;
349
350 fprintf (f, " Aggregate passed by %s:\n",
351 jump_func->agg.by_ref ? "reference" : "value");
9771b263 352 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
353 {
354 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
355 item->offset);
356 if (TYPE_P (item->value))
357 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 358 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
359 else
360 {
361 fprintf (f, "cst: ");
362 print_generic_expr (f, item->value, 0);
363 }
364 fprintf (f, "\n");
365 }
366 }
5ce97055
JH
367 if (IPA_EDGE_REF (cs)->polymorphic_call_contexts)
368 ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs), i)->dump (f);
749aa96d
MJ
369 }
370}
371
372
be95e2b9
MJ
373/* Print the jump functions of all arguments on all call graph edges going from
374 NODE to file F. */
375
518dc859 376void
3e293154 377ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 378{
3e293154 379 struct cgraph_edge *cs;
518dc859 380
fec39fa6 381 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 382 node->order);
3e293154
MJ
383 for (cs = node->callees; cs; cs = cs->next_callee)
384 {
385 if (!ipa_edge_args_info_available_for_edge_p (cs))
386 continue;
387
749aa96d 388 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
389 xstrdup (node->name ()), node->order,
390 xstrdup (cs->callee->name ()),
67348ccc 391 cs->callee->order);
749aa96d
MJ
392 ipa_print_node_jump_functions_for_edge (f, cs);
393 }
518dc859 394
9de04252 395 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 396 {
9de04252 397 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
398 if (!ipa_edge_args_info_available_for_edge_p (cs))
399 continue;
3e293154 400
9de04252
MJ
401 ii = cs->indirect_info;
402 if (ii->agg_contents)
c13bc3d9 403 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 404 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 405 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
406 ii->param_index, ii->offset,
407 ii->by_ref ? "by reference" : "by_value");
408 else
85942f45
JH
409 fprintf (f, " indirect %s callsite, calling param %i, "
410 "offset " HOST_WIDE_INT_PRINT_DEC,
411 ii->polymorphic ? "polymorphic" : "simple", ii->param_index,
412 ii->offset);
9de04252 413
749aa96d
MJ
414 if (cs->call_stmt)
415 {
9de04252 416 fprintf (f, ", for stmt ");
749aa96d 417 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 418 }
749aa96d 419 else
9de04252 420 fprintf (f, "\n");
ba392339
JH
421 if (ii->polymorphic)
422 ii->context.dump (f);
749aa96d 423 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
424 }
425}
426
427/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 428
3e293154
MJ
429void
430ipa_print_all_jump_functions (FILE *f)
431{
432 struct cgraph_node *node;
433
ca30a539 434 fprintf (f, "\nJump functions:\n");
65c70e6b 435 FOR_EACH_FUNCTION (node)
3e293154
MJ
436 {
437 ipa_print_node_jump_functions (f, node);
438 }
439}
440
7b872d9e
MJ
441/* Set JFUNC to be a known type jump function. */
442
443static void
444ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
445 tree base_type, tree component_type)
446{
b49407f8
JH
447 /* Recording and propagating main variants increases change that types
448 will match. */
449 base_type = TYPE_MAIN_VARIANT (base_type);
450 component_type = TYPE_MAIN_VARIANT (component_type);
451
d570d364
JH
452 gcc_assert (contains_polymorphic_type_p (base_type)
453 && contains_polymorphic_type_p (component_type));
0a2550e7
JH
454 if (!flag_devirtualize)
455 return;
7b872d9e
MJ
456 jfunc->type = IPA_JF_KNOWN_TYPE;
457 jfunc->value.known_type.offset = offset,
458 jfunc->value.known_type.base_type = base_type;
459 jfunc->value.known_type.component_type = component_type;
68377e53 460 gcc_assert (component_type);
7b872d9e
MJ
461}
462
b8f6e610
MJ
463/* Set JFUNC to be a copy of another jmp (to be used by jump function
464 combination code). The two functions will share their rdesc. */
465
466static void
467ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
468 struct ipa_jump_func *src)
469
470{
471 gcc_checking_assert (src->type == IPA_JF_CONST);
472 dst->type = IPA_JF_CONST;
473 dst->value.constant = src->value.constant;
474}
475
7b872d9e
MJ
476/* Set JFUNC to be a constant jmp function. */
477
478static void
4502fe8d
MJ
479ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
480 struct cgraph_edge *cs)
7b872d9e 481{
5368224f
DC
482 constant = unshare_expr (constant);
483 if (constant && EXPR_P (constant))
484 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 485 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
486 jfunc->value.constant.value = unshare_expr_without_location (constant);
487
488 if (TREE_CODE (constant) == ADDR_EXPR
489 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
490 {
491 struct ipa_cst_ref_desc *rdesc;
492 if (!ipa_refdesc_pool)
493 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
494 sizeof (struct ipa_cst_ref_desc), 32);
495
496 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
497 rdesc->cs = cs;
498 rdesc->next_duplicate = NULL;
499 rdesc->refcount = 1;
500 jfunc->value.constant.rdesc = rdesc;
501 }
502 else
503 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
504}
505
506/* Set JFUNC to be a simple pass-through jump function. */
507static void
8b7773a4 508ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 509 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
510{
511 jfunc->type = IPA_JF_PASS_THROUGH;
512 jfunc->value.pass_through.operand = NULL_TREE;
513 jfunc->value.pass_through.formal_id = formal_id;
514 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 515 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 516 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
517}
518
519/* Set JFUNC to be an arithmetic pass through jump function. */
520
521static void
522ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
523 tree operand, enum tree_code operation)
524{
525 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 526 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
527 jfunc->value.pass_through.formal_id = formal_id;
528 jfunc->value.pass_through.operation = operation;
8b7773a4 529 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 530 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
531}
532
533/* Set JFUNC to be an ancestor jump function. */
534
535static void
536ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
537 tree type, int formal_id, bool agg_preserved,
538 bool type_preserved)
7b872d9e 539{
0a2550e7
JH
540 if (!flag_devirtualize)
541 type_preserved = false;
d570d364
JH
542 if (!type_preserved)
543 type = NULL_TREE;
b49407f8
JH
544 if (type)
545 type = TYPE_MAIN_VARIANT (type);
d570d364 546 gcc_assert (!type_preserved || contains_polymorphic_type_p (type));
7b872d9e
MJ
547 jfunc->type = IPA_JF_ANCESTOR;
548 jfunc->value.ancestor.formal_id = formal_id;
549 jfunc->value.ancestor.offset = offset;
0a2550e7 550 jfunc->value.ancestor.type = type_preserved ? type : NULL;
8b7773a4 551 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 552 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
553}
554
e248d83f
MJ
555/* Extract the acual BINFO being described by JFUNC which must be a known type
556 jump function. */
557
558tree
559ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
560{
058d0a90
JH
561 if (!RECORD_OR_UNION_TYPE_P (jfunc->value.known_type.base_type))
562 return NULL_TREE;
563
e248d83f 564 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
058d0a90 565
e248d83f
MJ
566 if (!base_binfo)
567 return NULL_TREE;
01a92e70
JH
568 /* FIXME: At LTO we can't propagate to non-polymorphic type, because
569 we have no ODR equivalency on those. This should be fixed by
570 propagating on types rather than binfos that would make type
571 matching here unnecesary. */
572 if (in_lto_p
573 && (TREE_CODE (jfunc->value.known_type.component_type) != RECORD_TYPE
574 || !TYPE_BINFO (jfunc->value.known_type.component_type)
575 || !BINFO_VTABLE (TYPE_BINFO (jfunc->value.known_type.component_type))))
576 {
577 if (!jfunc->value.known_type.offset)
578 return base_binfo;
579 return NULL;
580 }
e248d83f
MJ
581 return get_binfo_at_offset (base_binfo,
582 jfunc->value.known_type.offset,
583 jfunc->value.known_type.component_type);
584}
585
8aab5218
MJ
586/* Get IPA BB information about the given BB. FBI is the context of analyzis
587 of this function body. */
588
589static struct ipa_bb_info *
590ipa_get_bb_info (struct func_body_info *fbi, basic_block bb)
591{
592 gcc_checking_assert (fbi);
593 return &fbi->bb_infos[bb->index];
594}
595
f65cf2b7
MJ
596/* Structure to be passed in between detect_type_change and
597 check_stmt_for_type_change. */
598
11478306 599struct prop_type_change_info
f65cf2b7 600{
290ebcb7
MJ
601 /* Offset into the object where there is the virtual method pointer we are
602 looking for. */
603 HOST_WIDE_INT offset;
604 /* The declaration or SSA_NAME pointer of the base that we are checking for
605 type change. */
606 tree object;
607 /* If we actually can tell the type that the object has changed to, it is
608 stored in this field. Otherwise it remains NULL_TREE. */
609 tree known_current_type;
f65cf2b7
MJ
610 /* Set to true if dynamic type change has been detected. */
611 bool type_maybe_changed;
290ebcb7
MJ
612 /* Set to true if multiple types have been encountered. known_current_type
613 must be disregarded in that case. */
614 bool multiple_types_encountered;
f65cf2b7
MJ
615};
616
617/* Return true if STMT can modify a virtual method table pointer.
618
619 This function makes special assumptions about both constructors and
620 destructors which are all the functions that are allowed to alter the VMT
621 pointers. It assumes that destructors begin with assignment into all VMT
622 pointers and that constructors essentially look in the following way:
623
624 1) The very first thing they do is that they call constructors of ancestor
625 sub-objects that have them.
626
627 2) Then VMT pointers of this and all its ancestors is set to new values
628 corresponding to the type corresponding to the constructor.
629
630 3) Only afterwards, other stuff such as constructor of member sub-objects
631 and the code written by the user is run. Only this may include calling
632 virtual functions, directly or indirectly.
633
634 There is no way to call a constructor of an ancestor sub-object in any
635 other way.
636
637 This means that we do not have to care whether constructors get the correct
638 type information because they will always change it (in fact, if we define
639 the type to be given by the VMT pointer, it is undefined).
640
641 The most important fact to derive from the above is that if, for some
642 statement in the section 3, we try to detect whether the dynamic type has
643 changed, we can safely ignore all calls as we examine the function body
644 backwards until we reach statements in section 2 because these calls cannot
645 be ancestor constructors or destructors (if the input is not bogus) and so
646 do not change the dynamic type (this holds true only for automatically
647 allocated objects but at the moment we devirtualize only these). We then
648 must detect that statements in section 2 change the dynamic type and can try
649 to derive the new type. That is enough and we can stop, we will never see
650 the calls into constructors of sub-objects in this code. Therefore we can
651 safely ignore all call statements that we traverse.
652 */
653
654static bool
655stmt_may_be_vtbl_ptr_store (gimple stmt)
656{
657 if (is_gimple_call (stmt))
658 return false;
70f633c5
JH
659 if (gimple_clobber_p (stmt))
660 return false;
f65cf2b7
MJ
661 else if (is_gimple_assign (stmt))
662 {
663 tree lhs = gimple_assign_lhs (stmt);
664
0004f992
MJ
665 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
666 {
667 if (flag_strict_aliasing
668 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
669 return false;
670
671 if (TREE_CODE (lhs) == COMPONENT_REF
672 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 673 return false;
0004f992
MJ
674 /* In the future we might want to use get_base_ref_and_offset to find
675 if there is a field corresponding to the offset and if so, proceed
676 almost like if it was a component ref. */
677 }
f65cf2b7
MJ
678 }
679 return true;
680}
681
290ebcb7
MJ
682/* If STMT can be proved to be an assignment to the virtual method table
683 pointer of ANALYZED_OBJ and the type associated with the new table
684 identified, return the type. Otherwise return NULL_TREE. */
685
686static tree
11478306 687extr_type_from_vtbl_ptr_store (gimple stmt, struct prop_type_change_info *tci)
290ebcb7
MJ
688{
689 HOST_WIDE_INT offset, size, max_size;
390675c8 690 tree lhs, rhs, base, binfo;
290ebcb7
MJ
691
692 if (!gimple_assign_single_p (stmt))
693 return NULL_TREE;
694
695 lhs = gimple_assign_lhs (stmt);
696 rhs = gimple_assign_rhs1 (stmt);
697 if (TREE_CODE (lhs) != COMPONENT_REF
390675c8 698 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
290ebcb7
MJ
699 return NULL_TREE;
700
701 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
702 if (offset != tci->offset
703 || size != POINTER_SIZE
704 || max_size != POINTER_SIZE)
705 return NULL_TREE;
706 if (TREE_CODE (base) == MEM_REF)
707 {
708 if (TREE_CODE (tci->object) != MEM_REF
709 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
710 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
711 TREE_OPERAND (base, 1)))
712 return NULL_TREE;
713 }
714 else if (tci->object != base)
715 return NULL_TREE;
716
390675c8
JH
717 binfo = vtable_pointer_value_to_binfo (rhs);
718
719 /* FIXME: vtable_pointer_value_to_binfo may return BINFO of a
720 base of outer type. In this case we would need to either
721 work on binfos or translate it back to outer type and offset.
722 KNOWN_TYPE jump functions are not ready for that, yet. */
723 if (!binfo || TYPE_BINFO (BINFO_TYPE (binfo)) != binfo)
724 return NULL;
725
726 return BINFO_TYPE (binfo);
290ebcb7
MJ
727}
728
61502ca8 729/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
730 detect_type_change to check whether a particular statement may modify
731 the virtual table pointer, and if possible also determine the new type of
732 the (sub-)object. It stores its result into DATA, which points to a
11478306 733 prop_type_change_info structure. */
f65cf2b7
MJ
734
735static bool
736check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
737{
738 gimple stmt = SSA_NAME_DEF_STMT (vdef);
11478306 739 struct prop_type_change_info *tci = (struct prop_type_change_info *) data;
f65cf2b7
MJ
740
741 if (stmt_may_be_vtbl_ptr_store (stmt))
742 {
290ebcb7 743 tree type;
b49407f8 744
290ebcb7 745 type = extr_type_from_vtbl_ptr_store (stmt, tci);
b49407f8 746 gcc_assert (!type || TYPE_MAIN_VARIANT (type) == type);
290ebcb7
MJ
747 if (tci->type_maybe_changed
748 && type != tci->known_current_type)
749 tci->multiple_types_encountered = true;
750 tci->known_current_type = type;
f65cf2b7
MJ
751 tci->type_maybe_changed = true;
752 return true;
753 }
754 else
755 return false;
756}
757
058d0a90
JH
758/* See if ARG is PARAM_DECl describing instance passed by pointer
759 or reference in FUNCTION. Return false if the dynamic type may change
760 in between beggining of the function until CALL is invoked.
290ebcb7 761
058d0a90
JH
762 Generally functions are not allowed to change type of such instances,
763 but they call destructors. We assume that methods can not destroy the THIS
764 pointer. Also as a special cases, constructor and destructors may change
765 type of the THIS pointer. */
766
767static bool
768param_type_may_change_p (tree function, tree arg, gimple call)
769{
770 /* Pure functions can not do any changes on the dynamic type;
771 that require writting to memory. */
772 if (flags_from_decl_or_type (function) & (ECF_PURE | ECF_CONST))
773 return false;
774 /* We need to check if we are within inlined consturctor
775 or destructor (ideally we would have way to check that the
776 inline cdtor is actually working on ARG, but we don't have
777 easy tie on this, so punt on all non-pure cdtors.
778 We may also record the types of cdtors and once we know type
779 of the instance match them.
780
781 Also code unification optimizations may merge calls from
782 different blocks making return values unreliable. So
783 do nothing during late optimization. */
784 if (DECL_STRUCT_FUNCTION (function)->after_inlining)
785 return true;
786 if (TREE_CODE (arg) == SSA_NAME
787 && SSA_NAME_IS_DEFAULT_DEF (arg)
788 && TREE_CODE (SSA_NAME_VAR (arg)) == PARM_DECL)
789 {
790 /* Normal (non-THIS) argument. */
791 if ((SSA_NAME_VAR (arg) != DECL_ARGUMENTS (function)
792 || TREE_CODE (TREE_TYPE (function)) != METHOD_TYPE)
793 /* THIS pointer of an method - here we we want to watch constructors
794 and destructors as those definitely may change the dynamic
795 type. */
796 || (TREE_CODE (TREE_TYPE (function)) == METHOD_TYPE
797 && !DECL_CXX_CONSTRUCTOR_P (function)
798 && !DECL_CXX_DESTRUCTOR_P (function)
799 && (SSA_NAME_VAR (arg) == DECL_ARGUMENTS (function))))
800 {
801 /* Walk the inline stack and watch out for ctors/dtors. */
802 for (tree block = gimple_block (call); block && TREE_CODE (block) == BLOCK;
803 block = BLOCK_SUPERCONTEXT (block))
804 if (BLOCK_ABSTRACT_ORIGIN (block)
805 && TREE_CODE (BLOCK_ABSTRACT_ORIGIN (block)) == FUNCTION_DECL)
806 {
807 tree fn = BLOCK_ABSTRACT_ORIGIN (block);
808
809 if (flags_from_decl_or_type (fn) & (ECF_PURE | ECF_CONST))
810 continue;
811 if (TREE_CODE (TREE_TYPE (fn)) == METHOD_TYPE
812 && (DECL_CXX_CONSTRUCTOR_P (fn)
813 || DECL_CXX_DESTRUCTOR_P (fn)))
814 return true;
815 }
816 return false;
817 }
818 }
819 return true;
820}
290ebcb7 821
06d65050
JH
822/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
823 callsite CALL) by looking for assignments to its virtual table pointer. If
824 it is, return true and fill in the jump function JFUNC with relevant type
825 information or set it to unknown. ARG is the object itself (not a pointer
826 to it, unless dereferenced). BASE is the base of the memory access as
058d0a90
JH
827 returned by get_ref_base_and_extent, as is the offset.
828
829 This is helper function for detect_type_change and detect_type_change_ssa
830 that does the heavy work which is usually unnecesary. */
f65cf2b7
MJ
831
832static bool
058d0a90
JH
833detect_type_change_from_memory_writes (tree arg, tree base, tree comp_type,
834 gimple call, struct ipa_jump_func *jfunc,
835 HOST_WIDE_INT offset)
f65cf2b7 836{
11478306 837 struct prop_type_change_info tci;
f65cf2b7 838 ao_ref ao;
70f633c5 839 bool entry_reached = false;
f65cf2b7
MJ
840
841 gcc_checking_assert (DECL_P (arg)
842 || TREE_CODE (arg) == MEM_REF
843 || handled_component_p (arg));
f65cf2b7 844
b49407f8
JH
845 comp_type = TYPE_MAIN_VARIANT (comp_type);
846
d570d364
JH
847 /* Const calls cannot call virtual methods through VMT and so type changes do
848 not matter. */
849 if (!flag_devirtualize || !gimple_vuse (call)
850 /* Be sure expected_type is polymorphic. */
851 || !comp_type
852 || TREE_CODE (comp_type) != RECORD_TYPE
853 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))
854 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type))))
855 return true;
4bf2a588 856
dd887943 857 ao_ref_init (&ao, arg);
f65cf2b7
MJ
858 ao.base = base;
859 ao.offset = offset;
860 ao.size = POINTER_SIZE;
861 ao.max_size = ao.size;
f65cf2b7 862
290ebcb7
MJ
863 tci.offset = offset;
864 tci.object = get_base_address (arg);
865 tci.known_current_type = NULL_TREE;
866 tci.type_maybe_changed = false;
867 tci.multiple_types_encountered = false;
868
f65cf2b7 869 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
70f633c5 870 &tci, NULL, &entry_reached);
f65cf2b7
MJ
871 if (!tci.type_maybe_changed)
872 return false;
873
290ebcb7
MJ
874 if (!tci.known_current_type
875 || tci.multiple_types_encountered
70f633c5
JH
876 || offset != 0
877 /* When the walk reached function entry, it means that type
878 is set along some paths but not along others. */
879 || entry_reached)
290ebcb7
MJ
880 jfunc->type = IPA_JF_UNKNOWN;
881 else
7b872d9e 882 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 883
f65cf2b7
MJ
884 return true;
885}
886
058d0a90
JH
887/* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
888 If it is, return true and fill in the jump function JFUNC with relevant type
889 information or set it to unknown. ARG is the object itself (not a pointer
890 to it, unless dereferenced). BASE is the base of the memory access as
891 returned by get_ref_base_and_extent, as is the offset. */
892
893static bool
894detect_type_change (tree arg, tree base, tree comp_type, gimple call,
895 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
896{
897 if (!flag_devirtualize)
898 return false;
899
900 if (TREE_CODE (base) == MEM_REF
901 && !param_type_may_change_p (current_function_decl,
902 TREE_OPERAND (base, 0),
903 call))
904 return false;
905 return detect_type_change_from_memory_writes (arg, base, comp_type,
906 call, jfunc, offset);
907}
908
f65cf2b7
MJ
909/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
910 SSA name (its dereference will become the base and the offset is assumed to
911 be zero). */
912
913static bool
06d65050
JH
914detect_type_change_ssa (tree arg, tree comp_type,
915 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
916{
917 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 918 if (!flag_devirtualize
06d65050 919 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
920 return false;
921
058d0a90
JH
922 if (!param_type_may_change_p (current_function_decl, arg, call))
923 return false;
924
f65cf2b7 925 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 926 build_int_cst (ptr_type_node, 0));
f65cf2b7 927
058d0a90
JH
928 return detect_type_change_from_memory_writes (arg, arg, comp_type,
929 call, jfunc, 0);
f65cf2b7
MJ
930}
931
fdb0e1b4
MJ
932/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
933 boolean variable pointed to by DATA. */
934
935static bool
936mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
937 void *data)
938{
939 bool *b = (bool *) data;
940 *b = true;
941 return true;
942}
943
8aab5218
MJ
944/* Return true if we have already walked so many statements in AA that we
945 should really just start giving up. */
946
947static bool
948aa_overwalked (struct func_body_info *fbi)
949{
950 gcc_checking_assert (fbi);
951 return fbi->aa_walked > (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS);
952}
953
954/* Find the nearest valid aa status for parameter specified by INDEX that
955 dominates BB. */
956
957static struct param_aa_status *
958find_dominating_aa_status (struct func_body_info *fbi, basic_block bb,
959 int index)
960{
961 while (true)
962 {
963 bb = get_immediate_dominator (CDI_DOMINATORS, bb);
964 if (!bb)
965 return NULL;
966 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
967 if (!bi->param_aa_statuses.is_empty ()
968 && bi->param_aa_statuses[index].valid)
969 return &bi->param_aa_statuses[index];
970 }
971}
972
973/* Get AA status structure for the given BB and parameter with INDEX. Allocate
974 structures and/or intialize the result with a dominating description as
975 necessary. */
976
977static struct param_aa_status *
978parm_bb_aa_status_for_bb (struct func_body_info *fbi, basic_block bb,
979 int index)
980{
981 gcc_checking_assert (fbi);
982 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
983 if (bi->param_aa_statuses.is_empty ())
984 bi->param_aa_statuses.safe_grow_cleared (fbi->param_count);
985 struct param_aa_status *paa = &bi->param_aa_statuses[index];
986 if (!paa->valid)
987 {
988 gcc_checking_assert (!paa->parm_modified
989 && !paa->ref_modified
990 && !paa->pt_modified);
991 struct param_aa_status *dom_paa;
992 dom_paa = find_dominating_aa_status (fbi, bb, index);
993 if (dom_paa)
994 *paa = *dom_paa;
995 else
996 paa->valid = true;
997 }
998
999 return paa;
1000}
1001
688010ba 1002/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4 1003 a value known not to be modified in this function before reaching the
8aab5218
MJ
1004 statement STMT. FBI holds information about the function we have so far
1005 gathered but do not survive the summary building stage. */
fdb0e1b4
MJ
1006
1007static bool
8aab5218
MJ
1008parm_preserved_before_stmt_p (struct func_body_info *fbi, int index,
1009 gimple stmt, tree parm_load)
fdb0e1b4 1010{
8aab5218 1011 struct param_aa_status *paa;
fdb0e1b4
MJ
1012 bool modified = false;
1013 ao_ref refd;
1014
8aab5218
MJ
1015 /* FIXME: FBI can be NULL if we are being called from outside
1016 ipa_node_analysis or ipcp_transform_function, which currently happens
1017 during inlining analysis. It would be great to extend fbi's lifetime and
1018 always have it. Currently, we are just not afraid of too much walking in
1019 that case. */
1020 if (fbi)
1021 {
1022 if (aa_overwalked (fbi))
1023 return false;
1024 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1025 if (paa->parm_modified)
1026 return false;
1027 }
1028 else
1029 paa = NULL;
fdb0e1b4
MJ
1030
1031 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4 1032 ao_ref_init (&refd, parm_load);
8aab5218
MJ
1033 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1034 &modified, NULL);
1035 if (fbi)
1036 fbi->aa_walked += walked;
1037 if (paa && modified)
1038 paa->parm_modified = true;
8b7773a4 1039 return !modified;
fdb0e1b4
MJ
1040}
1041
1042/* If STMT is an assignment that loads a value from an parameter declaration,
1043 return the index of the parameter in ipa_node_params which has not been
1044 modified. Otherwise return -1. */
1045
1046static int
8aab5218
MJ
1047load_from_unmodified_param (struct func_body_info *fbi,
1048 vec<ipa_param_descriptor> descriptors,
fdb0e1b4
MJ
1049 gimple stmt)
1050{
1051 int index;
1052 tree op1;
1053
1054 if (!gimple_assign_single_p (stmt))
1055 return -1;
1056
1057 op1 = gimple_assign_rhs1 (stmt);
1058 if (TREE_CODE (op1) != PARM_DECL)
1059 return -1;
1060
d044dd17 1061 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 1062 if (index < 0
8aab5218 1063 || !parm_preserved_before_stmt_p (fbi, index, stmt, op1))
fdb0e1b4
MJ
1064 return -1;
1065
1066 return index;
1067}
f65cf2b7 1068
8aab5218
MJ
1069/* Return true if memory reference REF (which must be a load through parameter
1070 with INDEX) loads data that are known to be unmodified in this function
1071 before reaching statement STMT. */
8b7773a4
MJ
1072
1073static bool
8aab5218
MJ
1074parm_ref_data_preserved_p (struct func_body_info *fbi,
1075 int index, gimple stmt, tree ref)
8b7773a4 1076{
8aab5218 1077 struct param_aa_status *paa;
8b7773a4
MJ
1078 bool modified = false;
1079 ao_ref refd;
1080
8aab5218
MJ
1081 /* FIXME: FBI can be NULL if we are being called from outside
1082 ipa_node_analysis or ipcp_transform_function, which currently happens
1083 during inlining analysis. It would be great to extend fbi's lifetime and
1084 always have it. Currently, we are just not afraid of too much walking in
1085 that case. */
1086 if (fbi)
1087 {
1088 if (aa_overwalked (fbi))
1089 return false;
1090 paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (stmt), index);
1091 if (paa->ref_modified)
1092 return false;
1093 }
1094 else
1095 paa = NULL;
8b7773a4 1096
8aab5218 1097 gcc_checking_assert (gimple_vuse (stmt));
8b7773a4 1098 ao_ref_init (&refd, ref);
8aab5218
MJ
1099 int walked = walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified,
1100 &modified, NULL);
1101 if (fbi)
1102 fbi->aa_walked += walked;
1103 if (paa && modified)
1104 paa->ref_modified = true;
8b7773a4
MJ
1105 return !modified;
1106}
1107
8aab5218
MJ
1108/* Return true if the data pointed to by PARM (which is a parameter with INDEX)
1109 is known to be unmodified in this function before reaching call statement
1110 CALL into which it is passed. FBI describes the function body. */
8b7773a4
MJ
1111
1112static bool
8aab5218
MJ
1113parm_ref_data_pass_through_p (struct func_body_info *fbi, int index,
1114 gimple call, tree parm)
8b7773a4
MJ
1115{
1116 bool modified = false;
1117 ao_ref refd;
1118
1119 /* It's unnecessary to calculate anything about memory contnets for a const
1120 function because it is not goin to use it. But do not cache the result
1121 either. Also, no such calculations for non-pointers. */
1122 if (!gimple_vuse (call)
8aab5218
MJ
1123 || !POINTER_TYPE_P (TREE_TYPE (parm))
1124 || aa_overwalked (fbi))
8b7773a4
MJ
1125 return false;
1126
8aab5218
MJ
1127 struct param_aa_status *paa = parm_bb_aa_status_for_bb (fbi, gimple_bb (call),
1128 index);
1129 if (paa->pt_modified)
8b7773a4
MJ
1130 return false;
1131
1132 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
8aab5218
MJ
1133 int walked = walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified,
1134 &modified, NULL);
1135 fbi->aa_walked += walked;
8b7773a4 1136 if (modified)
8aab5218 1137 paa->pt_modified = true;
8b7773a4
MJ
1138 return !modified;
1139}
1140
1141/* Return true if we can prove that OP is a memory reference loading unmodified
1142 data from an aggregate passed as a parameter and if the aggregate is passed
1143 by reference, that the alias type of the load corresponds to the type of the
1144 formal parameter (so that we can rely on this type for TBAA in callers).
1145 INFO and PARMS_AINFO describe parameters of the current function (but the
1146 latter can be NULL), STMT is the load statement. If function returns true,
1147 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1148 within the aggregate and whether it is a load from a value passed by
1149 reference respectively. */
1150
1151static bool
8aab5218
MJ
1152ipa_load_from_parm_agg_1 (struct func_body_info *fbi,
1153 vec<ipa_param_descriptor> descriptors,
1154 gimple stmt, tree op, int *index_p,
1155 HOST_WIDE_INT *offset_p, HOST_WIDE_INT *size_p,
1156 bool *by_ref_p)
8b7773a4
MJ
1157{
1158 int index;
1159 HOST_WIDE_INT size, max_size;
1160 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
1161
1162 if (max_size == -1 || max_size != size || *offset_p < 0)
1163 return false;
1164
1165 if (DECL_P (base))
1166 {
d044dd17 1167 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4 1168 if (index >= 0
8aab5218 1169 && parm_preserved_before_stmt_p (fbi, index, stmt, op))
8b7773a4
MJ
1170 {
1171 *index_p = index;
1172 *by_ref_p = false;
3ff2ca23
JJ
1173 if (size_p)
1174 *size_p = size;
8b7773a4
MJ
1175 return true;
1176 }
1177 return false;
1178 }
1179
1180 if (TREE_CODE (base) != MEM_REF
1181 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
1182 || !integer_zerop (TREE_OPERAND (base, 1)))
1183 return false;
1184
1185 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
1186 {
1187 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 1188 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
1189 }
1190 else
1191 {
1192 /* This branch catches situations where a pointer parameter is not a
1193 gimple register, for example:
1194
1195 void hip7(S*) (struct S * p)
1196 {
1197 void (*<T2e4>) (struct S *) D.1867;
1198 struct S * p.1;
1199
1200 <bb 2>:
1201 p.1_1 = p;
1202 D.1867_2 = p.1_1->f;
1203 D.1867_2 ();
1204 gdp = &p;
1205 */
1206
1207 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
8aab5218 1208 index = load_from_unmodified_param (fbi, descriptors, def);
8b7773a4
MJ
1209 }
1210
1211 if (index >= 0
8aab5218 1212 && parm_ref_data_preserved_p (fbi, index, stmt, op))
8b7773a4
MJ
1213 {
1214 *index_p = index;
1215 *by_ref_p = true;
3ff2ca23
JJ
1216 if (size_p)
1217 *size_p = size;
8b7773a4
MJ
1218 return true;
1219 }
1220 return false;
1221}
1222
1223/* Just like the previous function, just without the param_analysis_info
1224 pointer, for users outside of this file. */
1225
1226bool
1227ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
1228 tree op, int *index_p, HOST_WIDE_INT *offset_p,
1229 bool *by_ref_p)
1230{
8aab5218 1231 return ipa_load_from_parm_agg_1 (NULL, info->descriptors, stmt, op, index_p,
3ff2ca23 1232 offset_p, NULL, by_ref_p);
8b7773a4
MJ
1233}
1234
b258210c 1235/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
1236 of an assignment statement STMT, try to determine whether we are actually
1237 handling any of the following cases and construct an appropriate jump
1238 function into JFUNC if so:
1239
1240 1) The passed value is loaded from a formal parameter which is not a gimple
1241 register (most probably because it is addressable, the value has to be
1242 scalar) and we can guarantee the value has not changed. This case can
1243 therefore be described by a simple pass-through jump function. For example:
1244
1245 foo (int a)
1246 {
1247 int a.0;
1248
1249 a.0_2 = a;
1250 bar (a.0_2);
1251
1252 2) The passed value can be described by a simple arithmetic pass-through
1253 jump function. E.g.
1254
1255 foo (int a)
1256 {
1257 int D.2064;
1258
1259 D.2064_4 = a.1(D) + 4;
1260 bar (D.2064_4);
1261
1262 This case can also occur in combination of the previous one, e.g.:
1263
1264 foo (int a, int z)
1265 {
1266 int a.0;
1267 int D.2064;
1268
1269 a.0_3 = a;
1270 D.2064_4 = a.0_3 + 4;
1271 foo (D.2064_4);
1272
1273 3) The passed value is an address of an object within another one (which
1274 also passed by reference). Such situations are described by an ancestor
1275 jump function and describe situations such as:
1276
1277 B::foo() (struct B * const this)
1278 {
1279 struct A * D.1845;
1280
1281 D.1845_2 = &this_1(D)->D.1748;
1282 A::bar (D.1845_2);
1283
1284 INFO is the structure describing individual parameters access different
1285 stages of IPA optimizations. PARMS_AINFO contains the information that is
1286 only needed for intraprocedural analysis. */
685b0d13
MJ
1287
1288static void
8aab5218
MJ
1289compute_complex_assign_jump_func (struct func_body_info *fbi,
1290 struct ipa_node_params *info,
b258210c 1291 struct ipa_jump_func *jfunc,
06d65050
JH
1292 gimple call, gimple stmt, tree name,
1293 tree param_type)
685b0d13
MJ
1294{
1295 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1296 tree op1, tc_ssa, base, ssa;
685b0d13 1297 int index;
685b0d13 1298
685b0d13 1299 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1300
fdb0e1b4 1301 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1302 {
fdb0e1b4
MJ
1303 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1304 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1305 else
8aab5218 1306 index = load_from_unmodified_param (fbi, info->descriptors,
fdb0e1b4
MJ
1307 SSA_NAME_DEF_STMT (op1));
1308 tc_ssa = op1;
1309 }
1310 else
1311 {
8aab5218 1312 index = load_from_unmodified_param (fbi, info->descriptors, stmt);
fdb0e1b4
MJ
1313 tc_ssa = gimple_assign_lhs (stmt);
1314 }
1315
1316 if (index >= 0)
1317 {
1318 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1319
b258210c 1320 if (op2)
685b0d13 1321 {
b258210c
MJ
1322 if (!is_gimple_ip_invariant (op2)
1323 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1324 && !useless_type_conversion_p (TREE_TYPE (name),
1325 TREE_TYPE (op1))))
1326 return;
1327
7b872d9e
MJ
1328 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1329 gimple_assign_rhs_code (stmt));
685b0d13 1330 }
b8f6e610 1331 else if (gimple_assign_single_p (stmt))
8b7773a4 1332 {
8aab5218 1333 bool agg_p = parm_ref_data_pass_through_p (fbi, index, call, tc_ssa);
06d65050
JH
1334 bool type_p = false;
1335
1336 if (param_type && POINTER_TYPE_P (param_type))
1337 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1338 call, jfunc);
b8f6e610
MJ
1339 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1340 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1341 }
685b0d13
MJ
1342 return;
1343 }
1344
1345 if (TREE_CODE (op1) != ADDR_EXPR)
1346 return;
1347 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1348 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1349 return;
32aa622c
MJ
1350 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1351 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1352 /* If this is a varying address, punt. */
1353 || max_size == -1
1354 || max_size != size)
685b0d13 1355 return;
807e902e 1356 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
f65cf2b7
MJ
1357 ssa = TREE_OPERAND (base, 0);
1358 if (TREE_CODE (ssa) != SSA_NAME
1359 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1360 || offset < 0)
685b0d13
MJ
1361 return;
1362
b8f6e610 1363 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1364 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1365 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1366 {
d570d364
JH
1367 bool type_p = (contains_polymorphic_type_p (TREE_TYPE (param_type))
1368 && !detect_type_change (op1, base, TREE_TYPE (param_type),
1369 call, jfunc, offset));
b8f6e610 1370 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
0a2550e7
JH
1371 ipa_set_ancestor_jf (jfunc, offset,
1372 type_p ? TREE_TYPE (param_type) : NULL, index,
8aab5218 1373 parm_ref_data_pass_through_p (fbi, index,
b8f6e610
MJ
1374 call, ssa), type_p);
1375 }
685b0d13
MJ
1376}
1377
40591473
MJ
1378/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1379 it looks like:
1380
1381 iftmp.1_3 = &obj_2(D)->D.1762;
1382
1383 The base of the MEM_REF must be a default definition SSA NAME of a
1384 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1385 whole MEM_REF expression is returned and the offset calculated from any
1386 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1387 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1388
1389static tree
1390get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1391{
1392 HOST_WIDE_INT size, max_size;
1393 tree expr, parm, obj;
1394
1395 if (!gimple_assign_single_p (assign))
1396 return NULL_TREE;
1397 expr = gimple_assign_rhs1 (assign);
1398
1399 if (TREE_CODE (expr) != ADDR_EXPR)
1400 return NULL_TREE;
1401 expr = TREE_OPERAND (expr, 0);
1402 obj = expr;
1403 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1404
1405 if (TREE_CODE (expr) != MEM_REF
1406 /* If this is a varying address, punt. */
1407 || max_size == -1
1408 || max_size != size
1409 || *offset < 0)
1410 return NULL_TREE;
1411 parm = TREE_OPERAND (expr, 0);
1412 if (TREE_CODE (parm) != SSA_NAME
1413 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1414 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1415 return NULL_TREE;
1416
807e902e 1417 *offset += mem_ref_offset (expr).to_short_addr () * BITS_PER_UNIT;
40591473
MJ
1418 *obj_p = obj;
1419 return expr;
1420}
1421
685b0d13 1422
b258210c
MJ
1423/* Given that an actual argument is an SSA_NAME that is a result of a phi
1424 statement PHI, try to find out whether NAME is in fact a
1425 multiple-inheritance typecast from a descendant into an ancestor of a formal
1426 parameter and thus can be described by an ancestor jump function and if so,
1427 write the appropriate function into JFUNC.
1428
1429 Essentially we want to match the following pattern:
1430
1431 if (obj_2(D) != 0B)
1432 goto <bb 3>;
1433 else
1434 goto <bb 4>;
1435
1436 <bb 3>:
1437 iftmp.1_3 = &obj_2(D)->D.1762;
1438
1439 <bb 4>:
1440 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1441 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1442 return D.1879_6; */
1443
1444static void
8aab5218
MJ
1445compute_complex_ancestor_jump_func (struct func_body_info *fbi,
1446 struct ipa_node_params *info,
b258210c 1447 struct ipa_jump_func *jfunc,
06d65050 1448 gimple call, gimple phi, tree param_type)
b258210c 1449{
40591473 1450 HOST_WIDE_INT offset;
b258210c
MJ
1451 gimple assign, cond;
1452 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1453 tree tmp, parm, expr, obj;
b258210c
MJ
1454 int index, i;
1455
54e348cb 1456 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1457 return;
1458
54e348cb
MJ
1459 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1460 tmp = PHI_ARG_DEF (phi, 0);
1461 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1462 tmp = PHI_ARG_DEF (phi, 1);
1463 else
1464 return;
b258210c
MJ
1465 if (TREE_CODE (tmp) != SSA_NAME
1466 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1467 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1468 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1469 return;
1470
1471 assign = SSA_NAME_DEF_STMT (tmp);
1472 assign_bb = gimple_bb (assign);
40591473 1473 if (!single_pred_p (assign_bb))
b258210c 1474 return;
40591473
MJ
1475 expr = get_ancestor_addr_info (assign, &obj, &offset);
1476 if (!expr)
b258210c
MJ
1477 return;
1478 parm = TREE_OPERAND (expr, 0);
b258210c 1479 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
20afe640
EB
1480 if (index < 0)
1481 return;
b258210c
MJ
1482
1483 cond_bb = single_pred (assign_bb);
1484 cond = last_stmt (cond_bb);
69610617
SB
1485 if (!cond
1486 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1487 || gimple_cond_code (cond) != NE_EXPR
1488 || gimple_cond_lhs (cond) != parm
1489 || !integer_zerop (gimple_cond_rhs (cond)))
1490 return;
1491
b258210c
MJ
1492 phi_bb = gimple_bb (phi);
1493 for (i = 0; i < 2; i++)
1494 {
1495 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1496 if (pred != assign_bb && pred != cond_bb)
1497 return;
1498 }
1499
06d65050 1500 bool type_p = false;
d570d364
JH
1501 if (param_type && POINTER_TYPE_P (param_type)
1502 && contains_polymorphic_type_p (TREE_TYPE (param_type)))
06d65050
JH
1503 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1504 call, jfunc, offset);
b8f6e610 1505 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8aab5218
MJ
1506 ipa_set_ancestor_jf (jfunc, offset, type_p ? TREE_TYPE (param_type) : NULL,
1507 index,
1508 parm_ref_data_pass_through_p (fbi, index, call, parm),
1509 type_p);
b258210c
MJ
1510}
1511
61502ca8 1512/* Given OP which is passed as an actual argument to a called function,
b258210c 1513 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1514 and if so, create one and store it to JFUNC.
1515 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1516
1517static void
f65cf2b7 1518compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1519 gimple call, tree expected_type)
b258210c 1520{
32aa622c 1521 HOST_WIDE_INT offset, size, max_size;
c7573249 1522 tree base;
b258210c 1523
05842ff5
MJ
1524 if (!flag_devirtualize
1525 || TREE_CODE (op) != ADDR_EXPR
d570d364 1526 || !contains_polymorphic_type_p (TREE_TYPE (TREE_TYPE (op)))
06d65050
JH
1527 /* Be sure expected_type is polymorphic. */
1528 || !expected_type
d570d364 1529 || !contains_polymorphic_type_p (expected_type))
b258210c
MJ
1530 return;
1531
1532 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1533 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1534 if (!DECL_P (base)
1535 || max_size == -1
1536 || max_size != size
058d0a90 1537 || !contains_polymorphic_type_p (TREE_TYPE (base)))
32aa622c
MJ
1538 return;
1539
058d0a90
JH
1540 if (decl_maybe_in_construction_p (base, TREE_TYPE (base),
1541 call, current_function_decl)
1542 /* Even if the var seems to be in construction by inline call stack,
1543 we may work out the actual type by walking memory writes. */
21833f2d
JH
1544 && (is_global_var (base)
1545 || detect_type_change (op, base, expected_type, call, jfunc, offset)))
f65cf2b7
MJ
1546 return;
1547
06d65050
JH
1548 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1549 expected_type);
b258210c
MJ
1550}
1551
be95e2b9
MJ
1552/* Inspect the given TYPE and return true iff it has the same structure (the
1553 same number of fields of the same types) as a C++ member pointer. If
1554 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1555 corresponding fields there. */
1556
3e293154
MJ
1557static bool
1558type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1559{
1560 tree fld;
1561
1562 if (TREE_CODE (type) != RECORD_TYPE)
1563 return false;
1564
1565 fld = TYPE_FIELDS (type);
1566 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1567 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1568 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1569 return false;
1570
1571 if (method_ptr)
1572 *method_ptr = fld;
1573
910ad8de 1574 fld = DECL_CHAIN (fld);
8b7773a4 1575 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1576 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1577 return false;
1578 if (delta)
1579 *delta = fld;
1580
910ad8de 1581 if (DECL_CHAIN (fld))
3e293154
MJ
1582 return false;
1583
1584 return true;
1585}
1586
61502ca8 1587/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1588 return the rhs of its defining statement. Otherwise return RHS as it
1589 is. */
7ec49257
MJ
1590
1591static inline tree
1592get_ssa_def_if_simple_copy (tree rhs)
1593{
1594 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1595 {
1596 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1597
1598 if (gimple_assign_single_p (def_stmt))
1599 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1600 else
1601 break;
7ec49257
MJ
1602 }
1603 return rhs;
1604}
1605
8b7773a4
MJ
1606/* Simple linked list, describing known contents of an aggregate beforere
1607 call. */
1608
1609struct ipa_known_agg_contents_list
1610{
1611 /* Offset and size of the described part of the aggregate. */
1612 HOST_WIDE_INT offset, size;
1613 /* Known constant value or NULL if the contents is known to be unknown. */
1614 tree constant;
1615 /* Pointer to the next structure in the list. */
1616 struct ipa_known_agg_contents_list *next;
1617};
3e293154 1618
0d48ee34
MJ
1619/* Find the proper place in linked list of ipa_known_agg_contents_list
1620 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1621 unless there is a partial overlap, in which case return NULL, or such
1622 element is already there, in which case set *ALREADY_THERE to true. */
1623
1624static struct ipa_known_agg_contents_list **
1625get_place_in_agg_contents_list (struct ipa_known_agg_contents_list **list,
1626 HOST_WIDE_INT lhs_offset,
1627 HOST_WIDE_INT lhs_size,
1628 bool *already_there)
1629{
1630 struct ipa_known_agg_contents_list **p = list;
1631 while (*p && (*p)->offset < lhs_offset)
1632 {
1633 if ((*p)->offset + (*p)->size > lhs_offset)
1634 return NULL;
1635 p = &(*p)->next;
1636 }
1637
1638 if (*p && (*p)->offset < lhs_offset + lhs_size)
1639 {
1640 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1641 /* We already know this value is subsequently overwritten with
1642 something else. */
1643 *already_there = true;
1644 else
1645 /* Otherwise this is a partial overlap which we cannot
1646 represent. */
1647 return NULL;
1648 }
1649 return p;
1650}
1651
1652/* Build aggregate jump function from LIST, assuming there are exactly
1653 CONST_COUNT constant entries there and that th offset of the passed argument
1654 is ARG_OFFSET and store it into JFUNC. */
1655
1656static void
1657build_agg_jump_func_from_list (struct ipa_known_agg_contents_list *list,
1658 int const_count, HOST_WIDE_INT arg_offset,
1659 struct ipa_jump_func *jfunc)
1660{
1661 vec_alloc (jfunc->agg.items, const_count);
1662 while (list)
1663 {
1664 if (list->constant)
1665 {
1666 struct ipa_agg_jf_item item;
1667 item.offset = list->offset - arg_offset;
1668 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
1669 item.value = unshare_expr_without_location (list->constant);
1670 jfunc->agg.items->quick_push (item);
1671 }
1672 list = list->next;
1673 }
1674}
1675
8b7773a4
MJ
1676/* Traverse statements from CALL backwards, scanning whether an aggregate given
1677 in ARG is filled in with constant values. ARG can either be an aggregate
0d48ee34
MJ
1678 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1679 aggregate. JFUNC is the jump function into which the constants are
1680 subsequently stored. */
be95e2b9 1681
3e293154 1682static void
0d48ee34
MJ
1683determine_locally_known_aggregate_parts (gimple call, tree arg, tree arg_type,
1684 struct ipa_jump_func *jfunc)
3e293154 1685{
8b7773a4
MJ
1686 struct ipa_known_agg_contents_list *list = NULL;
1687 int item_count = 0, const_count = 0;
1688 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1689 gimple_stmt_iterator gsi;
8b7773a4
MJ
1690 tree arg_base;
1691 bool check_ref, by_ref;
1692 ao_ref r;
3e293154 1693
8b7773a4
MJ
1694 /* The function operates in three stages. First, we prepare check_ref, r,
1695 arg_base and arg_offset based on what is actually passed as an actual
1696 argument. */
3e293154 1697
85942f45 1698 if (POINTER_TYPE_P (arg_type))
8b7773a4
MJ
1699 {
1700 by_ref = true;
1701 if (TREE_CODE (arg) == SSA_NAME)
1702 {
1703 tree type_size;
85942f45 1704 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type))))
8b7773a4
MJ
1705 return;
1706 check_ref = true;
1707 arg_base = arg;
1708 arg_offset = 0;
85942f45 1709 type_size = TYPE_SIZE (TREE_TYPE (arg_type));
ae7e9ddd 1710 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1711 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1712 }
1713 else if (TREE_CODE (arg) == ADDR_EXPR)
1714 {
1715 HOST_WIDE_INT arg_max_size;
1716
1717 arg = TREE_OPERAND (arg, 0);
1718 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1719 &arg_max_size);
1720 if (arg_max_size == -1
1721 || arg_max_size != arg_size
1722 || arg_offset < 0)
1723 return;
1724 if (DECL_P (arg_base))
1725 {
8b7773a4 1726 check_ref = false;
0d48ee34 1727 ao_ref_init (&r, arg_base);
8b7773a4
MJ
1728 }
1729 else
1730 return;
1731 }
1732 else
1733 return;
1734 }
1735 else
1736 {
1737 HOST_WIDE_INT arg_max_size;
1738
1739 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1740
1741 by_ref = false;
1742 check_ref = false;
1743 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1744 &arg_max_size);
1745 if (arg_max_size == -1
1746 || arg_max_size != arg_size
1747 || arg_offset < 0)
1748 return;
1749
1750 ao_ref_init (&r, arg);
1751 }
1752
1753 /* Second stage walks back the BB, looks at individual statements and as long
1754 as it is confident of how the statements affect contents of the
1755 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1756 describing it. */
1757 gsi = gsi_for_stmt (call);
726a989a
RB
1758 gsi_prev (&gsi);
1759 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1760 {
8b7773a4 1761 struct ipa_known_agg_contents_list *n, **p;
726a989a 1762 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1763 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1764 tree lhs, rhs, lhs_base;
3e293154 1765
8b7773a4 1766 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1767 continue;
8b75fc9b 1768 if (!gimple_assign_single_p (stmt))
8b7773a4 1769 break;
3e293154 1770
726a989a
RB
1771 lhs = gimple_assign_lhs (stmt);
1772 rhs = gimple_assign_rhs1 (stmt);
0c6b087c 1773 if (!is_gimple_reg_type (TREE_TYPE (rhs))
7d2fb524
MJ
1774 || TREE_CODE (lhs) == BIT_FIELD_REF
1775 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1776 break;
3e293154 1777
8b7773a4
MJ
1778 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1779 &lhs_max_size);
1780 if (lhs_max_size == -1
0d48ee34 1781 || lhs_max_size != lhs_size)
8b7773a4 1782 break;
3e293154 1783
8b7773a4 1784 if (check_ref)
518dc859 1785 {
8b7773a4
MJ
1786 if (TREE_CODE (lhs_base) != MEM_REF
1787 || TREE_OPERAND (lhs_base, 0) != arg_base
1788 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1789 break;
3e293154 1790 }
8b7773a4 1791 else if (lhs_base != arg_base)
774b8a55
MJ
1792 {
1793 if (DECL_P (lhs_base))
1794 continue;
1795 else
1796 break;
1797 }
3e293154 1798
0d48ee34
MJ
1799 bool already_there = false;
1800 p = get_place_in_agg_contents_list (&list, lhs_offset, lhs_size,
1801 &already_there);
1802 if (!p)
8b7773a4 1803 break;
0d48ee34
MJ
1804 if (already_there)
1805 continue;
3e293154 1806
8b7773a4
MJ
1807 rhs = get_ssa_def_if_simple_copy (rhs);
1808 n = XALLOCA (struct ipa_known_agg_contents_list);
1809 n->size = lhs_size;
1810 n->offset = lhs_offset;
1811 if (is_gimple_ip_invariant (rhs))
1812 {
1813 n->constant = rhs;
1814 const_count++;
1815 }
1816 else
1817 n->constant = NULL_TREE;
1818 n->next = *p;
1819 *p = n;
3e293154 1820
8b7773a4 1821 item_count++;
dfea20f1
MJ
1822 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1823 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1824 break;
1825 }
be95e2b9 1826
8b7773a4
MJ
1827 /* Third stage just goes over the list and creates an appropriate vector of
1828 ipa_agg_jf_item structures out of it, of sourse only if there are
1829 any known constants to begin with. */
3e293154 1830
8b7773a4 1831 if (const_count)
3e293154 1832 {
8b7773a4 1833 jfunc->agg.by_ref = by_ref;
0d48ee34 1834 build_agg_jump_func_from_list (list, const_count, arg_offset, jfunc);
3e293154
MJ
1835 }
1836}
1837
06d65050
JH
1838static tree
1839ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1840{
1841 int n;
1842 tree type = (e->callee
67348ccc 1843 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1844 : gimple_call_fntype (e->call_stmt));
1845 tree t = TYPE_ARG_TYPES (type);
1846
1847 for (n = 0; n < i; n++)
1848 {
1849 if (!t)
1850 break;
1851 t = TREE_CHAIN (t);
1852 }
1853 if (t)
1854 return TREE_VALUE (t);
1855 if (!e->callee)
1856 return NULL;
67348ccc 1857 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1858 for (n = 0; n < i; n++)
1859 {
1860 if (!t)
1861 return NULL;
1862 t = TREE_CHAIN (t);
1863 }
1864 if (t)
1865 return TREE_TYPE (t);
1866 return NULL;
1867}
1868
3e293154
MJ
1869/* Compute jump function for all arguments of callsite CS and insert the
1870 information in the jump_functions array in the ipa_edge_args corresponding
1871 to this callsite. */
be95e2b9 1872
749aa96d 1873static void
8aab5218 1874ipa_compute_jump_functions_for_edge (struct func_body_info *fbi,
062c604f 1875 struct cgraph_edge *cs)
3e293154
MJ
1876{
1877 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1878 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1879 gimple call = cs->call_stmt;
8b7773a4 1880 int n, arg_num = gimple_call_num_args (call);
5ce97055 1881 bool useful_context = false;
3e293154 1882
606d9a09 1883 if (arg_num == 0 || args->jump_functions)
3e293154 1884 return;
9771b263 1885 vec_safe_grow_cleared (args->jump_functions, arg_num);
5ce97055
JH
1886 if (flag_devirtualize)
1887 vec_safe_grow_cleared (args->polymorphic_call_contexts, arg_num);
3e293154 1888
96e24d49
JJ
1889 if (gimple_call_internal_p (call))
1890 return;
5fe8e757
MJ
1891 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1892 return;
1893
8b7773a4
MJ
1894 for (n = 0; n < arg_num; n++)
1895 {
1896 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1897 tree arg = gimple_call_arg (call, n);
06d65050 1898 tree param_type = ipa_get_callee_param_type (cs, n);
5ce97055
JH
1899 if (flag_devirtualize && POINTER_TYPE_P (TREE_TYPE (arg)))
1900 {
049e6d36 1901 tree instance;
5ce97055
JH
1902 struct ipa_polymorphic_call_context context (cs->caller->decl,
1903 arg, cs->call_stmt,
049e6d36
JH
1904 &instance);
1905 context.get_dynamic_type (instance, arg, NULL, cs->call_stmt);
5ce97055
JH
1906 *ipa_get_ith_polymorhic_call_context (args, n) = context;
1907 if (!context.useless_p ())
1908 useful_context = true;
1909 }
3e293154 1910
8b7773a4 1911 if (is_gimple_ip_invariant (arg))
4502fe8d 1912 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1913 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1914 && TREE_CODE (arg) == PARM_DECL)
1915 {
1916 int index = ipa_get_param_decl_index (info, arg);
1917
1918 gcc_assert (index >=0);
1919 /* Aggregate passed by value, check for pass-through, otherwise we
1920 will attempt to fill in aggregate contents later in this
1921 for cycle. */
8aab5218 1922 if (parm_preserved_before_stmt_p (fbi, index, call, arg))
8b7773a4 1923 {
b8f6e610 1924 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1925 continue;
1926 }
1927 }
1928 else if (TREE_CODE (arg) == SSA_NAME)
1929 {
1930 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1931 {
1932 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1933 if (index >= 0)
8b7773a4 1934 {
b8f6e610 1935 bool agg_p, type_p;
8aab5218 1936 agg_p = parm_ref_data_pass_through_p (fbi, index, call, arg);
06d65050
JH
1937 if (param_type && POINTER_TYPE_P (param_type))
1938 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1939 call, jfunc);
1940 else
1941 type_p = false;
b8f6e610 1942 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1943 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1944 type_p);
8b7773a4
MJ
1945 }
1946 }
1947 else
1948 {
1949 gimple stmt = SSA_NAME_DEF_STMT (arg);
1950 if (is_gimple_assign (stmt))
8aab5218 1951 compute_complex_assign_jump_func (fbi, info, jfunc,
06d65050 1952 call, stmt, arg, param_type);
8b7773a4 1953 else if (gimple_code (stmt) == GIMPLE_PHI)
8aab5218 1954 compute_complex_ancestor_jump_func (fbi, info, jfunc,
06d65050 1955 call, stmt, param_type);
8b7773a4
MJ
1956 }
1957 }
1958 else
06d65050
JH
1959 compute_known_type_jump_func (arg, jfunc, call,
1960 param_type
1961 && POINTER_TYPE_P (param_type)
1962 ? TREE_TYPE (param_type)
1963 : NULL);
3e293154 1964
85942f45
JH
1965 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1966 passed (because type conversions are ignored in gimple). Usually we can
1967 safely get type from function declaration, but in case of K&R prototypes or
1968 variadic functions we can try our luck with type of the pointer passed.
1969 TODO: Since we look for actual initialization of the memory object, we may better
1970 work out the type based on the memory stores we find. */
1971 if (!param_type)
1972 param_type = TREE_TYPE (arg);
1973
8b7773a4
MJ
1974 if ((jfunc->type != IPA_JF_PASS_THROUGH
1975 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1976 && (jfunc->type != IPA_JF_ANCESTOR
1977 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1978 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
85942f45 1979 || POINTER_TYPE_P (param_type)))
0d48ee34 1980 determine_locally_known_aggregate_parts (call, arg, param_type, jfunc);
8b7773a4 1981 }
5ce97055
JH
1982 if (!useful_context)
1983 vec_free (args->polymorphic_call_contexts);
3e293154
MJ
1984}
1985
749aa96d 1986/* Compute jump functions for all edges - both direct and indirect - outgoing
8aab5218 1987 from BB. */
749aa96d 1988
062c604f 1989static void
8aab5218 1990ipa_compute_jump_functions_for_bb (struct func_body_info *fbi, basic_block bb)
749aa96d 1991{
8aab5218
MJ
1992 struct ipa_bb_info *bi = ipa_get_bb_info (fbi, bb);
1993 int i;
749aa96d
MJ
1994 struct cgraph_edge *cs;
1995
8aab5218 1996 FOR_EACH_VEC_ELT_REVERSE (bi->cg_edges, i, cs)
749aa96d 1997 {
8aab5218 1998 struct cgraph_node *callee = cs->callee;
749aa96d 1999
8aab5218
MJ
2000 if (callee)
2001 {
d52f5295 2002 callee->ultimate_alias_target ();
8aab5218
MJ
2003 /* We do not need to bother analyzing calls to unknown functions
2004 unless they may become known during lto/whopr. */
2005 if (!callee->definition && !flag_lto)
2006 continue;
2007 }
2008 ipa_compute_jump_functions_for_edge (fbi, cs);
2009 }
749aa96d
MJ
2010}
2011
8b7773a4
MJ
2012/* If STMT looks like a statement loading a value from a member pointer formal
2013 parameter, return that parameter and store the offset of the field to
2014 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
2015 might be clobbered). If USE_DELTA, then we look for a use of the delta
2016 field rather than the pfn. */
be95e2b9 2017
3e293154 2018static tree
8b7773a4
MJ
2019ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
2020 HOST_WIDE_INT *offset_p)
3e293154 2021{
8b7773a4
MJ
2022 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
2023
2024 if (!gimple_assign_single_p (stmt))
2025 return NULL_TREE;
3e293154 2026
8b7773a4 2027 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
2028 if (TREE_CODE (rhs) == COMPONENT_REF)
2029 {
2030 ref_field = TREE_OPERAND (rhs, 1);
2031 rhs = TREE_OPERAND (rhs, 0);
2032 }
2033 else
2034 ref_field = NULL_TREE;
d242d063 2035 if (TREE_CODE (rhs) != MEM_REF)
3e293154 2036 return NULL_TREE;
3e293154 2037 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
2038 if (TREE_CODE (rec) != ADDR_EXPR)
2039 return NULL_TREE;
2040 rec = TREE_OPERAND (rec, 0);
3e293154 2041 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 2042 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 2043 return NULL_TREE;
d242d063 2044 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 2045
8b7773a4
MJ
2046 if (use_delta)
2047 fld = delta_field;
2048 else
2049 fld = ptr_field;
2050 if (offset_p)
2051 *offset_p = int_bit_position (fld);
2052
ae788515
EB
2053 if (ref_field)
2054 {
2055 if (integer_nonzerop (ref_offset))
2056 return NULL_TREE;
ae788515
EB
2057 return ref_field == fld ? rec : NULL_TREE;
2058 }
3e293154 2059 else
8b7773a4
MJ
2060 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
2061 : NULL_TREE;
3e293154
MJ
2062}
2063
2064/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 2065
3e293154
MJ
2066static bool
2067ipa_is_ssa_with_stmt_def (tree t)
2068{
2069 if (TREE_CODE (t) == SSA_NAME
2070 && !SSA_NAME_IS_DEFAULT_DEF (t))
2071 return true;
2072 else
2073 return false;
2074}
2075
40591473
MJ
2076/* Find the indirect call graph edge corresponding to STMT and mark it as a
2077 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
2078 indirect call graph edge. */
be95e2b9 2079
40591473
MJ
2080static struct cgraph_edge *
2081ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 2082{
e33c6cd6 2083 struct cgraph_edge *cs;
3e293154 2084
d52f5295 2085 cs = node->get_edge (stmt);
b258210c 2086 cs->indirect_info->param_index = param_index;
8b7773a4 2087 cs->indirect_info->agg_contents = 0;
c13bc3d9 2088 cs->indirect_info->member_ptr = 0;
40591473 2089 return cs;
3e293154
MJ
2090}
2091
e33c6cd6 2092/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 2093 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
2094 intermediate information about each formal parameter. Currently it checks
2095 whether the call calls a pointer that is a formal parameter and if so, the
2096 parameter is marked with the called flag and an indirect call graph edge
2097 describing the call is created. This is very simple for ordinary pointers
2098 represented in SSA but not-so-nice when it comes to member pointers. The
2099 ugly part of this function does nothing more than trying to match the
2100 pattern of such a call. An example of such a pattern is the gimple dump
2101 below, the call is on the last line:
3e293154 2102
ae788515
EB
2103 <bb 2>:
2104 f$__delta_5 = f.__delta;
2105 f$__pfn_24 = f.__pfn;
2106
2107 or
3e293154 2108 <bb 2>:
d242d063
MJ
2109 f$__delta_5 = MEM[(struct *)&f];
2110 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 2111
ae788515 2112 and a few lines below:
8aa29647
MJ
2113
2114 <bb 5>
3e293154
MJ
2115 D.2496_3 = (int) f$__pfn_24;
2116 D.2497_4 = D.2496_3 & 1;
2117 if (D.2497_4 != 0)
2118 goto <bb 3>;
2119 else
2120 goto <bb 4>;
2121
8aa29647 2122 <bb 6>:
3e293154
MJ
2123 D.2500_7 = (unsigned int) f$__delta_5;
2124 D.2501_8 = &S + D.2500_7;
2125 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
2126 D.2503_10 = *D.2502_9;
2127 D.2504_12 = f$__pfn_24 + -1;
2128 D.2505_13 = (unsigned int) D.2504_12;
2129 D.2506_14 = D.2503_10 + D.2505_13;
2130 D.2507_15 = *D.2506_14;
2131 iftmp.11_16 = (String:: *) D.2507_15;
2132
8aa29647 2133 <bb 7>:
3e293154
MJ
2134 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
2135 D.2500_19 = (unsigned int) f$__delta_5;
2136 D.2508_20 = &S + D.2500_19;
2137 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
2138
2139 Such patterns are results of simple calls to a member pointer:
2140
2141 int doprinting (int (MyString::* f)(int) const)
2142 {
2143 MyString S ("somestring");
2144
2145 return (S.*f)(4);
2146 }
8b7773a4
MJ
2147
2148 Moreover, the function also looks for called pointers loaded from aggregates
2149 passed by value or reference. */
3e293154
MJ
2150
2151static void
8aab5218
MJ
2152ipa_analyze_indirect_call_uses (struct func_body_info *fbi, gimple call,
2153 tree target)
3e293154 2154{
8aab5218 2155 struct ipa_node_params *info = fbi->info;
8b7773a4
MJ
2156 HOST_WIDE_INT offset;
2157 bool by_ref;
3e293154 2158
3e293154
MJ
2159 if (SSA_NAME_IS_DEFAULT_DEF (target))
2160 {
b258210c 2161 tree var = SSA_NAME_VAR (target);
8aab5218 2162 int index = ipa_get_param_decl_index (info, var);
3e293154 2163 if (index >= 0)
8aab5218 2164 ipa_note_param_call (fbi->node, index, call);
3e293154
MJ
2165 return;
2166 }
2167
8aab5218
MJ
2168 int index;
2169 gimple def = SSA_NAME_DEF_STMT (target);
8b7773a4 2170 if (gimple_assign_single_p (def)
8aab5218 2171 && ipa_load_from_parm_agg_1 (fbi, info->descriptors, def,
8b7773a4 2172 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 2173 NULL, &by_ref))
8b7773a4 2174 {
8aab5218 2175 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2176 cs->indirect_info->offset = offset;
2177 cs->indirect_info->agg_contents = 1;
2178 cs->indirect_info->by_ref = by_ref;
2179 return;
2180 }
2181
3e293154
MJ
2182 /* Now we need to try to match the complex pattern of calling a member
2183 pointer. */
8b7773a4
MJ
2184 if (gimple_code (def) != GIMPLE_PHI
2185 || gimple_phi_num_args (def) != 2
2186 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
2187 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
2188 return;
2189
3e293154
MJ
2190 /* First, we need to check whether one of these is a load from a member
2191 pointer that is a parameter to this function. */
8aab5218
MJ
2192 tree n1 = PHI_ARG_DEF (def, 0);
2193 tree n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 2194 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154 2195 return;
8aab5218
MJ
2196 gimple d1 = SSA_NAME_DEF_STMT (n1);
2197 gimple d2 = SSA_NAME_DEF_STMT (n2);
3e293154 2198
8aab5218
MJ
2199 tree rec;
2200 basic_block bb, virt_bb;
2201 basic_block join = gimple_bb (def);
8b7773a4 2202 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 2203 {
8b7773a4 2204 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
2205 return;
2206
8aa29647 2207 bb = EDGE_PRED (join, 0)->src;
726a989a 2208 virt_bb = gimple_bb (d2);
3e293154 2209 }
8b7773a4 2210 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 2211 {
8aa29647 2212 bb = EDGE_PRED (join, 1)->src;
726a989a 2213 virt_bb = gimple_bb (d1);
3e293154
MJ
2214 }
2215 else
2216 return;
2217
2218 /* Second, we need to check that the basic blocks are laid out in the way
2219 corresponding to the pattern. */
2220
3e293154
MJ
2221 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
2222 || single_pred (virt_bb) != bb
2223 || single_succ (virt_bb) != join)
2224 return;
2225
2226 /* Third, let's see that the branching is done depending on the least
2227 significant bit of the pfn. */
2228
8aab5218 2229 gimple branch = last_stmt (bb);
8aa29647 2230 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
2231 return;
2232
12430896
RG
2233 if ((gimple_cond_code (branch) != NE_EXPR
2234 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 2235 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 2236 return;
3e293154 2237
8aab5218 2238 tree cond = gimple_cond_lhs (branch);
3e293154
MJ
2239 if (!ipa_is_ssa_with_stmt_def (cond))
2240 return;
2241
726a989a 2242 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 2243 if (!is_gimple_assign (def)
726a989a
RB
2244 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
2245 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 2246 return;
726a989a
RB
2247
2248 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2249 if (!ipa_is_ssa_with_stmt_def (cond))
2250 return;
2251
726a989a 2252 def = SSA_NAME_DEF_STMT (cond);
3e293154 2253
8b75fc9b
MJ
2254 if (is_gimple_assign (def)
2255 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 2256 {
726a989a 2257 cond = gimple_assign_rhs1 (def);
3e293154
MJ
2258 if (!ipa_is_ssa_with_stmt_def (cond))
2259 return;
726a989a 2260 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
2261 }
2262
8aab5218 2263 tree rec2;
6f7b8b70
RE
2264 rec2 = ipa_get_stmt_member_ptr_load_param (def,
2265 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
2266 == ptrmemfunc_vbit_in_delta),
2267 NULL);
3e293154
MJ
2268 if (rec != rec2)
2269 return;
2270
2271 index = ipa_get_param_decl_index (info, rec);
8b7773a4 2272 if (index >= 0
8aab5218 2273 && parm_preserved_before_stmt_p (fbi, index, call, rec))
8b7773a4 2274 {
8aab5218 2275 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
8b7773a4
MJ
2276 cs->indirect_info->offset = offset;
2277 cs->indirect_info->agg_contents = 1;
c13bc3d9 2278 cs->indirect_info->member_ptr = 1;
8b7773a4 2279 }
3e293154
MJ
2280
2281 return;
2282}
2283
b258210c
MJ
2284/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2285 object referenced in the expression is a formal parameter of the caller
8aab5218
MJ
2286 FBI->node (described by FBI->info), create a call note for the
2287 statement. */
b258210c
MJ
2288
2289static void
8aab5218
MJ
2290ipa_analyze_virtual_call_uses (struct func_body_info *fbi,
2291 gimple call, tree target)
b258210c
MJ
2292{
2293 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 2294 int index;
40591473 2295 HOST_WIDE_INT anc_offset;
b258210c 2296
05842ff5
MJ
2297 if (!flag_devirtualize)
2298 return;
2299
40591473 2300 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
2301 return;
2302
8aab5218 2303 struct ipa_node_params *info = fbi->info;
40591473
MJ
2304 if (SSA_NAME_IS_DEFAULT_DEF (obj))
2305 {
8aab5218 2306 struct ipa_jump_func jfunc;
40591473
MJ
2307 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
2308 return;
b258210c 2309
40591473
MJ
2310 anc_offset = 0;
2311 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
2312 gcc_assert (index >= 0);
06d65050
JH
2313 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
2314 call, &jfunc))
40591473
MJ
2315 return;
2316 }
2317 else
2318 {
8aab5218 2319 struct ipa_jump_func jfunc;
40591473
MJ
2320 gimple stmt = SSA_NAME_DEF_STMT (obj);
2321 tree expr;
2322
2323 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
2324 if (!expr)
2325 return;
2326 index = ipa_get_param_decl_index (info,
2327 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2328 gcc_assert (index >= 0);
06d65050
JH
2329 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2330 call, &jfunc, anc_offset))
40591473
MJ
2331 return;
2332 }
2333
8aab5218
MJ
2334 struct cgraph_edge *cs = ipa_note_param_call (fbi->node, index, call);
2335 struct cgraph_indirect_call_info *ii = cs->indirect_info;
8b7773a4 2336 ii->offset = anc_offset;
ae7e9ddd 2337 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2338 ii->otr_type = obj_type_ref_class (target);
40591473 2339 ii->polymorphic = 1;
b258210c
MJ
2340}
2341
2342/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2343 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2344 containing intermediate information about each formal parameter. */
b258210c
MJ
2345
2346static void
8aab5218 2347ipa_analyze_call_uses (struct func_body_info *fbi, gimple call)
b258210c
MJ
2348{
2349 tree target = gimple_call_fn (call);
b786d31f
JH
2350
2351 if (!target
2352 || (TREE_CODE (target) != SSA_NAME
2353 && !virtual_method_call_p (target)))
2354 return;
b258210c 2355
7d0aa05b 2356 struct cgraph_edge *cs = fbi->node->get_edge (call);
b786d31f
JH
2357 /* If we previously turned the call into a direct call, there is
2358 no need to analyze. */
b786d31f 2359 if (cs && !cs->indirect_unknown_callee)
25583c4f 2360 return;
7d0aa05b
JH
2361
2362 if (cs->indirect_info->polymorphic)
2363 {
7d0aa05b
JH
2364 tree instance;
2365 tree target = gimple_call_fn (call);
6f8091fc
JH
2366 ipa_polymorphic_call_context context (current_function_decl,
2367 target, call, &instance);
7d0aa05b 2368
ba392339
JH
2369 gcc_checking_assert (cs->indirect_info->otr_type
2370 == obj_type_ref_class (target));
2371 gcc_checking_assert (cs->indirect_info->otr_token
2372 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target)));
7d0aa05b 2373
29c43c83
JH
2374 cs->indirect_info->vptr_changed
2375 = !context.get_dynamic_type (instance,
2376 OBJ_TYPE_REF_OBJECT (target),
2377 obj_type_ref_class (target), call);
0127c169 2378 cs->indirect_info->context = context;
7d0aa05b
JH
2379 }
2380
b258210c 2381 if (TREE_CODE (target) == SSA_NAME)
8aab5218 2382 ipa_analyze_indirect_call_uses (fbi, call, target);
1d5755ef 2383 else if (virtual_method_call_p (target))
8aab5218 2384 ipa_analyze_virtual_call_uses (fbi, call, target);
b258210c
MJ
2385}
2386
2387
e33c6cd6 2388/* Analyze the call statement STMT with respect to formal parameters (described
8aab5218
MJ
2389 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2390 formal parameters are called. */
be95e2b9 2391
3e293154 2392static void
8aab5218 2393ipa_analyze_stmt_uses (struct func_body_info *fbi, gimple stmt)
3e293154 2394{
726a989a 2395 if (is_gimple_call (stmt))
8aab5218 2396 ipa_analyze_call_uses (fbi, stmt);
062c604f
MJ
2397}
2398
2399/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2400 If OP is a parameter declaration, mark it as used in the info structure
2401 passed in DATA. */
2402
2403static bool
9f1363cd 2404visit_ref_for_mod_analysis (gimple, tree op, tree, void *data)
062c604f
MJ
2405{
2406 struct ipa_node_params *info = (struct ipa_node_params *) data;
2407
2408 op = get_base_address (op);
2409 if (op
2410 && TREE_CODE (op) == PARM_DECL)
2411 {
2412 int index = ipa_get_param_decl_index (info, op);
2413 gcc_assert (index >= 0);
310bc633 2414 ipa_set_param_used (info, index, true);
062c604f
MJ
2415 }
2416
2417 return false;
3e293154
MJ
2418}
2419
8aab5218
MJ
2420/* Scan the statements in BB and inspect the uses of formal parameters. Store
2421 the findings in various structures of the associated ipa_node_params
2422 structure, such as parameter flags, notes etc. FBI holds various data about
2423 the function being analyzed. */
be95e2b9 2424
062c604f 2425static void
8aab5218 2426ipa_analyze_params_uses_in_bb (struct func_body_info *fbi, basic_block bb)
3e293154 2427{
726a989a 2428 gimple_stmt_iterator gsi;
8aab5218
MJ
2429 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2430 {
2431 gimple stmt = gsi_stmt (gsi);
3e293154 2432
8aab5218
MJ
2433 if (is_gimple_debug (stmt))
2434 continue;
3e293154 2435
8aab5218
MJ
2436 ipa_analyze_stmt_uses (fbi, stmt);
2437 walk_stmt_load_store_addr_ops (stmt, fbi->info,
2438 visit_ref_for_mod_analysis,
2439 visit_ref_for_mod_analysis,
2440 visit_ref_for_mod_analysis);
5fe8e757 2441 }
8aab5218
MJ
2442 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2443 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), fbi->info,
2444 visit_ref_for_mod_analysis,
2445 visit_ref_for_mod_analysis,
2446 visit_ref_for_mod_analysis);
2447}
2448
2449/* Calculate controlled uses of parameters of NODE. */
2450
2451static void
2452ipa_analyze_controlled_uses (struct cgraph_node *node)
2453{
2454 struct ipa_node_params *info = IPA_NODE_REF (node);
5fe8e757 2455
8aab5218 2456 for (int i = 0; i < ipa_get_param_count (info); i++)
062c604f
MJ
2457 {
2458 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2459 int controlled_uses = 0;
2460
062c604f
MJ
2461 /* For SSA regs see if parameter is used. For non-SSA we compute
2462 the flag during modification analysis. */
4502fe8d
MJ
2463 if (is_gimple_reg (parm))
2464 {
67348ccc 2465 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2466 parm);
2467 if (ddef && !has_zero_uses (ddef))
2468 {
2469 imm_use_iterator imm_iter;
2470 use_operand_p use_p;
2471
2472 ipa_set_param_used (info, i, true);
2473 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2474 if (!is_gimple_call (USE_STMT (use_p)))
2475 {
c6de6665
JJ
2476 if (!is_gimple_debug (USE_STMT (use_p)))
2477 {
2478 controlled_uses = IPA_UNDESCRIBED_USE;
2479 break;
2480 }
4502fe8d
MJ
2481 }
2482 else
2483 controlled_uses++;
2484 }
2485 else
2486 controlled_uses = 0;
2487 }
2488 else
2489 controlled_uses = IPA_UNDESCRIBED_USE;
2490 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f 2491 }
8aab5218 2492}
062c604f 2493
8aab5218 2494/* Free stuff in BI. */
062c604f 2495
8aab5218
MJ
2496static void
2497free_ipa_bb_info (struct ipa_bb_info *bi)
2498{
2499 bi->cg_edges.release ();
2500 bi->param_aa_statuses.release ();
3e293154
MJ
2501}
2502
8aab5218 2503/* Dominator walker driving the analysis. */
2c9561b5 2504
8aab5218 2505class analysis_dom_walker : public dom_walker
2c9561b5 2506{
8aab5218
MJ
2507public:
2508 analysis_dom_walker (struct func_body_info *fbi)
2509 : dom_walker (CDI_DOMINATORS), m_fbi (fbi) {}
2c9561b5 2510
8aab5218
MJ
2511 virtual void before_dom_children (basic_block);
2512
2513private:
2514 struct func_body_info *m_fbi;
2515};
2516
2517void
2518analysis_dom_walker::before_dom_children (basic_block bb)
2519{
2520 ipa_analyze_params_uses_in_bb (m_fbi, bb);
2521 ipa_compute_jump_functions_for_bb (m_fbi, bb);
2c9561b5
MJ
2522}
2523
dd5a833e
MS
2524/* Initialize the array describing properties of of formal parameters
2525 of NODE, analyze their uses and compute jump functions associated
2526 with actual arguments of calls from within NODE. */
062c604f
MJ
2527
2528void
2529ipa_analyze_node (struct cgraph_node *node)
2530{
8aab5218 2531 struct func_body_info fbi;
57dbdc5a 2532 struct ipa_node_params *info;
062c604f 2533
57dbdc5a
MJ
2534 ipa_check_create_node_params ();
2535 ipa_check_create_edge_args ();
2536 info = IPA_NODE_REF (node);
8aab5218
MJ
2537
2538 if (info->analysis_done)
2539 return;
2540 info->analysis_done = 1;
2541
2542 if (ipa_func_spec_opts_forbid_analysis_p (node))
2543 {
2544 for (int i = 0; i < ipa_get_param_count (info); i++)
2545 {
2546 ipa_set_param_used (info, i, true);
2547 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2548 }
2549 return;
2550 }
2551
2552 struct function *func = DECL_STRUCT_FUNCTION (node->decl);
2553 push_cfun (func);
2554 calculate_dominance_info (CDI_DOMINATORS);
062c604f 2555 ipa_initialize_node_params (node);
8aab5218 2556 ipa_analyze_controlled_uses (node);
062c604f 2557
8aab5218
MJ
2558 fbi.node = node;
2559 fbi.info = IPA_NODE_REF (node);
2560 fbi.bb_infos = vNULL;
2561 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
2562 fbi.param_count = ipa_get_param_count (info);
2563 fbi.aa_walked = 0;
062c604f 2564
8aab5218
MJ
2565 for (struct cgraph_edge *cs = node->callees; cs; cs = cs->next_callee)
2566 {
2567 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2568 bi->cg_edges.safe_push (cs);
2569 }
062c604f 2570
8aab5218
MJ
2571 for (struct cgraph_edge *cs = node->indirect_calls; cs; cs = cs->next_callee)
2572 {
2573 ipa_bb_info *bi = ipa_get_bb_info (&fbi, gimple_bb (cs->call_stmt));
2574 bi->cg_edges.safe_push (cs);
2575 }
2576
2577 analysis_dom_walker (&fbi).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2578
2579 int i;
2580 struct ipa_bb_info *bi;
2581 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
2582 free_ipa_bb_info (bi);
2583 fbi.bb_infos.release ();
2584 free_dominance_info (CDI_DOMINATORS);
f65cf2b7 2585 pop_cfun ();
062c604f 2586}
062c604f 2587
61502ca8 2588/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2589 is being inlined, knowing that DST is of type ancestor and src of known
2590 type. */
2591
2592static void
2593combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2594 struct ipa_jump_func *dst)
2595{
c7573249
MJ
2596 HOST_WIDE_INT combined_offset;
2597 tree combined_type;
b258210c 2598
b8f6e610
MJ
2599 if (!ipa_get_jf_ancestor_type_preserved (dst))
2600 {
2601 dst->type = IPA_JF_UNKNOWN;
2602 return;
2603 }
2604
7b872d9e
MJ
2605 combined_offset = ipa_get_jf_known_type_offset (src)
2606 + ipa_get_jf_ancestor_offset (dst);
2607 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2608
7b872d9e
MJ
2609 ipa_set_jf_known_type (dst, combined_offset,
2610 ipa_get_jf_known_type_base_type (src),
2611 combined_type);
b258210c
MJ
2612}
2613
be95e2b9 2614/* Update the jump functions associated with call graph edge E when the call
3e293154 2615 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2616 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2617
3e293154
MJ
2618static void
2619update_jump_functions_after_inlining (struct cgraph_edge *cs,
2620 struct cgraph_edge *e)
2621{
2622 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2623 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2624 int count = ipa_get_cs_argument_count (args);
2625 int i;
2626
2627 for (i = 0; i < count; i++)
2628 {
b258210c 2629 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
5ce97055
JH
2630 struct ipa_polymorphic_call_context *dst_ctx
2631 = ipa_get_ith_polymorhic_call_context (args, i);
3e293154 2632
685b0d13
MJ
2633 if (dst->type == IPA_JF_ANCESTOR)
2634 {
b258210c 2635 struct ipa_jump_func *src;
8b7773a4 2636 int dst_fid = dst->value.ancestor.formal_id;
5ce97055
JH
2637 struct ipa_polymorphic_call_context *src_ctx
2638 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
685b0d13 2639
b258210c
MJ
2640 /* Variable number of arguments can cause havoc if we try to access
2641 one that does not exist in the inlined edge. So make sure we
2642 don't. */
8b7773a4 2643 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2644 {
2645 dst->type = IPA_JF_UNKNOWN;
2646 continue;
2647 }
2648
8b7773a4
MJ
2649 src = ipa_get_ith_jump_func (top, dst_fid);
2650
5ce97055
JH
2651 if (src_ctx && !src_ctx->useless_p ())
2652 {
2653 struct ipa_polymorphic_call_context ctx = *src_ctx;
2654
2655 /* TODO: Make type preserved safe WRT contexts. */
2656 if (!dst->value.ancestor.agg_preserved)
f9bb202b 2657 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2658 ctx.offset_by (dst->value.ancestor.offset);
2659 if (!ctx.useless_p ())
2660 {
2661 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2662 count);
2663 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2664 }
2665 }
2666
8b7773a4
MJ
2667 if (src->agg.items
2668 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2669 {
2670 struct ipa_agg_jf_item *item;
2671 int j;
2672
2673 /* Currently we do not produce clobber aggregate jump functions,
2674 replace with merging when we do. */
2675 gcc_assert (!dst->agg.items);
2676
9771b263 2677 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2678 dst->agg.by_ref = src->agg.by_ref;
9771b263 2679 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2680 item->offset -= dst->value.ancestor.offset;
2681 }
2682
b258210c
MJ
2683 if (src->type == IPA_JF_KNOWN_TYPE)
2684 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2685 else if (src->type == IPA_JF_PASS_THROUGH
2686 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2687 {
2688 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2689 dst->value.ancestor.agg_preserved &=
2690 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2691 dst->value.ancestor.type_preserved &=
2692 src->value.pass_through.type_preserved;
8b7773a4 2693 }
b258210c
MJ
2694 else if (src->type == IPA_JF_ANCESTOR)
2695 {
2696 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2697 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2698 dst->value.ancestor.agg_preserved &=
2699 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2700 dst->value.ancestor.type_preserved &=
2701 src->value.ancestor.type_preserved;
b258210c
MJ
2702 }
2703 else
2704 dst->type = IPA_JF_UNKNOWN;
2705 }
2706 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2707 {
b258210c
MJ
2708 struct ipa_jump_func *src;
2709 /* We must check range due to calls with variable number of arguments
2710 and we cannot combine jump functions with operations. */
2711 if (dst->value.pass_through.operation == NOP_EXPR
2712 && (dst->value.pass_through.formal_id
2713 < ipa_get_cs_argument_count (top)))
2714 {
8b7773a4
MJ
2715 int dst_fid = dst->value.pass_through.formal_id;
2716 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2717 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
5ce97055
JH
2718 struct ipa_polymorphic_call_context *src_ctx
2719 = ipa_get_ith_polymorhic_call_context (top, dst_fid);
8b7773a4 2720
5ce97055
JH
2721 if (src_ctx && !src_ctx->useless_p ())
2722 {
2723 struct ipa_polymorphic_call_context ctx = *src_ctx;
2724
2725 /* TODO: Make type preserved safe WRT contexts. */
2726 if (!dst->value.ancestor.agg_preserved)
f9bb202b 2727 ctx.possible_dynamic_type_change (e->in_polymorphic_cdtor);
5ce97055
JH
2728 if (!ctx.useless_p ())
2729 {
2730 if (!dst_ctx)
2731 {
2732 vec_safe_grow_cleared (args->polymorphic_call_contexts,
2733 count);
2734 dst_ctx = ipa_get_ith_polymorhic_call_context (args, i);
2735 }
2736 dst_ctx->combine_with (ctx);
2737 }
2738 }
b8f6e610
MJ
2739 switch (src->type)
2740 {
2741 case IPA_JF_UNKNOWN:
2742 dst->type = IPA_JF_UNKNOWN;
2743 break;
2744 case IPA_JF_KNOWN_TYPE:
2ace77c2
JH
2745 if (ipa_get_jf_pass_through_type_preserved (dst))
2746 ipa_set_jf_known_type (dst,
2747 ipa_get_jf_known_type_offset (src),
2748 ipa_get_jf_known_type_base_type (src),
0a2550e7 2749 ipa_get_jf_known_type_component_type (src));
2ace77c2
JH
2750 else
2751 dst->type = IPA_JF_UNKNOWN;
b8f6e610
MJ
2752 break;
2753 case IPA_JF_CONST:
2754 ipa_set_jf_cst_copy (dst, src);
2755 break;
2756
2757 case IPA_JF_PASS_THROUGH:
2758 {
2759 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2760 enum tree_code operation;
2761 operation = ipa_get_jf_pass_through_operation (src);
2762
2763 if (operation == NOP_EXPR)
2764 {
2765 bool agg_p, type_p;
2766 agg_p = dst_agg_p
2767 && ipa_get_jf_pass_through_agg_preserved (src);
2768 type_p = ipa_get_jf_pass_through_type_preserved (src)
2769 && ipa_get_jf_pass_through_type_preserved (dst);
2770 ipa_set_jf_simple_pass_through (dst, formal_id,
2771 agg_p, type_p);
2772 }
2773 else
2774 {
2775 tree operand = ipa_get_jf_pass_through_operand (src);
2776 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2777 operation);
2778 }
2779 break;
2780 }
2781 case IPA_JF_ANCESTOR:
2782 {
2783 bool agg_p, type_p;
2784 agg_p = dst_agg_p
2785 && ipa_get_jf_ancestor_agg_preserved (src);
2786 type_p = ipa_get_jf_ancestor_type_preserved (src)
2787 && ipa_get_jf_pass_through_type_preserved (dst);
2788 ipa_set_ancestor_jf (dst,
2789 ipa_get_jf_ancestor_offset (src),
2790 ipa_get_jf_ancestor_type (src),
2791 ipa_get_jf_ancestor_formal_id (src),
2792 agg_p, type_p);
2793 break;
2794 }
2795 default:
2796 gcc_unreachable ();
2797 }
8b7773a4
MJ
2798
2799 if (src->agg.items
b8f6e610 2800 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2801 {
2802 /* Currently we do not produce clobber aggregate jump
2803 functions, replace with merging when we do. */
2804 gcc_assert (!dst->agg.items);
2805
2806 dst->agg.by_ref = src->agg.by_ref;
9771b263 2807 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2808 }
b258210c
MJ
2809 }
2810 else
2811 dst->type = IPA_JF_UNKNOWN;
3e293154 2812 }
b258210c
MJ
2813 }
2814}
2815
5ce97055
JH
2816/* If TARGET is an addr_expr of a function declaration, make it the
2817 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2818 Otherwise, return NULL. */
b258210c 2819
3949c4a7 2820struct cgraph_edge *
5ce97055
JH
2821ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target,
2822 bool speculative)
b258210c
MJ
2823{
2824 struct cgraph_node *callee;
0f378cb5 2825 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2826 bool unreachable = false;
b258210c 2827
ceeffab0
MJ
2828 if (TREE_CODE (target) == ADDR_EXPR)
2829 target = TREE_OPERAND (target, 0);
b258210c 2830 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2831 {
2832 target = canonicalize_constructor_val (target, NULL);
2833 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2834 {
c13bc3d9
MJ
2835 if (ie->indirect_info->member_ptr)
2836 /* Member pointer call that goes through a VMT lookup. */
2837 return NULL;
2838
2b5f0895
XDL
2839 if (dump_enabled_p ())
2840 {
807b7d62
ML
2841 location_t loc = gimple_location_safe (ie->call_stmt);
2842 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2843 "discovered direct call to non-function in %s/%i, "
2844 "making it __builtin_unreachable\n",
2845 ie->caller->name (), ie->caller->order);
2b5f0895 2846 }
3c9e6fca 2847
48b1474e 2848 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 2849 callee = cgraph_node::get_create (target);
48b1474e 2850 unreachable = true;
a0a7b611 2851 }
48b1474e 2852 else
d52f5295 2853 callee = cgraph_node::get (target);
a0a7b611 2854 }
48b1474e 2855 else
d52f5295 2856 callee = cgraph_node::get (target);
a0a7b611
JH
2857
2858 /* Because may-edges are not explicitely represented and vtable may be external,
2859 we may create the first reference to the object in the unit. */
2860 if (!callee || callee->global.inlined_to)
2861 {
a0a7b611
JH
2862
2863 /* We are better to ensure we can refer to it.
2864 In the case of static functions we are out of luck, since we already
2865 removed its body. In the case of public functions we may or may
2866 not introduce the reference. */
2867 if (!canonicalize_constructor_val (target, NULL)
2868 || !TREE_PUBLIC (target))
2869 {
2870 if (dump_file)
2871 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2872 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2873 xstrdup (ie->caller->name ()),
67348ccc 2874 ie->caller->order,
fec39fa6 2875 xstrdup (ie->callee->name ()),
67348ccc 2876 ie->callee->order);
a0a7b611
JH
2877 return NULL;
2878 }
d52f5295 2879 callee = cgraph_node::get_create (target);
a0a7b611 2880 }
2b5f0895 2881
0127c169
JH
2882 /* If the edge is already speculated. */
2883 if (speculative && ie->speculative)
2884 {
2885 struct cgraph_edge *e2;
2886 struct ipa_ref *ref;
2887 ie->speculative_call_info (e2, ie, ref);
2888 if (e2->callee->ultimate_alias_target ()
2889 != callee->ultimate_alias_target ())
2890 {
2891 if (dump_file)
2892 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2893 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2894 xstrdup (ie->caller->name ()),
2895 ie->caller->order,
2896 xstrdup (callee->name ()),
2897 callee->order,
2898 xstrdup (e2->callee->name ()),
2899 e2->callee->order);
2900 }
2901 else
2902 {
2903 if (dump_file)
2904 fprintf (dump_file, "ipa-prop: Discovered call to a speculative target "
2905 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2906 xstrdup (ie->caller->name ()),
2907 ie->caller->order,
2908 xstrdup (callee->name ()),
2909 callee->order);
2910 }
2911 return NULL;
2912 }
2913
2b5f0895
XDL
2914 if (!dbg_cnt (devirt))
2915 return NULL;
2916
1dbee8c9 2917 ipa_check_create_node_params ();
ceeffab0 2918
81fa35bd
MJ
2919 /* We can not make edges to inline clones. It is bug that someone removed
2920 the cgraph node too early. */
17afc0fe
JH
2921 gcc_assert (!callee->global.inlined_to);
2922
48b1474e 2923 if (dump_file && !unreachable)
b258210c 2924 {
5ce97055 2925 fprintf (dump_file, "ipa-prop: Discovered %s call to a %s target "
ceeffab0 2926 "(%s/%i -> %s/%i), for stmt ",
b258210c 2927 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
5ce97055 2928 speculative ? "speculative" : "known",
fec39fa6 2929 xstrdup (ie->caller->name ()),
67348ccc 2930 ie->caller->order,
fec39fa6 2931 xstrdup (callee->name ()),
67348ccc 2932 callee->order);
b258210c
MJ
2933 if (ie->call_stmt)
2934 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2935 else
2936 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2 2937 }
2b5f0895
XDL
2938 if (dump_enabled_p ())
2939 {
807b7d62 2940 location_t loc = gimple_location_safe (ie->call_stmt);
3c9e6fca 2941
807b7d62
ML
2942 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, loc,
2943 "converting indirect call in %s to direct call to %s\n",
2944 ie->caller->name (), callee->name ());
2b5f0895 2945 }
5ce97055
JH
2946 if (!speculative)
2947 ie = ie->make_direct (callee);
2948 else
2949 {
2950 if (!callee->can_be_discarded_p ())
2951 {
2952 cgraph_node *alias;
2953 alias = dyn_cast<cgraph_node *> (callee->noninterposable_alias ());
2954 if (alias)
2955 callee = alias;
2956 }
2957 ie = ie->make_speculative
2958 (callee, ie->count * 8 / 10, ie->frequency * 8 / 10);
2959 }
042ae7d2
JH
2960 es = inline_edge_summary (ie);
2961 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2962 - eni_size_weights.call_cost);
2963 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2964 - eni_time_weights.call_cost);
749aa96d 2965
b258210c 2966 return ie;
3e293154
MJ
2967}
2968
8b7773a4
MJ
2969/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2970 return NULL if there is not any. BY_REF specifies whether the value has to
2971 be passed by reference or by value. */
2972
2973tree
2974ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2975 HOST_WIDE_INT offset, bool by_ref)
2976{
2977 struct ipa_agg_jf_item *item;
2978 int i;
2979
2980 if (by_ref != agg->by_ref)
2981 return NULL;
2982
9771b263 2983 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2984 if (item->offset == offset)
2985 {
2986 /* Currently we do not have clobber values, return NULL for them once
2987 we do. */
2988 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2989 return item->value;
2990 }
8b7773a4
MJ
2991 return NULL;
2992}
2993
4502fe8d 2994/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2995 reference description RDESC. Return true if the reference has been
2996 successfully found and removed. */
4502fe8d 2997
568cda29 2998static bool
5e20cdc9 2999remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
3000{
3001 struct ipa_ref *to_del;
3002 struct cgraph_edge *origin;
3003
3004 origin = rdesc->cs;
a854f856
MJ
3005 if (!origin)
3006 return false;
d122681a
ML
3007 to_del = origin->caller->find_reference (symbol, origin->call_stmt,
3008 origin->lto_stmt_uid);
568cda29
MJ
3009 if (!to_del)
3010 return false;
3011
d122681a 3012 to_del->remove_reference ();
4502fe8d
MJ
3013 if (dump_file)
3014 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
3015 xstrdup (origin->caller->name ()),
3016 origin->caller->order, xstrdup (symbol->name ()));
568cda29 3017 return true;
4502fe8d
MJ
3018}
3019
3020/* If JFUNC has a reference description with refcount different from
3021 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
3022 NULL. JFUNC must be a constant jump function. */
3023
3024static struct ipa_cst_ref_desc *
3025jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
3026{
3027 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
3028 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
3029 return rdesc;
3030 else
3031 return NULL;
3032}
3033
568cda29
MJ
3034/* If the value of constant jump function JFUNC is an address of a function
3035 declaration, return the associated call graph node. Otherwise return
3036 NULL. */
3037
3038static cgraph_node *
3039cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
3040{
3041 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
3042 tree cst = ipa_get_jf_constant (jfunc);
3043 if (TREE_CODE (cst) != ADDR_EXPR
3044 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
3045 return NULL;
3046
d52f5295 3047 return cgraph_node::get (TREE_OPERAND (cst, 0));
568cda29
MJ
3048}
3049
3050
3051/* If JFUNC is a constant jump function with a usable rdesc, decrement its
3052 refcount and if it hits zero, remove reference to SYMBOL from the caller of
3053 the edge specified in the rdesc. Return false if either the symbol or the
3054 reference could not be found, otherwise return true. */
3055
3056static bool
3057try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
3058{
3059 struct ipa_cst_ref_desc *rdesc;
3060 if (jfunc->type == IPA_JF_CONST
3061 && (rdesc = jfunc_rdesc_usable (jfunc))
3062 && --rdesc->refcount == 0)
3063 {
5e20cdc9 3064 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
3065 if (!symbol)
3066 return false;
3067
3068 return remove_described_reference (symbol, rdesc);
3069 }
3070 return true;
3071}
3072
b258210c
MJ
3073/* Try to find a destination for indirect edge IE that corresponds to a simple
3074 call or a call of a member function pointer and where the destination is a
3075 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
3076 determined, return the newly direct edge, otherwise return NULL.
3077 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 3078
b258210c
MJ
3079static struct cgraph_edge *
3080try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
3081 struct ipa_jump_func *jfunc,
3082 struct ipa_node_params *new_root_info)
b258210c 3083{
4502fe8d 3084 struct cgraph_edge *cs;
b258210c 3085 tree target;
042ae7d2 3086 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 3087
8b7773a4 3088 if (ie->indirect_info->agg_contents)
d250540a
MJ
3089 target = ipa_find_agg_cst_for_param (&jfunc->agg,
3090 ie->indirect_info->offset,
3091 ie->indirect_info->by_ref);
b258210c 3092 else
d250540a
MJ
3093 target = ipa_value_from_jfunc (new_root_info, jfunc);
3094 if (!target)
3095 return NULL;
4502fe8d
MJ
3096 cs = ipa_make_edge_direct_to_target (ie, target);
3097
a12cd2db 3098 if (cs && !agg_contents)
568cda29
MJ
3099 {
3100 bool ok;
3101 gcc_checking_assert (cs->callee
ae6d0907
MJ
3102 && (cs != ie
3103 || jfunc->type != IPA_JF_CONST
568cda29
MJ
3104 || !cgraph_node_for_jfunc (jfunc)
3105 || cs->callee == cgraph_node_for_jfunc (jfunc)));
3106 ok = try_decrement_rdesc_refcount (jfunc);
3107 gcc_checking_assert (ok);
3108 }
4502fe8d
MJ
3109
3110 return cs;
b258210c
MJ
3111}
3112
bec81025
MJ
3113/* Return the target to be used in cases of impossible devirtualization. IE
3114 and target (the latter can be NULL) are dumped when dumping is enabled. */
3115
72972c22
MJ
3116tree
3117ipa_impossible_devirt_target (struct cgraph_edge *ie, tree target)
bec81025
MJ
3118{
3119 if (dump_file)
3120 {
3121 if (target)
3122 fprintf (dump_file,
72972c22 3123 "Type inconsistent devirtualization: %s/%i->%s\n",
bec81025
MJ
3124 ie->caller->name (), ie->caller->order,
3125 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target)));
3126 else
3127 fprintf (dump_file,
3128 "No devirtualization target in %s/%i\n",
3129 ie->caller->name (), ie->caller->order);
3130 }
3131 tree new_target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
d52f5295 3132 cgraph_node::get_create (new_target);
bec81025
MJ
3133 return new_target;
3134}
3135
d250540a
MJ
3136/* Try to find a destination for indirect edge IE that corresponds to a virtual
3137 call based on a formal parameter which is described by jump function JFUNC
3138 and if it can be determined, make it direct and return the direct edge.
3139 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
3140 are relative to. */
b258210c
MJ
3141
3142static struct cgraph_edge *
3143try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a 3144 struct ipa_jump_func *jfunc,
5ce97055
JH
3145 struct ipa_node_params *new_root_info,
3146 struct ipa_polymorphic_call_context *ctx_ptr)
3e293154 3147{
5ce97055
JH
3148 tree binfo, target = NULL;
3149 bool speculative = false;
3150 bool updated = false;
85942f45
JH
3151
3152 if (!flag_devirtualize)
3153 return NULL;
b258210c 3154
5ce97055
JH
3155 /* If this is call of a function parameter, restrict its type
3156 based on knowlede of the context. */
3157 if (ctx_ptr && !ie->indirect_info->by_ref)
3158 {
3159 struct ipa_polymorphic_call_context ctx = *ctx_ptr;
3160
3161 ctx.offset_by (ie->indirect_info->offset);
3162
0127c169
JH
3163 if (ie->indirect_info->vptr_changed)
3164 ctx.possible_dynamic_type_change (ie->in_polymorphic_cdtor,
3165 ie->indirect_info->otr_type);
5ce97055
JH
3166
3167 updated = ie->indirect_info->context.combine_with
3168 (ctx, ie->indirect_info->otr_type);
3169 }
3170
3171 /* Try to do lookup via known virtual table pointer value. */
0127c169
JH
3172 if (!ie->indirect_info->by_ref
3173 && (!ie->indirect_info->vptr_changed || flag_devirtualize_speculatively))
85942f45 3174 {
9de2f554
JH
3175 tree vtable;
3176 unsigned HOST_WIDE_INT offset;
85942f45
JH
3177 tree t = ipa_find_agg_cst_for_param (&jfunc->agg,
3178 ie->indirect_info->offset,
3179 true);
9de2f554
JH
3180 if (t && vtable_pointer_value_to_vtable (t, &vtable, &offset))
3181 {
0127c169 3182 t = gimple_get_virt_method_for_vtable (ie->indirect_info->otr_token,
9de2f554 3183 vtable, offset);
0127c169 3184 if (t)
9de2f554 3185 {
0127c169
JH
3186 if ((TREE_CODE (TREE_TYPE (t)) == FUNCTION_TYPE
3187 && DECL_FUNCTION_CODE (t) == BUILT_IN_UNREACHABLE)
9de2f554 3188 || !possible_polymorphic_call_target_p
0127c169
JH
3189 (ie, cgraph_node::get (t)))
3190 {
3191 /* Do not speculate builtin_unreachable, it is stpid! */
3192 if (!ie->indirect_info->vptr_changed)
3193 target = ipa_impossible_devirt_target (ie, target);
3194 }
3195 else
3196 {
3197 target = t;
3198 speculative = ie->indirect_info->vptr_changed;
3199 }
9de2f554
JH
3200 }
3201 }
85942f45
JH
3202 }
3203
9de2f554 3204 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
d250540a 3205
5ce97055
JH
3206 if (binfo && TREE_CODE (binfo) != TREE_BINFO)
3207 {
3208 struct ipa_polymorphic_call_context ctx (binfo,
3209 ie->indirect_info->otr_type,
3210 ie->indirect_info->offset);
3211 updated |= ie->indirect_info->context.combine_with
3212 (ctx, ie->indirect_info->otr_type);
3213 }
3e293154 3214
5ce97055 3215 if (updated)
da942ca0 3216 {
5ce97055 3217 ipa_polymorphic_call_context context (ie);
5bccb77a
JH
3218 vec <cgraph_node *>targets;
3219 bool final;
3220
5bccb77a
JH
3221 targets = possible_polymorphic_call_targets
3222 (ie->indirect_info->otr_type,
3223 ie->indirect_info->otr_token,
3224 context, &final);
5ce97055
JH
3225 if (final && targets.length () <= 1)
3226 {
3227 if (targets.length () == 1)
3228 target = targets[0]->decl;
3229 else
3230 target = ipa_impossible_devirt_target (ie, NULL_TREE);
3231 }
0127c169 3232 else if (!target && flag_devirtualize_speculatively
5ce97055
JH
3233 && !ie->speculative && ie->maybe_hot_p ())
3234 {
3235 cgraph_node *n = try_speculative_devirtualization (ie->indirect_info->otr_type,
3236 ie->indirect_info->otr_token,
3237 ie->indirect_info->context);
3238 if (n)
3239 {
3240 target = n->decl;
3241 speculative = true;
3242 }
3243 }
3244 }
3245
3246 if (binfo && TREE_CODE (binfo) == TREE_BINFO)
5bccb77a
JH
3247 {
3248 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
3249 ie->indirect_info->otr_type);
3250 if (binfo)
5ce97055
JH
3251 {
3252 tree t = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
3253 binfo);
3254 if (t)
3255 {
5ce97055
JH
3256 target = t;
3257 speculative = false;
3258 }
3259 }
5bccb77a 3260 }
b258210c
MJ
3261
3262 if (target)
450ad0cd 3263 {
5ce97055 3264 if (!possible_polymorphic_call_target_p (ie, cgraph_node::get_create (target)))
0127c169 3265 {
29c43c83 3266 if (speculative)
0127c169
JH
3267 return NULL;
3268 target = ipa_impossible_devirt_target (ie, target);
3269 }
5ce97055 3270 return ipa_make_edge_direct_to_target (ie, target, speculative);
450ad0cd 3271 }
b258210c
MJ
3272 else
3273 return NULL;
3e293154
MJ
3274}
3275
3276/* Update the param called notes associated with NODE when CS is being inlined,
3277 assuming NODE is (potentially indirectly) inlined into CS->callee.
3278 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 3279 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 3280 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 3281
f8e2a1ed 3282static bool
e33c6cd6
MJ
3283update_indirect_edges_after_inlining (struct cgraph_edge *cs,
3284 struct cgraph_node *node,
d52f5295 3285 vec<cgraph_edge *> *new_edges)
3e293154 3286{
9e97ff61 3287 struct ipa_edge_args *top;
b258210c 3288 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 3289 struct ipa_node_params *new_root_info;
f8e2a1ed 3290 bool res = false;
3e293154 3291
e33c6cd6 3292 ipa_check_create_edge_args ();
9e97ff61 3293 top = IPA_EDGE_REF (cs);
d250540a
MJ
3294 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
3295 ? cs->caller->global.inlined_to
3296 : cs->caller);
e33c6cd6
MJ
3297
3298 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 3299 {
e33c6cd6 3300 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 3301 struct ipa_jump_func *jfunc;
8b7773a4 3302 int param_index;
3e293154 3303
e33c6cd6 3304 next_ie = ie->next_callee;
3e293154 3305
5f902d76
JH
3306 if (ici->param_index == -1)
3307 continue;
e33c6cd6 3308
3e293154 3309 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 3310 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 3311 {
5ee53a06 3312 ici->param_index = -1;
3e293154
MJ
3313 continue;
3314 }
3315
8b7773a4
MJ
3316 param_index = ici->param_index;
3317 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
3318
3319 if (!flag_indirect_inlining)
36b72910
JH
3320 new_direct_edge = NULL;
3321 else if (ici->polymorphic)
5ce97055
JH
3322 {
3323 ipa_polymorphic_call_context *ctx;
3324 ctx = ipa_get_ith_polymorhic_call_context (top, param_index);
3325 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
3326 new_root_info,
3327 ctx);
3328 }
b258210c 3329 else
d250540a
MJ
3330 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
3331 new_root_info);
042ae7d2
JH
3332 /* If speculation was removed, then we need to do nothing. */
3333 if (new_direct_edge && new_direct_edge != ie)
3334 {
3335 new_direct_edge->indirect_inlining_edge = 1;
3336 top = IPA_EDGE_REF (cs);
3337 res = true;
3338 }
3339 else if (new_direct_edge)
685b0d13 3340 {
b258210c 3341 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
3342 if (new_direct_edge->call_stmt)
3343 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
3344 = !gimple_check_call_matching_types (
3345 new_direct_edge->call_stmt,
67348ccc 3346 new_direct_edge->callee->decl, false);
b258210c
MJ
3347 if (new_edges)
3348 {
9771b263 3349 new_edges->safe_push (new_direct_edge);
b258210c
MJ
3350 res = true;
3351 }
042ae7d2 3352 top = IPA_EDGE_REF (cs);
685b0d13 3353 }
36b72910
JH
3354 else if (jfunc->type == IPA_JF_PASS_THROUGH
3355 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
3356 {
8a2256dd
MJ
3357 if ((ici->agg_contents
3358 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
3359 || (ici->polymorphic
3360 && !ipa_get_jf_pass_through_type_preserved (jfunc)))
36b72910
JH
3361 ici->param_index = -1;
3362 else
3363 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
3364 }
3365 else if (jfunc->type == IPA_JF_ANCESTOR)
3366 {
8a2256dd
MJ
3367 if ((ici->agg_contents
3368 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
3369 || (ici->polymorphic
3370 && !ipa_get_jf_ancestor_type_preserved (jfunc)))
36b72910
JH
3371 ici->param_index = -1;
3372 else
3373 {
3374 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
3375 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
3376 }
3377 }
3378 else
3379 /* Either we can find a destination for this edge now or never. */
3380 ici->param_index = -1;
3e293154 3381 }
e33c6cd6 3382
f8e2a1ed 3383 return res;
3e293154
MJ
3384}
3385
3386/* Recursively traverse subtree of NODE (including node) made of inlined
3387 cgraph_edges when CS has been inlined and invoke
e33c6cd6 3388 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
3389 update_jump_functions_after_inlining on all non-inlined edges that lead out
3390 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3391 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3392 created. */
be95e2b9 3393
f8e2a1ed 3394static bool
3e293154
MJ
3395propagate_info_to_inlined_callees (struct cgraph_edge *cs,
3396 struct cgraph_node *node,
d52f5295 3397 vec<cgraph_edge *> *new_edges)
3e293154
MJ
3398{
3399 struct cgraph_edge *e;
f8e2a1ed 3400 bool res;
3e293154 3401
e33c6cd6 3402 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
3403
3404 for (e = node->callees; e; e = e->next_callee)
3405 if (!e->inline_failed)
f8e2a1ed 3406 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
3407 else
3408 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
3409 for (e = node->indirect_calls; e; e = e->next_callee)
3410 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
3411
3412 return res;
3e293154
MJ
3413}
3414
4502fe8d
MJ
3415/* Combine two controlled uses counts as done during inlining. */
3416
3417static int
3418combine_controlled_uses_counters (int c, int d)
3419{
3420 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
3421 return IPA_UNDESCRIBED_USE;
3422 else
3423 return c + d - 1;
3424}
3425
3426/* Propagate number of controlled users from CS->caleee to the new root of the
3427 tree of inlined nodes. */
3428
3429static void
3430propagate_controlled_uses (struct cgraph_edge *cs)
3431{
3432 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
3433 struct cgraph_node *new_root = cs->caller->global.inlined_to
3434 ? cs->caller->global.inlined_to : cs->caller;
3435 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
3436 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
3437 int count, i;
3438
3439 count = MIN (ipa_get_cs_argument_count (args),
3440 ipa_get_param_count (old_root_info));
3441 for (i = 0; i < count; i++)
3442 {
3443 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3444 struct ipa_cst_ref_desc *rdesc;
3445
3446 if (jf->type == IPA_JF_PASS_THROUGH)
3447 {
3448 int src_idx, c, d;
3449 src_idx = ipa_get_jf_pass_through_formal_id (jf);
3450 c = ipa_get_controlled_uses (new_root_info, src_idx);
3451 d = ipa_get_controlled_uses (old_root_info, i);
3452
3453 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
3454 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
3455 c = combine_controlled_uses_counters (c, d);
3456 ipa_set_controlled_uses (new_root_info, src_idx, c);
3457 if (c == 0 && new_root_info->ipcp_orig_node)
3458 {
3459 struct cgraph_node *n;
3460 struct ipa_ref *ref;
3461 tree t = new_root_info->known_vals[src_idx];
3462
3463 if (t && TREE_CODE (t) == ADDR_EXPR
3464 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
d52f5295 3465 && (n = cgraph_node::get (TREE_OPERAND (t, 0)))
d122681a 3466 && (ref = new_root->find_reference (n, NULL, 0)))
4502fe8d
MJ
3467 {
3468 if (dump_file)
3469 fprintf (dump_file, "ipa-prop: Removing cloning-created "
3470 "reference from %s/%i to %s/%i.\n",
fec39fa6 3471 xstrdup (new_root->name ()),
67348ccc 3472 new_root->order,
fec39fa6 3473 xstrdup (n->name ()), n->order);
d122681a 3474 ref->remove_reference ();
4502fe8d
MJ
3475 }
3476 }
3477 }
3478 else if (jf->type == IPA_JF_CONST
3479 && (rdesc = jfunc_rdesc_usable (jf)))
3480 {
3481 int d = ipa_get_controlled_uses (old_root_info, i);
3482 int c = rdesc->refcount;
3483 rdesc->refcount = combine_controlled_uses_counters (c, d);
3484 if (rdesc->refcount == 0)
3485 {
3486 tree cst = ipa_get_jf_constant (jf);
3487 struct cgraph_node *n;
3488 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
3489 && TREE_CODE (TREE_OPERAND (cst, 0))
3490 == FUNCTION_DECL);
d52f5295 3491 n = cgraph_node::get (TREE_OPERAND (cst, 0));
4502fe8d
MJ
3492 if (n)
3493 {
3494 struct cgraph_node *clone;
568cda29 3495 bool ok;
67348ccc 3496 ok = remove_described_reference (n, rdesc);
568cda29 3497 gcc_checking_assert (ok);
4502fe8d
MJ
3498
3499 clone = cs->caller;
3500 while (clone->global.inlined_to
3501 && clone != rdesc->cs->caller
3502 && IPA_NODE_REF (clone)->ipcp_orig_node)
3503 {
3504 struct ipa_ref *ref;
d122681a 3505 ref = clone->find_reference (n, NULL, 0);
4502fe8d
MJ
3506 if (ref)
3507 {
3508 if (dump_file)
3509 fprintf (dump_file, "ipa-prop: Removing "
3510 "cloning-created reference "
3511 "from %s/%i to %s/%i.\n",
fec39fa6 3512 xstrdup (clone->name ()),
67348ccc 3513 clone->order,
fec39fa6 3514 xstrdup (n->name ()),
67348ccc 3515 n->order);
d122681a 3516 ref->remove_reference ();
4502fe8d
MJ
3517 }
3518 clone = clone->callers->caller;
3519 }
3520 }
3521 }
3522 }
3523 }
3524
3525 for (i = ipa_get_param_count (old_root_info);
3526 i < ipa_get_cs_argument_count (args);
3527 i++)
3528 {
3529 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
3530
3531 if (jf->type == IPA_JF_CONST)
3532 {
3533 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
3534 if (rdesc)
3535 rdesc->refcount = IPA_UNDESCRIBED_USE;
3536 }
3537 else if (jf->type == IPA_JF_PASS_THROUGH)
3538 ipa_set_controlled_uses (new_root_info,
3539 jf->value.pass_through.formal_id,
3540 IPA_UNDESCRIBED_USE);
3541 }
3542}
3543
3e293154
MJ
3544/* Update jump functions and call note functions on inlining the call site CS.
3545 CS is expected to lead to a node already cloned by
3546 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
3547 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3548 created. */
be95e2b9 3549
f8e2a1ed 3550bool
3e293154 3551ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
d52f5295 3552 vec<cgraph_edge *> *new_edges)
3e293154 3553{
5ee53a06 3554 bool changed;
f8e2a1ed
MJ
3555 /* Do nothing if the preparation phase has not been carried out yet
3556 (i.e. during early inlining). */
9771b263 3557 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
3558 return false;
3559 gcc_assert (ipa_edge_args_vector);
3560
4502fe8d 3561 propagate_controlled_uses (cs);
5ee53a06
JH
3562 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
3563
5ee53a06 3564 return changed;
518dc859
RL
3565}
3566
771578a0
MJ
3567/* Frees all dynamically allocated structures that the argument info points
3568 to. */
be95e2b9 3569
518dc859 3570void
771578a0 3571ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 3572{
9771b263 3573 vec_free (args->jump_functions);
771578a0 3574 memset (args, 0, sizeof (*args));
518dc859
RL
3575}
3576
771578a0 3577/* Free all ipa_edge structures. */
be95e2b9 3578
518dc859 3579void
771578a0 3580ipa_free_all_edge_args (void)
518dc859 3581{
771578a0
MJ
3582 int i;
3583 struct ipa_edge_args *args;
518dc859 3584
9771b263
DN
3585 if (!ipa_edge_args_vector)
3586 return;
3587
3588 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3589 ipa_free_edge_args_substructures (args);
3590
9771b263 3591 vec_free (ipa_edge_args_vector);
518dc859
RL
3592}
3593
771578a0
MJ
3594/* Frees all dynamically allocated structures that the param info points
3595 to. */
be95e2b9 3596
518dc859 3597void
771578a0 3598ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 3599{
9771b263 3600 info->descriptors.release ();
310bc633
MJ
3601 free (info->lattices);
3602 /* Lattice values and their sources are deallocated with their alocation
3603 pool. */
9771b263 3604 info->known_vals.release ();
771578a0 3605 memset (info, 0, sizeof (*info));
518dc859
RL
3606}
3607
771578a0 3608/* Free all ipa_node_params structures. */
be95e2b9 3609
518dc859 3610void
771578a0 3611ipa_free_all_node_params (void)
518dc859 3612{
771578a0
MJ
3613 int i;
3614 struct ipa_node_params *info;
518dc859 3615
9771b263 3616 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3617 ipa_free_node_params_substructures (info);
3618
9771b263 3619 ipa_node_params_vector.release ();
771578a0
MJ
3620}
3621
2c9561b5
MJ
3622/* Set the aggregate replacements of NODE to be AGGVALS. */
3623
3624void
3625ipa_set_node_agg_value_chain (struct cgraph_node *node,
3626 struct ipa_agg_replacement_value *aggvals)
3627{
3dafb85c
ML
3628 if (vec_safe_length (ipa_node_agg_replacements)
3629 <= (unsigned) symtab->cgraph_max_uid)
3630 vec_safe_grow_cleared (ipa_node_agg_replacements,
3631 symtab->cgraph_max_uid + 1);
2c9561b5 3632
9771b263 3633 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3634}
3635
771578a0 3636/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3637
771578a0 3638static void
5c0466b5 3639ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3640{
568cda29
MJ
3641 struct ipa_edge_args *args;
3642
3643 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3644 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3645 return;
568cda29
MJ
3646
3647 args = IPA_EDGE_REF (cs);
3648 if (args->jump_functions)
3649 {
3650 struct ipa_jump_func *jf;
3651 int i;
3652 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3653 {
3654 struct ipa_cst_ref_desc *rdesc;
3655 try_decrement_rdesc_refcount (jf);
3656 if (jf->type == IPA_JF_CONST
3657 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3658 && rdesc->cs == cs)
3659 rdesc->cs = NULL;
3660 }
568cda29
MJ
3661 }
3662
771578a0 3663 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3664}
3665
771578a0 3666/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3667
771578a0 3668static void
5c0466b5 3669ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3670{
dd6d1ad7 3671 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3672 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3673 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3674 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3675 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3676}
3677
8b7773a4 3678/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3679
771578a0
MJ
3680static void
3681ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3682 __attribute__((unused)) void *data)
771578a0
MJ
3683{
3684 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3685 unsigned int i;
771578a0
MJ
3686
3687 ipa_check_create_edge_args ();
3688
3689 old_args = IPA_EDGE_REF (src);
3690 new_args = IPA_EDGE_REF (dst);
3691
9771b263 3692 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
5ce97055
JH
3693 if (old_args->polymorphic_call_contexts)
3694 new_args->polymorphic_call_contexts
3695 = vec_safe_copy (old_args->polymorphic_call_contexts);
8b7773a4 3696
9771b263 3697 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3698 {
3699 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3700 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3701
3702 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3703
3704 if (src_jf->type == IPA_JF_CONST)
3705 {
3706 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3707
3708 if (!src_rdesc)
3709 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3710 else if (src->caller == dst->caller)
3711 {
3712 struct ipa_ref *ref;
5e20cdc9 3713 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3714 gcc_checking_assert (n);
d122681a
ML
3715 ref = src->caller->find_reference (n, src->call_stmt,
3716 src->lto_stmt_uid);
568cda29 3717 gcc_checking_assert (ref);
d122681a 3718 dst->caller->clone_reference (ref, ref->stmt);
568cda29
MJ
3719
3720 gcc_checking_assert (ipa_refdesc_pool);
3721 struct ipa_cst_ref_desc *dst_rdesc
3722 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3723 dst_rdesc->cs = dst;
3724 dst_rdesc->refcount = src_rdesc->refcount;
3725 dst_rdesc->next_duplicate = NULL;
3726 dst_jf->value.constant.rdesc = dst_rdesc;
3727 }
4502fe8d
MJ
3728 else if (src_rdesc->cs == src)
3729 {
3730 struct ipa_cst_ref_desc *dst_rdesc;
3731 gcc_checking_assert (ipa_refdesc_pool);
3732 dst_rdesc
3733 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3734 dst_rdesc->cs = dst;
4502fe8d 3735 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3736 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3737 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3738 dst_jf->value.constant.rdesc = dst_rdesc;
3739 }
3740 else
3741 {
3742 struct ipa_cst_ref_desc *dst_rdesc;
3743 /* This can happen during inlining, when a JFUNC can refer to a
3744 reference taken in a function up in the tree of inline clones.
3745 We need to find the duplicate that refers to our tree of
3746 inline clones. */
3747
3748 gcc_assert (dst->caller->global.inlined_to);
3749 for (dst_rdesc = src_rdesc->next_duplicate;
3750 dst_rdesc;
3751 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3752 {
3753 struct cgraph_node *top;
3754 top = dst_rdesc->cs->caller->global.inlined_to
3755 ? dst_rdesc->cs->caller->global.inlined_to
3756 : dst_rdesc->cs->caller;
3757 if (dst->caller->global.inlined_to == top)
3758 break;
3759 }
44a60244 3760 gcc_assert (dst_rdesc);
4502fe8d
MJ
3761 dst_jf->value.constant.rdesc = dst_rdesc;
3762 }
3763 }
6fe45955
MJ
3764 else if (dst_jf->type == IPA_JF_PASS_THROUGH
3765 && src->caller == dst->caller)
3766 {
3767 struct cgraph_node *inline_root = dst->caller->global.inlined_to
3768 ? dst->caller->global.inlined_to : dst->caller;
3769 struct ipa_node_params *root_info = IPA_NODE_REF (inline_root);
3770 int idx = ipa_get_jf_pass_through_formal_id (dst_jf);
3771
3772 int c = ipa_get_controlled_uses (root_info, idx);
3773 if (c != IPA_UNDESCRIBED_USE)
3774 {
3775 c++;
3776 ipa_set_controlled_uses (root_info, idx, c);
3777 }
3778 }
4502fe8d 3779 }
771578a0
MJ
3780}
3781
3782/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3783
771578a0
MJ
3784static void
3785ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3786 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3787{
3788 struct ipa_node_params *old_info, *new_info;
2c9561b5 3789 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3790
3791 ipa_check_create_node_params ();
3792 old_info = IPA_NODE_REF (src);
3793 new_info = IPA_NODE_REF (dst);
771578a0 3794
9771b263 3795 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3796 new_info->lattices = NULL;
771578a0 3797 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3798
8aab5218 3799 new_info->analysis_done = old_info->analysis_done;
3949c4a7 3800 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3801
3802 old_av = ipa_get_agg_replacements_for_node (src);
3803 if (!old_av)
3804 return;
3805
3806 new_av = NULL;
3807 while (old_av)
3808 {
3809 struct ipa_agg_replacement_value *v;
3810
766090c2 3811 v = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
3812 memcpy (v, old_av, sizeof (*v));
3813 v->next = new_av;
3814 new_av = v;
3815 old_av = old_av->next;
3816 }
3817 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3818}
3819
40982661
JH
3820
3821/* Analyze newly added function into callgraph. */
3822
3823static void
3824ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3825{
d52f5295 3826 if (node->has_gimple_body_p ())
0136f8f0 3827 ipa_analyze_node (node);
40982661
JH
3828}
3829
771578a0 3830/* Register our cgraph hooks if they are not already there. */
be95e2b9 3831
518dc859 3832void
771578a0 3833ipa_register_cgraph_hooks (void)
518dc859 3834{
771578a0
MJ
3835 if (!edge_removal_hook_holder)
3836 edge_removal_hook_holder =
3dafb85c 3837 symtab->add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
771578a0
MJ
3838 if (!node_removal_hook_holder)
3839 node_removal_hook_holder =
3dafb85c 3840 symtab->add_cgraph_removal_hook (&ipa_node_removal_hook, NULL);
771578a0
MJ
3841 if (!edge_duplication_hook_holder)
3842 edge_duplication_hook_holder =
3dafb85c 3843 symtab->add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
771578a0
MJ
3844 if (!node_duplication_hook_holder)
3845 node_duplication_hook_holder =
3dafb85c 3846 symtab->add_cgraph_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661 3847 function_insertion_hook_holder =
3dafb85c 3848 symtab->add_cgraph_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3849}
518dc859 3850
771578a0 3851/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3852
771578a0
MJ
3853static void
3854ipa_unregister_cgraph_hooks (void)
3855{
3dafb85c 3856 symtab->remove_edge_removal_hook (edge_removal_hook_holder);
771578a0 3857 edge_removal_hook_holder = NULL;
3dafb85c 3858 symtab->remove_cgraph_removal_hook (node_removal_hook_holder);
771578a0 3859 node_removal_hook_holder = NULL;
3dafb85c 3860 symtab->remove_edge_duplication_hook (edge_duplication_hook_holder);
771578a0 3861 edge_duplication_hook_holder = NULL;
3dafb85c 3862 symtab->remove_cgraph_duplication_hook (node_duplication_hook_holder);
771578a0 3863 node_duplication_hook_holder = NULL;
3dafb85c 3864 symtab->remove_cgraph_insertion_hook (function_insertion_hook_holder);
40982661 3865 function_insertion_hook_holder = NULL;
771578a0
MJ
3866}
3867
3868/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3869 longer needed after ipa-cp. */
be95e2b9 3870
771578a0 3871void
e33c6cd6 3872ipa_free_all_structures_after_ipa_cp (void)
3e293154 3873{
5ee53a06 3874 if (!optimize)
3e293154
MJ
3875 {
3876 ipa_free_all_edge_args ();
3877 ipa_free_all_node_params ();
310bc633
MJ
3878 free_alloc_pool (ipcp_sources_pool);
3879 free_alloc_pool (ipcp_values_pool);
2c9561b5 3880 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3881 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3882 if (ipa_refdesc_pool)
3883 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3884 }
3885}
3886
3887/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3888 longer needed after indirect inlining. */
be95e2b9 3889
3e293154 3890void
e33c6cd6 3891ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3892{
3893 ipa_free_all_edge_args ();
3894 ipa_free_all_node_params ();
3895 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3896 if (ipcp_sources_pool)
3897 free_alloc_pool (ipcp_sources_pool);
3898 if (ipcp_values_pool)
3899 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3900 if (ipcp_agg_lattice_pool)
3901 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3902 if (ipa_refdesc_pool)
3903 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3904}
3905
dcd416e3 3906/* Print ipa_tree_map data structures of all functions in the
518dc859 3907 callgraph to F. */
be95e2b9 3908
518dc859 3909void
2c9561b5 3910ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3911{
3912 int i, count;
3e293154 3913 struct ipa_node_params *info;
518dc859 3914
67348ccc 3915 if (!node->definition)
3e293154
MJ
3916 return;
3917 info = IPA_NODE_REF (node);
9de04252 3918 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3919 node->name (), node->order);
3e293154
MJ
3920 count = ipa_get_param_count (info);
3921 for (i = 0; i < count; i++)
518dc859 3922 {
4502fe8d
MJ
3923 int c;
3924
a4e33812 3925 fprintf (f, " ");
e067bd43 3926 ipa_dump_param (f, info, i);
339f49ec
JH
3927 if (ipa_is_param_used (info, i))
3928 fprintf (f, " used");
4502fe8d
MJ
3929 c = ipa_get_controlled_uses (info, i);
3930 if (c == IPA_UNDESCRIBED_USE)
3931 fprintf (f, " undescribed_use");
3932 else
3933 fprintf (f, " controlled_uses=%i", c);
3e293154 3934 fprintf (f, "\n");
518dc859
RL
3935 }
3936}
dcd416e3 3937
ca30a539 3938/* Print ipa_tree_map data structures of all functions in the
3e293154 3939 callgraph to F. */
be95e2b9 3940
3e293154 3941void
ca30a539 3942ipa_print_all_params (FILE * f)
3e293154
MJ
3943{
3944 struct cgraph_node *node;
3945
ca30a539 3946 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3947 FOR_EACH_FUNCTION (node)
ca30a539 3948 ipa_print_node_params (f, node);
3e293154 3949}
3f84bf08
MJ
3950
3951/* Return a heap allocated vector containing formal parameters of FNDECL. */
3952
9771b263 3953vec<tree>
3f84bf08
MJ
3954ipa_get_vector_of_formal_parms (tree fndecl)
3955{
9771b263 3956 vec<tree> args;
3f84bf08
MJ
3957 int count;
3958 tree parm;
3959
0e8853ee 3960 gcc_assert (!flag_wpa);
310bc633 3961 count = count_formal_params (fndecl);
9771b263 3962 args.create (count);
910ad8de 3963 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3964 args.quick_push (parm);
3f84bf08
MJ
3965
3966 return args;
3967}
3968
3969/* Return a heap allocated vector containing types of formal parameters of
3970 function type FNTYPE. */
3971
31519c38
AH
3972vec<tree>
3973ipa_get_vector_of_formal_parm_types (tree fntype)
3f84bf08 3974{
9771b263 3975 vec<tree> types;
3f84bf08
MJ
3976 int count = 0;
3977 tree t;
3978
3979 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3980 count++;
3981
9771b263 3982 types.create (count);
3f84bf08 3983 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3984 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3985
3986 return types;
3987}
3988
3989/* Modify the function declaration FNDECL and its type according to the plan in
3990 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3991 to reflect the actual parameters being modified which are determined by the
3992 base_index field. */
3993
3994void
31519c38 3995ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments)
3f84bf08 3996{
31519c38
AH
3997 vec<tree> oparms = ipa_get_vector_of_formal_parms (fndecl);
3998 tree orig_type = TREE_TYPE (fndecl);
3999 tree old_arg_types = TYPE_ARG_TYPES (orig_type);
3f84bf08
MJ
4000
4001 /* The following test is an ugly hack, some functions simply don't have any
4002 arguments in their type. This is probably a bug but well... */
31519c38
AH
4003 bool care_for_types = (old_arg_types != NULL_TREE);
4004 bool last_parm_void;
4005 vec<tree> otypes;
3f84bf08
MJ
4006 if (care_for_types)
4007 {
4008 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
4009 == void_type_node);
31519c38 4010 otypes = ipa_get_vector_of_formal_parm_types (orig_type);
3f84bf08 4011 if (last_parm_void)
9771b263 4012 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 4013 else
9771b263 4014 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
4015 }
4016 else
4017 {
4018 last_parm_void = false;
9771b263 4019 otypes.create (0);
3f84bf08
MJ
4020 }
4021
31519c38
AH
4022 int len = adjustments.length ();
4023 tree *link = &DECL_ARGUMENTS (fndecl);
4024 tree new_arg_types = NULL;
4025 for (int i = 0; i < len; i++)
3f84bf08
MJ
4026 {
4027 struct ipa_parm_adjustment *adj;
4028 gcc_assert (link);
4029
9771b263 4030 adj = &adjustments[i];
31519c38
AH
4031 tree parm;
4032 if (adj->op == IPA_PARM_OP_NEW)
4033 parm = NULL;
4034 else
4035 parm = oparms[adj->base_index];
3f84bf08
MJ
4036 adj->base = parm;
4037
31519c38 4038 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4039 {
4040 if (care_for_types)
9771b263 4041 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
4042 new_arg_types);
4043 *link = parm;
910ad8de 4044 link = &DECL_CHAIN (parm);
3f84bf08 4045 }
31519c38 4046 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4047 {
4048 tree new_parm;
4049 tree ptype;
4050
4051 if (adj->by_ref)
4052 ptype = build_pointer_type (adj->type);
4053 else
e69dbe37
MJ
4054 {
4055 ptype = adj->type;
4056 if (is_gimple_reg_type (ptype))
4057 {
4058 unsigned malign = GET_MODE_ALIGNMENT (TYPE_MODE (ptype));
4059 if (TYPE_ALIGN (ptype) < malign)
4060 ptype = build_aligned_type (ptype, malign);
4061 }
4062 }
3f84bf08
MJ
4063
4064 if (care_for_types)
4065 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
4066
4067 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
4068 ptype);
31519c38
AH
4069 const char *prefix = adj->arg_prefix ? adj->arg_prefix : "SYNTH";
4070 DECL_NAME (new_parm) = create_tmp_var_name (prefix);
3f84bf08
MJ
4071 DECL_ARTIFICIAL (new_parm) = 1;
4072 DECL_ARG_TYPE (new_parm) = ptype;
4073 DECL_CONTEXT (new_parm) = fndecl;
4074 TREE_USED (new_parm) = 1;
4075 DECL_IGNORED_P (new_parm) = 1;
4076 layout_decl (new_parm, 0);
4077
31519c38
AH
4078 if (adj->op == IPA_PARM_OP_NEW)
4079 adj->base = NULL;
4080 else
4081 adj->base = parm;
4082 adj->new_decl = new_parm;
3f84bf08
MJ
4083
4084 *link = new_parm;
910ad8de 4085 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
4086 }
4087 }
4088
4089 *link = NULL_TREE;
4090
31519c38 4091 tree new_reversed = NULL;
3f84bf08
MJ
4092 if (care_for_types)
4093 {
4094 new_reversed = nreverse (new_arg_types);
4095 if (last_parm_void)
4096 {
4097 if (new_reversed)
4098 TREE_CHAIN (new_arg_types) = void_list_node;
4099 else
4100 new_reversed = void_list_node;
4101 }
4102 }
4103
4104 /* Use copy_node to preserve as much as possible from original type
4105 (debug info, attribute lists etc.)
4106 Exception is METHOD_TYPEs must have THIS argument.
4107 When we are asked to remove it, we need to build new FUNCTION_TYPE
4108 instead. */
31519c38 4109 tree new_type = NULL;
3f84bf08 4110 if (TREE_CODE (orig_type) != METHOD_TYPE
31519c38 4111 || (adjustments[0].op == IPA_PARM_OP_COPY
9771b263 4112 && adjustments[0].base_index == 0))
3f84bf08 4113 {
4eb3f32c 4114 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
4115 TYPE_ARG_TYPES (new_type) = new_reversed;
4116 }
4117 else
4118 {
4119 new_type
4120 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
4121 new_reversed));
4122 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
4123 DECL_VINDEX (fndecl) = NULL_TREE;
4124 }
4125
d402c33d
JH
4126 /* When signature changes, we need to clear builtin info. */
4127 if (DECL_BUILT_IN (fndecl))
4128 {
4129 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
4130 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
4131 }
4132
3f84bf08 4133 TREE_TYPE (fndecl) = new_type;
9b389a5e 4134 DECL_VIRTUAL_P (fndecl) = 0;
70d6d5c1 4135 DECL_LANG_SPECIFIC (fndecl) = NULL;
9771b263
DN
4136 otypes.release ();
4137 oparms.release ();
3f84bf08
MJ
4138}
4139
4140/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
4141 If this is a directly recursive call, CS must be NULL. Otherwise it must
4142 contain the corresponding call graph edge. */
4143
4144void
4145ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
4146 ipa_parm_adjustment_vec adjustments)
4147{
d52f5295 4148 struct cgraph_node *current_node = cgraph_node::get (current_function_decl);
9771b263
DN
4149 vec<tree> vargs;
4150 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 4151 gimple new_stmt;
82338059 4152 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
4153 tree callee_decl;
4154 int i, len;
4155
9771b263
DN
4156 len = adjustments.length ();
4157 vargs.create (len);
67348ccc 4158 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
d122681a 4159 current_node->remove_stmt_references (stmt);
3f84bf08
MJ
4160
4161 gsi = gsi_for_stmt (stmt);
82338059
MJ
4162 prev_gsi = gsi;
4163 gsi_prev (&prev_gsi);
3f84bf08
MJ
4164 for (i = 0; i < len; i++)
4165 {
4166 struct ipa_parm_adjustment *adj;
4167
9771b263 4168 adj = &adjustments[i];
3f84bf08 4169
31519c38 4170 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08
MJ
4171 {
4172 tree arg = gimple_call_arg (stmt, adj->base_index);
4173
9771b263 4174 vargs.quick_push (arg);
3f84bf08 4175 }
31519c38 4176 else if (adj->op != IPA_PARM_OP_REMOVE)
3f84bf08 4177 {
fffe1e40
MJ
4178 tree expr, base, off;
4179 location_t loc;
f43245d1 4180 unsigned int deref_align = 0;
c1ed6a01 4181 bool deref_base = false;
fffe1e40
MJ
4182
4183 /* We create a new parameter out of the value of the old one, we can
4184 do the following kind of transformations:
4185
4186 - A scalar passed by reference is converted to a scalar passed by
4187 value. (adj->by_ref is false and the type of the original
4188 actual argument is a pointer to a scalar).
4189
4190 - A part of an aggregate is passed instead of the whole aggregate.
4191 The part can be passed either by value or by reference, this is
4192 determined by value of adj->by_ref. Moreover, the code below
4193 handles both situations when the original aggregate is passed by
4194 value (its type is not a pointer) and when it is passed by
4195 reference (it is a pointer to an aggregate).
4196
4197 When the new argument is passed by reference (adj->by_ref is true)
4198 it must be a part of an aggregate and therefore we form it by
4199 simply taking the address of a reference inside the original
4200 aggregate. */
4201
4202 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
4203 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
4204 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
4205 : EXPR_LOCATION (base);
fffe1e40 4206
82d49829
MJ
4207 if (TREE_CODE (base) != ADDR_EXPR
4208 && POINTER_TYPE_P (TREE_TYPE (base)))
4209 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 4210 adj->offset / BITS_PER_UNIT);
3f84bf08 4211 else
3f84bf08 4212 {
fffe1e40
MJ
4213 HOST_WIDE_INT base_offset;
4214 tree prev_base;
c1ed6a01 4215 bool addrof;
fffe1e40
MJ
4216
4217 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
4218 {
4219 base = TREE_OPERAND (base, 0);
4220 addrof = true;
4221 }
4222 else
4223 addrof = false;
fffe1e40
MJ
4224 prev_base = base;
4225 base = get_addr_base_and_unit_offset (base, &base_offset);
4226 /* Aggregate arguments can have non-invariant addresses. */
4227 if (!base)
4228 {
4229 base = build_fold_addr_expr (prev_base);
82d49829 4230 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4231 adj->offset / BITS_PER_UNIT);
4232 }
4233 else if (TREE_CODE (base) == MEM_REF)
4234 {
c1ed6a01
MJ
4235 if (!addrof)
4236 {
4237 deref_base = true;
4238 deref_align = TYPE_ALIGN (TREE_TYPE (base));
4239 }
82d49829 4240 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4241 base_offset
4242 + adj->offset / BITS_PER_UNIT);
4243 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 4244 off);
fffe1e40
MJ
4245 base = TREE_OPERAND (base, 0);
4246 }
4247 else
4248 {
82d49829 4249 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
4250 base_offset
4251 + adj->offset / BITS_PER_UNIT);
4252 base = build_fold_addr_expr (base);
4253 }
3f84bf08 4254 }
fffe1e40 4255
3a5a825a
RG
4256 if (!adj->by_ref)
4257 {
4258 tree type = adj->type;
4259 unsigned int align;
4260 unsigned HOST_WIDE_INT misalign;
644ffefd 4261
c1ed6a01
MJ
4262 if (deref_base)
4263 {
4264 align = deref_align;
4265 misalign = 0;
4266 }
4267 else
4268 {
4269 get_pointer_alignment_1 (base, &align, &misalign);
4270 if (TYPE_ALIGN (type) > align)
4271 align = TYPE_ALIGN (type);
4272 }
807e902e 4273 misalign += (offset_int::from (off, SIGNED).to_short_addr ()
3a5a825a
RG
4274 * BITS_PER_UNIT);
4275 misalign = misalign & (align - 1);
4276 if (misalign != 0)
4277 align = (misalign & -misalign);
4278 if (align < TYPE_ALIGN (type))
4279 type = build_aligned_type (type, align);
4df65a85
RB
4280 base = force_gimple_operand_gsi (&gsi, base,
4281 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4282 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
4df65a85
RB
4283 /* If expr is not a valid gimple call argument emit
4284 a load into a temporary. */
4285 if (is_gimple_reg_type (TREE_TYPE (expr)))
4286 {
4287 gimple tem = gimple_build_assign (NULL_TREE, expr);
4288 if (gimple_in_ssa_p (cfun))
4289 {
4290 gimple_set_vuse (tem, gimple_vuse (stmt));
4291 expr = make_ssa_name (TREE_TYPE (expr), tem);
4292 }
4293 else
4294 expr = create_tmp_reg (TREE_TYPE (expr), NULL);
4295 gimple_assign_set_lhs (tem, expr);
4296 gsi_insert_before (&gsi, tem, GSI_SAME_STMT);
4297 }
3a5a825a
RG
4298 }
4299 else
4300 {
4301 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
4302 expr = build_fold_addr_expr (expr);
4df65a85
RB
4303 expr = force_gimple_operand_gsi (&gsi, expr,
4304 true, NULL, true, GSI_SAME_STMT);
3a5a825a 4305 }
9771b263 4306 vargs.quick_push (expr);
3f84bf08 4307 }
31519c38 4308 if (adj->op != IPA_PARM_OP_COPY && MAY_HAVE_DEBUG_STMTS)
ddb555ed
JJ
4309 {
4310 unsigned int ix;
4311 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
4312 gimple def_temp;
4313
4314 arg = gimple_call_arg (stmt, adj->base_index);
4315 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
4316 {
4317 if (!fold_convertible_p (TREE_TYPE (origin), arg))
4318 continue;
4319 arg = fold_convert_loc (gimple_location (stmt),
4320 TREE_TYPE (origin), arg);
4321 }
4322 if (debug_args == NULL)
4323 debug_args = decl_debug_args_insert (callee_decl);
9771b263 4324 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
4325 if (ddecl == origin)
4326 {
9771b263 4327 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
4328 break;
4329 }
4330 if (ddecl == NULL)
4331 {
4332 ddecl = make_node (DEBUG_EXPR_DECL);
4333 DECL_ARTIFICIAL (ddecl) = 1;
4334 TREE_TYPE (ddecl) = TREE_TYPE (origin);
4335 DECL_MODE (ddecl) = DECL_MODE (origin);
4336
9771b263
DN
4337 vec_safe_push (*debug_args, origin);
4338 vec_safe_push (*debug_args, ddecl);
ddb555ed 4339 }
9771b263 4340 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
4341 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
4342 }
3f84bf08
MJ
4343 }
4344
4345 if (dump_file && (dump_flags & TDF_DETAILS))
4346 {
4347 fprintf (dump_file, "replacing stmt:");
4348 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
4349 }
4350
3f84bf08 4351 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 4352 vargs.release ();
3f84bf08
MJ
4353 if (gimple_call_lhs (stmt))
4354 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
4355
4356 gimple_set_block (new_stmt, gimple_block (stmt));
4357 if (gimple_has_location (stmt))
4358 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 4359 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 4360 gimple_call_copy_flags (new_stmt, stmt);
4df65a85
RB
4361 if (gimple_in_ssa_p (cfun))
4362 {
4363 gimple_set_vuse (new_stmt, gimple_vuse (stmt));
4364 if (gimple_vdef (stmt))
4365 {
4366 gimple_set_vdef (new_stmt, gimple_vdef (stmt));
4367 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt)) = new_stmt;
4368 }
4369 }
3f84bf08
MJ
4370
4371 if (dump_file && (dump_flags & TDF_DETAILS))
4372 {
4373 fprintf (dump_file, "with stmt:");
4374 print_gimple_stmt (dump_file, new_stmt, 0, 0);
4375 fprintf (dump_file, "\n");
4376 }
4377 gsi_replace (&gsi, new_stmt, true);
4378 if (cs)
3dafb85c 4379 cs->set_call_stmt (new_stmt);
82338059
MJ
4380 do
4381 {
d52f5295 4382 current_node->record_stmt_references (gsi_stmt (gsi));
82338059
MJ
4383 gsi_prev (&gsi);
4384 }
3d354792 4385 while (gsi_stmt (gsi) != gsi_stmt (prev_gsi));
3f84bf08
MJ
4386}
4387
31519c38
AH
4388/* If the expression *EXPR should be replaced by a reduction of a parameter, do
4389 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4390 specifies whether the function should care about type incompatibility the
4391 current and new expressions. If it is false, the function will leave
4392 incompatibility issues to the caller. Return true iff the expression
4393 was modified. */
4394
4395bool
4396ipa_modify_expr (tree *expr, bool convert,
4397 ipa_parm_adjustment_vec adjustments)
4398{
4399 struct ipa_parm_adjustment *cand
4400 = ipa_get_adjustment_candidate (&expr, &convert, adjustments, false);
4401 if (!cand)
4402 return false;
4403
4404 tree src;
4405 if (cand->by_ref)
4406 src = build_simple_mem_ref (cand->new_decl);
4407 else
4408 src = cand->new_decl;
4409
4410 if (dump_file && (dump_flags & TDF_DETAILS))
4411 {
4412 fprintf (dump_file, "About to replace expr ");
4413 print_generic_expr (dump_file, *expr, 0);
4414 fprintf (dump_file, " with ");
4415 print_generic_expr (dump_file, src, 0);
4416 fprintf (dump_file, "\n");
4417 }
4418
4419 if (convert && !useless_type_conversion_p (TREE_TYPE (*expr), cand->type))
4420 {
4421 tree vce = build1 (VIEW_CONVERT_EXPR, TREE_TYPE (*expr), src);
4422 *expr = vce;
4423 }
4424 else
4425 *expr = src;
4426 return true;
4427}
4428
4429/* If T is an SSA_NAME, return NULL if it is not a default def or
4430 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4431 the base variable is always returned, regardless if it is a default
4432 def. Return T if it is not an SSA_NAME. */
4433
4434static tree
4435get_ssa_base_param (tree t, bool ignore_default_def)
4436{
4437 if (TREE_CODE (t) == SSA_NAME)
4438 {
4439 if (ignore_default_def || SSA_NAME_IS_DEFAULT_DEF (t))
4440 return SSA_NAME_VAR (t);
4441 else
4442 return NULL_TREE;
4443 }
4444 return t;
4445}
4446
4447/* Given an expression, return an adjustment entry specifying the
4448 transformation to be done on EXPR. If no suitable adjustment entry
4449 was found, returns NULL.
4450
4451 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4452 default def, otherwise bail on them.
4453
4454 If CONVERT is non-NULL, this function will set *CONVERT if the
4455 expression provided is a component reference. ADJUSTMENTS is the
4456 adjustments vector. */
4457
4458ipa_parm_adjustment *
4459ipa_get_adjustment_candidate (tree **expr, bool *convert,
4460 ipa_parm_adjustment_vec adjustments,
4461 bool ignore_default_def)
4462{
4463 if (TREE_CODE (**expr) == BIT_FIELD_REF
4464 || TREE_CODE (**expr) == IMAGPART_EXPR
4465 || TREE_CODE (**expr) == REALPART_EXPR)
4466 {
4467 *expr = &TREE_OPERAND (**expr, 0);
4468 if (convert)
4469 *convert = true;
4470 }
4471
4472 HOST_WIDE_INT offset, size, max_size;
4473 tree base = get_ref_base_and_extent (**expr, &offset, &size, &max_size);
4474 if (!base || size == -1 || max_size == -1)
4475 return NULL;
4476
4477 if (TREE_CODE (base) == MEM_REF)
4478 {
807e902e 4479 offset += mem_ref_offset (base).to_short_addr () * BITS_PER_UNIT;
31519c38
AH
4480 base = TREE_OPERAND (base, 0);
4481 }
4482
4483 base = get_ssa_base_param (base, ignore_default_def);
4484 if (!base || TREE_CODE (base) != PARM_DECL)
4485 return NULL;
4486
4487 struct ipa_parm_adjustment *cand = NULL;
4488 unsigned int len = adjustments.length ();
4489 for (unsigned i = 0; i < len; i++)
4490 {
4491 struct ipa_parm_adjustment *adj = &adjustments[i];
4492
4493 if (adj->base == base
4494 && (adj->offset == offset || adj->op == IPA_PARM_OP_REMOVE))
4495 {
4496 cand = adj;
4497 break;
4498 }
4499 }
4500
4501 if (!cand || cand->op == IPA_PARM_OP_COPY || cand->op == IPA_PARM_OP_REMOVE)
4502 return NULL;
4503 return cand;
4504}
4505
3f84bf08
MJ
4506/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4507
4508static bool
4509index_in_adjustments_multiple_times_p (int base_index,
4510 ipa_parm_adjustment_vec adjustments)
4511{
9771b263 4512 int i, len = adjustments.length ();
3f84bf08
MJ
4513 bool one = false;
4514
4515 for (i = 0; i < len; i++)
4516 {
4517 struct ipa_parm_adjustment *adj;
9771b263 4518 adj = &adjustments[i];
3f84bf08
MJ
4519
4520 if (adj->base_index == base_index)
4521 {
4522 if (one)
4523 return true;
4524 else
4525 one = true;
4526 }
4527 }
4528 return false;
4529}
4530
4531
4532/* Return adjustments that should have the same effect on function parameters
4533 and call arguments as if they were first changed according to adjustments in
4534 INNER and then by adjustments in OUTER. */
4535
4536ipa_parm_adjustment_vec
4537ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
4538 ipa_parm_adjustment_vec outer)
4539{
9771b263
DN
4540 int i, outlen = outer.length ();
4541 int inlen = inner.length ();
3f84bf08
MJ
4542 int removals = 0;
4543 ipa_parm_adjustment_vec adjustments, tmp;
4544
9771b263 4545 tmp.create (inlen);
3f84bf08
MJ
4546 for (i = 0; i < inlen; i++)
4547 {
4548 struct ipa_parm_adjustment *n;
9771b263 4549 n = &inner[i];
3f84bf08 4550
31519c38 4551 if (n->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4552 removals++;
4553 else
31519c38
AH
4554 {
4555 /* FIXME: Handling of new arguments are not implemented yet. */
4556 gcc_assert (n->op != IPA_PARM_OP_NEW);
4557 tmp.quick_push (*n);
4558 }
3f84bf08
MJ
4559 }
4560
9771b263 4561 adjustments.create (outlen + removals);
3f84bf08
MJ
4562 for (i = 0; i < outlen; i++)
4563 {
f32682ca 4564 struct ipa_parm_adjustment r;
9771b263
DN
4565 struct ipa_parm_adjustment *out = &outer[i];
4566 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 4567
f32682ca 4568 memset (&r, 0, sizeof (r));
31519c38
AH
4569 gcc_assert (in->op != IPA_PARM_OP_REMOVE);
4570 if (out->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4571 {
4572 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
4573 {
31519c38 4574 r.op = IPA_PARM_OP_REMOVE;
9771b263 4575 adjustments.quick_push (r);
3f84bf08
MJ
4576 }
4577 continue;
4578 }
31519c38
AH
4579 else
4580 {
4581 /* FIXME: Handling of new arguments are not implemented yet. */
4582 gcc_assert (out->op != IPA_PARM_OP_NEW);
4583 }
3f84bf08 4584
f32682ca
DN
4585 r.base_index = in->base_index;
4586 r.type = out->type;
3f84bf08
MJ
4587
4588 /* FIXME: Create nonlocal value too. */
4589
31519c38
AH
4590 if (in->op == IPA_PARM_OP_COPY && out->op == IPA_PARM_OP_COPY)
4591 r.op = IPA_PARM_OP_COPY;
4592 else if (in->op == IPA_PARM_OP_COPY)
f32682ca 4593 r.offset = out->offset;
31519c38 4594 else if (out->op == IPA_PARM_OP_COPY)
f32682ca 4595 r.offset = in->offset;
3f84bf08 4596 else
f32682ca 4597 r.offset = in->offset + out->offset;
9771b263 4598 adjustments.quick_push (r);
3f84bf08
MJ
4599 }
4600
4601 for (i = 0; i < inlen; i++)
4602 {
9771b263 4603 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08 4604
31519c38 4605 if (n->op == IPA_PARM_OP_REMOVE)
9771b263 4606 adjustments.quick_push (*n);
3f84bf08
MJ
4607 }
4608
9771b263 4609 tmp.release ();
3f84bf08
MJ
4610 return adjustments;
4611}
4612
4613/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4614 friendly way, assuming they are meant to be applied to FNDECL. */
4615
4616void
4617ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
4618 tree fndecl)
4619{
9771b263 4620 int i, len = adjustments.length ();
3f84bf08 4621 bool first = true;
9771b263 4622 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
4623
4624 fprintf (file, "IPA param adjustments: ");
4625 for (i = 0; i < len; i++)
4626 {
4627 struct ipa_parm_adjustment *adj;
9771b263 4628 adj = &adjustments[i];
3f84bf08
MJ
4629
4630 if (!first)
4631 fprintf (file, " ");
4632 else
4633 first = false;
4634
4635 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 4636 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
4637 if (adj->base)
4638 {
4639 fprintf (file, ", base: ");
4640 print_generic_expr (file, adj->base, 0);
4641 }
31519c38 4642 if (adj->new_decl)
3f84bf08 4643 {
31519c38
AH
4644 fprintf (file, ", new_decl: ");
4645 print_generic_expr (file, adj->new_decl, 0);
3f84bf08
MJ
4646 }
4647 if (adj->new_ssa_base)
4648 {
4649 fprintf (file, ", new_ssa_base: ");
4650 print_generic_expr (file, adj->new_ssa_base, 0);
4651 }
4652
31519c38 4653 if (adj->op == IPA_PARM_OP_COPY)
3f84bf08 4654 fprintf (file, ", copy_param");
31519c38 4655 else if (adj->op == IPA_PARM_OP_REMOVE)
3f84bf08
MJ
4656 fprintf (file, ", remove_param");
4657 else
4658 fprintf (file, ", offset %li", (long) adj->offset);
4659 if (adj->by_ref)
4660 fprintf (file, ", by_ref");
4661 print_node_brief (file, ", type: ", adj->type, 0);
4662 fprintf (file, "\n");
4663 }
9771b263 4664 parms.release ();
3f84bf08
MJ
4665}
4666
2c9561b5
MJ
4667/* Dump the AV linked list. */
4668
4669void
4670ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
4671{
4672 bool comma = false;
4673 fprintf (f, " Aggregate replacements:");
4674 for (; av; av = av->next)
4675 {
4676 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
4677 av->index, av->offset);
4678 print_generic_expr (f, av->value, 0);
4679 comma = true;
4680 }
4681 fprintf (f, "\n");
4682}
4683
fb3f88cc
JH
4684/* Stream out jump function JUMP_FUNC to OB. */
4685
4686static void
4687ipa_write_jump_function (struct output_block *ob,
4688 struct ipa_jump_func *jump_func)
4689{
8b7773a4
MJ
4690 struct ipa_agg_jf_item *item;
4691 struct bitpack_d bp;
4692 int i, count;
fb3f88cc 4693
8b7773a4 4694 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
4695 switch (jump_func->type)
4696 {
4697 case IPA_JF_UNKNOWN:
4698 break;
b258210c 4699 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
4700 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
4701 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
4702 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 4703 break;
fb3f88cc 4704 case IPA_JF_CONST:
5368224f 4705 gcc_assert (
4502fe8d
MJ
4706 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
4707 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
4708 break;
4709 case IPA_JF_PASS_THROUGH:
412288f1 4710 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
4711 if (jump_func->value.pass_through.operation == NOP_EXPR)
4712 {
4713 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4714 bp = bitpack_create (ob->main_stream);
4715 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 4716 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
4717 streamer_write_bitpack (&bp);
4718 }
4719 else
4720 {
4721 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
4722 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
4723 }
fb3f88cc
JH
4724 break;
4725 case IPA_JF_ANCESTOR:
412288f1 4726 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 4727 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 4728 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
4729 bp = bitpack_create (ob->main_stream);
4730 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 4731 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 4732 streamer_write_bitpack (&bp);
fb3f88cc 4733 break;
8b7773a4
MJ
4734 }
4735
9771b263 4736 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
4737 streamer_write_uhwi (ob, count);
4738 if (count)
4739 {
4740 bp = bitpack_create (ob->main_stream);
4741 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
4742 streamer_write_bitpack (&bp);
4743 }
4744
9771b263 4745 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
4746 {
4747 streamer_write_uhwi (ob, item->offset);
4748 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
4749 }
4750}
4751
4752/* Read in jump function JUMP_FUNC from IB. */
4753
4754static void
4755ipa_read_jump_function (struct lto_input_block *ib,
4756 struct ipa_jump_func *jump_func,
4502fe8d 4757 struct cgraph_edge *cs,
fb3f88cc
JH
4758 struct data_in *data_in)
4759{
4a53743e
MJ
4760 enum jump_func_type jftype;
4761 enum tree_code operation;
8b7773a4 4762 int i, count;
fb3f88cc 4763
4a53743e
MJ
4764 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4765 switch (jftype)
fb3f88cc
JH
4766 {
4767 case IPA_JF_UNKNOWN:
4a53743e 4768 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4769 break;
b258210c 4770 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4771 {
4772 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4773 tree base_type = stream_read_tree (ib, data_in);
4774 tree component_type = stream_read_tree (ib, data_in);
4775
4776 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4777 break;
4778 }
fb3f88cc 4779 case IPA_JF_CONST:
4502fe8d 4780 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4781 break;
4782 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4783 operation = (enum tree_code) streamer_read_uhwi (ib);
4784 if (operation == NOP_EXPR)
4785 {
4786 int formal_id = streamer_read_uhwi (ib);
4787 struct bitpack_d bp = streamer_read_bitpack (ib);
4788 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4789 bool type_preserved = bp_unpack_value (&bp, 1);
4790 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4791 type_preserved);
4a53743e
MJ
4792 }
4793 else
4794 {
4795 tree operand = stream_read_tree (ib, data_in);
4796 int formal_id = streamer_read_uhwi (ib);
4797 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4798 operation);
4799 }
fb3f88cc
JH
4800 break;
4801 case IPA_JF_ANCESTOR:
4a53743e
MJ
4802 {
4803 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4804 tree type = stream_read_tree (ib, data_in);
4805 int formal_id = streamer_read_uhwi (ib);
4806 struct bitpack_d bp = streamer_read_bitpack (ib);
4807 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4808 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4809
b8f6e610
MJ
4810 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4811 type_preserved);
4a53743e
MJ
4812 break;
4813 }
8b7773a4
MJ
4814 }
4815
4816 count = streamer_read_uhwi (ib);
9771b263 4817 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4818 if (count)
4819 {
4a53743e 4820 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4821 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4822 }
4823 for (i = 0; i < count; i++)
4824 {
f32682ca
DN
4825 struct ipa_agg_jf_item item;
4826 item.offset = streamer_read_uhwi (ib);
4827 item.value = stream_read_tree (ib, data_in);
9771b263 4828 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4829 }
4830}
4831
e33c6cd6
MJ
4832/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4833 relevant to indirect inlining to OB. */
661e7330
MJ
4834
4835static void
e33c6cd6
MJ
4836ipa_write_indirect_edge_info (struct output_block *ob,
4837 struct cgraph_edge *cs)
661e7330 4838{
e33c6cd6 4839 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4840 struct bitpack_d bp;
e33c6cd6 4841
412288f1 4842 streamer_write_hwi (ob, ii->param_index);
2465dcc2
RG
4843 bp = bitpack_create (ob->main_stream);
4844 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4845 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4846 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4847 bp_pack_value (&bp, ii->by_ref, 1);
0127c169 4848 bp_pack_value (&bp, ii->vptr_changed, 1);
412288f1 4849 streamer_write_bitpack (&bp);
ba392339
JH
4850 if (ii->agg_contents || ii->polymorphic)
4851 streamer_write_hwi (ob, ii->offset);
4852 else
4853 gcc_assert (ii->offset == 0);
b258210c
MJ
4854
4855 if (ii->polymorphic)
4856 {
412288f1 4857 streamer_write_hwi (ob, ii->otr_token);
b9393656 4858 stream_write_tree (ob, ii->otr_type, true);
ba392339 4859 ii->context.stream_out (ob);
b258210c 4860 }
661e7330
MJ
4861}
4862
e33c6cd6
MJ
4863/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4864 relevant to indirect inlining from IB. */
661e7330
MJ
4865
4866static void
e33c6cd6 4867ipa_read_indirect_edge_info (struct lto_input_block *ib,
ba392339 4868 struct data_in *data_in,
e33c6cd6 4869 struct cgraph_edge *cs)
661e7330 4870{
e33c6cd6 4871 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4872 struct bitpack_d bp;
661e7330 4873
412288f1 4874 ii->param_index = (int) streamer_read_hwi (ib);
412288f1 4875 bp = streamer_read_bitpack (ib);
2465dcc2 4876 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4877 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4878 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4879 ii->by_ref = bp_unpack_value (&bp, 1);
0127c169 4880 ii->vptr_changed = bp_unpack_value (&bp, 1);
ba392339
JH
4881 if (ii->agg_contents || ii->polymorphic)
4882 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
4883 else
4884 ii->offset = 0;
b258210c
MJ
4885 if (ii->polymorphic)
4886 {
412288f1 4887 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4888 ii->otr_type = stream_read_tree (ib, data_in);
ba392339 4889 ii->context.stream_in (ib, data_in);
b258210c 4890 }
661e7330
MJ
4891}
4892
fb3f88cc
JH
4893/* Stream out NODE info to OB. */
4894
4895static void
4896ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4897{
4898 int node_ref;
7380e6ef 4899 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4900 struct ipa_node_params *info = IPA_NODE_REF (node);
4901 int j;
4902 struct cgraph_edge *e;
2465dcc2 4903 struct bitpack_d bp;
fb3f88cc 4904
7380e6ef 4905 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4906 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4907 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4908
0e8853ee
JH
4909 streamer_write_uhwi (ob, ipa_get_param_count (info));
4910 for (j = 0; j < ipa_get_param_count (info); j++)
4911 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4912 bp = bitpack_create (ob->main_stream);
8aab5218 4913 gcc_assert (info->analysis_done
661e7330 4914 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4915 gcc_assert (!info->node_enqueued);
4916 gcc_assert (!info->ipcp_orig_node);
4917 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4918 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4919 streamer_write_bitpack (&bp);
4502fe8d
MJ
4920 for (j = 0; j < ipa_get_param_count (info); j++)
4921 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4922 for (e = node->callees; e; e = e->next_callee)
4923 {
4924 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4925
5ce97055
JH
4926 streamer_write_uhwi (ob,
4927 ipa_get_cs_argument_count (args) * 2
4928 + (args->polymorphic_call_contexts != NULL));
fb3f88cc 4929 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4930 {
4931 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4932 if (args->polymorphic_call_contexts != NULL)
4933 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4934 }
fb3f88cc 4935 }
e33c6cd6 4936 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4937 {
4938 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4939
5ce97055
JH
4940 streamer_write_uhwi (ob,
4941 ipa_get_cs_argument_count (args) * 2
4942 + (args->polymorphic_call_contexts != NULL));
c8246dbe 4943 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
5ce97055
JH
4944 {
4945 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4946 if (args->polymorphic_call_contexts != NULL)
4947 ipa_get_ith_polymorhic_call_context (args, j)->stream_out (ob);
4948 }
c8246dbe
JH
4949 ipa_write_indirect_edge_info (ob, e);
4950 }
fb3f88cc
JH
4951}
4952
61502ca8 4953/* Stream in NODE info from IB. */
fb3f88cc
JH
4954
4955static void
4956ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4957 struct data_in *data_in)
4958{
4959 struct ipa_node_params *info = IPA_NODE_REF (node);
4960 int k;
4961 struct cgraph_edge *e;
2465dcc2 4962 struct bitpack_d bp;
fb3f88cc 4963
0e8853ee 4964 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4965
0e8853ee
JH
4966 for (k = 0; k < ipa_get_param_count (info); k++)
4967 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4968
412288f1 4969 bp = streamer_read_bitpack (ib);
fb3f88cc 4970 if (ipa_get_param_count (info) != 0)
8aab5218 4971 info->analysis_done = true;
fb3f88cc
JH
4972 info->node_enqueued = false;
4973 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4974 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4975 for (k = 0; k < ipa_get_param_count (info); k++)
4976 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4977 for (e = node->callees; e; e = e->next_callee)
4978 {
4979 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4980 int count = streamer_read_uhwi (ib);
5ce97055
JH
4981 bool contexts_computed = count & 1;
4982 count /= 2;
fb3f88cc 4983
fb3f88cc
JH
4984 if (!count)
4985 continue;
9771b263 4986 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
4987 if (contexts_computed)
4988 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
fb3f88cc 4989
fb3f88cc 4990 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
4991 {
4992 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4993 data_in);
4994 if (contexts_computed)
4995 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
4996 }
fb3f88cc 4997 }
e33c6cd6 4998 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4999 {
5000 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 5001 int count = streamer_read_uhwi (ib);
5ce97055
JH
5002 bool contexts_computed = count & 1;
5003 count /= 2;
c8246dbe 5004
c8246dbe
JH
5005 if (count)
5006 {
9771b263 5007 vec_safe_grow_cleared (args->jump_functions, count);
5ce97055
JH
5008 if (contexts_computed)
5009 vec_safe_grow_cleared (args->polymorphic_call_contexts, count);
c8246dbe 5010 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
5ce97055
JH
5011 {
5012 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
5013 data_in);
5014 if (contexts_computed)
5015 ipa_get_ith_polymorhic_call_context (args, k)->stream_in (ib, data_in);
5016 }
c8246dbe
JH
5017 }
5018 ipa_read_indirect_edge_info (ib, data_in, e);
5019 }
fb3f88cc
JH
5020}
5021
5022/* Write jump functions for nodes in SET. */
5023
5024void
f27c1867 5025ipa_prop_write_jump_functions (void)
fb3f88cc
JH
5026{
5027 struct cgraph_node *node;
93536c97 5028 struct output_block *ob;
fb3f88cc 5029 unsigned int count = 0;
f27c1867
JH
5030 lto_symtab_encoder_iterator lsei;
5031 lto_symtab_encoder_t encoder;
5032
fb3f88cc 5033
9771b263 5034 if (!ipa_node_params_vector.exists ())
93536c97 5035 return;
fb3f88cc 5036
93536c97 5037 ob = create_output_block (LTO_section_jump_functions);
f27c1867 5038 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5039 ob->symbol = NULL;
f27c1867
JH
5040 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5041 lsei_next_function_in_partition (&lsei))
fb3f88cc 5042 {
f27c1867 5043 node = lsei_cgraph_node (lsei);
d52f5295 5044 if (node->has_gimple_body_p ()
c47d0034 5045 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5046 count++;
5047 }
5048
412288f1 5049 streamer_write_uhwi (ob, count);
fb3f88cc
JH
5050
5051 /* Process all of the functions. */
f27c1867
JH
5052 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5053 lsei_next_function_in_partition (&lsei))
fb3f88cc 5054 {
f27c1867 5055 node = lsei_cgraph_node (lsei);
d52f5295 5056 if (node->has_gimple_body_p ()
c47d0034 5057 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
5058 ipa_write_node_info (ob, node);
5059 }
412288f1 5060 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
5061 produce_asm (ob, NULL);
5062 destroy_output_block (ob);
5063}
5064
5065/* Read section in file FILE_DATA of length LEN with data DATA. */
5066
5067static void
5068ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
5069 size_t len)
5070{
5071 const struct lto_function_header *header =
5072 (const struct lto_function_header *) data;
4ad9a9de
EB
5073 const int cfg_offset = sizeof (struct lto_function_header);
5074 const int main_offset = cfg_offset + header->cfg_size;
5075 const int string_offset = main_offset + header->main_size;
fb3f88cc 5076 struct data_in *data_in;
fb3f88cc
JH
5077 unsigned int i;
5078 unsigned int count;
5079
207c68cd
RB
5080 lto_input_block ib_main ((const char *) data + main_offset,
5081 header->main_size);
fb3f88cc
JH
5082
5083 data_in =
5084 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5085 header->string_size, vNULL);
412288f1 5086 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
5087
5088 for (i = 0; i < count; i++)
5089 {
5090 unsigned int index;
5091 struct cgraph_node *node;
7380e6ef 5092 lto_symtab_encoder_t encoder;
fb3f88cc 5093
412288f1 5094 index = streamer_read_uhwi (&ib_main);
7380e6ef 5095 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5096 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5097 index));
67348ccc 5098 gcc_assert (node->definition);
fb3f88cc
JH
5099 ipa_read_node_info (&ib_main, node, data_in);
5100 }
5101 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5102 len);
5103 lto_data_in_delete (data_in);
5104}
5105
5106/* Read ipcp jump functions. */
5107
5108void
5109ipa_prop_read_jump_functions (void)
5110{
5111 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5112 struct lto_file_decl_data *file_data;
5113 unsigned int j = 0;
5114
5115 ipa_check_create_node_params ();
5116 ipa_check_create_edge_args ();
5117 ipa_register_cgraph_hooks ();
5118
5119 while ((file_data = file_data_vec[j++]))
5120 {
5121 size_t len;
5122 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
5123
5124 if (data)
5125 ipa_prop_read_section (file_data, data, len);
5126 }
5127}
5128
b8698a0f 5129/* After merging units, we can get mismatch in argument counts.
61502ca8 5130 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
5131 Also compute called_with_variable_arg info. */
5132
5133void
5134ipa_update_after_lto_read (void)
5135{
05d3aa37
MJ
5136 ipa_check_create_node_params ();
5137 ipa_check_create_edge_args ();
fb3f88cc 5138}
2c9561b5
MJ
5139
5140void
5141write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
5142{
5143 int node_ref;
5144 unsigned int count = 0;
5145 lto_symtab_encoder_t encoder;
5146 struct ipa_agg_replacement_value *aggvals, *av;
5147
5148 aggvals = ipa_get_agg_replacements_for_node (node);
5149 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 5150 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
5151 streamer_write_uhwi (ob, node_ref);
5152
5153 for (av = aggvals; av; av = av->next)
5154 count++;
5155 streamer_write_uhwi (ob, count);
5156
5157 for (av = aggvals; av; av = av->next)
5158 {
7b920a9a
MJ
5159 struct bitpack_d bp;
5160
2c9561b5
MJ
5161 streamer_write_uhwi (ob, av->offset);
5162 streamer_write_uhwi (ob, av->index);
5163 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
5164
5165 bp = bitpack_create (ob->main_stream);
5166 bp_pack_value (&bp, av->by_ref, 1);
5167 streamer_write_bitpack (&bp);
2c9561b5
MJ
5168 }
5169}
5170
5171/* Stream in the aggregate value replacement chain for NODE from IB. */
5172
5173static void
5174read_agg_replacement_chain (struct lto_input_block *ib,
5175 struct cgraph_node *node,
5176 struct data_in *data_in)
5177{
5178 struct ipa_agg_replacement_value *aggvals = NULL;
5179 unsigned int count, i;
5180
5181 count = streamer_read_uhwi (ib);
5182 for (i = 0; i <count; i++)
5183 {
5184 struct ipa_agg_replacement_value *av;
7b920a9a 5185 struct bitpack_d bp;
2c9561b5 5186
766090c2 5187 av = ggc_alloc<ipa_agg_replacement_value> ();
2c9561b5
MJ
5188 av->offset = streamer_read_uhwi (ib);
5189 av->index = streamer_read_uhwi (ib);
5190 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
5191 bp = streamer_read_bitpack (ib);
5192 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
5193 av->next = aggvals;
5194 aggvals = av;
5195 }
5196 ipa_set_node_agg_value_chain (node, aggvals);
5197}
5198
5199/* Write all aggregate replacement for nodes in set. */
5200
5201void
5202ipa_prop_write_all_agg_replacement (void)
5203{
5204 struct cgraph_node *node;
5205 struct output_block *ob;
5206 unsigned int count = 0;
5207 lto_symtab_encoder_iterator lsei;
5208 lto_symtab_encoder_t encoder;
5209
5210 if (!ipa_node_agg_replacements)
5211 return;
5212
5213 ob = create_output_block (LTO_section_ipcp_transform);
5214 encoder = ob->decl_state->symtab_node_encoder;
0b83e688 5215 ob->symbol = NULL;
2c9561b5
MJ
5216 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5217 lsei_next_function_in_partition (&lsei))
5218 {
5219 node = lsei_cgraph_node (lsei);
d52f5295 5220 if (node->has_gimple_body_p ()
2c9561b5
MJ
5221 && ipa_get_agg_replacements_for_node (node) != NULL)
5222 count++;
5223 }
5224
5225 streamer_write_uhwi (ob, count);
5226
5227 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
5228 lsei_next_function_in_partition (&lsei))
5229 {
5230 node = lsei_cgraph_node (lsei);
d52f5295 5231 if (node->has_gimple_body_p ()
2c9561b5
MJ
5232 && ipa_get_agg_replacements_for_node (node) != NULL)
5233 write_agg_replacement_chain (ob, node);
5234 }
5235 streamer_write_char_stream (ob->main_stream, 0);
5236 produce_asm (ob, NULL);
5237 destroy_output_block (ob);
5238}
5239
5240/* Read replacements section in file FILE_DATA of length LEN with data
5241 DATA. */
5242
5243static void
5244read_replacements_section (struct lto_file_decl_data *file_data,
5245 const char *data,
5246 size_t len)
5247{
5248 const struct lto_function_header *header =
5249 (const struct lto_function_header *) data;
5250 const int cfg_offset = sizeof (struct lto_function_header);
5251 const int main_offset = cfg_offset + header->cfg_size;
5252 const int string_offset = main_offset + header->main_size;
5253 struct data_in *data_in;
2c9561b5
MJ
5254 unsigned int i;
5255 unsigned int count;
5256
207c68cd
RB
5257 lto_input_block ib_main ((const char *) data + main_offset,
5258 header->main_size);
2c9561b5
MJ
5259
5260 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 5261 header->string_size, vNULL);
2c9561b5
MJ
5262 count = streamer_read_uhwi (&ib_main);
5263
5264 for (i = 0; i < count; i++)
5265 {
5266 unsigned int index;
5267 struct cgraph_node *node;
5268 lto_symtab_encoder_t encoder;
5269
5270 index = streamer_read_uhwi (&ib_main);
5271 encoder = file_data->symtab_node_encoder;
d52f5295
ML
5272 node = dyn_cast<cgraph_node *> (lto_symtab_encoder_deref (encoder,
5273 index));
67348ccc 5274 gcc_assert (node->definition);
2c9561b5
MJ
5275 read_agg_replacement_chain (&ib_main, node, data_in);
5276 }
5277 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
5278 len);
5279 lto_data_in_delete (data_in);
5280}
5281
5282/* Read IPA-CP aggregate replacements. */
5283
5284void
5285ipa_prop_read_all_agg_replacement (void)
5286{
5287 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
5288 struct lto_file_decl_data *file_data;
5289 unsigned int j = 0;
5290
5291 while ((file_data = file_data_vec[j++]))
5292 {
5293 size_t len;
5294 const char *data = lto_get_section_data (file_data,
5295 LTO_section_ipcp_transform,
5296 NULL, &len);
5297 if (data)
5298 read_replacements_section (file_data, data, len);
5299 }
5300}
5301
5302/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5303 NODE. */
5304
5305static void
5306adjust_agg_replacement_values (struct cgraph_node *node,
5307 struct ipa_agg_replacement_value *aggval)
5308{
5309 struct ipa_agg_replacement_value *v;
5310 int i, c = 0, d = 0, *adj;
5311
5312 if (!node->clone.combined_args_to_skip)
5313 return;
5314
5315 for (v = aggval; v; v = v->next)
5316 {
5317 gcc_assert (v->index >= 0);
5318 if (c < v->index)
5319 c = v->index;
5320 }
5321 c++;
5322
5323 adj = XALLOCAVEC (int, c);
5324 for (i = 0; i < c; i++)
5325 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
5326 {
5327 adj[i] = -1;
5328 d++;
5329 }
5330 else
5331 adj[i] = i - d;
5332
5333 for (v = aggval; v; v = v->next)
5334 v->index = adj[v->index];
5335}
5336
8aab5218
MJ
5337/* Dominator walker driving the ipcp modification phase. */
5338
5339class ipcp_modif_dom_walker : public dom_walker
5340{
5341public:
5342 ipcp_modif_dom_walker (struct func_body_info *fbi,
5343 vec<ipa_param_descriptor> descs,
5344 struct ipa_agg_replacement_value *av,
5345 bool *sc, bool *cc)
5346 : dom_walker (CDI_DOMINATORS), m_fbi (fbi), m_descriptors (descs),
5347 m_aggval (av), m_something_changed (sc), m_cfg_changed (cc) {}
5348
5349 virtual void before_dom_children (basic_block);
5350
5351private:
5352 struct func_body_info *m_fbi;
5353 vec<ipa_param_descriptor> m_descriptors;
5354 struct ipa_agg_replacement_value *m_aggval;
5355 bool *m_something_changed, *m_cfg_changed;
5356};
5357
5358void
5359ipcp_modif_dom_walker::before_dom_children (basic_block bb)
5360{
5361 gimple_stmt_iterator gsi;
5362 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
5363 {
5364 struct ipa_agg_replacement_value *v;
5365 gimple stmt = gsi_stmt (gsi);
5366 tree rhs, val, t;
5367 HOST_WIDE_INT offset, size;
5368 int index;
5369 bool by_ref, vce;
5370
5371 if (!gimple_assign_load_p (stmt))
5372 continue;
5373 rhs = gimple_assign_rhs1 (stmt);
5374 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
5375 continue;
2c9561b5 5376
8aab5218
MJ
5377 vce = false;
5378 t = rhs;
5379 while (handled_component_p (t))
5380 {
5381 /* V_C_E can do things like convert an array of integers to one
5382 bigger integer and similar things we do not handle below. */
5383 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
5384 {
5385 vce = true;
5386 break;
5387 }
5388 t = TREE_OPERAND (t, 0);
5389 }
5390 if (vce)
5391 continue;
5392
5393 if (!ipa_load_from_parm_agg_1 (m_fbi, m_descriptors, stmt, rhs, &index,
5394 &offset, &size, &by_ref))
5395 continue;
5396 for (v = m_aggval; v; v = v->next)
5397 if (v->index == index
5398 && v->offset == offset)
5399 break;
5400 if (!v
5401 || v->by_ref != by_ref
5402 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
5403 continue;
5404
5405 gcc_checking_assert (is_gimple_ip_invariant (v->value));
5406 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
5407 {
5408 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
5409 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
5410 else if (TYPE_SIZE (TREE_TYPE (rhs))
5411 == TYPE_SIZE (TREE_TYPE (v->value)))
5412 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
5413 else
5414 {
5415 if (dump_file)
5416 {
5417 fprintf (dump_file, " const ");
5418 print_generic_expr (dump_file, v->value, 0);
5419 fprintf (dump_file, " can't be converted to type of ");
5420 print_generic_expr (dump_file, rhs, 0);
5421 fprintf (dump_file, "\n");
5422 }
5423 continue;
5424 }
5425 }
5426 else
5427 val = v->value;
5428
5429 if (dump_file && (dump_flags & TDF_DETAILS))
5430 {
5431 fprintf (dump_file, "Modifying stmt:\n ");
5432 print_gimple_stmt (dump_file, stmt, 0, 0);
5433 }
5434 gimple_assign_set_rhs_from_tree (&gsi, val);
5435 update_stmt (stmt);
5436
5437 if (dump_file && (dump_flags & TDF_DETAILS))
5438 {
5439 fprintf (dump_file, "into:\n ");
5440 print_gimple_stmt (dump_file, stmt, 0, 0);
5441 fprintf (dump_file, "\n");
5442 }
5443
5444 *m_something_changed = true;
5445 if (maybe_clean_eh_stmt (stmt)
5446 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
5447 *m_cfg_changed = true;
5448 }
5449
5450}
5451
5452/* IPCP transformation phase doing propagation of aggregate values. */
2c9561b5
MJ
5453
5454unsigned int
5455ipcp_transform_function (struct cgraph_node *node)
5456{
84562394 5457 vec<ipa_param_descriptor> descriptors = vNULL;
8aab5218 5458 struct func_body_info fbi;
2c9561b5 5459 struct ipa_agg_replacement_value *aggval;
2c9561b5
MJ
5460 int param_count;
5461 bool cfg_changed = false, something_changed = false;
5462
5463 gcc_checking_assert (cfun);
5464 gcc_checking_assert (current_function_decl);
5465
5466 if (dump_file)
5467 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 5468 node->name (), node->order);
2c9561b5
MJ
5469
5470 aggval = ipa_get_agg_replacements_for_node (node);
5471 if (!aggval)
5472 return 0;
67348ccc 5473 param_count = count_formal_params (node->decl);
2c9561b5
MJ
5474 if (param_count == 0)
5475 return 0;
5476 adjust_agg_replacement_values (node, aggval);
5477 if (dump_file)
5478 ipa_dump_agg_replacement_values (dump_file, aggval);
2c9561b5 5479
8aab5218
MJ
5480 fbi.node = node;
5481 fbi.info = NULL;
5482 fbi.bb_infos = vNULL;
5483 fbi.bb_infos.safe_grow_cleared (last_basic_block_for_fn (cfun));
5484 fbi.param_count = param_count;
5485 fbi.aa_walked = 0;
2c9561b5 5486
8aab5218
MJ
5487 descriptors.safe_grow_cleared (param_count);
5488 ipa_populate_param_decls (node, descriptors);
5489 calculate_dominance_info (CDI_DOMINATORS);
5490 ipcp_modif_dom_walker (&fbi, descriptors, aggval, &something_changed,
5491 &cfg_changed).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun));
2c9561b5 5492
8aab5218
MJ
5493 int i;
5494 struct ipa_bb_info *bi;
5495 FOR_EACH_VEC_ELT (fbi.bb_infos, i, bi)
5496 free_ipa_bb_info (bi);
5497 fbi.bb_infos.release ();
5498 free_dominance_info (CDI_DOMINATORS);
9771b263 5499 (*ipa_node_agg_replacements)[node->uid] = NULL;
9771b263 5500 descriptors.release ();
2c9561b5
MJ
5501
5502 if (!something_changed)
5503 return 0;
5504 else if (cfg_changed)
5505 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
5506 else
5507 return TODO_update_ssa_only_virtuals;
5508}
This page took 4.627002 seconds and 5 git commands to generate.