]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa-prop.c
ubsan.c (ubsan_source_location): Don't crash on unknown locations.
[gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
2fb9a547
AM
24#include "basic-block.h"
25#include "tree-ssa-alias.h"
26#include "internal-fn.h"
27#include "gimple-fold.h"
28#include "tree-eh.h"
29#include "gimple-expr.h"
30#include "is-a.h"
18f429e2 31#include "gimple.h"
d8a2d370
DN
32#include "expr.h"
33#include "stor-layout.h"
34#include "print-tree.h"
45b0be94 35#include "gimplify.h"
5be5c238 36#include "gimple-iterator.h"
18f429e2 37#include "gimplify-me.h"
5be5c238 38#include "gimple-walk.h"
518dc859 39#include "langhooks.h"
518dc859 40#include "target.h"
518dc859 41#include "ipa-prop.h"
442b4905
AM
42#include "bitmap.h"
43#include "gimple-ssa.h"
44#include "tree-cfg.h"
45#include "tree-phinodes.h"
46#include "ssa-iterators.h"
47#include "tree-into-ssa.h"
48#include "tree-dfa.h"
518dc859 49#include "tree-pass.h"
771578a0 50#include "tree-inline.h"
0f378cb5 51#include "ipa-inline.h"
518dc859 52#include "flags.h"
3e293154 53#include "diagnostic.h"
cf835838 54#include "gimple-pretty-print.h"
fb3f88cc 55#include "lto-streamer.h"
f0efc7aa
DN
56#include "data-streamer.h"
57#include "tree-streamer.h"
dfea20f1 58#include "params.h"
450ad0cd 59#include "ipa-utils.h"
771578a0 60
062c604f
MJ
61/* Intermediate information about a parameter that is only useful during the
62 run of ipa_analyze_node and is not kept afterwards. */
63
64struct param_analysis_info
65{
8b7773a4
MJ
66 bool parm_modified, ref_modified, pt_modified;
67 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
68};
69
771578a0 70/* Vector where the parameter infos are actually stored. */
9771b263 71vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 72/* Vector of known aggregate values in cloned nodes. */
9771b263 73vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 74/* Vector where the parameter infos are actually stored. */
9771b263 75vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
76
77/* Holders of ipa cgraph hooks: */
e2c9111c
JH
78static struct cgraph_edge_hook_list *edge_removal_hook_holder;
79static struct cgraph_node_hook_list *node_removal_hook_holder;
80static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
81static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 82static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 83
4502fe8d
MJ
84/* Description of a reference to an IPA constant. */
85struct ipa_cst_ref_desc
86{
87 /* Edge that corresponds to the statement which took the reference. */
88 struct cgraph_edge *cs;
89 /* Linked list of duplicates created when call graph edges are cloned. */
90 struct ipa_cst_ref_desc *next_duplicate;
91 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
92 if out of control. */
93 int refcount;
94};
95
96/* Allocation pool for reference descriptions. */
97
98static alloc_pool ipa_refdesc_pool;
99
5fe8e757
MJ
100/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
101 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
102
103static bool
104ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
105{
67348ccc 106 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->decl);
5fe8e757
MJ
107 struct cl_optimization *os;
108
109 if (!fs_opts)
110 return false;
111 os = TREE_OPTIMIZATION (fs_opts);
112 return !os->x_optimize || !os->x_flag_ipa_cp;
113}
114
be95e2b9
MJ
115/* Return index of the formal whose tree is PTREE in function which corresponds
116 to INFO. */
117
d044dd17 118static int
9771b263 119ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
120{
121 int i, count;
122
9771b263 123 count = descriptors.length ();
518dc859 124 for (i = 0; i < count; i++)
9771b263 125 if (descriptors[i].decl == ptree)
518dc859
RL
126 return i;
127
128 return -1;
129}
130
d044dd17
MJ
131/* Return index of the formal whose tree is PTREE in function which corresponds
132 to INFO. */
133
134int
135ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
136{
137 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
138}
139
140/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
141 NODE. */
be95e2b9 142
f8e2a1ed
MJ
143static void
144ipa_populate_param_decls (struct cgraph_node *node,
9771b263 145 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
146{
147 tree fndecl;
148 tree fnargs;
149 tree parm;
150 int param_num;
3e293154 151
67348ccc 152 fndecl = node->decl;
0e8853ee 153 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
154 fnargs = DECL_ARGUMENTS (fndecl);
155 param_num = 0;
910ad8de 156 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 157 {
9771b263 158 descriptors[param_num].decl = parm;
0e8853ee 159 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
160 param_num++;
161 }
162}
163
3f84bf08
MJ
164/* Return how many formal parameters FNDECL has. */
165
166static inline int
310bc633 167count_formal_params (tree fndecl)
3f84bf08
MJ
168{
169 tree parm;
170 int count = 0;
0e8853ee 171 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 172
910ad8de 173 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
174 count++;
175
176 return count;
177}
178
0e8853ee
JH
179/* Return the declaration of Ith formal parameter of the function corresponding
180 to INFO. Note there is no setter function as this array is built just once
181 using ipa_initialize_node_params. */
182
183void
184ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
185{
186 fprintf (file, "param #%i", i);
187 if (info->descriptors[i].decl)
188 {
189 fprintf (file, " ");
190 print_generic_expr (file, info->descriptors[i].decl, 0);
191 }
192}
193
194/* Initialize the ipa_node_params structure associated with NODE
195 to hold PARAM_COUNT parameters. */
196
197void
198ipa_alloc_node_params (struct cgraph_node *node, int param_count)
199{
200 struct ipa_node_params *info = IPA_NODE_REF (node);
201
202 if (!info->descriptors.exists () && param_count)
203 info->descriptors.safe_grow_cleared (param_count);
204}
205
f8e2a1ed
MJ
206/* Initialize the ipa_node_params structure associated with NODE by counting
207 the function parameters, creating the descriptors and populating their
208 param_decls. */
be95e2b9 209
f8e2a1ed
MJ
210void
211ipa_initialize_node_params (struct cgraph_node *node)
212{
213 struct ipa_node_params *info = IPA_NODE_REF (node);
214
9771b263 215 if (!info->descriptors.exists ())
f8e2a1ed 216 {
67348ccc 217 ipa_alloc_node_params (node, count_formal_params (node->decl));
0e8853ee 218 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 219 }
518dc859
RL
220}
221
749aa96d
MJ
222/* Print the jump functions associated with call graph edge CS to file F. */
223
224static void
225ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
226{
227 int i, count;
228
229 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
230 for (i = 0; i < count; i++)
231 {
232 struct ipa_jump_func *jump_func;
233 enum jump_func_type type;
234
235 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
236 type = jump_func->type;
237
238 fprintf (f, " param %d: ", i);
239 if (type == IPA_JF_UNKNOWN)
240 fprintf (f, "UNKNOWN\n");
241 else if (type == IPA_JF_KNOWN_TYPE)
242 {
c7573249
MJ
243 fprintf (f, "KNOWN TYPE: base ");
244 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
245 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
246 jump_func->value.known_type.offset);
247 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
248 fprintf (f, "\n");
749aa96d
MJ
249 }
250 else if (type == IPA_JF_CONST)
251 {
4502fe8d 252 tree val = jump_func->value.constant.value;
749aa96d
MJ
253 fprintf (f, "CONST: ");
254 print_generic_expr (f, val, 0);
255 if (TREE_CODE (val) == ADDR_EXPR
256 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
257 {
258 fprintf (f, " -> ");
259 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
260 0);
261 }
262 fprintf (f, "\n");
263 }
749aa96d
MJ
264 else if (type == IPA_JF_PASS_THROUGH)
265 {
266 fprintf (f, "PASS THROUGH: ");
8b7773a4 267 fprintf (f, "%d, op %s",
749aa96d 268 jump_func->value.pass_through.formal_id,
5806f481 269 get_tree_code_name(jump_func->value.pass_through.operation));
749aa96d 270 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
271 {
272 fprintf (f, " ");
273 print_generic_expr (f,
274 jump_func->value.pass_through.operand, 0);
275 }
276 if (jump_func->value.pass_through.agg_preserved)
277 fprintf (f, ", agg_preserved");
b8f6e610
MJ
278 if (jump_func->value.pass_through.type_preserved)
279 fprintf (f, ", type_preserved");
3ea6239f 280 fprintf (f, "\n");
749aa96d
MJ
281 }
282 else if (type == IPA_JF_ANCESTOR)
283 {
284 fprintf (f, "ANCESTOR: ");
285 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
286 jump_func->value.ancestor.formal_id,
287 jump_func->value.ancestor.offset);
288 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
289 if (jump_func->value.ancestor.agg_preserved)
290 fprintf (f, ", agg_preserved");
b8f6e610
MJ
291 if (jump_func->value.ancestor.type_preserved)
292 fprintf (f, ", type_preserved");
3ea6239f 293 fprintf (f, "\n");
749aa96d 294 }
8b7773a4
MJ
295
296 if (jump_func->agg.items)
297 {
298 struct ipa_agg_jf_item *item;
299 int j;
300
301 fprintf (f, " Aggregate passed by %s:\n",
302 jump_func->agg.by_ref ? "reference" : "value");
9771b263 303 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
304 {
305 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
306 item->offset);
307 if (TYPE_P (item->value))
308 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
ae7e9ddd 309 tree_to_uhwi (TYPE_SIZE (item->value)));
8b7773a4
MJ
310 else
311 {
312 fprintf (f, "cst: ");
313 print_generic_expr (f, item->value, 0);
314 }
315 fprintf (f, "\n");
316 }
317 }
749aa96d
MJ
318 }
319}
320
321
be95e2b9
MJ
322/* Print the jump functions of all arguments on all call graph edges going from
323 NODE to file F. */
324
518dc859 325void
3e293154 326ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 327{
3e293154 328 struct cgraph_edge *cs;
518dc859 329
fec39fa6 330 fprintf (f, " Jump functions of caller %s/%i:\n", node->name (),
67348ccc 331 node->order);
3e293154
MJ
332 for (cs = node->callees; cs; cs = cs->next_callee)
333 {
334 if (!ipa_edge_args_info_available_for_edge_p (cs))
335 continue;
336
749aa96d 337 fprintf (f, " callsite %s/%i -> %s/%i : \n",
fec39fa6
TS
338 xstrdup (node->name ()), node->order,
339 xstrdup (cs->callee->name ()),
67348ccc 340 cs->callee->order);
749aa96d
MJ
341 ipa_print_node_jump_functions_for_edge (f, cs);
342 }
518dc859 343
9de04252 344 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 345 {
9de04252 346 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
347 if (!ipa_edge_args_info_available_for_edge_p (cs))
348 continue;
3e293154 349
9de04252
MJ
350 ii = cs->indirect_info;
351 if (ii->agg_contents)
c13bc3d9 352 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 353 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 354 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
355 ii->param_index, ii->offset,
356 ii->by_ref ? "by reference" : "by_value");
357 else
358 fprintf (f, " indirect %s callsite, calling param %i",
359 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
360
749aa96d
MJ
361 if (cs->call_stmt)
362 {
9de04252 363 fprintf (f, ", for stmt ");
749aa96d 364 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 365 }
749aa96d 366 else
9de04252 367 fprintf (f, "\n");
749aa96d 368 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
369 }
370}
371
372/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 373
3e293154
MJ
374void
375ipa_print_all_jump_functions (FILE *f)
376{
377 struct cgraph_node *node;
378
ca30a539 379 fprintf (f, "\nJump functions:\n");
65c70e6b 380 FOR_EACH_FUNCTION (node)
3e293154
MJ
381 {
382 ipa_print_node_jump_functions (f, node);
383 }
384}
385
7b872d9e
MJ
386/* Set JFUNC to be a known type jump function. */
387
388static void
389ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
390 tree base_type, tree component_type)
391{
06d65050
JH
392 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
393 && TYPE_BINFO (component_type));
7b872d9e
MJ
394 jfunc->type = IPA_JF_KNOWN_TYPE;
395 jfunc->value.known_type.offset = offset,
396 jfunc->value.known_type.base_type = base_type;
397 jfunc->value.known_type.component_type = component_type;
68377e53 398 gcc_assert (component_type);
7b872d9e
MJ
399}
400
b8f6e610
MJ
401/* Set JFUNC to be a copy of another jmp (to be used by jump function
402 combination code). The two functions will share their rdesc. */
403
404static void
405ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
406 struct ipa_jump_func *src)
407
408{
409 gcc_checking_assert (src->type == IPA_JF_CONST);
410 dst->type = IPA_JF_CONST;
411 dst->value.constant = src->value.constant;
412}
413
7b872d9e
MJ
414/* Set JFUNC to be a constant jmp function. */
415
416static void
4502fe8d
MJ
417ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
418 struct cgraph_edge *cs)
7b872d9e 419{
5368224f
DC
420 constant = unshare_expr (constant);
421 if (constant && EXPR_P (constant))
422 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 423 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
424 jfunc->value.constant.value = unshare_expr_without_location (constant);
425
426 if (TREE_CODE (constant) == ADDR_EXPR
427 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
428 {
429 struct ipa_cst_ref_desc *rdesc;
430 if (!ipa_refdesc_pool)
431 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
432 sizeof (struct ipa_cst_ref_desc), 32);
433
434 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
435 rdesc->cs = cs;
436 rdesc->next_duplicate = NULL;
437 rdesc->refcount = 1;
438 jfunc->value.constant.rdesc = rdesc;
439 }
440 else
441 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
442}
443
444/* Set JFUNC to be a simple pass-through jump function. */
445static void
8b7773a4 446ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 447 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
448{
449 jfunc->type = IPA_JF_PASS_THROUGH;
450 jfunc->value.pass_through.operand = NULL_TREE;
451 jfunc->value.pass_through.formal_id = formal_id;
452 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 453 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 454 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
455}
456
457/* Set JFUNC to be an arithmetic pass through jump function. */
458
459static void
460ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
461 tree operand, enum tree_code operation)
462{
463 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 464 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
465 jfunc->value.pass_through.formal_id = formal_id;
466 jfunc->value.pass_through.operation = operation;
8b7773a4 467 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 468 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
469}
470
471/* Set JFUNC to be an ancestor jump function. */
472
473static void
474ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
475 tree type, int formal_id, bool agg_preserved,
476 bool type_preserved)
7b872d9e
MJ
477{
478 jfunc->type = IPA_JF_ANCESTOR;
479 jfunc->value.ancestor.formal_id = formal_id;
480 jfunc->value.ancestor.offset = offset;
481 jfunc->value.ancestor.type = type;
8b7773a4 482 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 483 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
484}
485
e248d83f
MJ
486/* Extract the acual BINFO being described by JFUNC which must be a known type
487 jump function. */
488
489tree
490ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
491{
492 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
493 if (!base_binfo)
494 return NULL_TREE;
495 return get_binfo_at_offset (base_binfo,
496 jfunc->value.known_type.offset,
497 jfunc->value.known_type.component_type);
498}
499
f65cf2b7
MJ
500/* Structure to be passed in between detect_type_change and
501 check_stmt_for_type_change. */
502
503struct type_change_info
504{
290ebcb7
MJ
505 /* Offset into the object where there is the virtual method pointer we are
506 looking for. */
507 HOST_WIDE_INT offset;
508 /* The declaration or SSA_NAME pointer of the base that we are checking for
509 type change. */
510 tree object;
511 /* If we actually can tell the type that the object has changed to, it is
512 stored in this field. Otherwise it remains NULL_TREE. */
513 tree known_current_type;
f65cf2b7
MJ
514 /* Set to true if dynamic type change has been detected. */
515 bool type_maybe_changed;
290ebcb7
MJ
516 /* Set to true if multiple types have been encountered. known_current_type
517 must be disregarded in that case. */
518 bool multiple_types_encountered;
f65cf2b7
MJ
519};
520
521/* Return true if STMT can modify a virtual method table pointer.
522
523 This function makes special assumptions about both constructors and
524 destructors which are all the functions that are allowed to alter the VMT
525 pointers. It assumes that destructors begin with assignment into all VMT
526 pointers and that constructors essentially look in the following way:
527
528 1) The very first thing they do is that they call constructors of ancestor
529 sub-objects that have them.
530
531 2) Then VMT pointers of this and all its ancestors is set to new values
532 corresponding to the type corresponding to the constructor.
533
534 3) Only afterwards, other stuff such as constructor of member sub-objects
535 and the code written by the user is run. Only this may include calling
536 virtual functions, directly or indirectly.
537
538 There is no way to call a constructor of an ancestor sub-object in any
539 other way.
540
541 This means that we do not have to care whether constructors get the correct
542 type information because they will always change it (in fact, if we define
543 the type to be given by the VMT pointer, it is undefined).
544
545 The most important fact to derive from the above is that if, for some
546 statement in the section 3, we try to detect whether the dynamic type has
547 changed, we can safely ignore all calls as we examine the function body
548 backwards until we reach statements in section 2 because these calls cannot
549 be ancestor constructors or destructors (if the input is not bogus) and so
550 do not change the dynamic type (this holds true only for automatically
551 allocated objects but at the moment we devirtualize only these). We then
552 must detect that statements in section 2 change the dynamic type and can try
553 to derive the new type. That is enough and we can stop, we will never see
554 the calls into constructors of sub-objects in this code. Therefore we can
555 safely ignore all call statements that we traverse.
556 */
557
558static bool
559stmt_may_be_vtbl_ptr_store (gimple stmt)
560{
561 if (is_gimple_call (stmt))
562 return false;
563 else if (is_gimple_assign (stmt))
564 {
565 tree lhs = gimple_assign_lhs (stmt);
566
0004f992
MJ
567 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
568 {
569 if (flag_strict_aliasing
570 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
571 return false;
572
573 if (TREE_CODE (lhs) == COMPONENT_REF
574 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 575 return false;
0004f992
MJ
576 /* In the future we might want to use get_base_ref_and_offset to find
577 if there is a field corresponding to the offset and if so, proceed
578 almost like if it was a component ref. */
579 }
f65cf2b7
MJ
580 }
581 return true;
582}
583
290ebcb7
MJ
584/* If STMT can be proved to be an assignment to the virtual method table
585 pointer of ANALYZED_OBJ and the type associated with the new table
586 identified, return the type. Otherwise return NULL_TREE. */
587
588static tree
589extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
590{
591 HOST_WIDE_INT offset, size, max_size;
592 tree lhs, rhs, base;
593
594 if (!gimple_assign_single_p (stmt))
595 return NULL_TREE;
596
597 lhs = gimple_assign_lhs (stmt);
598 rhs = gimple_assign_rhs1 (stmt);
599 if (TREE_CODE (lhs) != COMPONENT_REF
600 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
601 || TREE_CODE (rhs) != ADDR_EXPR)
602 return NULL_TREE;
603 rhs = get_base_address (TREE_OPERAND (rhs, 0));
604 if (!rhs
605 || TREE_CODE (rhs) != VAR_DECL
606 || !DECL_VIRTUAL_P (rhs))
607 return NULL_TREE;
608
609 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
610 if (offset != tci->offset
611 || size != POINTER_SIZE
612 || max_size != POINTER_SIZE)
613 return NULL_TREE;
614 if (TREE_CODE (base) == MEM_REF)
615 {
616 if (TREE_CODE (tci->object) != MEM_REF
617 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
618 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
619 TREE_OPERAND (base, 1)))
620 return NULL_TREE;
621 }
622 else if (tci->object != base)
623 return NULL_TREE;
624
625 return DECL_CONTEXT (rhs);
626}
627
61502ca8 628/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
629 detect_type_change to check whether a particular statement may modify
630 the virtual table pointer, and if possible also determine the new type of
631 the (sub-)object. It stores its result into DATA, which points to a
632 type_change_info structure. */
633
634static bool
635check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
636{
637 gimple stmt = SSA_NAME_DEF_STMT (vdef);
638 struct type_change_info *tci = (struct type_change_info *) data;
639
640 if (stmt_may_be_vtbl_ptr_store (stmt))
641 {
290ebcb7
MJ
642 tree type;
643 type = extr_type_from_vtbl_ptr_store (stmt, tci);
644 if (tci->type_maybe_changed
645 && type != tci->known_current_type)
646 tci->multiple_types_encountered = true;
647 tci->known_current_type = type;
f65cf2b7
MJ
648 tci->type_maybe_changed = true;
649 return true;
650 }
651 else
652 return false;
653}
654
290ebcb7
MJ
655
656
06d65050
JH
657/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
658 callsite CALL) by looking for assignments to its virtual table pointer. If
659 it is, return true and fill in the jump function JFUNC with relevant type
660 information or set it to unknown. ARG is the object itself (not a pointer
661 to it, unless dereferenced). BASE is the base of the memory access as
662 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
663
664static bool
06d65050
JH
665detect_type_change (tree arg, tree base, tree comp_type, gimple call,
666 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
667{
668 struct type_change_info tci;
669 ao_ref ao;
670
671 gcc_checking_assert (DECL_P (arg)
672 || TREE_CODE (arg) == MEM_REF
673 || handled_component_p (arg));
674 /* Const calls cannot call virtual methods through VMT and so type changes do
675 not matter. */
06d65050
JH
676 if (!flag_devirtualize || !gimple_vuse (call)
677 /* Be sure expected_type is polymorphic. */
678 || !comp_type
679 || TREE_CODE (comp_type) != RECORD_TYPE
680 || !TYPE_BINFO (comp_type)
681 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
682 return false;
683
dd887943 684 ao_ref_init (&ao, arg);
f65cf2b7
MJ
685 ao.base = base;
686 ao.offset = offset;
687 ao.size = POINTER_SIZE;
688 ao.max_size = ao.size;
f65cf2b7 689
290ebcb7
MJ
690 tci.offset = offset;
691 tci.object = get_base_address (arg);
692 tci.known_current_type = NULL_TREE;
693 tci.type_maybe_changed = false;
694 tci.multiple_types_encountered = false;
695
f65cf2b7
MJ
696 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
697 &tci, NULL);
698 if (!tci.type_maybe_changed)
699 return false;
700
290ebcb7
MJ
701 if (!tci.known_current_type
702 || tci.multiple_types_encountered
703 || offset != 0)
704 jfunc->type = IPA_JF_UNKNOWN;
705 else
7b872d9e 706 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 707
f65cf2b7
MJ
708 return true;
709}
710
711/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
712 SSA name (its dereference will become the base and the offset is assumed to
713 be zero). */
714
715static bool
06d65050
JH
716detect_type_change_ssa (tree arg, tree comp_type,
717 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
718{
719 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 720 if (!flag_devirtualize
06d65050 721 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
722 return false;
723
724 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 725 build_int_cst (ptr_type_node, 0));
f65cf2b7 726
06d65050 727 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
728}
729
fdb0e1b4
MJ
730/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
731 boolean variable pointed to by DATA. */
732
733static bool
734mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
735 void *data)
736{
737 bool *b = (bool *) data;
738 *b = true;
739 return true;
740}
741
688010ba 742/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
743 a value known not to be modified in this function before reaching the
744 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
745 information about the parameter. */
fdb0e1b4
MJ
746
747static bool
8b7773a4
MJ
748parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
749 gimple stmt, tree parm_load)
fdb0e1b4
MJ
750{
751 bool modified = false;
8b7773a4 752 bitmap *visited_stmts;
fdb0e1b4
MJ
753 ao_ref refd;
754
8b7773a4
MJ
755 if (parm_ainfo && parm_ainfo->parm_modified)
756 return false;
fdb0e1b4
MJ
757
758 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
759 ao_ref_init (&refd, parm_load);
760 /* We can cache visited statements only when parm_ainfo is available and when
761 we are looking at a naked load of the whole parameter. */
762 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
763 visited_stmts = NULL;
764 else
765 visited_stmts = &parm_ainfo->parm_visited_statements;
766 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
767 visited_stmts);
768 if (parm_ainfo && modified)
769 parm_ainfo->parm_modified = true;
770 return !modified;
fdb0e1b4
MJ
771}
772
773/* If STMT is an assignment that loads a value from an parameter declaration,
774 return the index of the parameter in ipa_node_params which has not been
775 modified. Otherwise return -1. */
776
777static int
9771b263 778load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
779 struct param_analysis_info *parms_ainfo,
780 gimple stmt)
781{
782 int index;
783 tree op1;
784
785 if (!gimple_assign_single_p (stmt))
786 return -1;
787
788 op1 = gimple_assign_rhs1 (stmt);
789 if (TREE_CODE (op1) != PARM_DECL)
790 return -1;
791
d044dd17 792 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 793 if (index < 0
8b7773a4
MJ
794 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
795 : NULL, stmt, op1))
fdb0e1b4
MJ
796 return -1;
797
798 return index;
799}
f65cf2b7 800
8b7773a4
MJ
801/* Return true if memory reference REF loads data that are known to be
802 unmodified in this function before reaching statement STMT. PARM_AINFO, if
803 non-NULL, is a pointer to a structure containing temporary information about
804 PARM. */
805
806static bool
807parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
808 gimple stmt, tree ref)
809{
810 bool modified = false;
811 ao_ref refd;
812
813 gcc_checking_assert (gimple_vuse (stmt));
814 if (parm_ainfo && parm_ainfo->ref_modified)
815 return false;
816
817 ao_ref_init (&refd, ref);
818 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
819 NULL);
820 if (parm_ainfo && modified)
821 parm_ainfo->ref_modified = true;
822 return !modified;
823}
824
825/* Return true if the data pointed to by PARM is known to be unmodified in this
826 function before reaching call statement CALL into which it is passed.
827 PARM_AINFO is a pointer to a structure containing temporary information
828 about PARM. */
829
830static bool
831parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
832 gimple call, tree parm)
833{
834 bool modified = false;
835 ao_ref refd;
836
837 /* It's unnecessary to calculate anything about memory contnets for a const
838 function because it is not goin to use it. But do not cache the result
839 either. Also, no such calculations for non-pointers. */
840 if (!gimple_vuse (call)
841 || !POINTER_TYPE_P (TREE_TYPE (parm)))
842 return false;
843
844 if (parm_ainfo->pt_modified)
845 return false;
846
847 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
848 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
849 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
850 if (modified)
851 parm_ainfo->pt_modified = true;
852 return !modified;
853}
854
855/* Return true if we can prove that OP is a memory reference loading unmodified
856 data from an aggregate passed as a parameter and if the aggregate is passed
857 by reference, that the alias type of the load corresponds to the type of the
858 formal parameter (so that we can rely on this type for TBAA in callers).
859 INFO and PARMS_AINFO describe parameters of the current function (but the
860 latter can be NULL), STMT is the load statement. If function returns true,
861 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
862 within the aggregate and whether it is a load from a value passed by
863 reference respectively. */
864
865static bool
9771b263 866ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
867 struct param_analysis_info *parms_ainfo, gimple stmt,
868 tree op, int *index_p, HOST_WIDE_INT *offset_p,
3ff2ca23 869 HOST_WIDE_INT *size_p, bool *by_ref_p)
8b7773a4
MJ
870{
871 int index;
872 HOST_WIDE_INT size, max_size;
873 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
874
875 if (max_size == -1 || max_size != size || *offset_p < 0)
876 return false;
877
878 if (DECL_P (base))
879 {
d044dd17 880 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
881 if (index >= 0
882 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
883 : NULL, stmt, op))
884 {
885 *index_p = index;
886 *by_ref_p = false;
3ff2ca23
JJ
887 if (size_p)
888 *size_p = size;
8b7773a4
MJ
889 return true;
890 }
891 return false;
892 }
893
894 if (TREE_CODE (base) != MEM_REF
895 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
896 || !integer_zerop (TREE_OPERAND (base, 1)))
897 return false;
898
899 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
900 {
901 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 902 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
903 }
904 else
905 {
906 /* This branch catches situations where a pointer parameter is not a
907 gimple register, for example:
908
909 void hip7(S*) (struct S * p)
910 {
911 void (*<T2e4>) (struct S *) D.1867;
912 struct S * p.1;
913
914 <bb 2>:
915 p.1_1 = p;
916 D.1867_2 = p.1_1->f;
917 D.1867_2 ();
918 gdp = &p;
919 */
920
921 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 922 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
923 }
924
925 if (index >= 0
926 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
927 stmt, op))
928 {
929 *index_p = index;
930 *by_ref_p = true;
3ff2ca23
JJ
931 if (size_p)
932 *size_p = size;
8b7773a4
MJ
933 return true;
934 }
935 return false;
936}
937
938/* Just like the previous function, just without the param_analysis_info
939 pointer, for users outside of this file. */
940
941bool
942ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
943 tree op, int *index_p, HOST_WIDE_INT *offset_p,
944 bool *by_ref_p)
945{
d044dd17 946 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
3ff2ca23 947 offset_p, NULL, by_ref_p);
8b7773a4
MJ
948}
949
b258210c 950/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
951 of an assignment statement STMT, try to determine whether we are actually
952 handling any of the following cases and construct an appropriate jump
953 function into JFUNC if so:
954
955 1) The passed value is loaded from a formal parameter which is not a gimple
956 register (most probably because it is addressable, the value has to be
957 scalar) and we can guarantee the value has not changed. This case can
958 therefore be described by a simple pass-through jump function. For example:
959
960 foo (int a)
961 {
962 int a.0;
963
964 a.0_2 = a;
965 bar (a.0_2);
966
967 2) The passed value can be described by a simple arithmetic pass-through
968 jump function. E.g.
969
970 foo (int a)
971 {
972 int D.2064;
973
974 D.2064_4 = a.1(D) + 4;
975 bar (D.2064_4);
976
977 This case can also occur in combination of the previous one, e.g.:
978
979 foo (int a, int z)
980 {
981 int a.0;
982 int D.2064;
983
984 a.0_3 = a;
985 D.2064_4 = a.0_3 + 4;
986 foo (D.2064_4);
987
988 3) The passed value is an address of an object within another one (which
989 also passed by reference). Such situations are described by an ancestor
990 jump function and describe situations such as:
991
992 B::foo() (struct B * const this)
993 {
994 struct A * D.1845;
995
996 D.1845_2 = &this_1(D)->D.1748;
997 A::bar (D.1845_2);
998
999 INFO is the structure describing individual parameters access different
1000 stages of IPA optimizations. PARMS_AINFO contains the information that is
1001 only needed for intraprocedural analysis. */
685b0d13
MJ
1002
1003static void
b258210c 1004compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 1005 struct param_analysis_info *parms_ainfo,
b258210c 1006 struct ipa_jump_func *jfunc,
06d65050
JH
1007 gimple call, gimple stmt, tree name,
1008 tree param_type)
685b0d13
MJ
1009{
1010 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 1011 tree op1, tc_ssa, base, ssa;
685b0d13 1012 int index;
685b0d13 1013
685b0d13 1014 op1 = gimple_assign_rhs1 (stmt);
685b0d13 1015
fdb0e1b4 1016 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 1017 {
fdb0e1b4
MJ
1018 if (SSA_NAME_IS_DEFAULT_DEF (op1))
1019 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
1020 else
d044dd17 1021 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
1022 SSA_NAME_DEF_STMT (op1));
1023 tc_ssa = op1;
1024 }
1025 else
1026 {
d044dd17 1027 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1028 tc_ssa = gimple_assign_lhs (stmt);
1029 }
1030
1031 if (index >= 0)
1032 {
1033 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1034
b258210c 1035 if (op2)
685b0d13 1036 {
b258210c
MJ
1037 if (!is_gimple_ip_invariant (op2)
1038 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1039 && !useless_type_conversion_p (TREE_TYPE (name),
1040 TREE_TYPE (op1))))
1041 return;
1042
7b872d9e
MJ
1043 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1044 gimple_assign_rhs_code (stmt));
685b0d13 1045 }
b8f6e610 1046 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1047 {
1048 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1049 call, tc_ssa);
06d65050
JH
1050 bool type_p = false;
1051
1052 if (param_type && POINTER_TYPE_P (param_type))
1053 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1054 call, jfunc);
b8f6e610
MJ
1055 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1056 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1057 }
685b0d13
MJ
1058 return;
1059 }
1060
1061 if (TREE_CODE (op1) != ADDR_EXPR)
1062 return;
1063 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1064 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1065 return;
32aa622c
MJ
1066 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1067 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1068 /* If this is a varying address, punt. */
1069 || max_size == -1
1070 || max_size != size)
685b0d13 1071 return;
32aa622c 1072 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1073 ssa = TREE_OPERAND (base, 0);
1074 if (TREE_CODE (ssa) != SSA_NAME
1075 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1076 || offset < 0)
685b0d13
MJ
1077 return;
1078
b8f6e610 1079 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1080 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1081 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1082 {
06d65050
JH
1083 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1084 call, jfunc, offset);
b8f6e610
MJ
1085 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1086 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1087 parm_ref_data_pass_through_p (&parms_ainfo[index],
1088 call, ssa), type_p);
1089 }
685b0d13
MJ
1090}
1091
40591473
MJ
1092/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1093 it looks like:
1094
1095 iftmp.1_3 = &obj_2(D)->D.1762;
1096
1097 The base of the MEM_REF must be a default definition SSA NAME of a
1098 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1099 whole MEM_REF expression is returned and the offset calculated from any
1100 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1101 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1102
1103static tree
1104get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1105{
1106 HOST_WIDE_INT size, max_size;
1107 tree expr, parm, obj;
1108
1109 if (!gimple_assign_single_p (assign))
1110 return NULL_TREE;
1111 expr = gimple_assign_rhs1 (assign);
1112
1113 if (TREE_CODE (expr) != ADDR_EXPR)
1114 return NULL_TREE;
1115 expr = TREE_OPERAND (expr, 0);
1116 obj = expr;
1117 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1118
1119 if (TREE_CODE (expr) != MEM_REF
1120 /* If this is a varying address, punt. */
1121 || max_size == -1
1122 || max_size != size
1123 || *offset < 0)
1124 return NULL_TREE;
1125 parm = TREE_OPERAND (expr, 0);
1126 if (TREE_CODE (parm) != SSA_NAME
1127 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1128 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1129 return NULL_TREE;
1130
1131 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1132 *obj_p = obj;
1133 return expr;
1134}
1135
685b0d13 1136
b258210c
MJ
1137/* Given that an actual argument is an SSA_NAME that is a result of a phi
1138 statement PHI, try to find out whether NAME is in fact a
1139 multiple-inheritance typecast from a descendant into an ancestor of a formal
1140 parameter and thus can be described by an ancestor jump function and if so,
1141 write the appropriate function into JFUNC.
1142
1143 Essentially we want to match the following pattern:
1144
1145 if (obj_2(D) != 0B)
1146 goto <bb 3>;
1147 else
1148 goto <bb 4>;
1149
1150 <bb 3>:
1151 iftmp.1_3 = &obj_2(D)->D.1762;
1152
1153 <bb 4>:
1154 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1155 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1156 return D.1879_6; */
1157
1158static void
1159compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1160 struct param_analysis_info *parms_ainfo,
b258210c 1161 struct ipa_jump_func *jfunc,
06d65050 1162 gimple call, gimple phi, tree param_type)
b258210c 1163{
40591473 1164 HOST_WIDE_INT offset;
b258210c
MJ
1165 gimple assign, cond;
1166 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1167 tree tmp, parm, expr, obj;
b258210c
MJ
1168 int index, i;
1169
54e348cb 1170 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1171 return;
1172
54e348cb
MJ
1173 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1174 tmp = PHI_ARG_DEF (phi, 0);
1175 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1176 tmp = PHI_ARG_DEF (phi, 1);
1177 else
1178 return;
b258210c
MJ
1179 if (TREE_CODE (tmp) != SSA_NAME
1180 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1181 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1182 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1183 return;
1184
1185 assign = SSA_NAME_DEF_STMT (tmp);
1186 assign_bb = gimple_bb (assign);
40591473 1187 if (!single_pred_p (assign_bb))
b258210c 1188 return;
40591473
MJ
1189 expr = get_ancestor_addr_info (assign, &obj, &offset);
1190 if (!expr)
b258210c
MJ
1191 return;
1192 parm = TREE_OPERAND (expr, 0);
b258210c 1193 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1194 gcc_assert (index >= 0);
b258210c
MJ
1195
1196 cond_bb = single_pred (assign_bb);
1197 cond = last_stmt (cond_bb);
69610617
SB
1198 if (!cond
1199 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1200 || gimple_cond_code (cond) != NE_EXPR
1201 || gimple_cond_lhs (cond) != parm
1202 || !integer_zerop (gimple_cond_rhs (cond)))
1203 return;
1204
b258210c
MJ
1205 phi_bb = gimple_bb (phi);
1206 for (i = 0; i < 2; i++)
1207 {
1208 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1209 if (pred != assign_bb && pred != cond_bb)
1210 return;
1211 }
1212
06d65050
JH
1213 bool type_p = false;
1214 if (param_type && POINTER_TYPE_P (param_type))
1215 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1216 call, jfunc, offset);
b8f6e610 1217 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1218 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1219 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1220 call, parm), type_p);
b258210c
MJ
1221}
1222
61502ca8 1223/* Given OP which is passed as an actual argument to a called function,
b258210c 1224 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1225 and if so, create one and store it to JFUNC.
1226 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1227
1228static void
f65cf2b7 1229compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1230 gimple call, tree expected_type)
b258210c 1231{
32aa622c 1232 HOST_WIDE_INT offset, size, max_size;
c7573249 1233 tree base;
b258210c 1234
05842ff5
MJ
1235 if (!flag_devirtualize
1236 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1237 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1238 /* Be sure expected_type is polymorphic. */
1239 || !expected_type
1240 || TREE_CODE (expected_type) != RECORD_TYPE
1241 || !TYPE_BINFO (expected_type)
1242 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1243 return;
1244
1245 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1246 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1247 if (!DECL_P (base)
1248 || max_size == -1
1249 || max_size != size
1250 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1251 || is_global_var (base))
1252 return;
1253
06d65050 1254 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1255 return;
1256
06d65050
JH
1257 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1258 expected_type);
b258210c
MJ
1259}
1260
be95e2b9
MJ
1261/* Inspect the given TYPE and return true iff it has the same structure (the
1262 same number of fields of the same types) as a C++ member pointer. If
1263 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1264 corresponding fields there. */
1265
3e293154
MJ
1266static bool
1267type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1268{
1269 tree fld;
1270
1271 if (TREE_CODE (type) != RECORD_TYPE)
1272 return false;
1273
1274 fld = TYPE_FIELDS (type);
1275 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4 1276 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
cc269bb6 1277 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1278 return false;
1279
1280 if (method_ptr)
1281 *method_ptr = fld;
1282
910ad8de 1283 fld = DECL_CHAIN (fld);
8b7773a4 1284 if (!fld || INTEGRAL_TYPE_P (fld)
cc269bb6 1285 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld)))
3e293154
MJ
1286 return false;
1287 if (delta)
1288 *delta = fld;
1289
910ad8de 1290 if (DECL_CHAIN (fld))
3e293154
MJ
1291 return false;
1292
1293 return true;
1294}
1295
61502ca8 1296/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1297 return the rhs of its defining statement. Otherwise return RHS as it
1298 is. */
7ec49257
MJ
1299
1300static inline tree
1301get_ssa_def_if_simple_copy (tree rhs)
1302{
1303 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1304 {
1305 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1306
1307 if (gimple_assign_single_p (def_stmt))
1308 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1309 else
1310 break;
7ec49257
MJ
1311 }
1312 return rhs;
1313}
1314
8b7773a4
MJ
1315/* Simple linked list, describing known contents of an aggregate beforere
1316 call. */
1317
1318struct ipa_known_agg_contents_list
1319{
1320 /* Offset and size of the described part of the aggregate. */
1321 HOST_WIDE_INT offset, size;
1322 /* Known constant value or NULL if the contents is known to be unknown. */
1323 tree constant;
1324 /* Pointer to the next structure in the list. */
1325 struct ipa_known_agg_contents_list *next;
1326};
3e293154 1327
8b7773a4
MJ
1328/* Traverse statements from CALL backwards, scanning whether an aggregate given
1329 in ARG is filled in with constant values. ARG can either be an aggregate
1330 expression or a pointer to an aggregate. JFUNC is the jump function into
1331 which the constants are subsequently stored. */
be95e2b9 1332
3e293154 1333static void
8b7773a4
MJ
1334determine_known_aggregate_parts (gimple call, tree arg,
1335 struct ipa_jump_func *jfunc)
3e293154 1336{
8b7773a4
MJ
1337 struct ipa_known_agg_contents_list *list = NULL;
1338 int item_count = 0, const_count = 0;
1339 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1340 gimple_stmt_iterator gsi;
8b7773a4
MJ
1341 tree arg_base;
1342 bool check_ref, by_ref;
1343 ao_ref r;
3e293154 1344
8b7773a4
MJ
1345 /* The function operates in three stages. First, we prepare check_ref, r,
1346 arg_base and arg_offset based on what is actually passed as an actual
1347 argument. */
3e293154 1348
8b7773a4
MJ
1349 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1350 {
1351 by_ref = true;
1352 if (TREE_CODE (arg) == SSA_NAME)
1353 {
1354 tree type_size;
cc269bb6 1355 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)))))
8b7773a4
MJ
1356 return;
1357 check_ref = true;
1358 arg_base = arg;
1359 arg_offset = 0;
1360 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
ae7e9ddd 1361 arg_size = tree_to_uhwi (type_size);
8b7773a4
MJ
1362 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1363 }
1364 else if (TREE_CODE (arg) == ADDR_EXPR)
1365 {
1366 HOST_WIDE_INT arg_max_size;
1367
1368 arg = TREE_OPERAND (arg, 0);
1369 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1370 &arg_max_size);
1371 if (arg_max_size == -1
1372 || arg_max_size != arg_size
1373 || arg_offset < 0)
1374 return;
1375 if (DECL_P (arg_base))
1376 {
1377 tree size;
1378 check_ref = false;
1379 size = build_int_cst (integer_type_node, arg_size);
1380 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1381 }
1382 else
1383 return;
1384 }
1385 else
1386 return;
1387 }
1388 else
1389 {
1390 HOST_WIDE_INT arg_max_size;
1391
1392 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1393
1394 by_ref = false;
1395 check_ref = false;
1396 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1397 &arg_max_size);
1398 if (arg_max_size == -1
1399 || arg_max_size != arg_size
1400 || arg_offset < 0)
1401 return;
1402
1403 ao_ref_init (&r, arg);
1404 }
1405
1406 /* Second stage walks back the BB, looks at individual statements and as long
1407 as it is confident of how the statements affect contents of the
1408 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1409 describing it. */
1410 gsi = gsi_for_stmt (call);
726a989a
RB
1411 gsi_prev (&gsi);
1412 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1413 {
8b7773a4 1414 struct ipa_known_agg_contents_list *n, **p;
726a989a 1415 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1416 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1417 tree lhs, rhs, lhs_base;
1418 bool partial_overlap;
3e293154 1419
8b7773a4 1420 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1421 continue;
8b75fc9b 1422 if (!gimple_assign_single_p (stmt))
8b7773a4 1423 break;
3e293154 1424
726a989a
RB
1425 lhs = gimple_assign_lhs (stmt);
1426 rhs = gimple_assign_rhs1 (stmt);
7d2fb524
MJ
1427 if (!is_gimple_reg_type (rhs)
1428 || TREE_CODE (lhs) == BIT_FIELD_REF
1429 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1430 break;
3e293154 1431
8b7773a4
MJ
1432 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1433 &lhs_max_size);
1434 if (lhs_max_size == -1
1435 || lhs_max_size != lhs_size
1436 || (lhs_offset < arg_offset
1437 && lhs_offset + lhs_size > arg_offset)
1438 || (lhs_offset < arg_offset + arg_size
1439 && lhs_offset + lhs_size > arg_offset + arg_size))
1440 break;
3e293154 1441
8b7773a4 1442 if (check_ref)
518dc859 1443 {
8b7773a4
MJ
1444 if (TREE_CODE (lhs_base) != MEM_REF
1445 || TREE_OPERAND (lhs_base, 0) != arg_base
1446 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1447 break;
3e293154 1448 }
8b7773a4 1449 else if (lhs_base != arg_base)
774b8a55
MJ
1450 {
1451 if (DECL_P (lhs_base))
1452 continue;
1453 else
1454 break;
1455 }
3e293154 1456
8b7773a4
MJ
1457 if (lhs_offset + lhs_size < arg_offset
1458 || lhs_offset >= (arg_offset + arg_size))
1459 continue;
1460
1461 partial_overlap = false;
1462 p = &list;
1463 while (*p && (*p)->offset < lhs_offset)
3e293154 1464 {
8b7773a4 1465 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1466 {
8b7773a4
MJ
1467 partial_overlap = true;
1468 break;
3e293154 1469 }
8b7773a4
MJ
1470 p = &(*p)->next;
1471 }
1472 if (partial_overlap)
1473 break;
1474 if (*p && (*p)->offset < lhs_offset + lhs_size)
1475 {
1476 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1477 /* We already know this value is subsequently overwritten with
1478 something else. */
1479 continue;
3e293154 1480 else
8b7773a4
MJ
1481 /* Otherwise this is a partial overlap which we cannot
1482 represent. */
1483 break;
3e293154 1484 }
3e293154 1485
8b7773a4
MJ
1486 rhs = get_ssa_def_if_simple_copy (rhs);
1487 n = XALLOCA (struct ipa_known_agg_contents_list);
1488 n->size = lhs_size;
1489 n->offset = lhs_offset;
1490 if (is_gimple_ip_invariant (rhs))
1491 {
1492 n->constant = rhs;
1493 const_count++;
1494 }
1495 else
1496 n->constant = NULL_TREE;
1497 n->next = *p;
1498 *p = n;
3e293154 1499
8b7773a4 1500 item_count++;
dfea20f1
MJ
1501 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1502 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1503 break;
1504 }
be95e2b9 1505
8b7773a4
MJ
1506 /* Third stage just goes over the list and creates an appropriate vector of
1507 ipa_agg_jf_item structures out of it, of sourse only if there are
1508 any known constants to begin with. */
3e293154 1509
8b7773a4 1510 if (const_count)
3e293154 1511 {
8b7773a4 1512 jfunc->agg.by_ref = by_ref;
9771b263 1513 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1514 while (list)
1515 {
1516 if (list->constant)
1517 {
f32682ca
DN
1518 struct ipa_agg_jf_item item;
1519 item.offset = list->offset - arg_offset;
7d2fb524 1520 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1521 item.value = unshare_expr_without_location (list->constant);
9771b263 1522 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1523 }
1524 list = list->next;
1525 }
3e293154
MJ
1526 }
1527}
1528
06d65050
JH
1529static tree
1530ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1531{
1532 int n;
1533 tree type = (e->callee
67348ccc 1534 ? TREE_TYPE (e->callee->decl)
06d65050
JH
1535 : gimple_call_fntype (e->call_stmt));
1536 tree t = TYPE_ARG_TYPES (type);
1537
1538 for (n = 0; n < i; n++)
1539 {
1540 if (!t)
1541 break;
1542 t = TREE_CHAIN (t);
1543 }
1544 if (t)
1545 return TREE_VALUE (t);
1546 if (!e->callee)
1547 return NULL;
67348ccc 1548 t = DECL_ARGUMENTS (e->callee->decl);
06d65050
JH
1549 for (n = 0; n < i; n++)
1550 {
1551 if (!t)
1552 return NULL;
1553 t = TREE_CHAIN (t);
1554 }
1555 if (t)
1556 return TREE_TYPE (t);
1557 return NULL;
1558}
1559
3e293154
MJ
1560/* Compute jump function for all arguments of callsite CS and insert the
1561 information in the jump_functions array in the ipa_edge_args corresponding
1562 to this callsite. */
be95e2b9 1563
749aa96d 1564static void
c419671c 1565ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1566 struct cgraph_edge *cs)
3e293154
MJ
1567{
1568 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1569 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1570 gimple call = cs->call_stmt;
8b7773a4 1571 int n, arg_num = gimple_call_num_args (call);
3e293154 1572
606d9a09 1573 if (arg_num == 0 || args->jump_functions)
3e293154 1574 return;
9771b263 1575 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1576
96e24d49
JJ
1577 if (gimple_call_internal_p (call))
1578 return;
5fe8e757
MJ
1579 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1580 return;
1581
8b7773a4
MJ
1582 for (n = 0; n < arg_num; n++)
1583 {
1584 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1585 tree arg = gimple_call_arg (call, n);
06d65050 1586 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1587
8b7773a4 1588 if (is_gimple_ip_invariant (arg))
4502fe8d 1589 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1590 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1591 && TREE_CODE (arg) == PARM_DECL)
1592 {
1593 int index = ipa_get_param_decl_index (info, arg);
1594
1595 gcc_assert (index >=0);
1596 /* Aggregate passed by value, check for pass-through, otherwise we
1597 will attempt to fill in aggregate contents later in this
1598 for cycle. */
1599 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1600 {
b8f6e610 1601 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1602 continue;
1603 }
1604 }
1605 else if (TREE_CODE (arg) == SSA_NAME)
1606 {
1607 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1608 {
1609 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1610 if (index >= 0)
8b7773a4 1611 {
b8f6e610 1612 bool agg_p, type_p;
8b7773a4
MJ
1613 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1614 call, arg);
06d65050
JH
1615 if (param_type && POINTER_TYPE_P (param_type))
1616 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1617 call, jfunc);
1618 else
1619 type_p = false;
b8f6e610 1620 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1621 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1622 type_p);
8b7773a4
MJ
1623 }
1624 }
1625 else
1626 {
1627 gimple stmt = SSA_NAME_DEF_STMT (arg);
1628 if (is_gimple_assign (stmt))
1629 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1630 call, stmt, arg, param_type);
8b7773a4
MJ
1631 else if (gimple_code (stmt) == GIMPLE_PHI)
1632 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1633 call, stmt, param_type);
8b7773a4
MJ
1634 }
1635 }
1636 else
06d65050
JH
1637 compute_known_type_jump_func (arg, jfunc, call,
1638 param_type
1639 && POINTER_TYPE_P (param_type)
1640 ? TREE_TYPE (param_type)
1641 : NULL);
3e293154 1642
8b7773a4
MJ
1643 if ((jfunc->type != IPA_JF_PASS_THROUGH
1644 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1645 && (jfunc->type != IPA_JF_ANCESTOR
1646 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1647 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1648 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1649 determine_known_aggregate_parts (call, arg, jfunc);
1650 }
3e293154
MJ
1651}
1652
749aa96d
MJ
1653/* Compute jump functions for all edges - both direct and indirect - outgoing
1654 from NODE. Also count the actual arguments in the process. */
1655
062c604f
MJ
1656static void
1657ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1658 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1659{
1660 struct cgraph_edge *cs;
1661
1662 for (cs = node->callees; cs; cs = cs->next_callee)
1663 {
d7da5cc8
MJ
1664 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1665 NULL);
749aa96d
MJ
1666 /* We do not need to bother analyzing calls to unknown
1667 functions unless they may become known during lto/whopr. */
67348ccc 1668 if (!callee->definition && !flag_lto)
749aa96d 1669 continue;
c419671c 1670 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1671 }
1672
1673 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1674 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1675}
1676
8b7773a4
MJ
1677/* If STMT looks like a statement loading a value from a member pointer formal
1678 parameter, return that parameter and store the offset of the field to
1679 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1680 might be clobbered). If USE_DELTA, then we look for a use of the delta
1681 field rather than the pfn. */
be95e2b9 1682
3e293154 1683static tree
8b7773a4
MJ
1684ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1685 HOST_WIDE_INT *offset_p)
3e293154 1686{
8b7773a4
MJ
1687 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1688
1689 if (!gimple_assign_single_p (stmt))
1690 return NULL_TREE;
3e293154 1691
8b7773a4 1692 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1693 if (TREE_CODE (rhs) == COMPONENT_REF)
1694 {
1695 ref_field = TREE_OPERAND (rhs, 1);
1696 rhs = TREE_OPERAND (rhs, 0);
1697 }
1698 else
1699 ref_field = NULL_TREE;
d242d063 1700 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1701 return NULL_TREE;
3e293154 1702 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1703 if (TREE_CODE (rec) != ADDR_EXPR)
1704 return NULL_TREE;
1705 rec = TREE_OPERAND (rec, 0);
3e293154 1706 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1707 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1708 return NULL_TREE;
d242d063 1709 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1710
8b7773a4
MJ
1711 if (use_delta)
1712 fld = delta_field;
1713 else
1714 fld = ptr_field;
1715 if (offset_p)
1716 *offset_p = int_bit_position (fld);
1717
ae788515
EB
1718 if (ref_field)
1719 {
1720 if (integer_nonzerop (ref_offset))
1721 return NULL_TREE;
ae788515
EB
1722 return ref_field == fld ? rec : NULL_TREE;
1723 }
3e293154 1724 else
8b7773a4
MJ
1725 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1726 : NULL_TREE;
3e293154
MJ
1727}
1728
1729/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1730
3e293154
MJ
1731static bool
1732ipa_is_ssa_with_stmt_def (tree t)
1733{
1734 if (TREE_CODE (t) == SSA_NAME
1735 && !SSA_NAME_IS_DEFAULT_DEF (t))
1736 return true;
1737 else
1738 return false;
1739}
1740
40591473
MJ
1741/* Find the indirect call graph edge corresponding to STMT and mark it as a
1742 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1743 indirect call graph edge. */
be95e2b9 1744
40591473
MJ
1745static struct cgraph_edge *
1746ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1747{
e33c6cd6 1748 struct cgraph_edge *cs;
3e293154 1749
5f902d76 1750 cs = cgraph_edge (node, stmt);
b258210c 1751 cs->indirect_info->param_index = param_index;
8b7773a4 1752 cs->indirect_info->agg_contents = 0;
c13bc3d9 1753 cs->indirect_info->member_ptr = 0;
40591473 1754 return cs;
3e293154
MJ
1755}
1756
e33c6cd6 1757/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1758 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1759 intermediate information about each formal parameter. Currently it checks
1760 whether the call calls a pointer that is a formal parameter and if so, the
1761 parameter is marked with the called flag and an indirect call graph edge
1762 describing the call is created. This is very simple for ordinary pointers
1763 represented in SSA but not-so-nice when it comes to member pointers. The
1764 ugly part of this function does nothing more than trying to match the
1765 pattern of such a call. An example of such a pattern is the gimple dump
1766 below, the call is on the last line:
3e293154 1767
ae788515
EB
1768 <bb 2>:
1769 f$__delta_5 = f.__delta;
1770 f$__pfn_24 = f.__pfn;
1771
1772 or
3e293154 1773 <bb 2>:
d242d063
MJ
1774 f$__delta_5 = MEM[(struct *)&f];
1775 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1776
ae788515 1777 and a few lines below:
8aa29647
MJ
1778
1779 <bb 5>
3e293154
MJ
1780 D.2496_3 = (int) f$__pfn_24;
1781 D.2497_4 = D.2496_3 & 1;
1782 if (D.2497_4 != 0)
1783 goto <bb 3>;
1784 else
1785 goto <bb 4>;
1786
8aa29647 1787 <bb 6>:
3e293154
MJ
1788 D.2500_7 = (unsigned int) f$__delta_5;
1789 D.2501_8 = &S + D.2500_7;
1790 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1791 D.2503_10 = *D.2502_9;
1792 D.2504_12 = f$__pfn_24 + -1;
1793 D.2505_13 = (unsigned int) D.2504_12;
1794 D.2506_14 = D.2503_10 + D.2505_13;
1795 D.2507_15 = *D.2506_14;
1796 iftmp.11_16 = (String:: *) D.2507_15;
1797
8aa29647 1798 <bb 7>:
3e293154
MJ
1799 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1800 D.2500_19 = (unsigned int) f$__delta_5;
1801 D.2508_20 = &S + D.2500_19;
1802 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1803
1804 Such patterns are results of simple calls to a member pointer:
1805
1806 int doprinting (int (MyString::* f)(int) const)
1807 {
1808 MyString S ("somestring");
1809
1810 return (S.*f)(4);
1811 }
8b7773a4
MJ
1812
1813 Moreover, the function also looks for called pointers loaded from aggregates
1814 passed by value or reference. */
3e293154
MJ
1815
1816static void
b258210c
MJ
1817ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1818 struct ipa_node_params *info,
c419671c 1819 struct param_analysis_info *parms_ainfo,
b258210c 1820 gimple call, tree target)
3e293154 1821{
726a989a 1822 gimple def;
3e293154 1823 tree n1, n2;
726a989a
RB
1824 gimple d1, d2;
1825 tree rec, rec2, cond;
1826 gimple branch;
3e293154 1827 int index;
3e293154 1828 basic_block bb, virt_bb, join;
8b7773a4
MJ
1829 HOST_WIDE_INT offset;
1830 bool by_ref;
3e293154 1831
3e293154
MJ
1832 if (SSA_NAME_IS_DEFAULT_DEF (target))
1833 {
b258210c 1834 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1835 index = ipa_get_param_decl_index (info, var);
1836 if (index >= 0)
40591473 1837 ipa_note_param_call (node, index, call);
3e293154
MJ
1838 return;
1839 }
1840
8b7773a4
MJ
1841 def = SSA_NAME_DEF_STMT (target);
1842 if (gimple_assign_single_p (def)
d044dd17 1843 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4 1844 gimple_assign_rhs1 (def), &index, &offset,
3ff2ca23 1845 NULL, &by_ref))
8b7773a4
MJ
1846 {
1847 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
68377e53
JH
1848 if (cs->indirect_info->offset != offset)
1849 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
1850 cs->indirect_info->offset = offset;
1851 cs->indirect_info->agg_contents = 1;
1852 cs->indirect_info->by_ref = by_ref;
1853 return;
1854 }
1855
3e293154
MJ
1856 /* Now we need to try to match the complex pattern of calling a member
1857 pointer. */
8b7773a4
MJ
1858 if (gimple_code (def) != GIMPLE_PHI
1859 || gimple_phi_num_args (def) != 2
1860 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1861 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1862 return;
1863
3e293154
MJ
1864 /* First, we need to check whether one of these is a load from a member
1865 pointer that is a parameter to this function. */
1866 n1 = PHI_ARG_DEF (def, 0);
1867 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1868 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1869 return;
1870 d1 = SSA_NAME_DEF_STMT (n1);
1871 d2 = SSA_NAME_DEF_STMT (n2);
1872
8aa29647 1873 join = gimple_bb (def);
8b7773a4 1874 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1875 {
8b7773a4 1876 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1877 return;
1878
8aa29647 1879 bb = EDGE_PRED (join, 0)->src;
726a989a 1880 virt_bb = gimple_bb (d2);
3e293154 1881 }
8b7773a4 1882 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1883 {
8aa29647 1884 bb = EDGE_PRED (join, 1)->src;
726a989a 1885 virt_bb = gimple_bb (d1);
3e293154
MJ
1886 }
1887 else
1888 return;
1889
1890 /* Second, we need to check that the basic blocks are laid out in the way
1891 corresponding to the pattern. */
1892
3e293154
MJ
1893 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1894 || single_pred (virt_bb) != bb
1895 || single_succ (virt_bb) != join)
1896 return;
1897
1898 /* Third, let's see that the branching is done depending on the least
1899 significant bit of the pfn. */
1900
1901 branch = last_stmt (bb);
8aa29647 1902 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1903 return;
1904
12430896
RG
1905 if ((gimple_cond_code (branch) != NE_EXPR
1906 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1907 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1908 return;
3e293154 1909
726a989a 1910 cond = gimple_cond_lhs (branch);
3e293154
MJ
1911 if (!ipa_is_ssa_with_stmt_def (cond))
1912 return;
1913
726a989a 1914 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1915 if (!is_gimple_assign (def)
726a989a
RB
1916 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1917 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1918 return;
726a989a
RB
1919
1920 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1921 if (!ipa_is_ssa_with_stmt_def (cond))
1922 return;
1923
726a989a 1924 def = SSA_NAME_DEF_STMT (cond);
3e293154 1925
8b75fc9b
MJ
1926 if (is_gimple_assign (def)
1927 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1928 {
726a989a 1929 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1930 if (!ipa_is_ssa_with_stmt_def (cond))
1931 return;
726a989a 1932 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1933 }
1934
6f7b8b70
RE
1935 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1936 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1937 == ptrmemfunc_vbit_in_delta),
1938 NULL);
3e293154
MJ
1939 if (rec != rec2)
1940 return;
1941
1942 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1943 if (index >= 0
1944 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1945 {
1946 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
68377e53
JH
1947 if (cs->indirect_info->offset != offset)
1948 cs->indirect_info->outer_type = NULL;
8b7773a4
MJ
1949 cs->indirect_info->offset = offset;
1950 cs->indirect_info->agg_contents = 1;
c13bc3d9 1951 cs->indirect_info->member_ptr = 1;
8b7773a4 1952 }
3e293154
MJ
1953
1954 return;
1955}
1956
b258210c
MJ
1957/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1958 object referenced in the expression is a formal parameter of the caller
1959 (described by INFO), create a call note for the statement. */
1960
1961static void
1962ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1963 struct ipa_node_params *info, gimple call,
1964 tree target)
1965{
40591473
MJ
1966 struct cgraph_edge *cs;
1967 struct cgraph_indirect_call_info *ii;
f65cf2b7 1968 struct ipa_jump_func jfunc;
b258210c 1969 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1970 int index;
40591473 1971 HOST_WIDE_INT anc_offset;
b258210c 1972
05842ff5
MJ
1973 if (!flag_devirtualize)
1974 return;
1975
40591473 1976 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1977 return;
1978
40591473
MJ
1979 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1980 {
1981 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1982 return;
b258210c 1983
40591473
MJ
1984 anc_offset = 0;
1985 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1986 gcc_assert (index >= 0);
06d65050
JH
1987 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1988 call, &jfunc))
40591473
MJ
1989 return;
1990 }
1991 else
1992 {
1993 gimple stmt = SSA_NAME_DEF_STMT (obj);
1994 tree expr;
1995
1996 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1997 if (!expr)
1998 return;
1999 index = ipa_get_param_decl_index (info,
2000 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
2001 gcc_assert (index >= 0);
06d65050
JH
2002 if (detect_type_change (obj, expr, obj_type_ref_class (target),
2003 call, &jfunc, anc_offset))
40591473
MJ
2004 return;
2005 }
2006
2007 cs = ipa_note_param_call (node, index, call);
2008 ii = cs->indirect_info;
8b7773a4 2009 ii->offset = anc_offset;
ae7e9ddd 2010 ii->otr_token = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target));
c49bdb2e 2011 ii->otr_type = obj_type_ref_class (target);
40591473 2012 ii->polymorphic = 1;
b258210c
MJ
2013}
2014
2015/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 2016 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 2017 containing intermediate information about each formal parameter. */
b258210c
MJ
2018
2019static void
2020ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 2021 struct ipa_node_params *info,
c419671c 2022 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
2023{
2024 tree target = gimple_call_fn (call);
2025
25583c4f
RS
2026 if (!target)
2027 return;
b258210c 2028 if (TREE_CODE (target) == SSA_NAME)
c419671c 2029 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2030 else if (virtual_method_call_p (target))
b258210c
MJ
2031 ipa_analyze_virtual_call_uses (node, info, call, target);
2032}
2033
2034
e33c6cd6
MJ
2035/* Analyze the call statement STMT with respect to formal parameters (described
2036 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2037 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2038 intermediate information about each formal parameter. */
be95e2b9 2039
3e293154 2040static void
e33c6cd6 2041ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2042 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2043{
726a989a 2044 if (is_gimple_call (stmt))
c419671c 2045 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2046}
2047
2048/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2049 If OP is a parameter declaration, mark it as used in the info structure
2050 passed in DATA. */
2051
2052static bool
2053visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2054 tree op, void *data)
2055{
2056 struct ipa_node_params *info = (struct ipa_node_params *) data;
2057
2058 op = get_base_address (op);
2059 if (op
2060 && TREE_CODE (op) == PARM_DECL)
2061 {
2062 int index = ipa_get_param_decl_index (info, op);
2063 gcc_assert (index >= 0);
310bc633 2064 ipa_set_param_used (info, index, true);
062c604f
MJ
2065 }
2066
2067 return false;
3e293154
MJ
2068}
2069
2070/* Scan the function body of NODE and inspect the uses of formal parameters.
2071 Store the findings in various structures of the associated ipa_node_params
c419671c 2072 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2073 vector containing intermediate information about each formal parameter. */
be95e2b9 2074
062c604f
MJ
2075static void
2076ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2077 struct param_analysis_info *parms_ainfo)
3e293154 2078{
67348ccc 2079 tree decl = node->decl;
3e293154
MJ
2080 basic_block bb;
2081 struct function *func;
726a989a 2082 gimple_stmt_iterator gsi;
3e293154 2083 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2084 int i;
3e293154 2085
726a989a 2086 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2087 return;
3e293154 2088
5fe8e757
MJ
2089 info->uses_analysis_done = 1;
2090 if (ipa_func_spec_opts_forbid_analysis_p (node))
2091 {
2092 for (i = 0; i < ipa_get_param_count (info); i++)
2093 {
2094 ipa_set_param_used (info, i, true);
2095 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2096 }
2097 return;
2098 }
2099
062c604f
MJ
2100 for (i = 0; i < ipa_get_param_count (info); i++)
2101 {
2102 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2103 int controlled_uses = 0;
2104
062c604f
MJ
2105 /* For SSA regs see if parameter is used. For non-SSA we compute
2106 the flag during modification analysis. */
4502fe8d
MJ
2107 if (is_gimple_reg (parm))
2108 {
67348ccc 2109 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->decl),
4502fe8d
MJ
2110 parm);
2111 if (ddef && !has_zero_uses (ddef))
2112 {
2113 imm_use_iterator imm_iter;
2114 use_operand_p use_p;
2115
2116 ipa_set_param_used (info, i, true);
2117 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2118 if (!is_gimple_call (USE_STMT (use_p)))
2119 {
2120 controlled_uses = IPA_UNDESCRIBED_USE;
2121 break;
2122 }
2123 else
2124 controlled_uses++;
2125 }
2126 else
2127 controlled_uses = 0;
2128 }
2129 else
2130 controlled_uses = IPA_UNDESCRIBED_USE;
2131 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2132 }
2133
3e293154
MJ
2134 func = DECL_STRUCT_FUNCTION (decl);
2135 FOR_EACH_BB_FN (bb, func)
2136 {
726a989a 2137 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2138 {
726a989a 2139 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2140
2141 if (is_gimple_debug (stmt))
2142 continue;
2143
c419671c 2144 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2145 walk_stmt_load_store_addr_ops (stmt, info,
2146 visit_ref_for_mod_analysis,
2147 visit_ref_for_mod_analysis,
2148 visit_ref_for_mod_analysis);
518dc859 2149 }
355a7673 2150 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2151 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2152 visit_ref_for_mod_analysis,
2153 visit_ref_for_mod_analysis,
2154 visit_ref_for_mod_analysis);
518dc859 2155 }
3e293154
MJ
2156}
2157
2c9561b5
MJ
2158/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2159
2160static void
2161free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2162{
2163 int i;
2164
2165 for (i = 0; i < param_count; i++)
2166 {
2167 if (parms_ainfo[i].parm_visited_statements)
2168 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2169 if (parms_ainfo[i].pt_visited_statements)
2170 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2171 }
2172}
2173
dd5a833e
MS
2174/* Initialize the array describing properties of of formal parameters
2175 of NODE, analyze their uses and compute jump functions associated
2176 with actual arguments of calls from within NODE. */
062c604f
MJ
2177
2178void
2179ipa_analyze_node (struct cgraph_node *node)
2180{
57dbdc5a 2181 struct ipa_node_params *info;
c419671c 2182 struct param_analysis_info *parms_ainfo;
2c9561b5 2183 int param_count;
062c604f 2184
57dbdc5a
MJ
2185 ipa_check_create_node_params ();
2186 ipa_check_create_edge_args ();
2187 info = IPA_NODE_REF (node);
67348ccc 2188 push_cfun (DECL_STRUCT_FUNCTION (node->decl));
062c604f
MJ
2189 ipa_initialize_node_params (node);
2190
2191 param_count = ipa_get_param_count (info);
c419671c
MJ
2192 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2193 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2194
c419671c
MJ
2195 ipa_analyze_params_uses (node, parms_ainfo);
2196 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2197
2c9561b5 2198 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2199 pop_cfun ();
062c604f
MJ
2200}
2201
e248d83f
MJ
2202/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2203 attempt a type-based devirtualization. If successful, return the
2204 target function declaration, otherwise return NULL. */
2205
2206tree
2207ipa_intraprocedural_devirtualization (gimple call)
2208{
2209 tree binfo, token, fndecl;
2210 struct ipa_jump_func jfunc;
2211 tree otr = gimple_call_fn (call);
2212
2213 jfunc.type = IPA_JF_UNKNOWN;
2214 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2215 call, obj_type_ref_class (otr));
e248d83f
MJ
2216 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2217 return NULL_TREE;
2218 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2219 if (!binfo)
2220 return NULL_TREE;
2221 token = OBJ_TYPE_REF_TOKEN (otr);
ae7e9ddd 2222 fndecl = gimple_get_virt_method_for_binfo (tree_to_uhwi (token),
e248d83f 2223 binfo);
450ad0cd
JH
2224#ifdef ENABLE_CHECKING
2225 if (fndecl)
2226 gcc_assert (possible_polymorphic_call_target_p
2227 (otr, cgraph_get_node (fndecl)));
2228#endif
e248d83f
MJ
2229 return fndecl;
2230}
062c604f 2231
61502ca8 2232/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2233 is being inlined, knowing that DST is of type ancestor and src of known
2234 type. */
2235
2236static void
2237combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2238 struct ipa_jump_func *dst)
2239{
c7573249
MJ
2240 HOST_WIDE_INT combined_offset;
2241 tree combined_type;
b258210c 2242
b8f6e610
MJ
2243 if (!ipa_get_jf_ancestor_type_preserved (dst))
2244 {
2245 dst->type = IPA_JF_UNKNOWN;
2246 return;
2247 }
2248
7b872d9e
MJ
2249 combined_offset = ipa_get_jf_known_type_offset (src)
2250 + ipa_get_jf_ancestor_offset (dst);
2251 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2252
7b872d9e
MJ
2253 ipa_set_jf_known_type (dst, combined_offset,
2254 ipa_get_jf_known_type_base_type (src),
2255 combined_type);
b258210c
MJ
2256}
2257
be95e2b9 2258/* Update the jump functions associated with call graph edge E when the call
3e293154 2259 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2260 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2261
3e293154
MJ
2262static void
2263update_jump_functions_after_inlining (struct cgraph_edge *cs,
2264 struct cgraph_edge *e)
2265{
2266 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2267 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2268 int count = ipa_get_cs_argument_count (args);
2269 int i;
2270
2271 for (i = 0; i < count; i++)
2272 {
b258210c 2273 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2274
685b0d13
MJ
2275 if (dst->type == IPA_JF_ANCESTOR)
2276 {
b258210c 2277 struct ipa_jump_func *src;
8b7773a4 2278 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2279
b258210c
MJ
2280 /* Variable number of arguments can cause havoc if we try to access
2281 one that does not exist in the inlined edge. So make sure we
2282 don't. */
8b7773a4 2283 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2284 {
2285 dst->type = IPA_JF_UNKNOWN;
2286 continue;
2287 }
2288
8b7773a4
MJ
2289 src = ipa_get_ith_jump_func (top, dst_fid);
2290
2291 if (src->agg.items
2292 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2293 {
2294 struct ipa_agg_jf_item *item;
2295 int j;
2296
2297 /* Currently we do not produce clobber aggregate jump functions,
2298 replace with merging when we do. */
2299 gcc_assert (!dst->agg.items);
2300
9771b263 2301 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2302 dst->agg.by_ref = src->agg.by_ref;
9771b263 2303 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2304 item->offset -= dst->value.ancestor.offset;
2305 }
2306
b258210c
MJ
2307 if (src->type == IPA_JF_KNOWN_TYPE)
2308 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2309 else if (src->type == IPA_JF_PASS_THROUGH
2310 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2311 {
2312 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2313 dst->value.ancestor.agg_preserved &=
2314 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2315 dst->value.ancestor.type_preserved &=
2316 src->value.pass_through.type_preserved;
8b7773a4 2317 }
b258210c
MJ
2318 else if (src->type == IPA_JF_ANCESTOR)
2319 {
2320 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2321 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2322 dst->value.ancestor.agg_preserved &=
2323 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2324 dst->value.ancestor.type_preserved &=
2325 src->value.ancestor.type_preserved;
b258210c
MJ
2326 }
2327 else
2328 dst->type = IPA_JF_UNKNOWN;
2329 }
2330 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2331 {
b258210c
MJ
2332 struct ipa_jump_func *src;
2333 /* We must check range due to calls with variable number of arguments
2334 and we cannot combine jump functions with operations. */
2335 if (dst->value.pass_through.operation == NOP_EXPR
2336 && (dst->value.pass_through.formal_id
2337 < ipa_get_cs_argument_count (top)))
2338 {
8b7773a4
MJ
2339 int dst_fid = dst->value.pass_through.formal_id;
2340 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2341 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2342
b8f6e610
MJ
2343 switch (src->type)
2344 {
2345 case IPA_JF_UNKNOWN:
2346 dst->type = IPA_JF_UNKNOWN;
2347 break;
2348 case IPA_JF_KNOWN_TYPE:
2349 ipa_set_jf_known_type (dst,
2350 ipa_get_jf_known_type_offset (src),
2351 ipa_get_jf_known_type_base_type (src),
2352 ipa_get_jf_known_type_base_type (src));
2353 break;
2354 case IPA_JF_CONST:
2355 ipa_set_jf_cst_copy (dst, src);
2356 break;
2357
2358 case IPA_JF_PASS_THROUGH:
2359 {
2360 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2361 enum tree_code operation;
2362 operation = ipa_get_jf_pass_through_operation (src);
2363
2364 if (operation == NOP_EXPR)
2365 {
2366 bool agg_p, type_p;
2367 agg_p = dst_agg_p
2368 && ipa_get_jf_pass_through_agg_preserved (src);
2369 type_p = ipa_get_jf_pass_through_type_preserved (src)
2370 && ipa_get_jf_pass_through_type_preserved (dst);
2371 ipa_set_jf_simple_pass_through (dst, formal_id,
2372 agg_p, type_p);
2373 }
2374 else
2375 {
2376 tree operand = ipa_get_jf_pass_through_operand (src);
2377 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2378 operation);
2379 }
2380 break;
2381 }
2382 case IPA_JF_ANCESTOR:
2383 {
2384 bool agg_p, type_p;
2385 agg_p = dst_agg_p
2386 && ipa_get_jf_ancestor_agg_preserved (src);
2387 type_p = ipa_get_jf_ancestor_type_preserved (src)
2388 && ipa_get_jf_pass_through_type_preserved (dst);
2389 ipa_set_ancestor_jf (dst,
2390 ipa_get_jf_ancestor_offset (src),
2391 ipa_get_jf_ancestor_type (src),
2392 ipa_get_jf_ancestor_formal_id (src),
2393 agg_p, type_p);
2394 break;
2395 }
2396 default:
2397 gcc_unreachable ();
2398 }
8b7773a4
MJ
2399
2400 if (src->agg.items
b8f6e610 2401 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2402 {
2403 /* Currently we do not produce clobber aggregate jump
2404 functions, replace with merging when we do. */
2405 gcc_assert (!dst->agg.items);
2406
2407 dst->agg.by_ref = src->agg.by_ref;
9771b263 2408 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2409 }
b258210c
MJ
2410 }
2411 else
2412 dst->type = IPA_JF_UNKNOWN;
3e293154 2413 }
b258210c
MJ
2414 }
2415}
2416
2417/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2418 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2419
3949c4a7 2420struct cgraph_edge *
81fa35bd 2421ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2422{
2423 struct cgraph_node *callee;
0f378cb5 2424 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2425 bool unreachable = false;
b258210c 2426
ceeffab0
MJ
2427 if (TREE_CODE (target) == ADDR_EXPR)
2428 target = TREE_OPERAND (target, 0);
b258210c 2429 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2430 {
2431 target = canonicalize_constructor_val (target, NULL);
2432 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2433 {
c13bc3d9
MJ
2434 if (ie->indirect_info->member_ptr)
2435 /* Member pointer call that goes through a VMT lookup. */
2436 return NULL;
2437
a0a7b611
JH
2438 if (dump_file)
2439 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2440 " in %s/%i, making it unreachable.\n",
fec39fa6 2441 ie->caller->name (), ie->caller->order);
48b1474e
MJ
2442 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2443 callee = cgraph_get_create_node (target);
2444 unreachable = true;
a0a7b611 2445 }
48b1474e
MJ
2446 else
2447 callee = cgraph_get_node (target);
a0a7b611 2448 }
48b1474e
MJ
2449 else
2450 callee = cgraph_get_node (target);
a0a7b611
JH
2451
2452 /* Because may-edges are not explicitely represented and vtable may be external,
2453 we may create the first reference to the object in the unit. */
2454 if (!callee || callee->global.inlined_to)
2455 {
a0a7b611
JH
2456
2457 /* We are better to ensure we can refer to it.
2458 In the case of static functions we are out of luck, since we already
2459 removed its body. In the case of public functions we may or may
2460 not introduce the reference. */
2461 if (!canonicalize_constructor_val (target, NULL)
2462 || !TREE_PUBLIC (target))
2463 {
2464 if (dump_file)
2465 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2466 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
fec39fa6 2467 xstrdup (ie->caller->name ()),
67348ccc 2468 ie->caller->order,
fec39fa6 2469 xstrdup (ie->callee->name ()),
67348ccc 2470 ie->callee->order);
a0a7b611
JH
2471 return NULL;
2472 }
6f99e449 2473 callee = cgraph_get_create_node (target);
a0a7b611 2474 }
1dbee8c9 2475 ipa_check_create_node_params ();
ceeffab0 2476
81fa35bd
MJ
2477 /* We can not make edges to inline clones. It is bug that someone removed
2478 the cgraph node too early. */
17afc0fe
JH
2479 gcc_assert (!callee->global.inlined_to);
2480
48b1474e 2481 if (dump_file && !unreachable)
b258210c
MJ
2482 {
2483 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2484 "(%s/%i -> %s/%i), for stmt ",
b258210c 2485 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
fec39fa6 2486 xstrdup (ie->caller->name ()),
67348ccc 2487 ie->caller->order,
fec39fa6 2488 xstrdup (callee->name ()),
67348ccc 2489 callee->order);
b258210c
MJ
2490 if (ie->call_stmt)
2491 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2492 else
2493 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2494 }
2495 ie = cgraph_make_edge_direct (ie, callee);
2496 es = inline_edge_summary (ie);
2497 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2498 - eni_size_weights.call_cost);
2499 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2500 - eni_time_weights.call_cost);
749aa96d 2501
b258210c 2502 return ie;
3e293154
MJ
2503}
2504
8b7773a4
MJ
2505/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2506 return NULL if there is not any. BY_REF specifies whether the value has to
2507 be passed by reference or by value. */
2508
2509tree
2510ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2511 HOST_WIDE_INT offset, bool by_ref)
2512{
2513 struct ipa_agg_jf_item *item;
2514 int i;
2515
2516 if (by_ref != agg->by_ref)
2517 return NULL;
2518
9771b263 2519 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2520 if (item->offset == offset)
2521 {
2522 /* Currently we do not have clobber values, return NULL for them once
2523 we do. */
2524 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2525 return item->value;
2526 }
8b7773a4
MJ
2527 return NULL;
2528}
2529
4502fe8d 2530/* Remove a reference to SYMBOL from the list of references of a node given by
568cda29
MJ
2531 reference description RDESC. Return true if the reference has been
2532 successfully found and removed. */
4502fe8d 2533
568cda29 2534static bool
5e20cdc9 2535remove_described_reference (symtab_node *symbol, struct ipa_cst_ref_desc *rdesc)
4502fe8d
MJ
2536{
2537 struct ipa_ref *to_del;
2538 struct cgraph_edge *origin;
2539
2540 origin = rdesc->cs;
a854f856
MJ
2541 if (!origin)
2542 return false;
67348ccc 2543 to_del = ipa_find_reference (origin->caller, symbol,
042ae7d2 2544 origin->call_stmt, origin->lto_stmt_uid);
568cda29
MJ
2545 if (!to_del)
2546 return false;
2547
4502fe8d
MJ
2548 ipa_remove_reference (to_del);
2549 if (dump_file)
2550 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
fec39fa6
TS
2551 xstrdup (origin->caller->name ()),
2552 origin->caller->order, xstrdup (symbol->name ()));
568cda29 2553 return true;
4502fe8d
MJ
2554}
2555
2556/* If JFUNC has a reference description with refcount different from
2557 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2558 NULL. JFUNC must be a constant jump function. */
2559
2560static struct ipa_cst_ref_desc *
2561jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2562{
2563 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2564 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2565 return rdesc;
2566 else
2567 return NULL;
2568}
2569
568cda29
MJ
2570/* If the value of constant jump function JFUNC is an address of a function
2571 declaration, return the associated call graph node. Otherwise return
2572 NULL. */
2573
2574static cgraph_node *
2575cgraph_node_for_jfunc (struct ipa_jump_func *jfunc)
2576{
2577 gcc_checking_assert (jfunc->type == IPA_JF_CONST);
2578 tree cst = ipa_get_jf_constant (jfunc);
2579 if (TREE_CODE (cst) != ADDR_EXPR
2580 || TREE_CODE (TREE_OPERAND (cst, 0)) != FUNCTION_DECL)
2581 return NULL;
2582
2583 return cgraph_get_node (TREE_OPERAND (cst, 0));
2584}
2585
2586
2587/* If JFUNC is a constant jump function with a usable rdesc, decrement its
2588 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2589 the edge specified in the rdesc. Return false if either the symbol or the
2590 reference could not be found, otherwise return true. */
2591
2592static bool
2593try_decrement_rdesc_refcount (struct ipa_jump_func *jfunc)
2594{
2595 struct ipa_cst_ref_desc *rdesc;
2596 if (jfunc->type == IPA_JF_CONST
2597 && (rdesc = jfunc_rdesc_usable (jfunc))
2598 && --rdesc->refcount == 0)
2599 {
5e20cdc9 2600 symtab_node *symbol = cgraph_node_for_jfunc (jfunc);
568cda29
MJ
2601 if (!symbol)
2602 return false;
2603
2604 return remove_described_reference (symbol, rdesc);
2605 }
2606 return true;
2607}
2608
b258210c
MJ
2609/* Try to find a destination for indirect edge IE that corresponds to a simple
2610 call or a call of a member function pointer and where the destination is a
2611 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2612 determined, return the newly direct edge, otherwise return NULL.
2613 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2614
b258210c
MJ
2615static struct cgraph_edge *
2616try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2617 struct ipa_jump_func *jfunc,
2618 struct ipa_node_params *new_root_info)
b258210c 2619{
4502fe8d 2620 struct cgraph_edge *cs;
b258210c 2621 tree target;
042ae7d2 2622 bool agg_contents = ie->indirect_info->agg_contents;
b258210c 2623
8b7773a4 2624 if (ie->indirect_info->agg_contents)
d250540a
MJ
2625 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2626 ie->indirect_info->offset,
2627 ie->indirect_info->by_ref);
b258210c 2628 else
d250540a
MJ
2629 target = ipa_value_from_jfunc (new_root_info, jfunc);
2630 if (!target)
2631 return NULL;
4502fe8d
MJ
2632 cs = ipa_make_edge_direct_to_target (ie, target);
2633
a12cd2db 2634 if (cs && !agg_contents)
568cda29
MJ
2635 {
2636 bool ok;
2637 gcc_checking_assert (cs->callee
ae6d0907
MJ
2638 && (cs != ie
2639 || jfunc->type != IPA_JF_CONST
568cda29
MJ
2640 || !cgraph_node_for_jfunc (jfunc)
2641 || cs->callee == cgraph_node_for_jfunc (jfunc)));
2642 ok = try_decrement_rdesc_refcount (jfunc);
2643 gcc_checking_assert (ok);
2644 }
4502fe8d
MJ
2645
2646 return cs;
b258210c
MJ
2647}
2648
d250540a
MJ
2649/* Try to find a destination for indirect edge IE that corresponds to a virtual
2650 call based on a formal parameter which is described by jump function JFUNC
2651 and if it can be determined, make it direct and return the direct edge.
2652 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2653 are relative to. */
b258210c
MJ
2654
2655static struct cgraph_edge *
2656try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2657 struct ipa_jump_func *jfunc,
2658 struct ipa_node_params *new_root_info)
3e293154 2659{
c7573249 2660 tree binfo, target;
b258210c 2661
d250540a
MJ
2662 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2663
da942ca0 2664 if (!binfo)
b258210c 2665 return NULL;
3e293154 2666
da942ca0
JH
2667 if (TREE_CODE (binfo) != TREE_BINFO)
2668 {
c49bdb2e
JH
2669 binfo = gimple_extract_devirt_binfo_from_cst
2670 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2671 if (!binfo)
2672 return NULL;
2673 }
2674
d250540a 2675 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2676 ie->indirect_info->otr_type);
b258210c 2677 if (binfo)
c7573249
MJ
2678 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2679 binfo);
b258210c
MJ
2680 else
2681 return NULL;
2682
2683 if (target)
450ad0cd
JH
2684 {
2685#ifdef ENABLE_CHECKING
2686 gcc_assert (possible_polymorphic_call_target_p
2687 (ie, cgraph_get_node (target)));
2688#endif
2689 return ipa_make_edge_direct_to_target (ie, target);
2690 }
b258210c
MJ
2691 else
2692 return NULL;
3e293154
MJ
2693}
2694
2695/* Update the param called notes associated with NODE when CS is being inlined,
2696 assuming NODE is (potentially indirectly) inlined into CS->callee.
2697 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2698 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2699 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2700
f8e2a1ed 2701static bool
e33c6cd6
MJ
2702update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2703 struct cgraph_node *node,
9771b263 2704 vec<cgraph_edge_p> *new_edges)
3e293154 2705{
9e97ff61 2706 struct ipa_edge_args *top;
b258210c 2707 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2708 struct ipa_node_params *new_root_info;
f8e2a1ed 2709 bool res = false;
3e293154 2710
e33c6cd6 2711 ipa_check_create_edge_args ();
9e97ff61 2712 top = IPA_EDGE_REF (cs);
d250540a
MJ
2713 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2714 ? cs->caller->global.inlined_to
2715 : cs->caller);
e33c6cd6
MJ
2716
2717 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2718 {
e33c6cd6 2719 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2720 struct ipa_jump_func *jfunc;
8b7773a4 2721 int param_index;
3e293154 2722
e33c6cd6 2723 next_ie = ie->next_callee;
3e293154 2724
5f902d76
JH
2725 if (ici->param_index == -1)
2726 continue;
e33c6cd6 2727
3e293154 2728 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2729 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2730 {
5ee53a06 2731 ici->param_index = -1;
3e293154
MJ
2732 continue;
2733 }
2734
8b7773a4
MJ
2735 param_index = ici->param_index;
2736 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2737
2738 if (!flag_indirect_inlining)
36b72910
JH
2739 new_direct_edge = NULL;
2740 else if (ici->polymorphic)
d250540a
MJ
2741 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2742 new_root_info);
b258210c 2743 else
d250540a
MJ
2744 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2745 new_root_info);
042ae7d2
JH
2746 /* If speculation was removed, then we need to do nothing. */
2747 if (new_direct_edge && new_direct_edge != ie)
2748 {
2749 new_direct_edge->indirect_inlining_edge = 1;
2750 top = IPA_EDGE_REF (cs);
2751 res = true;
2752 }
2753 else if (new_direct_edge)
685b0d13 2754 {
b258210c 2755 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2756 if (new_direct_edge->call_stmt)
2757 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2758 = !gimple_check_call_matching_types (
2759 new_direct_edge->call_stmt,
67348ccc 2760 new_direct_edge->callee->decl, false);
b258210c
MJ
2761 if (new_edges)
2762 {
9771b263 2763 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2764 res = true;
2765 }
042ae7d2 2766 top = IPA_EDGE_REF (cs);
685b0d13 2767 }
36b72910
JH
2768 else if (jfunc->type == IPA_JF_PASS_THROUGH
2769 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2770 {
2771 if (ici->agg_contents
2772 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2773 ici->param_index = -1;
2774 else
2775 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2776 }
2777 else if (jfunc->type == IPA_JF_ANCESTOR)
2778 {
2779 if (ici->agg_contents
2780 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2781 ici->param_index = -1;
2782 else
2783 {
2784 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
68377e53
JH
2785 if (ipa_get_jf_ancestor_offset (jfunc))
2786 ici->outer_type = NULL;
36b72910
JH
2787 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2788 }
2789 }
2790 else
2791 /* Either we can find a destination for this edge now or never. */
2792 ici->param_index = -1;
3e293154 2793 }
e33c6cd6 2794
f8e2a1ed 2795 return res;
3e293154
MJ
2796}
2797
2798/* Recursively traverse subtree of NODE (including node) made of inlined
2799 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2800 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2801 update_jump_functions_after_inlining on all non-inlined edges that lead out
2802 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2803 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2804 created. */
be95e2b9 2805
f8e2a1ed 2806static bool
3e293154
MJ
2807propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2808 struct cgraph_node *node,
9771b263 2809 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2810{
2811 struct cgraph_edge *e;
f8e2a1ed 2812 bool res;
3e293154 2813
e33c6cd6 2814 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2815
2816 for (e = node->callees; e; e = e->next_callee)
2817 if (!e->inline_failed)
f8e2a1ed 2818 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2819 else
2820 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2821 for (e = node->indirect_calls; e; e = e->next_callee)
2822 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2823
2824 return res;
3e293154
MJ
2825}
2826
4502fe8d
MJ
2827/* Combine two controlled uses counts as done during inlining. */
2828
2829static int
2830combine_controlled_uses_counters (int c, int d)
2831{
2832 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2833 return IPA_UNDESCRIBED_USE;
2834 else
2835 return c + d - 1;
2836}
2837
2838/* Propagate number of controlled users from CS->caleee to the new root of the
2839 tree of inlined nodes. */
2840
2841static void
2842propagate_controlled_uses (struct cgraph_edge *cs)
2843{
2844 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2845 struct cgraph_node *new_root = cs->caller->global.inlined_to
2846 ? cs->caller->global.inlined_to : cs->caller;
2847 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2848 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2849 int count, i;
2850
2851 count = MIN (ipa_get_cs_argument_count (args),
2852 ipa_get_param_count (old_root_info));
2853 for (i = 0; i < count; i++)
2854 {
2855 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2856 struct ipa_cst_ref_desc *rdesc;
2857
2858 if (jf->type == IPA_JF_PASS_THROUGH)
2859 {
2860 int src_idx, c, d;
2861 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2862 c = ipa_get_controlled_uses (new_root_info, src_idx);
2863 d = ipa_get_controlled_uses (old_root_info, i);
2864
2865 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2866 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2867 c = combine_controlled_uses_counters (c, d);
2868 ipa_set_controlled_uses (new_root_info, src_idx, c);
2869 if (c == 0 && new_root_info->ipcp_orig_node)
2870 {
2871 struct cgraph_node *n;
2872 struct ipa_ref *ref;
2873 tree t = new_root_info->known_vals[src_idx];
2874
2875 if (t && TREE_CODE (t) == ADDR_EXPR
2876 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2877 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
67348ccc
DM
2878 && (ref = ipa_find_reference (new_root,
2879 n, NULL, 0)))
4502fe8d
MJ
2880 {
2881 if (dump_file)
2882 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2883 "reference from %s/%i to %s/%i.\n",
fec39fa6 2884 xstrdup (new_root->name ()),
67348ccc 2885 new_root->order,
fec39fa6 2886 xstrdup (n->name ()), n->order);
4502fe8d
MJ
2887 ipa_remove_reference (ref);
2888 }
2889 }
2890 }
2891 else if (jf->type == IPA_JF_CONST
2892 && (rdesc = jfunc_rdesc_usable (jf)))
2893 {
2894 int d = ipa_get_controlled_uses (old_root_info, i);
2895 int c = rdesc->refcount;
2896 rdesc->refcount = combine_controlled_uses_counters (c, d);
2897 if (rdesc->refcount == 0)
2898 {
2899 tree cst = ipa_get_jf_constant (jf);
2900 struct cgraph_node *n;
2901 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2902 && TREE_CODE (TREE_OPERAND (cst, 0))
2903 == FUNCTION_DECL);
2904 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2905 if (n)
2906 {
2907 struct cgraph_node *clone;
568cda29 2908 bool ok;
67348ccc 2909 ok = remove_described_reference (n, rdesc);
568cda29 2910 gcc_checking_assert (ok);
4502fe8d
MJ
2911
2912 clone = cs->caller;
2913 while (clone->global.inlined_to
2914 && clone != rdesc->cs->caller
2915 && IPA_NODE_REF (clone)->ipcp_orig_node)
2916 {
2917 struct ipa_ref *ref;
67348ccc
DM
2918 ref = ipa_find_reference (clone,
2919 n, NULL, 0);
4502fe8d
MJ
2920 if (ref)
2921 {
2922 if (dump_file)
2923 fprintf (dump_file, "ipa-prop: Removing "
2924 "cloning-created reference "
2925 "from %s/%i to %s/%i.\n",
fec39fa6 2926 xstrdup (clone->name ()),
67348ccc 2927 clone->order,
fec39fa6 2928 xstrdup (n->name ()),
67348ccc 2929 n->order);
4502fe8d
MJ
2930 ipa_remove_reference (ref);
2931 }
2932 clone = clone->callers->caller;
2933 }
2934 }
2935 }
2936 }
2937 }
2938
2939 for (i = ipa_get_param_count (old_root_info);
2940 i < ipa_get_cs_argument_count (args);
2941 i++)
2942 {
2943 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2944
2945 if (jf->type == IPA_JF_CONST)
2946 {
2947 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2948 if (rdesc)
2949 rdesc->refcount = IPA_UNDESCRIBED_USE;
2950 }
2951 else if (jf->type == IPA_JF_PASS_THROUGH)
2952 ipa_set_controlled_uses (new_root_info,
2953 jf->value.pass_through.formal_id,
2954 IPA_UNDESCRIBED_USE);
2955 }
2956}
2957
3e293154
MJ
2958/* Update jump functions and call note functions on inlining the call site CS.
2959 CS is expected to lead to a node already cloned by
2960 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2961 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2962 created. */
be95e2b9 2963
f8e2a1ed 2964bool
3e293154 2965ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2966 vec<cgraph_edge_p> *new_edges)
3e293154 2967{
5ee53a06 2968 bool changed;
f8e2a1ed
MJ
2969 /* Do nothing if the preparation phase has not been carried out yet
2970 (i.e. during early inlining). */
9771b263 2971 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2972 return false;
2973 gcc_assert (ipa_edge_args_vector);
2974
4502fe8d 2975 propagate_controlled_uses (cs);
5ee53a06
JH
2976 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2977
5ee53a06 2978 return changed;
518dc859
RL
2979}
2980
771578a0
MJ
2981/* Frees all dynamically allocated structures that the argument info points
2982 to. */
be95e2b9 2983
518dc859 2984void
771578a0 2985ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2986{
9771b263 2987 vec_free (args->jump_functions);
771578a0 2988 memset (args, 0, sizeof (*args));
518dc859
RL
2989}
2990
771578a0 2991/* Free all ipa_edge structures. */
be95e2b9 2992
518dc859 2993void
771578a0 2994ipa_free_all_edge_args (void)
518dc859 2995{
771578a0
MJ
2996 int i;
2997 struct ipa_edge_args *args;
518dc859 2998
9771b263
DN
2999 if (!ipa_edge_args_vector)
3000 return;
3001
3002 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
3003 ipa_free_edge_args_substructures (args);
3004
9771b263 3005 vec_free (ipa_edge_args_vector);
518dc859
RL
3006}
3007
771578a0
MJ
3008/* Frees all dynamically allocated structures that the param info points
3009 to. */
be95e2b9 3010
518dc859 3011void
771578a0 3012ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 3013{
9771b263 3014 info->descriptors.release ();
310bc633
MJ
3015 free (info->lattices);
3016 /* Lattice values and their sources are deallocated with their alocation
3017 pool. */
9771b263 3018 info->known_vals.release ();
771578a0 3019 memset (info, 0, sizeof (*info));
518dc859
RL
3020}
3021
771578a0 3022/* Free all ipa_node_params structures. */
be95e2b9 3023
518dc859 3024void
771578a0 3025ipa_free_all_node_params (void)
518dc859 3026{
771578a0
MJ
3027 int i;
3028 struct ipa_node_params *info;
518dc859 3029
9771b263 3030 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
3031 ipa_free_node_params_substructures (info);
3032
9771b263 3033 ipa_node_params_vector.release ();
771578a0
MJ
3034}
3035
2c9561b5
MJ
3036/* Set the aggregate replacements of NODE to be AGGVALS. */
3037
3038void
3039ipa_set_node_agg_value_chain (struct cgraph_node *node,
3040 struct ipa_agg_replacement_value *aggvals)
3041{
9771b263
DN
3042 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
3043 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 3044
9771b263 3045 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
3046}
3047
771578a0 3048/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 3049
771578a0 3050static void
5c0466b5 3051ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 3052{
568cda29
MJ
3053 struct ipa_edge_args *args;
3054
3055 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
9771b263 3056 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 3057 return;
568cda29
MJ
3058
3059 args = IPA_EDGE_REF (cs);
3060 if (args->jump_functions)
3061 {
3062 struct ipa_jump_func *jf;
3063 int i;
3064 FOR_EACH_VEC_ELT (*args->jump_functions, i, jf)
a854f856
MJ
3065 {
3066 struct ipa_cst_ref_desc *rdesc;
3067 try_decrement_rdesc_refcount (jf);
3068 if (jf->type == IPA_JF_CONST
3069 && (rdesc = ipa_get_jf_constant_rdesc (jf))
3070 && rdesc->cs == cs)
3071 rdesc->cs = NULL;
3072 }
568cda29
MJ
3073 }
3074
771578a0 3075 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
3076}
3077
771578a0 3078/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 3079
771578a0 3080static void
5c0466b5 3081ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 3082{
dd6d1ad7 3083 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 3084 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 3085 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
3086 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
3087 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
3088}
3089
8b7773a4 3090/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 3091
771578a0
MJ
3092static void
3093ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 3094 __attribute__((unused)) void *data)
771578a0
MJ
3095{
3096 struct ipa_edge_args *old_args, *new_args;
8b7773a4 3097 unsigned int i;
771578a0
MJ
3098
3099 ipa_check_create_edge_args ();
3100
3101 old_args = IPA_EDGE_REF (src);
3102 new_args = IPA_EDGE_REF (dst);
3103
9771b263 3104 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 3105
9771b263 3106 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
3107 {
3108 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3109 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3110
3111 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3112
3113 if (src_jf->type == IPA_JF_CONST)
3114 {
3115 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3116
3117 if (!src_rdesc)
3118 dst_jf->value.constant.rdesc = NULL;
568cda29
MJ
3119 else if (src->caller == dst->caller)
3120 {
3121 struct ipa_ref *ref;
5e20cdc9 3122 symtab_node *n = cgraph_node_for_jfunc (src_jf);
568cda29 3123 gcc_checking_assert (n);
67348ccc 3124 ref = ipa_find_reference (src->caller, n,
568cda29
MJ
3125 src->call_stmt, src->lto_stmt_uid);
3126 gcc_checking_assert (ref);
67348ccc 3127 ipa_clone_ref (ref, dst->caller, ref->stmt);
568cda29
MJ
3128
3129 gcc_checking_assert (ipa_refdesc_pool);
3130 struct ipa_cst_ref_desc *dst_rdesc
3131 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3132 dst_rdesc->cs = dst;
3133 dst_rdesc->refcount = src_rdesc->refcount;
3134 dst_rdesc->next_duplicate = NULL;
3135 dst_jf->value.constant.rdesc = dst_rdesc;
3136 }
4502fe8d
MJ
3137 else if (src_rdesc->cs == src)
3138 {
3139 struct ipa_cst_ref_desc *dst_rdesc;
3140 gcc_checking_assert (ipa_refdesc_pool);
3141 dst_rdesc
3142 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3143 dst_rdesc->cs = dst;
4502fe8d 3144 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3145 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3146 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3147 dst_jf->value.constant.rdesc = dst_rdesc;
3148 }
3149 else
3150 {
3151 struct ipa_cst_ref_desc *dst_rdesc;
3152 /* This can happen during inlining, when a JFUNC can refer to a
3153 reference taken in a function up in the tree of inline clones.
3154 We need to find the duplicate that refers to our tree of
3155 inline clones. */
3156
3157 gcc_assert (dst->caller->global.inlined_to);
3158 for (dst_rdesc = src_rdesc->next_duplicate;
3159 dst_rdesc;
3160 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3161 {
3162 struct cgraph_node *top;
3163 top = dst_rdesc->cs->caller->global.inlined_to
3164 ? dst_rdesc->cs->caller->global.inlined_to
3165 : dst_rdesc->cs->caller;
3166 if (dst->caller->global.inlined_to == top)
3167 break;
3168 }
44a60244 3169 gcc_assert (dst_rdesc);
4502fe8d
MJ
3170 dst_jf->value.constant.rdesc = dst_rdesc;
3171 }
3172 }
3173 }
771578a0
MJ
3174}
3175
3176/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3177
771578a0
MJ
3178static void
3179ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3180 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3181{
3182 struct ipa_node_params *old_info, *new_info;
2c9561b5 3183 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3184
3185 ipa_check_create_node_params ();
3186 old_info = IPA_NODE_REF (src);
3187 new_info = IPA_NODE_REF (dst);
771578a0 3188
9771b263 3189 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3190 new_info->lattices = NULL;
771578a0 3191 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3192
3949c4a7
MJ
3193 new_info->uses_analysis_done = old_info->uses_analysis_done;
3194 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3195
3196 old_av = ipa_get_agg_replacements_for_node (src);
3197 if (!old_av)
3198 return;
3199
3200 new_av = NULL;
3201 while (old_av)
3202 {
3203 struct ipa_agg_replacement_value *v;
3204
3205 v = ggc_alloc_ipa_agg_replacement_value ();
3206 memcpy (v, old_av, sizeof (*v));
3207 v->next = new_av;
3208 new_av = v;
3209 old_av = old_av->next;
3210 }
3211 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3212}
3213
40982661
JH
3214
3215/* Analyze newly added function into callgraph. */
3216
3217static void
3218ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3219{
3220 ipa_analyze_node (node);
3221}
3222
771578a0 3223/* Register our cgraph hooks if they are not already there. */
be95e2b9 3224
518dc859 3225void
771578a0 3226ipa_register_cgraph_hooks (void)
518dc859 3227{
771578a0
MJ
3228 if (!edge_removal_hook_holder)
3229 edge_removal_hook_holder =
3230 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3231 if (!node_removal_hook_holder)
3232 node_removal_hook_holder =
3233 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3234 if (!edge_duplication_hook_holder)
3235 edge_duplication_hook_holder =
3236 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3237 if (!node_duplication_hook_holder)
3238 node_duplication_hook_holder =
3239 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3240 function_insertion_hook_holder =
3241 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3242}
518dc859 3243
771578a0 3244/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3245
771578a0
MJ
3246static void
3247ipa_unregister_cgraph_hooks (void)
3248{
3249 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3250 edge_removal_hook_holder = NULL;
3251 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3252 node_removal_hook_holder = NULL;
3253 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3254 edge_duplication_hook_holder = NULL;
3255 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3256 node_duplication_hook_holder = NULL;
40982661
JH
3257 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3258 function_insertion_hook_holder = NULL;
771578a0
MJ
3259}
3260
3261/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3262 longer needed after ipa-cp. */
be95e2b9 3263
771578a0 3264void
e33c6cd6 3265ipa_free_all_structures_after_ipa_cp (void)
3e293154 3266{
5ee53a06 3267 if (!optimize)
3e293154
MJ
3268 {
3269 ipa_free_all_edge_args ();
3270 ipa_free_all_node_params ();
310bc633
MJ
3271 free_alloc_pool (ipcp_sources_pool);
3272 free_alloc_pool (ipcp_values_pool);
2c9561b5 3273 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3274 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3275 if (ipa_refdesc_pool)
3276 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3277 }
3278}
3279
3280/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3281 longer needed after indirect inlining. */
be95e2b9 3282
3e293154 3283void
e33c6cd6 3284ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3285{
3286 ipa_free_all_edge_args ();
3287 ipa_free_all_node_params ();
3288 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3289 if (ipcp_sources_pool)
3290 free_alloc_pool (ipcp_sources_pool);
3291 if (ipcp_values_pool)
3292 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3293 if (ipcp_agg_lattice_pool)
3294 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3295 if (ipa_refdesc_pool)
3296 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3297}
3298
dcd416e3 3299/* Print ipa_tree_map data structures of all functions in the
518dc859 3300 callgraph to F. */
be95e2b9 3301
518dc859 3302void
2c9561b5 3303ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3304{
3305 int i, count;
3e293154 3306 struct ipa_node_params *info;
518dc859 3307
67348ccc 3308 if (!node->definition)
3e293154
MJ
3309 return;
3310 info = IPA_NODE_REF (node);
9de04252 3311 fprintf (f, " function %s/%i parameter descriptors:\n",
fec39fa6 3312 node->name (), node->order);
3e293154
MJ
3313 count = ipa_get_param_count (info);
3314 for (i = 0; i < count; i++)
518dc859 3315 {
4502fe8d
MJ
3316 int c;
3317
e067bd43 3318 ipa_dump_param (f, info, i);
339f49ec
JH
3319 if (ipa_is_param_used (info, i))
3320 fprintf (f, " used");
4502fe8d
MJ
3321 c = ipa_get_controlled_uses (info, i);
3322 if (c == IPA_UNDESCRIBED_USE)
3323 fprintf (f, " undescribed_use");
3324 else
3325 fprintf (f, " controlled_uses=%i", c);
3e293154 3326 fprintf (f, "\n");
518dc859
RL
3327 }
3328}
dcd416e3 3329
ca30a539 3330/* Print ipa_tree_map data structures of all functions in the
3e293154 3331 callgraph to F. */
be95e2b9 3332
3e293154 3333void
ca30a539 3334ipa_print_all_params (FILE * f)
3e293154
MJ
3335{
3336 struct cgraph_node *node;
3337
ca30a539 3338 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3339 FOR_EACH_FUNCTION (node)
ca30a539 3340 ipa_print_node_params (f, node);
3e293154 3341}
3f84bf08
MJ
3342
3343/* Return a heap allocated vector containing formal parameters of FNDECL. */
3344
9771b263 3345vec<tree>
3f84bf08
MJ
3346ipa_get_vector_of_formal_parms (tree fndecl)
3347{
9771b263 3348 vec<tree> args;
3f84bf08
MJ
3349 int count;
3350 tree parm;
3351
0e8853ee 3352 gcc_assert (!flag_wpa);
310bc633 3353 count = count_formal_params (fndecl);
9771b263 3354 args.create (count);
910ad8de 3355 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3356 args.quick_push (parm);
3f84bf08
MJ
3357
3358 return args;
3359}
3360
3361/* Return a heap allocated vector containing types of formal parameters of
3362 function type FNTYPE. */
3363
9771b263 3364static inline vec<tree>
3f84bf08
MJ
3365get_vector_of_formal_parm_types (tree fntype)
3366{
9771b263 3367 vec<tree> types;
3f84bf08
MJ
3368 int count = 0;
3369 tree t;
3370
3371 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3372 count++;
3373
9771b263 3374 types.create (count);
3f84bf08 3375 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3376 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3377
3378 return types;
3379}
3380
3381/* Modify the function declaration FNDECL and its type according to the plan in
3382 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3383 to reflect the actual parameters being modified which are determined by the
3384 base_index field. */
3385
3386void
3387ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3388 const char *synth_parm_prefix)
3389{
9771b263 3390 vec<tree> oparms, otypes;
3f84bf08
MJ
3391 tree orig_type, new_type = NULL;
3392 tree old_arg_types, t, new_arg_types = NULL;
3393 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 3394 int i, len = adjustments.length ();
3f84bf08
MJ
3395 tree new_reversed = NULL;
3396 bool care_for_types, last_parm_void;
3397
3398 if (!synth_parm_prefix)
3399 synth_parm_prefix = "SYNTH";
3400
3401 oparms = ipa_get_vector_of_formal_parms (fndecl);
3402 orig_type = TREE_TYPE (fndecl);
3403 old_arg_types = TYPE_ARG_TYPES (orig_type);
3404
3405 /* The following test is an ugly hack, some functions simply don't have any
3406 arguments in their type. This is probably a bug but well... */
3407 care_for_types = (old_arg_types != NULL_TREE);
3408 if (care_for_types)
3409 {
3410 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3411 == void_type_node);
3412 otypes = get_vector_of_formal_parm_types (orig_type);
3413 if (last_parm_void)
9771b263 3414 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3415 else
9771b263 3416 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3417 }
3418 else
3419 {
3420 last_parm_void = false;
9771b263 3421 otypes.create (0);
3f84bf08
MJ
3422 }
3423
3424 for (i = 0; i < len; i++)
3425 {
3426 struct ipa_parm_adjustment *adj;
3427 gcc_assert (link);
3428
9771b263
DN
3429 adj = &adjustments[i];
3430 parm = oparms[adj->base_index];
3f84bf08
MJ
3431 adj->base = parm;
3432
3433 if (adj->copy_param)
3434 {
3435 if (care_for_types)
9771b263 3436 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3437 new_arg_types);
3438 *link = parm;
910ad8de 3439 link = &DECL_CHAIN (parm);
3f84bf08
MJ
3440 }
3441 else if (!adj->remove_param)
3442 {
3443 tree new_parm;
3444 tree ptype;
3445
3446 if (adj->by_ref)
3447 ptype = build_pointer_type (adj->type);
3448 else
3449 ptype = adj->type;
3450
3451 if (care_for_types)
3452 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3453
3454 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3455 ptype);
3456 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3457
3458 DECL_ARTIFICIAL (new_parm) = 1;
3459 DECL_ARG_TYPE (new_parm) = ptype;
3460 DECL_CONTEXT (new_parm) = fndecl;
3461 TREE_USED (new_parm) = 1;
3462 DECL_IGNORED_P (new_parm) = 1;
3463 layout_decl (new_parm, 0);
3464
3f84bf08
MJ
3465 adj->base = parm;
3466 adj->reduction = new_parm;
3467
3468 *link = new_parm;
3469
910ad8de 3470 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3471 }
3472 }
3473
3474 *link = NULL_TREE;
3475
3476 if (care_for_types)
3477 {
3478 new_reversed = nreverse (new_arg_types);
3479 if (last_parm_void)
3480 {
3481 if (new_reversed)
3482 TREE_CHAIN (new_arg_types) = void_list_node;
3483 else
3484 new_reversed = void_list_node;
3485 }
3486 }
3487
3488 /* Use copy_node to preserve as much as possible from original type
3489 (debug info, attribute lists etc.)
3490 Exception is METHOD_TYPEs must have THIS argument.
3491 When we are asked to remove it, we need to build new FUNCTION_TYPE
3492 instead. */
3493 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
3494 || (adjustments[0].copy_param
3495 && adjustments[0].base_index == 0))
3f84bf08 3496 {
4eb3f32c 3497 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3498 TYPE_ARG_TYPES (new_type) = new_reversed;
3499 }
3500 else
3501 {
3502 new_type
3503 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3504 new_reversed));
3505 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3506 DECL_VINDEX (fndecl) = NULL_TREE;
3507 }
3508
d402c33d
JH
3509 /* When signature changes, we need to clear builtin info. */
3510 if (DECL_BUILT_IN (fndecl))
3511 {
3512 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3513 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3514 }
3515
3f84bf08
MJ
3516 /* This is a new type, not a copy of an old type. Need to reassociate
3517 variants. We can handle everything except the main variant lazily. */
3518 t = TYPE_MAIN_VARIANT (orig_type);
3519 if (orig_type != t)
3520 {
3521 TYPE_MAIN_VARIANT (new_type) = t;
3522 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3523 TYPE_NEXT_VARIANT (t) = new_type;
3524 }
3525 else
3526 {
3527 TYPE_MAIN_VARIANT (new_type) = new_type;
3528 TYPE_NEXT_VARIANT (new_type) = NULL;
3529 }
3530
3531 TREE_TYPE (fndecl) = new_type;
9b389a5e 3532 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3533 otypes.release ();
3534 oparms.release ();
3f84bf08
MJ
3535}
3536
3537/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3538 If this is a directly recursive call, CS must be NULL. Otherwise it must
3539 contain the corresponding call graph edge. */
3540
3541void
3542ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3543 ipa_parm_adjustment_vec adjustments)
3544{
82338059 3545 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3546 vec<tree> vargs;
3547 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3548 gimple new_stmt;
82338059 3549 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3550 tree callee_decl;
3551 int i, len;
3552
9771b263
DN
3553 len = adjustments.length ();
3554 vargs.create (len);
67348ccc
DM
3555 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->decl;
3556 ipa_remove_stmt_references (current_node, stmt);
3f84bf08
MJ
3557
3558 gsi = gsi_for_stmt (stmt);
82338059
MJ
3559 prev_gsi = gsi;
3560 gsi_prev (&prev_gsi);
3f84bf08
MJ
3561 for (i = 0; i < len; i++)
3562 {
3563 struct ipa_parm_adjustment *adj;
3564
9771b263 3565 adj = &adjustments[i];
3f84bf08
MJ
3566
3567 if (adj->copy_param)
3568 {
3569 tree arg = gimple_call_arg (stmt, adj->base_index);
3570
9771b263 3571 vargs.quick_push (arg);
3f84bf08
MJ
3572 }
3573 else if (!adj->remove_param)
3574 {
fffe1e40
MJ
3575 tree expr, base, off;
3576 location_t loc;
f43245d1 3577 unsigned int deref_align = 0;
c1ed6a01 3578 bool deref_base = false;
fffe1e40
MJ
3579
3580 /* We create a new parameter out of the value of the old one, we can
3581 do the following kind of transformations:
3582
3583 - A scalar passed by reference is converted to a scalar passed by
3584 value. (adj->by_ref is false and the type of the original
3585 actual argument is a pointer to a scalar).
3586
3587 - A part of an aggregate is passed instead of the whole aggregate.
3588 The part can be passed either by value or by reference, this is
3589 determined by value of adj->by_ref. Moreover, the code below
3590 handles both situations when the original aggregate is passed by
3591 value (its type is not a pointer) and when it is passed by
3592 reference (it is a pointer to an aggregate).
3593
3594 When the new argument is passed by reference (adj->by_ref is true)
3595 it must be a part of an aggregate and therefore we form it by
3596 simply taking the address of a reference inside the original
3597 aggregate. */
3598
3599 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3600 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3601 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3602 : EXPR_LOCATION (base);
fffe1e40 3603
82d49829
MJ
3604 if (TREE_CODE (base) != ADDR_EXPR
3605 && POINTER_TYPE_P (TREE_TYPE (base)))
3606 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3607 adj->offset / BITS_PER_UNIT);
3f84bf08 3608 else
3f84bf08 3609 {
fffe1e40
MJ
3610 HOST_WIDE_INT base_offset;
3611 tree prev_base;
c1ed6a01 3612 bool addrof;
fffe1e40
MJ
3613
3614 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3615 {
3616 base = TREE_OPERAND (base, 0);
3617 addrof = true;
3618 }
3619 else
3620 addrof = false;
fffe1e40
MJ
3621 prev_base = base;
3622 base = get_addr_base_and_unit_offset (base, &base_offset);
3623 /* Aggregate arguments can have non-invariant addresses. */
3624 if (!base)
3625 {
3626 base = build_fold_addr_expr (prev_base);
82d49829 3627 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3628 adj->offset / BITS_PER_UNIT);
3629 }
3630 else if (TREE_CODE (base) == MEM_REF)
3631 {
c1ed6a01
MJ
3632 if (!addrof)
3633 {
3634 deref_base = true;
3635 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3636 }
82d49829 3637 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3638 base_offset
3639 + adj->offset / BITS_PER_UNIT);
3640 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3641 off);
fffe1e40
MJ
3642 base = TREE_OPERAND (base, 0);
3643 }
3644 else
3645 {
82d49829 3646 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3647 base_offset
3648 + adj->offset / BITS_PER_UNIT);
3649 base = build_fold_addr_expr (base);
3650 }
3f84bf08 3651 }
fffe1e40 3652
3a5a825a
RG
3653 if (!adj->by_ref)
3654 {
3655 tree type = adj->type;
3656 unsigned int align;
3657 unsigned HOST_WIDE_INT misalign;
644ffefd 3658
c1ed6a01
MJ
3659 if (deref_base)
3660 {
3661 align = deref_align;
3662 misalign = 0;
3663 }
3664 else
3665 {
3666 get_pointer_alignment_1 (base, &align, &misalign);
3667 if (TYPE_ALIGN (type) > align)
3668 align = TYPE_ALIGN (type);
3669 }
27bcd47c
LC
3670 misalign += (tree_to_double_int (off)
3671 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3672 * BITS_PER_UNIT);
3673 misalign = misalign & (align - 1);
3674 if (misalign != 0)
3675 align = (misalign & -misalign);
3676 if (align < TYPE_ALIGN (type))
3677 type = build_aligned_type (type, align);
3678 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3679 }
3680 else
3681 {
3682 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3683 expr = build_fold_addr_expr (expr);
3684 }
fffe1e40 3685
3f84bf08
MJ
3686 expr = force_gimple_operand_gsi (&gsi, expr,
3687 adj->by_ref
3688 || is_gimple_reg_type (adj->type),
3689 NULL, true, GSI_SAME_STMT);
9771b263 3690 vargs.quick_push (expr);
3f84bf08 3691 }
ddb555ed
JJ
3692 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3693 {
3694 unsigned int ix;
3695 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3696 gimple def_temp;
3697
3698 arg = gimple_call_arg (stmt, adj->base_index);
3699 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3700 {
3701 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3702 continue;
3703 arg = fold_convert_loc (gimple_location (stmt),
3704 TREE_TYPE (origin), arg);
3705 }
3706 if (debug_args == NULL)
3707 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3708 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3709 if (ddecl == origin)
3710 {
9771b263 3711 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3712 break;
3713 }
3714 if (ddecl == NULL)
3715 {
3716 ddecl = make_node (DEBUG_EXPR_DECL);
3717 DECL_ARTIFICIAL (ddecl) = 1;
3718 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3719 DECL_MODE (ddecl) = DECL_MODE (origin);
3720
9771b263
DN
3721 vec_safe_push (*debug_args, origin);
3722 vec_safe_push (*debug_args, ddecl);
ddb555ed 3723 }
9771b263 3724 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3725 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3726 }
3f84bf08
MJ
3727 }
3728
3729 if (dump_file && (dump_flags & TDF_DETAILS))
3730 {
3731 fprintf (dump_file, "replacing stmt:");
3732 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3733 }
3734
3f84bf08 3735 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3736 vargs.release ();
3f84bf08
MJ
3737 if (gimple_call_lhs (stmt))
3738 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3739
3740 gimple_set_block (new_stmt, gimple_block (stmt));
3741 if (gimple_has_location (stmt))
3742 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3743 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3744 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3745
3746 if (dump_file && (dump_flags & TDF_DETAILS))
3747 {
3748 fprintf (dump_file, "with stmt:");
3749 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3750 fprintf (dump_file, "\n");
3751 }
3752 gsi_replace (&gsi, new_stmt, true);
3753 if (cs)
3754 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3755 do
3756 {
3757 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3758 gsi_prev (&gsi);
3759 }
3760 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3761 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3762
3f84bf08
MJ
3763 update_ssa (TODO_update_ssa);
3764 free_dominance_info (CDI_DOMINATORS);
3765}
3766
3767/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3768
3769static bool
3770index_in_adjustments_multiple_times_p (int base_index,
3771 ipa_parm_adjustment_vec adjustments)
3772{
9771b263 3773 int i, len = adjustments.length ();
3f84bf08
MJ
3774 bool one = false;
3775
3776 for (i = 0; i < len; i++)
3777 {
3778 struct ipa_parm_adjustment *adj;
9771b263 3779 adj = &adjustments[i];
3f84bf08
MJ
3780
3781 if (adj->base_index == base_index)
3782 {
3783 if (one)
3784 return true;
3785 else
3786 one = true;
3787 }
3788 }
3789 return false;
3790}
3791
3792
3793/* Return adjustments that should have the same effect on function parameters
3794 and call arguments as if they were first changed according to adjustments in
3795 INNER and then by adjustments in OUTER. */
3796
3797ipa_parm_adjustment_vec
3798ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3799 ipa_parm_adjustment_vec outer)
3800{
9771b263
DN
3801 int i, outlen = outer.length ();
3802 int inlen = inner.length ();
3f84bf08
MJ
3803 int removals = 0;
3804 ipa_parm_adjustment_vec adjustments, tmp;
3805
9771b263 3806 tmp.create (inlen);
3f84bf08
MJ
3807 for (i = 0; i < inlen; i++)
3808 {
3809 struct ipa_parm_adjustment *n;
9771b263 3810 n = &inner[i];
3f84bf08
MJ
3811
3812 if (n->remove_param)
3813 removals++;
3814 else
9771b263 3815 tmp.quick_push (*n);
3f84bf08
MJ
3816 }
3817
9771b263 3818 adjustments.create (outlen + removals);
3f84bf08
MJ
3819 for (i = 0; i < outlen; i++)
3820 {
f32682ca 3821 struct ipa_parm_adjustment r;
9771b263
DN
3822 struct ipa_parm_adjustment *out = &outer[i];
3823 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3824
f32682ca 3825 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3826 gcc_assert (!in->remove_param);
3827 if (out->remove_param)
3828 {
3829 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3830 {
f32682ca 3831 r.remove_param = true;
9771b263 3832 adjustments.quick_push (r);
3f84bf08
MJ
3833 }
3834 continue;
3835 }
3836
f32682ca
DN
3837 r.base_index = in->base_index;
3838 r.type = out->type;
3f84bf08
MJ
3839
3840 /* FIXME: Create nonlocal value too. */
3841
3842 if (in->copy_param && out->copy_param)
f32682ca 3843 r.copy_param = true;
3f84bf08 3844 else if (in->copy_param)
f32682ca 3845 r.offset = out->offset;
3f84bf08 3846 else if (out->copy_param)
f32682ca 3847 r.offset = in->offset;
3f84bf08 3848 else
f32682ca 3849 r.offset = in->offset + out->offset;
9771b263 3850 adjustments.quick_push (r);
3f84bf08
MJ
3851 }
3852
3853 for (i = 0; i < inlen; i++)
3854 {
9771b263 3855 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3856
3857 if (n->remove_param)
9771b263 3858 adjustments.quick_push (*n);
3f84bf08
MJ
3859 }
3860
9771b263 3861 tmp.release ();
3f84bf08
MJ
3862 return adjustments;
3863}
3864
3865/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3866 friendly way, assuming they are meant to be applied to FNDECL. */
3867
3868void
3869ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3870 tree fndecl)
3871{
9771b263 3872 int i, len = adjustments.length ();
3f84bf08 3873 bool first = true;
9771b263 3874 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3875
3876 fprintf (file, "IPA param adjustments: ");
3877 for (i = 0; i < len; i++)
3878 {
3879 struct ipa_parm_adjustment *adj;
9771b263 3880 adj = &adjustments[i];
3f84bf08
MJ
3881
3882 if (!first)
3883 fprintf (file, " ");
3884 else
3885 first = false;
3886
3887 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3888 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3889 if (adj->base)
3890 {
3891 fprintf (file, ", base: ");
3892 print_generic_expr (file, adj->base, 0);
3893 }
3894 if (adj->reduction)
3895 {
3896 fprintf (file, ", reduction: ");
3897 print_generic_expr (file, adj->reduction, 0);
3898 }
3899 if (adj->new_ssa_base)
3900 {
3901 fprintf (file, ", new_ssa_base: ");
3902 print_generic_expr (file, adj->new_ssa_base, 0);
3903 }
3904
3905 if (adj->copy_param)
3906 fprintf (file, ", copy_param");
3907 else if (adj->remove_param)
3908 fprintf (file, ", remove_param");
3909 else
3910 fprintf (file, ", offset %li", (long) adj->offset);
3911 if (adj->by_ref)
3912 fprintf (file, ", by_ref");
3913 print_node_brief (file, ", type: ", adj->type, 0);
3914 fprintf (file, "\n");
3915 }
9771b263 3916 parms.release ();
3f84bf08
MJ
3917}
3918
2c9561b5
MJ
3919/* Dump the AV linked list. */
3920
3921void
3922ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3923{
3924 bool comma = false;
3925 fprintf (f, " Aggregate replacements:");
3926 for (; av; av = av->next)
3927 {
3928 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3929 av->index, av->offset);
3930 print_generic_expr (f, av->value, 0);
3931 comma = true;
3932 }
3933 fprintf (f, "\n");
3934}
3935
fb3f88cc
JH
3936/* Stream out jump function JUMP_FUNC to OB. */
3937
3938static void
3939ipa_write_jump_function (struct output_block *ob,
3940 struct ipa_jump_func *jump_func)
3941{
8b7773a4
MJ
3942 struct ipa_agg_jf_item *item;
3943 struct bitpack_d bp;
3944 int i, count;
fb3f88cc 3945
8b7773a4 3946 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3947 switch (jump_func->type)
3948 {
3949 case IPA_JF_UNKNOWN:
3950 break;
b258210c 3951 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3952 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3953 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3954 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3955 break;
fb3f88cc 3956 case IPA_JF_CONST:
5368224f 3957 gcc_assert (
4502fe8d
MJ
3958 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3959 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
3960 break;
3961 case IPA_JF_PASS_THROUGH:
412288f1 3962 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3963 if (jump_func->value.pass_through.operation == NOP_EXPR)
3964 {
3965 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3966 bp = bitpack_create (ob->main_stream);
3967 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 3968 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
3969 streamer_write_bitpack (&bp);
3970 }
3971 else
3972 {
3973 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3974 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3975 }
fb3f88cc
JH
3976 break;
3977 case IPA_JF_ANCESTOR:
412288f1 3978 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3979 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3980 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3981 bp = bitpack_create (ob->main_stream);
3982 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 3983 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 3984 streamer_write_bitpack (&bp);
fb3f88cc 3985 break;
8b7773a4
MJ
3986 }
3987
9771b263 3988 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3989 streamer_write_uhwi (ob, count);
3990 if (count)
3991 {
3992 bp = bitpack_create (ob->main_stream);
3993 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3994 streamer_write_bitpack (&bp);
3995 }
3996
9771b263 3997 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3998 {
3999 streamer_write_uhwi (ob, item->offset);
4000 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
4001 }
4002}
4003
4004/* Read in jump function JUMP_FUNC from IB. */
4005
4006static void
4007ipa_read_jump_function (struct lto_input_block *ib,
4008 struct ipa_jump_func *jump_func,
4502fe8d 4009 struct cgraph_edge *cs,
fb3f88cc
JH
4010 struct data_in *data_in)
4011{
4a53743e
MJ
4012 enum jump_func_type jftype;
4013 enum tree_code operation;
8b7773a4 4014 int i, count;
fb3f88cc 4015
4a53743e
MJ
4016 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
4017 switch (jftype)
fb3f88cc
JH
4018 {
4019 case IPA_JF_UNKNOWN:
4a53743e 4020 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 4021 break;
b258210c 4022 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
4023 {
4024 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4025 tree base_type = stream_read_tree (ib, data_in);
4026 tree component_type = stream_read_tree (ib, data_in);
4027
4028 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
4029 break;
4030 }
fb3f88cc 4031 case IPA_JF_CONST:
4502fe8d 4032 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
4033 break;
4034 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
4035 operation = (enum tree_code) streamer_read_uhwi (ib);
4036 if (operation == NOP_EXPR)
4037 {
4038 int formal_id = streamer_read_uhwi (ib);
4039 struct bitpack_d bp = streamer_read_bitpack (ib);
4040 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
4041 bool type_preserved = bp_unpack_value (&bp, 1);
4042 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
4043 type_preserved);
4a53743e
MJ
4044 }
4045 else
4046 {
4047 tree operand = stream_read_tree (ib, data_in);
4048 int formal_id = streamer_read_uhwi (ib);
4049 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
4050 operation);
4051 }
fb3f88cc
JH
4052 break;
4053 case IPA_JF_ANCESTOR:
4a53743e
MJ
4054 {
4055 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
4056 tree type = stream_read_tree (ib, data_in);
4057 int formal_id = streamer_read_uhwi (ib);
4058 struct bitpack_d bp = streamer_read_bitpack (ib);
4059 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 4060 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 4061
b8f6e610
MJ
4062 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
4063 type_preserved);
4a53743e
MJ
4064 break;
4065 }
8b7773a4
MJ
4066 }
4067
4068 count = streamer_read_uhwi (ib);
9771b263 4069 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
4070 if (count)
4071 {
4a53743e 4072 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
4073 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
4074 }
4075 for (i = 0; i < count; i++)
4076 {
f32682ca
DN
4077 struct ipa_agg_jf_item item;
4078 item.offset = streamer_read_uhwi (ib);
4079 item.value = stream_read_tree (ib, data_in);
9771b263 4080 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
4081 }
4082}
4083
e33c6cd6
MJ
4084/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4085 relevant to indirect inlining to OB. */
661e7330
MJ
4086
4087static void
e33c6cd6
MJ
4088ipa_write_indirect_edge_info (struct output_block *ob,
4089 struct cgraph_edge *cs)
661e7330 4090{
e33c6cd6 4091 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4092 struct bitpack_d bp;
e33c6cd6 4093
412288f1 4094 streamer_write_hwi (ob, ii->param_index);
8b7773a4 4095 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
4096 bp = bitpack_create (ob->main_stream);
4097 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 4098 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 4099 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 4100 bp_pack_value (&bp, ii->by_ref, 1);
68377e53
JH
4101 bp_pack_value (&bp, ii->maybe_in_construction, 1);
4102 bp_pack_value (&bp, ii->maybe_derived_type, 1);
412288f1 4103 streamer_write_bitpack (&bp);
b258210c
MJ
4104
4105 if (ii->polymorphic)
4106 {
412288f1 4107 streamer_write_hwi (ob, ii->otr_token);
b9393656 4108 stream_write_tree (ob, ii->otr_type, true);
68377e53 4109 stream_write_tree (ob, ii->outer_type, true);
b258210c 4110 }
661e7330
MJ
4111}
4112
e33c6cd6
MJ
4113/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4114 relevant to indirect inlining from IB. */
661e7330
MJ
4115
4116static void
e33c6cd6
MJ
4117ipa_read_indirect_edge_info (struct lto_input_block *ib,
4118 struct data_in *data_in ATTRIBUTE_UNUSED,
4119 struct cgraph_edge *cs)
661e7330 4120{
e33c6cd6 4121 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 4122 struct bitpack_d bp;
661e7330 4123
412288f1 4124 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 4125 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 4126 bp = streamer_read_bitpack (ib);
2465dcc2 4127 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 4128 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 4129 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4130 ii->by_ref = bp_unpack_value (&bp, 1);
68377e53
JH
4131 ii->maybe_in_construction = bp_unpack_value (&bp, 1);
4132 ii->maybe_derived_type = bp_unpack_value (&bp, 1);
b258210c
MJ
4133 if (ii->polymorphic)
4134 {
412288f1 4135 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4136 ii->otr_type = stream_read_tree (ib, data_in);
68377e53 4137 ii->outer_type = stream_read_tree (ib, data_in);
b258210c 4138 }
661e7330
MJ
4139}
4140
fb3f88cc
JH
4141/* Stream out NODE info to OB. */
4142
4143static void
4144ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4145{
4146 int node_ref;
7380e6ef 4147 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4148 struct ipa_node_params *info = IPA_NODE_REF (node);
4149 int j;
4150 struct cgraph_edge *e;
2465dcc2 4151 struct bitpack_d bp;
fb3f88cc 4152
7380e6ef 4153 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4154 node_ref = lto_symtab_encoder_encode (encoder, node);
412288f1 4155 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4156
0e8853ee
JH
4157 streamer_write_uhwi (ob, ipa_get_param_count (info));
4158 for (j = 0; j < ipa_get_param_count (info); j++)
4159 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4160 bp = bitpack_create (ob->main_stream);
062c604f 4161 gcc_assert (info->uses_analysis_done
661e7330 4162 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4163 gcc_assert (!info->node_enqueued);
4164 gcc_assert (!info->ipcp_orig_node);
4165 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4166 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4167 streamer_write_bitpack (&bp);
4502fe8d
MJ
4168 for (j = 0; j < ipa_get_param_count (info); j++)
4169 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4170 for (e = node->callees; e; e = e->next_callee)
4171 {
4172 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4173
412288f1 4174 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4175 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4176 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4177 }
e33c6cd6 4178 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4179 {
4180 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4181
412288f1 4182 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4183 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4184 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4185 ipa_write_indirect_edge_info (ob, e);
4186 }
fb3f88cc
JH
4187}
4188
61502ca8 4189/* Stream in NODE info from IB. */
fb3f88cc
JH
4190
4191static void
4192ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4193 struct data_in *data_in)
4194{
4195 struct ipa_node_params *info = IPA_NODE_REF (node);
4196 int k;
4197 struct cgraph_edge *e;
2465dcc2 4198 struct bitpack_d bp;
fb3f88cc 4199
0e8853ee 4200 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4201
0e8853ee
JH
4202 for (k = 0; k < ipa_get_param_count (info); k++)
4203 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4204
412288f1 4205 bp = streamer_read_bitpack (ib);
fb3f88cc 4206 if (ipa_get_param_count (info) != 0)
062c604f 4207 info->uses_analysis_done = true;
fb3f88cc
JH
4208 info->node_enqueued = false;
4209 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4210 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4211 for (k = 0; k < ipa_get_param_count (info); k++)
4212 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4213 for (e = node->callees; e; e = e->next_callee)
4214 {
4215 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4216 int count = streamer_read_uhwi (ib);
fb3f88cc 4217
fb3f88cc
JH
4218 if (!count)
4219 continue;
9771b263 4220 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4221
fb3f88cc 4222 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4223 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4224 data_in);
fb3f88cc 4225 }
e33c6cd6 4226 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4227 {
4228 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4229 int count = streamer_read_uhwi (ib);
c8246dbe 4230
c8246dbe
JH
4231 if (count)
4232 {
9771b263 4233 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4234 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4235 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4236 data_in);
c8246dbe
JH
4237 }
4238 ipa_read_indirect_edge_info (ib, data_in, e);
4239 }
fb3f88cc
JH
4240}
4241
4242/* Write jump functions for nodes in SET. */
4243
4244void
f27c1867 4245ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4246{
4247 struct cgraph_node *node;
93536c97 4248 struct output_block *ob;
fb3f88cc 4249 unsigned int count = 0;
f27c1867
JH
4250 lto_symtab_encoder_iterator lsei;
4251 lto_symtab_encoder_t encoder;
4252
fb3f88cc 4253
9771b263 4254 if (!ipa_node_params_vector.exists ())
93536c97 4255 return;
fb3f88cc 4256
93536c97 4257 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4258 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4259 ob->cgraph_node = NULL;
f27c1867
JH
4260 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4261 lsei_next_function_in_partition (&lsei))
fb3f88cc 4262 {
f27c1867 4263 node = lsei_cgraph_node (lsei);
c47d0034
JH
4264 if (cgraph_function_with_gimple_body_p (node)
4265 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4266 count++;
4267 }
4268
412288f1 4269 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4270
4271 /* Process all of the functions. */
f27c1867
JH
4272 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4273 lsei_next_function_in_partition (&lsei))
fb3f88cc 4274 {
f27c1867 4275 node = lsei_cgraph_node (lsei);
c47d0034
JH
4276 if (cgraph_function_with_gimple_body_p (node)
4277 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4278 ipa_write_node_info (ob, node);
4279 }
412288f1 4280 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4281 produce_asm (ob, NULL);
4282 destroy_output_block (ob);
4283}
4284
4285/* Read section in file FILE_DATA of length LEN with data DATA. */
4286
4287static void
4288ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4289 size_t len)
4290{
4291 const struct lto_function_header *header =
4292 (const struct lto_function_header *) data;
4ad9a9de
EB
4293 const int cfg_offset = sizeof (struct lto_function_header);
4294 const int main_offset = cfg_offset + header->cfg_size;
4295 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4296 struct data_in *data_in;
4297 struct lto_input_block ib_main;
4298 unsigned int i;
4299 unsigned int count;
4300
4301 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4302 header->main_size);
4303
4304 data_in =
4305 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4306 header->string_size, vNULL);
412288f1 4307 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4308
4309 for (i = 0; i < count; i++)
4310 {
4311 unsigned int index;
4312 struct cgraph_node *node;
7380e6ef 4313 lto_symtab_encoder_t encoder;
fb3f88cc 4314
412288f1 4315 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4316 encoder = file_data->symtab_node_encoder;
4317 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4318 gcc_assert (node->definition);
fb3f88cc
JH
4319 ipa_read_node_info (&ib_main, node, data_in);
4320 }
4321 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4322 len);
4323 lto_data_in_delete (data_in);
4324}
4325
4326/* Read ipcp jump functions. */
4327
4328void
4329ipa_prop_read_jump_functions (void)
4330{
4331 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4332 struct lto_file_decl_data *file_data;
4333 unsigned int j = 0;
4334
4335 ipa_check_create_node_params ();
4336 ipa_check_create_edge_args ();
4337 ipa_register_cgraph_hooks ();
4338
4339 while ((file_data = file_data_vec[j++]))
4340 {
4341 size_t len;
4342 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4343
4344 if (data)
4345 ipa_prop_read_section (file_data, data, len);
4346 }
4347}
4348
b8698a0f 4349/* After merging units, we can get mismatch in argument counts.
61502ca8 4350 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4351 Also compute called_with_variable_arg info. */
4352
4353void
4354ipa_update_after_lto_read (void)
4355{
05d3aa37
MJ
4356 ipa_check_create_node_params ();
4357 ipa_check_create_edge_args ();
fb3f88cc 4358}
2c9561b5
MJ
4359
4360void
4361write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4362{
4363 int node_ref;
4364 unsigned int count = 0;
4365 lto_symtab_encoder_t encoder;
4366 struct ipa_agg_replacement_value *aggvals, *av;
4367
4368 aggvals = ipa_get_agg_replacements_for_node (node);
4369 encoder = ob->decl_state->symtab_node_encoder;
67348ccc 4370 node_ref = lto_symtab_encoder_encode (encoder, node);
2c9561b5
MJ
4371 streamer_write_uhwi (ob, node_ref);
4372
4373 for (av = aggvals; av; av = av->next)
4374 count++;
4375 streamer_write_uhwi (ob, count);
4376
4377 for (av = aggvals; av; av = av->next)
4378 {
7b920a9a
MJ
4379 struct bitpack_d bp;
4380
2c9561b5
MJ
4381 streamer_write_uhwi (ob, av->offset);
4382 streamer_write_uhwi (ob, av->index);
4383 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4384
4385 bp = bitpack_create (ob->main_stream);
4386 bp_pack_value (&bp, av->by_ref, 1);
4387 streamer_write_bitpack (&bp);
2c9561b5
MJ
4388 }
4389}
4390
4391/* Stream in the aggregate value replacement chain for NODE from IB. */
4392
4393static void
4394read_agg_replacement_chain (struct lto_input_block *ib,
4395 struct cgraph_node *node,
4396 struct data_in *data_in)
4397{
4398 struct ipa_agg_replacement_value *aggvals = NULL;
4399 unsigned int count, i;
4400
4401 count = streamer_read_uhwi (ib);
4402 for (i = 0; i <count; i++)
4403 {
4404 struct ipa_agg_replacement_value *av;
7b920a9a 4405 struct bitpack_d bp;
2c9561b5
MJ
4406
4407 av = ggc_alloc_ipa_agg_replacement_value ();
4408 av->offset = streamer_read_uhwi (ib);
4409 av->index = streamer_read_uhwi (ib);
4410 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4411 bp = streamer_read_bitpack (ib);
4412 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4413 av->next = aggvals;
4414 aggvals = av;
4415 }
4416 ipa_set_node_agg_value_chain (node, aggvals);
4417}
4418
4419/* Write all aggregate replacement for nodes in set. */
4420
4421void
4422ipa_prop_write_all_agg_replacement (void)
4423{
4424 struct cgraph_node *node;
4425 struct output_block *ob;
4426 unsigned int count = 0;
4427 lto_symtab_encoder_iterator lsei;
4428 lto_symtab_encoder_t encoder;
4429
4430 if (!ipa_node_agg_replacements)
4431 return;
4432
4433 ob = create_output_block (LTO_section_ipcp_transform);
4434 encoder = ob->decl_state->symtab_node_encoder;
4435 ob->cgraph_node = NULL;
4436 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4437 lsei_next_function_in_partition (&lsei))
4438 {
4439 node = lsei_cgraph_node (lsei);
4440 if (cgraph_function_with_gimple_body_p (node)
4441 && ipa_get_agg_replacements_for_node (node) != NULL)
4442 count++;
4443 }
4444
4445 streamer_write_uhwi (ob, count);
4446
4447 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4448 lsei_next_function_in_partition (&lsei))
4449 {
4450 node = lsei_cgraph_node (lsei);
4451 if (cgraph_function_with_gimple_body_p (node)
4452 && ipa_get_agg_replacements_for_node (node) != NULL)
4453 write_agg_replacement_chain (ob, node);
4454 }
4455 streamer_write_char_stream (ob->main_stream, 0);
4456 produce_asm (ob, NULL);
4457 destroy_output_block (ob);
4458}
4459
4460/* Read replacements section in file FILE_DATA of length LEN with data
4461 DATA. */
4462
4463static void
4464read_replacements_section (struct lto_file_decl_data *file_data,
4465 const char *data,
4466 size_t len)
4467{
4468 const struct lto_function_header *header =
4469 (const struct lto_function_header *) data;
4470 const int cfg_offset = sizeof (struct lto_function_header);
4471 const int main_offset = cfg_offset + header->cfg_size;
4472 const int string_offset = main_offset + header->main_size;
4473 struct data_in *data_in;
4474 struct lto_input_block ib_main;
4475 unsigned int i;
4476 unsigned int count;
4477
4478 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4479 header->main_size);
4480
4481 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4482 header->string_size, vNULL);
2c9561b5
MJ
4483 count = streamer_read_uhwi (&ib_main);
4484
4485 for (i = 0; i < count; i++)
4486 {
4487 unsigned int index;
4488 struct cgraph_node *node;
4489 lto_symtab_encoder_t encoder;
4490
4491 index = streamer_read_uhwi (&ib_main);
4492 encoder = file_data->symtab_node_encoder;
4493 node = cgraph (lto_symtab_encoder_deref (encoder, index));
67348ccc 4494 gcc_assert (node->definition);
2c9561b5
MJ
4495 read_agg_replacement_chain (&ib_main, node, data_in);
4496 }
4497 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4498 len);
4499 lto_data_in_delete (data_in);
4500}
4501
4502/* Read IPA-CP aggregate replacements. */
4503
4504void
4505ipa_prop_read_all_agg_replacement (void)
4506{
4507 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4508 struct lto_file_decl_data *file_data;
4509 unsigned int j = 0;
4510
4511 while ((file_data = file_data_vec[j++]))
4512 {
4513 size_t len;
4514 const char *data = lto_get_section_data (file_data,
4515 LTO_section_ipcp_transform,
4516 NULL, &len);
4517 if (data)
4518 read_replacements_section (file_data, data, len);
4519 }
4520}
4521
4522/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4523 NODE. */
4524
4525static void
4526adjust_agg_replacement_values (struct cgraph_node *node,
4527 struct ipa_agg_replacement_value *aggval)
4528{
4529 struct ipa_agg_replacement_value *v;
4530 int i, c = 0, d = 0, *adj;
4531
4532 if (!node->clone.combined_args_to_skip)
4533 return;
4534
4535 for (v = aggval; v; v = v->next)
4536 {
4537 gcc_assert (v->index >= 0);
4538 if (c < v->index)
4539 c = v->index;
4540 }
4541 c++;
4542
4543 adj = XALLOCAVEC (int, c);
4544 for (i = 0; i < c; i++)
4545 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4546 {
4547 adj[i] = -1;
4548 d++;
4549 }
4550 else
4551 adj[i] = i - d;
4552
4553 for (v = aggval; v; v = v->next)
4554 v->index = adj[v->index];
4555}
4556
4557
4558/* Function body transformation phase. */
4559
4560unsigned int
4561ipcp_transform_function (struct cgraph_node *node)
4562{
6e1aa848 4563 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
4564 struct param_analysis_info *parms_ainfo;
4565 struct ipa_agg_replacement_value *aggval;
4566 gimple_stmt_iterator gsi;
4567 basic_block bb;
4568 int param_count;
4569 bool cfg_changed = false, something_changed = false;
4570
4571 gcc_checking_assert (cfun);
4572 gcc_checking_assert (current_function_decl);
4573
4574 if (dump_file)
4575 fprintf (dump_file, "Modification phase of node %s/%i\n",
fec39fa6 4576 node->name (), node->order);
2c9561b5
MJ
4577
4578 aggval = ipa_get_agg_replacements_for_node (node);
4579 if (!aggval)
4580 return 0;
67348ccc 4581 param_count = count_formal_params (node->decl);
2c9561b5
MJ
4582 if (param_count == 0)
4583 return 0;
4584 adjust_agg_replacement_values (node, aggval);
4585 if (dump_file)
4586 ipa_dump_agg_replacement_values (dump_file, aggval);
4587 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4588 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4589 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4590 ipa_populate_param_decls (node, descriptors);
4591
4592 FOR_EACH_BB (bb)
4593 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4594 {
4595 struct ipa_agg_replacement_value *v;
4596 gimple stmt = gsi_stmt (gsi);
4597 tree rhs, val, t;
3ff2ca23 4598 HOST_WIDE_INT offset, size;
2c9561b5
MJ
4599 int index;
4600 bool by_ref, vce;
4601
4602 if (!gimple_assign_load_p (stmt))
4603 continue;
4604 rhs = gimple_assign_rhs1 (stmt);
4605 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4606 continue;
4607
4608 vce = false;
4609 t = rhs;
4610 while (handled_component_p (t))
4611 {
4612 /* V_C_E can do things like convert an array of integers to one
4613 bigger integer and similar things we do not handle below. */
4614 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4615 {
4616 vce = true;
4617 break;
4618 }
4619 t = TREE_OPERAND (t, 0);
4620 }
4621 if (vce)
4622 continue;
4623
4624 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3ff2ca23 4625 rhs, &index, &offset, &size, &by_ref))
2c9561b5
MJ
4626 continue;
4627 for (v = aggval; v; v = v->next)
4628 if (v->index == index
4629 && v->offset == offset)
4630 break;
3ff2ca23
JJ
4631 if (!v
4632 || v->by_ref != by_ref
9439e9a1 4633 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v->value))) != size)
2c9561b5
MJ
4634 continue;
4635
4636 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4637 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4638 {
4639 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4640 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4641 else if (TYPE_SIZE (TREE_TYPE (rhs))
4642 == TYPE_SIZE (TREE_TYPE (v->value)))
4643 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4644 else
4645 {
4646 if (dump_file)
4647 {
4648 fprintf (dump_file, " const ");
4649 print_generic_expr (dump_file, v->value, 0);
4650 fprintf (dump_file, " can't be converted to type of ");
4651 print_generic_expr (dump_file, rhs, 0);
4652 fprintf (dump_file, "\n");
4653 }
4654 continue;
4655 }
4656 }
4657 else
4658 val = v->value;
4659
4660 if (dump_file && (dump_flags & TDF_DETAILS))
4661 {
4662 fprintf (dump_file, "Modifying stmt:\n ");
4663 print_gimple_stmt (dump_file, stmt, 0, 0);
4664 }
4665 gimple_assign_set_rhs_from_tree (&gsi, val);
4666 update_stmt (stmt);
4667
4668 if (dump_file && (dump_flags & TDF_DETAILS))
4669 {
4670 fprintf (dump_file, "into:\n ");
4671 print_gimple_stmt (dump_file, stmt, 0, 0);
4672 fprintf (dump_file, "\n");
4673 }
4674
4675 something_changed = true;
4676 if (maybe_clean_eh_stmt (stmt)
4677 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4678 cfg_changed = true;
4679 }
4680
9771b263 4681 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4682 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4683 descriptors.release ();
2c9561b5
MJ
4684
4685 if (!something_changed)
4686 return 0;
4687 else if (cfg_changed)
4688 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4689 else
4690 return TODO_update_ssa_only_virtuals;
4691}
This page took 3.398187 seconds and 5 git commands to generate.