]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa-prop.c
arm.c (thumb2_emit_strd_push): Rewrite to use pre-decrement on initial store.
[gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
24#include "langhooks.h"
25#include "ggc.h"
26#include "target.h"
27#include "cgraph.h"
28#include "ipa-prop.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
771578a0 31#include "tree-inline.h"
0f378cb5 32#include "ipa-inline.h"
b258210c 33#include "gimple.h"
518dc859 34#include "flags.h"
3e293154 35#include "diagnostic.h"
cf835838 36#include "gimple-pretty-print.h"
fb3f88cc 37#include "lto-streamer.h"
f0efc7aa
DN
38#include "data-streamer.h"
39#include "tree-streamer.h"
dfea20f1 40#include "params.h"
771578a0 41
062c604f
MJ
42/* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
44
45struct param_analysis_info
46{
8b7773a4
MJ
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
49};
50
771578a0 51/* Vector where the parameter infos are actually stored. */
9771b263 52vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 53/* Vector of known aggregate values in cloned nodes. */
9771b263 54vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 55/* Vector where the parameter infos are actually stored. */
9771b263 56vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
57
58/* Holders of ipa cgraph hooks: */
e2c9111c
JH
59static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60static struct cgraph_node_hook_list *node_removal_hook_holder;
61static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 63static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 64
4502fe8d
MJ
65/* Description of a reference to an IPA constant. */
66struct ipa_cst_ref_desc
67{
68 /* Edge that corresponds to the statement which took the reference. */
69 struct cgraph_edge *cs;
70 /* Linked list of duplicates created when call graph edges are cloned. */
71 struct ipa_cst_ref_desc *next_duplicate;
72 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
73 if out of control. */
74 int refcount;
75};
76
77/* Allocation pool for reference descriptions. */
78
79static alloc_pool ipa_refdesc_pool;
80
5fe8e757
MJ
81/* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
82 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
83
84static bool
85ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node *node)
86{
87 tree fs_opts = DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node->symbol.decl);
88 struct cl_optimization *os;
89
90 if (!fs_opts)
91 return false;
92 os = TREE_OPTIMIZATION (fs_opts);
93 return !os->x_optimize || !os->x_flag_ipa_cp;
94}
95
be95e2b9
MJ
96/* Return index of the formal whose tree is PTREE in function which corresponds
97 to INFO. */
98
d044dd17 99static int
9771b263 100ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
101{
102 int i, count;
103
9771b263 104 count = descriptors.length ();
518dc859 105 for (i = 0; i < count; i++)
9771b263 106 if (descriptors[i].decl == ptree)
518dc859
RL
107 return i;
108
109 return -1;
110}
111
d044dd17
MJ
112/* Return index of the formal whose tree is PTREE in function which corresponds
113 to INFO. */
114
115int
116ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
117{
118 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
119}
120
121/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
122 NODE. */
be95e2b9 123
f8e2a1ed
MJ
124static void
125ipa_populate_param_decls (struct cgraph_node *node,
9771b263 126 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
127{
128 tree fndecl;
129 tree fnargs;
130 tree parm;
131 int param_num;
3e293154 132
960bfb69 133 fndecl = node->symbol.decl;
0e8853ee 134 gcc_assert (gimple_has_body_p (fndecl));
518dc859
RL
135 fnargs = DECL_ARGUMENTS (fndecl);
136 param_num = 0;
910ad8de 137 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 138 {
9771b263 139 descriptors[param_num].decl = parm;
0e8853ee 140 descriptors[param_num].move_cost = estimate_move_cost (TREE_TYPE (parm));
518dc859
RL
141 param_num++;
142 }
143}
144
3f84bf08
MJ
145/* Return how many formal parameters FNDECL has. */
146
147static inline int
310bc633 148count_formal_params (tree fndecl)
3f84bf08
MJ
149{
150 tree parm;
151 int count = 0;
0e8853ee 152 gcc_assert (gimple_has_body_p (fndecl));
3f84bf08 153
910ad8de 154 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
155 count++;
156
157 return count;
158}
159
0e8853ee
JH
160/* Return the declaration of Ith formal parameter of the function corresponding
161 to INFO. Note there is no setter function as this array is built just once
162 using ipa_initialize_node_params. */
163
164void
165ipa_dump_param (FILE *file, struct ipa_node_params *info, int i)
166{
167 fprintf (file, "param #%i", i);
168 if (info->descriptors[i].decl)
169 {
170 fprintf (file, " ");
171 print_generic_expr (file, info->descriptors[i].decl, 0);
172 }
173}
174
175/* Initialize the ipa_node_params structure associated with NODE
176 to hold PARAM_COUNT parameters. */
177
178void
179ipa_alloc_node_params (struct cgraph_node *node, int param_count)
180{
181 struct ipa_node_params *info = IPA_NODE_REF (node);
182
183 if (!info->descriptors.exists () && param_count)
184 info->descriptors.safe_grow_cleared (param_count);
185}
186
f8e2a1ed
MJ
187/* Initialize the ipa_node_params structure associated with NODE by counting
188 the function parameters, creating the descriptors and populating their
189 param_decls. */
be95e2b9 190
f8e2a1ed
MJ
191void
192ipa_initialize_node_params (struct cgraph_node *node)
193{
194 struct ipa_node_params *info = IPA_NODE_REF (node);
195
9771b263 196 if (!info->descriptors.exists ())
f8e2a1ed 197 {
0e8853ee
JH
198 ipa_alloc_node_params (node, count_formal_params (node->symbol.decl));
199 ipa_populate_param_decls (node, info->descriptors);
f8e2a1ed 200 }
518dc859
RL
201}
202
749aa96d
MJ
203/* Print the jump functions associated with call graph edge CS to file F. */
204
205static void
206ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
207{
208 int i, count;
209
210 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
211 for (i = 0; i < count; i++)
212 {
213 struct ipa_jump_func *jump_func;
214 enum jump_func_type type;
215
216 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
217 type = jump_func->type;
218
219 fprintf (f, " param %d: ", i);
220 if (type == IPA_JF_UNKNOWN)
221 fprintf (f, "UNKNOWN\n");
222 else if (type == IPA_JF_KNOWN_TYPE)
223 {
c7573249
MJ
224 fprintf (f, "KNOWN TYPE: base ");
225 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
226 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
227 jump_func->value.known_type.offset);
228 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
229 fprintf (f, "\n");
749aa96d
MJ
230 }
231 else if (type == IPA_JF_CONST)
232 {
4502fe8d 233 tree val = jump_func->value.constant.value;
749aa96d
MJ
234 fprintf (f, "CONST: ");
235 print_generic_expr (f, val, 0);
236 if (TREE_CODE (val) == ADDR_EXPR
237 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
238 {
239 fprintf (f, " -> ");
240 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
241 0);
242 }
243 fprintf (f, "\n");
244 }
749aa96d
MJ
245 else if (type == IPA_JF_PASS_THROUGH)
246 {
247 fprintf (f, "PASS THROUGH: ");
8b7773a4 248 fprintf (f, "%d, op %s",
749aa96d
MJ
249 jump_func->value.pass_through.formal_id,
250 tree_code_name[(int)
251 jump_func->value.pass_through.operation]);
252 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
253 {
254 fprintf (f, " ");
255 print_generic_expr (f,
256 jump_func->value.pass_through.operand, 0);
257 }
258 if (jump_func->value.pass_through.agg_preserved)
259 fprintf (f, ", agg_preserved");
b8f6e610
MJ
260 if (jump_func->value.pass_through.type_preserved)
261 fprintf (f, ", type_preserved");
3ea6239f 262 fprintf (f, "\n");
749aa96d
MJ
263 }
264 else if (type == IPA_JF_ANCESTOR)
265 {
266 fprintf (f, "ANCESTOR: ");
267 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
268 jump_func->value.ancestor.formal_id,
269 jump_func->value.ancestor.offset);
270 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
271 if (jump_func->value.ancestor.agg_preserved)
272 fprintf (f, ", agg_preserved");
b8f6e610
MJ
273 if (jump_func->value.ancestor.type_preserved)
274 fprintf (f, ", type_preserved");
3ea6239f 275 fprintf (f, "\n");
749aa96d 276 }
8b7773a4
MJ
277
278 if (jump_func->agg.items)
279 {
280 struct ipa_agg_jf_item *item;
281 int j;
282
283 fprintf (f, " Aggregate passed by %s:\n",
284 jump_func->agg.by_ref ? "reference" : "value");
9771b263 285 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
286 {
287 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
288 item->offset);
289 if (TYPE_P (item->value))
290 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
291 tree_low_cst (TYPE_SIZE (item->value), 1));
292 else
293 {
294 fprintf (f, "cst: ");
295 print_generic_expr (f, item->value, 0);
296 }
297 fprintf (f, "\n");
298 }
299 }
749aa96d
MJ
300 }
301}
302
303
be95e2b9
MJ
304/* Print the jump functions of all arguments on all call graph edges going from
305 NODE to file F. */
306
518dc859 307void
3e293154 308ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 309{
3e293154 310 struct cgraph_edge *cs;
518dc859 311
9de04252
MJ
312 fprintf (f, " Jump functions of caller %s/%i:\n", cgraph_node_name (node),
313 node->symbol.order);
3e293154
MJ
314 for (cs = node->callees; cs; cs = cs->next_callee)
315 {
316 if (!ipa_edge_args_info_available_for_edge_p (cs))
317 continue;
318
749aa96d 319 fprintf (f, " callsite %s/%i -> %s/%i : \n",
9de04252
MJ
320 xstrdup (cgraph_node_name (node)), node->symbol.order,
321 xstrdup (cgraph_node_name (cs->callee)),
322 cs->callee->symbol.order);
749aa96d
MJ
323 ipa_print_node_jump_functions_for_edge (f, cs);
324 }
518dc859 325
9de04252 326 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
749aa96d 327 {
9de04252 328 struct cgraph_indirect_call_info *ii;
749aa96d
MJ
329 if (!ipa_edge_args_info_available_for_edge_p (cs))
330 continue;
3e293154 331
9de04252
MJ
332 ii = cs->indirect_info;
333 if (ii->agg_contents)
c13bc3d9 334 fprintf (f, " indirect %s callsite, calling param %i, "
9de04252 335 "offset " HOST_WIDE_INT_PRINT_DEC ", %s",
c13bc3d9 336 ii->member_ptr ? "member ptr" : "aggregate",
9de04252
MJ
337 ii->param_index, ii->offset,
338 ii->by_ref ? "by reference" : "by_value");
339 else
340 fprintf (f, " indirect %s callsite, calling param %i",
341 ii->polymorphic ? "polymorphic" : "simple", ii->param_index);
342
749aa96d
MJ
343 if (cs->call_stmt)
344 {
9de04252 345 fprintf (f, ", for stmt ");
749aa96d 346 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 347 }
749aa96d 348 else
9de04252 349 fprintf (f, "\n");
749aa96d 350 ipa_print_node_jump_functions_for_edge (f, cs);
3e293154
MJ
351 }
352}
353
354/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 355
3e293154
MJ
356void
357ipa_print_all_jump_functions (FILE *f)
358{
359 struct cgraph_node *node;
360
ca30a539 361 fprintf (f, "\nJump functions:\n");
65c70e6b 362 FOR_EACH_FUNCTION (node)
3e293154
MJ
363 {
364 ipa_print_node_jump_functions (f, node);
365 }
366}
367
7b872d9e
MJ
368/* Set JFUNC to be a known type jump function. */
369
370static void
371ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
372 tree base_type, tree component_type)
373{
06d65050
JH
374 gcc_assert (TREE_CODE (component_type) == RECORD_TYPE
375 && TYPE_BINFO (component_type));
7b872d9e
MJ
376 jfunc->type = IPA_JF_KNOWN_TYPE;
377 jfunc->value.known_type.offset = offset,
378 jfunc->value.known_type.base_type = base_type;
379 jfunc->value.known_type.component_type = component_type;
380}
381
b8f6e610
MJ
382/* Set JFUNC to be a copy of another jmp (to be used by jump function
383 combination code). The two functions will share their rdesc. */
384
385static void
386ipa_set_jf_cst_copy (struct ipa_jump_func *dst,
387 struct ipa_jump_func *src)
388
389{
390 gcc_checking_assert (src->type == IPA_JF_CONST);
391 dst->type = IPA_JF_CONST;
392 dst->value.constant = src->value.constant;
393}
394
7b872d9e
MJ
395/* Set JFUNC to be a constant jmp function. */
396
397static void
4502fe8d
MJ
398ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant,
399 struct cgraph_edge *cs)
7b872d9e 400{
5368224f
DC
401 constant = unshare_expr (constant);
402 if (constant && EXPR_P (constant))
403 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 404 jfunc->type = IPA_JF_CONST;
4502fe8d
MJ
405 jfunc->value.constant.value = unshare_expr_without_location (constant);
406
407 if (TREE_CODE (constant) == ADDR_EXPR
408 && TREE_CODE (TREE_OPERAND (constant, 0)) == FUNCTION_DECL)
409 {
410 struct ipa_cst_ref_desc *rdesc;
411 if (!ipa_refdesc_pool)
412 ipa_refdesc_pool = create_alloc_pool ("IPA-PROP ref descriptions",
413 sizeof (struct ipa_cst_ref_desc), 32);
414
415 rdesc = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
416 rdesc->cs = cs;
417 rdesc->next_duplicate = NULL;
418 rdesc->refcount = 1;
419 jfunc->value.constant.rdesc = rdesc;
420 }
421 else
422 jfunc->value.constant.rdesc = NULL;
7b872d9e
MJ
423}
424
425/* Set JFUNC to be a simple pass-through jump function. */
426static void
8b7773a4 427ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
b8f6e610 428 bool agg_preserved, bool type_preserved)
7b872d9e
MJ
429{
430 jfunc->type = IPA_JF_PASS_THROUGH;
431 jfunc->value.pass_through.operand = NULL_TREE;
432 jfunc->value.pass_through.formal_id = formal_id;
433 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 434 jfunc->value.pass_through.agg_preserved = agg_preserved;
b8f6e610 435 jfunc->value.pass_through.type_preserved = type_preserved;
7b872d9e
MJ
436}
437
438/* Set JFUNC to be an arithmetic pass through jump function. */
439
440static void
441ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
442 tree operand, enum tree_code operation)
443{
444 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 445 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
446 jfunc->value.pass_through.formal_id = formal_id;
447 jfunc->value.pass_through.operation = operation;
8b7773a4 448 jfunc->value.pass_through.agg_preserved = false;
b8f6e610 449 jfunc->value.pass_through.type_preserved = false;
7b872d9e
MJ
450}
451
452/* Set JFUNC to be an ancestor jump function. */
453
454static void
455ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
b8f6e610
MJ
456 tree type, int formal_id, bool agg_preserved,
457 bool type_preserved)
7b872d9e
MJ
458{
459 jfunc->type = IPA_JF_ANCESTOR;
460 jfunc->value.ancestor.formal_id = formal_id;
461 jfunc->value.ancestor.offset = offset;
462 jfunc->value.ancestor.type = type;
8b7773a4 463 jfunc->value.ancestor.agg_preserved = agg_preserved;
b8f6e610 464 jfunc->value.ancestor.type_preserved = type_preserved;
7b872d9e
MJ
465}
466
e248d83f
MJ
467/* Extract the acual BINFO being described by JFUNC which must be a known type
468 jump function. */
469
470tree
471ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
472{
473 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
474 if (!base_binfo)
475 return NULL_TREE;
476 return get_binfo_at_offset (base_binfo,
477 jfunc->value.known_type.offset,
478 jfunc->value.known_type.component_type);
479}
480
f65cf2b7
MJ
481/* Structure to be passed in between detect_type_change and
482 check_stmt_for_type_change. */
483
484struct type_change_info
485{
290ebcb7
MJ
486 /* Offset into the object where there is the virtual method pointer we are
487 looking for. */
488 HOST_WIDE_INT offset;
489 /* The declaration or SSA_NAME pointer of the base that we are checking for
490 type change. */
491 tree object;
492 /* If we actually can tell the type that the object has changed to, it is
493 stored in this field. Otherwise it remains NULL_TREE. */
494 tree known_current_type;
f65cf2b7
MJ
495 /* Set to true if dynamic type change has been detected. */
496 bool type_maybe_changed;
290ebcb7
MJ
497 /* Set to true if multiple types have been encountered. known_current_type
498 must be disregarded in that case. */
499 bool multiple_types_encountered;
f65cf2b7
MJ
500};
501
502/* Return true if STMT can modify a virtual method table pointer.
503
504 This function makes special assumptions about both constructors and
505 destructors which are all the functions that are allowed to alter the VMT
506 pointers. It assumes that destructors begin with assignment into all VMT
507 pointers and that constructors essentially look in the following way:
508
509 1) The very first thing they do is that they call constructors of ancestor
510 sub-objects that have them.
511
512 2) Then VMT pointers of this and all its ancestors is set to new values
513 corresponding to the type corresponding to the constructor.
514
515 3) Only afterwards, other stuff such as constructor of member sub-objects
516 and the code written by the user is run. Only this may include calling
517 virtual functions, directly or indirectly.
518
519 There is no way to call a constructor of an ancestor sub-object in any
520 other way.
521
522 This means that we do not have to care whether constructors get the correct
523 type information because they will always change it (in fact, if we define
524 the type to be given by the VMT pointer, it is undefined).
525
526 The most important fact to derive from the above is that if, for some
527 statement in the section 3, we try to detect whether the dynamic type has
528 changed, we can safely ignore all calls as we examine the function body
529 backwards until we reach statements in section 2 because these calls cannot
530 be ancestor constructors or destructors (if the input is not bogus) and so
531 do not change the dynamic type (this holds true only for automatically
532 allocated objects but at the moment we devirtualize only these). We then
533 must detect that statements in section 2 change the dynamic type and can try
534 to derive the new type. That is enough and we can stop, we will never see
535 the calls into constructors of sub-objects in this code. Therefore we can
536 safely ignore all call statements that we traverse.
537 */
538
539static bool
540stmt_may_be_vtbl_ptr_store (gimple stmt)
541{
542 if (is_gimple_call (stmt))
543 return false;
544 else if (is_gimple_assign (stmt))
545 {
546 tree lhs = gimple_assign_lhs (stmt);
547
0004f992
MJ
548 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
549 {
550 if (flag_strict_aliasing
551 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
552 return false;
553
554 if (TREE_CODE (lhs) == COMPONENT_REF
555 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 556 return false;
0004f992
MJ
557 /* In the future we might want to use get_base_ref_and_offset to find
558 if there is a field corresponding to the offset and if so, proceed
559 almost like if it was a component ref. */
560 }
f65cf2b7
MJ
561 }
562 return true;
563}
564
290ebcb7
MJ
565/* If STMT can be proved to be an assignment to the virtual method table
566 pointer of ANALYZED_OBJ and the type associated with the new table
567 identified, return the type. Otherwise return NULL_TREE. */
568
569static tree
570extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
571{
572 HOST_WIDE_INT offset, size, max_size;
573 tree lhs, rhs, base;
574
575 if (!gimple_assign_single_p (stmt))
576 return NULL_TREE;
577
578 lhs = gimple_assign_lhs (stmt);
579 rhs = gimple_assign_rhs1 (stmt);
580 if (TREE_CODE (lhs) != COMPONENT_REF
581 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
582 || TREE_CODE (rhs) != ADDR_EXPR)
583 return NULL_TREE;
584 rhs = get_base_address (TREE_OPERAND (rhs, 0));
585 if (!rhs
586 || TREE_CODE (rhs) != VAR_DECL
587 || !DECL_VIRTUAL_P (rhs))
588 return NULL_TREE;
589
590 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
591 if (offset != tci->offset
592 || size != POINTER_SIZE
593 || max_size != POINTER_SIZE)
594 return NULL_TREE;
595 if (TREE_CODE (base) == MEM_REF)
596 {
597 if (TREE_CODE (tci->object) != MEM_REF
598 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
599 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
600 TREE_OPERAND (base, 1)))
601 return NULL_TREE;
602 }
603 else if (tci->object != base)
604 return NULL_TREE;
605
606 return DECL_CONTEXT (rhs);
607}
608
61502ca8 609/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
610 detect_type_change to check whether a particular statement may modify
611 the virtual table pointer, and if possible also determine the new type of
612 the (sub-)object. It stores its result into DATA, which points to a
613 type_change_info structure. */
614
615static bool
616check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
617{
618 gimple stmt = SSA_NAME_DEF_STMT (vdef);
619 struct type_change_info *tci = (struct type_change_info *) data;
620
621 if (stmt_may_be_vtbl_ptr_store (stmt))
622 {
290ebcb7
MJ
623 tree type;
624 type = extr_type_from_vtbl_ptr_store (stmt, tci);
625 if (tci->type_maybe_changed
626 && type != tci->known_current_type)
627 tci->multiple_types_encountered = true;
628 tci->known_current_type = type;
f65cf2b7
MJ
629 tci->type_maybe_changed = true;
630 return true;
631 }
632 else
633 return false;
634}
635
290ebcb7
MJ
636
637
06d65050
JH
638/* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
639 callsite CALL) by looking for assignments to its virtual table pointer. If
640 it is, return true and fill in the jump function JFUNC with relevant type
641 information or set it to unknown. ARG is the object itself (not a pointer
642 to it, unless dereferenced). BASE is the base of the memory access as
643 returned by get_ref_base_and_extent, as is the offset. */
f65cf2b7
MJ
644
645static bool
06d65050
JH
646detect_type_change (tree arg, tree base, tree comp_type, gimple call,
647 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
648{
649 struct type_change_info tci;
650 ao_ref ao;
651
652 gcc_checking_assert (DECL_P (arg)
653 || TREE_CODE (arg) == MEM_REF
654 || handled_component_p (arg));
655 /* Const calls cannot call virtual methods through VMT and so type changes do
656 not matter. */
06d65050
JH
657 if (!flag_devirtualize || !gimple_vuse (call)
658 /* Be sure expected_type is polymorphic. */
659 || !comp_type
660 || TREE_CODE (comp_type) != RECORD_TYPE
661 || !TYPE_BINFO (comp_type)
662 || !BINFO_VTABLE (TYPE_BINFO (comp_type)))
f65cf2b7
MJ
663 return false;
664
dd887943 665 ao_ref_init (&ao, arg);
f65cf2b7
MJ
666 ao.base = base;
667 ao.offset = offset;
668 ao.size = POINTER_SIZE;
669 ao.max_size = ao.size;
f65cf2b7 670
290ebcb7
MJ
671 tci.offset = offset;
672 tci.object = get_base_address (arg);
673 tci.known_current_type = NULL_TREE;
674 tci.type_maybe_changed = false;
675 tci.multiple_types_encountered = false;
676
f65cf2b7
MJ
677 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
678 &tci, NULL);
679 if (!tci.type_maybe_changed)
680 return false;
681
290ebcb7
MJ
682 if (!tci.known_current_type
683 || tci.multiple_types_encountered
684 || offset != 0)
685 jfunc->type = IPA_JF_UNKNOWN;
686 else
7b872d9e 687 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 688
f65cf2b7
MJ
689 return true;
690}
691
692/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
693 SSA name (its dereference will become the base and the offset is assumed to
694 be zero). */
695
696static bool
06d65050
JH
697detect_type_change_ssa (tree arg, tree comp_type,
698 gimple call, struct ipa_jump_func *jfunc)
f65cf2b7
MJ
699{
700 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5 701 if (!flag_devirtualize
06d65050 702 || !POINTER_TYPE_P (TREE_TYPE (arg)))
f65cf2b7
MJ
703 return false;
704
705 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 706 build_int_cst (ptr_type_node, 0));
f65cf2b7 707
06d65050 708 return detect_type_change (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
709}
710
fdb0e1b4
MJ
711/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
712 boolean variable pointed to by DATA. */
713
714static bool
715mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
716 void *data)
717{
718 bool *b = (bool *) data;
719 *b = true;
720 return true;
721}
722
688010ba 723/* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
8b7773a4
MJ
724 a value known not to be modified in this function before reaching the
725 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
726 information about the parameter. */
fdb0e1b4
MJ
727
728static bool
8b7773a4
MJ
729parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
730 gimple stmt, tree parm_load)
fdb0e1b4
MJ
731{
732 bool modified = false;
8b7773a4 733 bitmap *visited_stmts;
fdb0e1b4
MJ
734 ao_ref refd;
735
8b7773a4
MJ
736 if (parm_ainfo && parm_ainfo->parm_modified)
737 return false;
fdb0e1b4
MJ
738
739 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
740 ao_ref_init (&refd, parm_load);
741 /* We can cache visited statements only when parm_ainfo is available and when
742 we are looking at a naked load of the whole parameter. */
743 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
744 visited_stmts = NULL;
745 else
746 visited_stmts = &parm_ainfo->parm_visited_statements;
747 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
748 visited_stmts);
749 if (parm_ainfo && modified)
750 parm_ainfo->parm_modified = true;
751 return !modified;
fdb0e1b4
MJ
752}
753
754/* If STMT is an assignment that loads a value from an parameter declaration,
755 return the index of the parameter in ipa_node_params which has not been
756 modified. Otherwise return -1. */
757
758static int
9771b263 759load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
760 struct param_analysis_info *parms_ainfo,
761 gimple stmt)
762{
763 int index;
764 tree op1;
765
766 if (!gimple_assign_single_p (stmt))
767 return -1;
768
769 op1 = gimple_assign_rhs1 (stmt);
770 if (TREE_CODE (op1) != PARM_DECL)
771 return -1;
772
d044dd17 773 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 774 if (index < 0
8b7773a4
MJ
775 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
776 : NULL, stmt, op1))
fdb0e1b4
MJ
777 return -1;
778
779 return index;
780}
f65cf2b7 781
8b7773a4
MJ
782/* Return true if memory reference REF loads data that are known to be
783 unmodified in this function before reaching statement STMT. PARM_AINFO, if
784 non-NULL, is a pointer to a structure containing temporary information about
785 PARM. */
786
787static bool
788parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
789 gimple stmt, tree ref)
790{
791 bool modified = false;
792 ao_ref refd;
793
794 gcc_checking_assert (gimple_vuse (stmt));
795 if (parm_ainfo && parm_ainfo->ref_modified)
796 return false;
797
798 ao_ref_init (&refd, ref);
799 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
800 NULL);
801 if (parm_ainfo && modified)
802 parm_ainfo->ref_modified = true;
803 return !modified;
804}
805
806/* Return true if the data pointed to by PARM is known to be unmodified in this
807 function before reaching call statement CALL into which it is passed.
808 PARM_AINFO is a pointer to a structure containing temporary information
809 about PARM. */
810
811static bool
812parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
813 gimple call, tree parm)
814{
815 bool modified = false;
816 ao_ref refd;
817
818 /* It's unnecessary to calculate anything about memory contnets for a const
819 function because it is not goin to use it. But do not cache the result
820 either. Also, no such calculations for non-pointers. */
821 if (!gimple_vuse (call)
822 || !POINTER_TYPE_P (TREE_TYPE (parm)))
823 return false;
824
825 if (parm_ainfo->pt_modified)
826 return false;
827
828 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
829 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
830 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
831 if (modified)
832 parm_ainfo->pt_modified = true;
833 return !modified;
834}
835
836/* Return true if we can prove that OP is a memory reference loading unmodified
837 data from an aggregate passed as a parameter and if the aggregate is passed
838 by reference, that the alias type of the load corresponds to the type of the
839 formal parameter (so that we can rely on this type for TBAA in callers).
840 INFO and PARMS_AINFO describe parameters of the current function (but the
841 latter can be NULL), STMT is the load statement. If function returns true,
842 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
843 within the aggregate and whether it is a load from a value passed by
844 reference respectively. */
845
846static bool
9771b263 847ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
848 struct param_analysis_info *parms_ainfo, gimple stmt,
849 tree op, int *index_p, HOST_WIDE_INT *offset_p,
850 bool *by_ref_p)
851{
852 int index;
853 HOST_WIDE_INT size, max_size;
854 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
855
856 if (max_size == -1 || max_size != size || *offset_p < 0)
857 return false;
858
859 if (DECL_P (base))
860 {
d044dd17 861 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
862 if (index >= 0
863 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
864 : NULL, stmt, op))
865 {
866 *index_p = index;
867 *by_ref_p = false;
868 return true;
869 }
870 return false;
871 }
872
873 if (TREE_CODE (base) != MEM_REF
874 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
875 || !integer_zerop (TREE_OPERAND (base, 1)))
876 return false;
877
878 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
879 {
880 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 881 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
882 }
883 else
884 {
885 /* This branch catches situations where a pointer parameter is not a
886 gimple register, for example:
887
888 void hip7(S*) (struct S * p)
889 {
890 void (*<T2e4>) (struct S *) D.1867;
891 struct S * p.1;
892
893 <bb 2>:
894 p.1_1 = p;
895 D.1867_2 = p.1_1->f;
896 D.1867_2 ();
897 gdp = &p;
898 */
899
900 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 901 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
902 }
903
904 if (index >= 0
905 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
906 stmt, op))
907 {
908 *index_p = index;
909 *by_ref_p = true;
910 return true;
911 }
912 return false;
913}
914
915/* Just like the previous function, just without the param_analysis_info
916 pointer, for users outside of this file. */
917
918bool
919ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
920 tree op, int *index_p, HOST_WIDE_INT *offset_p,
921 bool *by_ref_p)
922{
d044dd17
MJ
923 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
924 offset_p, by_ref_p);
8b7773a4
MJ
925}
926
b258210c 927/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
928 of an assignment statement STMT, try to determine whether we are actually
929 handling any of the following cases and construct an appropriate jump
930 function into JFUNC if so:
931
932 1) The passed value is loaded from a formal parameter which is not a gimple
933 register (most probably because it is addressable, the value has to be
934 scalar) and we can guarantee the value has not changed. This case can
935 therefore be described by a simple pass-through jump function. For example:
936
937 foo (int a)
938 {
939 int a.0;
940
941 a.0_2 = a;
942 bar (a.0_2);
943
944 2) The passed value can be described by a simple arithmetic pass-through
945 jump function. E.g.
946
947 foo (int a)
948 {
949 int D.2064;
950
951 D.2064_4 = a.1(D) + 4;
952 bar (D.2064_4);
953
954 This case can also occur in combination of the previous one, e.g.:
955
956 foo (int a, int z)
957 {
958 int a.0;
959 int D.2064;
960
961 a.0_3 = a;
962 D.2064_4 = a.0_3 + 4;
963 foo (D.2064_4);
964
965 3) The passed value is an address of an object within another one (which
966 also passed by reference). Such situations are described by an ancestor
967 jump function and describe situations such as:
968
969 B::foo() (struct B * const this)
970 {
971 struct A * D.1845;
972
973 D.1845_2 = &this_1(D)->D.1748;
974 A::bar (D.1845_2);
975
976 INFO is the structure describing individual parameters access different
977 stages of IPA optimizations. PARMS_AINFO contains the information that is
978 only needed for intraprocedural analysis. */
685b0d13
MJ
979
980static void
b258210c 981compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 982 struct param_analysis_info *parms_ainfo,
b258210c 983 struct ipa_jump_func *jfunc,
06d65050
JH
984 gimple call, gimple stmt, tree name,
985 tree param_type)
685b0d13
MJ
986{
987 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 988 tree op1, tc_ssa, base, ssa;
685b0d13 989 int index;
685b0d13 990
685b0d13 991 op1 = gimple_assign_rhs1 (stmt);
685b0d13 992
fdb0e1b4 993 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 994 {
fdb0e1b4
MJ
995 if (SSA_NAME_IS_DEFAULT_DEF (op1))
996 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
997 else
d044dd17 998 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
999 SSA_NAME_DEF_STMT (op1));
1000 tc_ssa = op1;
1001 }
1002 else
1003 {
d044dd17 1004 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
1005 tc_ssa = gimple_assign_lhs (stmt);
1006 }
1007
1008 if (index >= 0)
1009 {
1010 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 1011
b258210c 1012 if (op2)
685b0d13 1013 {
b258210c
MJ
1014 if (!is_gimple_ip_invariant (op2)
1015 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
1016 && !useless_type_conversion_p (TREE_TYPE (name),
1017 TREE_TYPE (op1))))
1018 return;
1019
7b872d9e
MJ
1020 ipa_set_jf_arith_pass_through (jfunc, index, op2,
1021 gimple_assign_rhs_code (stmt));
685b0d13 1022 }
b8f6e610 1023 else if (gimple_assign_single_p (stmt))
8b7773a4
MJ
1024 {
1025 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1026 call, tc_ssa);
06d65050
JH
1027 bool type_p = false;
1028
1029 if (param_type && POINTER_TYPE_P (param_type))
1030 type_p = !detect_type_change_ssa (tc_ssa, TREE_TYPE (param_type),
1031 call, jfunc);
b8f6e610
MJ
1032 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1033 ipa_set_jf_simple_pass_through (jfunc, index, agg_p, type_p);
8b7773a4 1034 }
685b0d13
MJ
1035 return;
1036 }
1037
1038 if (TREE_CODE (op1) != ADDR_EXPR)
1039 return;
1040 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 1041 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 1042 return;
32aa622c
MJ
1043 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
1044 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
1045 /* If this is a varying address, punt. */
1046 || max_size == -1
1047 || max_size != size)
685b0d13 1048 return;
32aa622c 1049 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
1050 ssa = TREE_OPERAND (base, 0);
1051 if (TREE_CODE (ssa) != SSA_NAME
1052 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 1053 || offset < 0)
685b0d13
MJ
1054 return;
1055
b8f6e610 1056 /* Dynamic types are changed in constructors and destructors. */
f65cf2b7 1057 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
06d65050 1058 if (index >= 0 && param_type && POINTER_TYPE_P (param_type))
b8f6e610 1059 {
06d65050
JH
1060 bool type_p = !detect_type_change (op1, base, TREE_TYPE (param_type),
1061 call, jfunc, offset);
b8f6e610
MJ
1062 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
1063 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
1064 parm_ref_data_pass_through_p (&parms_ainfo[index],
1065 call, ssa), type_p);
1066 }
685b0d13
MJ
1067}
1068
40591473
MJ
1069/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1070 it looks like:
1071
1072 iftmp.1_3 = &obj_2(D)->D.1762;
1073
1074 The base of the MEM_REF must be a default definition SSA NAME of a
1075 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1076 whole MEM_REF expression is returned and the offset calculated from any
1077 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1078 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1079
1080static tree
1081get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
1082{
1083 HOST_WIDE_INT size, max_size;
1084 tree expr, parm, obj;
1085
1086 if (!gimple_assign_single_p (assign))
1087 return NULL_TREE;
1088 expr = gimple_assign_rhs1 (assign);
1089
1090 if (TREE_CODE (expr) != ADDR_EXPR)
1091 return NULL_TREE;
1092 expr = TREE_OPERAND (expr, 0);
1093 obj = expr;
1094 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
1095
1096 if (TREE_CODE (expr) != MEM_REF
1097 /* If this is a varying address, punt. */
1098 || max_size == -1
1099 || max_size != size
1100 || *offset < 0)
1101 return NULL_TREE;
1102 parm = TREE_OPERAND (expr, 0);
1103 if (TREE_CODE (parm) != SSA_NAME
1104 || !SSA_NAME_IS_DEFAULT_DEF (parm)
1105 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
1106 return NULL_TREE;
1107
1108 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1109 *obj_p = obj;
1110 return expr;
1111}
1112
685b0d13 1113
b258210c
MJ
1114/* Given that an actual argument is an SSA_NAME that is a result of a phi
1115 statement PHI, try to find out whether NAME is in fact a
1116 multiple-inheritance typecast from a descendant into an ancestor of a formal
1117 parameter and thus can be described by an ancestor jump function and if so,
1118 write the appropriate function into JFUNC.
1119
1120 Essentially we want to match the following pattern:
1121
1122 if (obj_2(D) != 0B)
1123 goto <bb 3>;
1124 else
1125 goto <bb 4>;
1126
1127 <bb 3>:
1128 iftmp.1_3 = &obj_2(D)->D.1762;
1129
1130 <bb 4>:
1131 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1132 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1133 return D.1879_6; */
1134
1135static void
1136compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1137 struct param_analysis_info *parms_ainfo,
b258210c 1138 struct ipa_jump_func *jfunc,
06d65050 1139 gimple call, gimple phi, tree param_type)
b258210c 1140{
40591473 1141 HOST_WIDE_INT offset;
b258210c
MJ
1142 gimple assign, cond;
1143 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1144 tree tmp, parm, expr, obj;
b258210c
MJ
1145 int index, i;
1146
54e348cb 1147 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1148 return;
1149
54e348cb
MJ
1150 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1151 tmp = PHI_ARG_DEF (phi, 0);
1152 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1153 tmp = PHI_ARG_DEF (phi, 1);
1154 else
1155 return;
b258210c
MJ
1156 if (TREE_CODE (tmp) != SSA_NAME
1157 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1158 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1159 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1160 return;
1161
1162 assign = SSA_NAME_DEF_STMT (tmp);
1163 assign_bb = gimple_bb (assign);
40591473 1164 if (!single_pred_p (assign_bb))
b258210c 1165 return;
40591473
MJ
1166 expr = get_ancestor_addr_info (assign, &obj, &offset);
1167 if (!expr)
b258210c
MJ
1168 return;
1169 parm = TREE_OPERAND (expr, 0);
b258210c 1170 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1171 gcc_assert (index >= 0);
b258210c
MJ
1172
1173 cond_bb = single_pred (assign_bb);
1174 cond = last_stmt (cond_bb);
69610617
SB
1175 if (!cond
1176 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1177 || gimple_cond_code (cond) != NE_EXPR
1178 || gimple_cond_lhs (cond) != parm
1179 || !integer_zerop (gimple_cond_rhs (cond)))
1180 return;
1181
b258210c
MJ
1182 phi_bb = gimple_bb (phi);
1183 for (i = 0; i < 2; i++)
1184 {
1185 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1186 if (pred != assign_bb && pred != cond_bb)
1187 return;
1188 }
1189
06d65050
JH
1190 bool type_p = false;
1191 if (param_type && POINTER_TYPE_P (param_type))
1192 type_p = !detect_type_change (obj, expr, TREE_TYPE (param_type),
1193 call, jfunc, offset);
b8f6e610 1194 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
8b7773a4
MJ
1195 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1196 parm_ref_data_pass_through_p (&parms_ainfo[index],
b8f6e610 1197 call, parm), type_p);
b258210c
MJ
1198}
1199
61502ca8 1200/* Given OP which is passed as an actual argument to a called function,
b258210c 1201 determine if it is possible to construct a KNOWN_TYPE jump function for it
06d65050
JH
1202 and if so, create one and store it to JFUNC.
1203 EXPECTED_TYPE represents a type the argument should be in */
b258210c
MJ
1204
1205static void
f65cf2b7 1206compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
06d65050 1207 gimple call, tree expected_type)
b258210c 1208{
32aa622c 1209 HOST_WIDE_INT offset, size, max_size;
c7573249 1210 tree base;
b258210c 1211
05842ff5
MJ
1212 if (!flag_devirtualize
1213 || TREE_CODE (op) != ADDR_EXPR
06d65050
JH
1214 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE
1215 /* Be sure expected_type is polymorphic. */
1216 || !expected_type
1217 || TREE_CODE (expected_type) != RECORD_TYPE
1218 || !TYPE_BINFO (expected_type)
1219 || !BINFO_VTABLE (TYPE_BINFO (expected_type)))
b258210c
MJ
1220 return;
1221
1222 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1223 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1224 if (!DECL_P (base)
1225 || max_size == -1
1226 || max_size != size
1227 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1228 || is_global_var (base))
1229 return;
1230
06d65050 1231 if (detect_type_change (op, base, expected_type, call, jfunc, offset))
f65cf2b7
MJ
1232 return;
1233
06d65050
JH
1234 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base),
1235 expected_type);
b258210c
MJ
1236}
1237
be95e2b9
MJ
1238/* Inspect the given TYPE and return true iff it has the same structure (the
1239 same number of fields of the same types) as a C++ member pointer. If
1240 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1241 corresponding fields there. */
1242
3e293154
MJ
1243static bool
1244type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1245{
1246 tree fld;
1247
1248 if (TREE_CODE (type) != RECORD_TYPE)
1249 return false;
1250
1251 fld = TYPE_FIELDS (type);
1252 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4
MJ
1253 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1254 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1255 return false;
1256
1257 if (method_ptr)
1258 *method_ptr = fld;
1259
910ad8de 1260 fld = DECL_CHAIN (fld);
8b7773a4
MJ
1261 if (!fld || INTEGRAL_TYPE_P (fld)
1262 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1263 return false;
1264 if (delta)
1265 *delta = fld;
1266
910ad8de 1267 if (DECL_CHAIN (fld))
3e293154
MJ
1268 return false;
1269
1270 return true;
1271}
1272
61502ca8 1273/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1274 return the rhs of its defining statement. Otherwise return RHS as it
1275 is. */
7ec49257
MJ
1276
1277static inline tree
1278get_ssa_def_if_simple_copy (tree rhs)
1279{
1280 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1281 {
1282 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1283
1284 if (gimple_assign_single_p (def_stmt))
1285 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1286 else
1287 break;
7ec49257
MJ
1288 }
1289 return rhs;
1290}
1291
8b7773a4
MJ
1292/* Simple linked list, describing known contents of an aggregate beforere
1293 call. */
1294
1295struct ipa_known_agg_contents_list
1296{
1297 /* Offset and size of the described part of the aggregate. */
1298 HOST_WIDE_INT offset, size;
1299 /* Known constant value or NULL if the contents is known to be unknown. */
1300 tree constant;
1301 /* Pointer to the next structure in the list. */
1302 struct ipa_known_agg_contents_list *next;
1303};
3e293154 1304
8b7773a4
MJ
1305/* Traverse statements from CALL backwards, scanning whether an aggregate given
1306 in ARG is filled in with constant values. ARG can either be an aggregate
1307 expression or a pointer to an aggregate. JFUNC is the jump function into
1308 which the constants are subsequently stored. */
be95e2b9 1309
3e293154 1310static void
8b7773a4
MJ
1311determine_known_aggregate_parts (gimple call, tree arg,
1312 struct ipa_jump_func *jfunc)
3e293154 1313{
8b7773a4
MJ
1314 struct ipa_known_agg_contents_list *list = NULL;
1315 int item_count = 0, const_count = 0;
1316 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1317 gimple_stmt_iterator gsi;
8b7773a4
MJ
1318 tree arg_base;
1319 bool check_ref, by_ref;
1320 ao_ref r;
3e293154 1321
8b7773a4
MJ
1322 /* The function operates in three stages. First, we prepare check_ref, r,
1323 arg_base and arg_offset based on what is actually passed as an actual
1324 argument. */
3e293154 1325
8b7773a4
MJ
1326 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1327 {
1328 by_ref = true;
1329 if (TREE_CODE (arg) == SSA_NAME)
1330 {
1331 tree type_size;
1332 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1333 return;
1334 check_ref = true;
1335 arg_base = arg;
1336 arg_offset = 0;
1337 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1338 arg_size = tree_low_cst (type_size, 1);
1339 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1340 }
1341 else if (TREE_CODE (arg) == ADDR_EXPR)
1342 {
1343 HOST_WIDE_INT arg_max_size;
1344
1345 arg = TREE_OPERAND (arg, 0);
1346 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1347 &arg_max_size);
1348 if (arg_max_size == -1
1349 || arg_max_size != arg_size
1350 || arg_offset < 0)
1351 return;
1352 if (DECL_P (arg_base))
1353 {
1354 tree size;
1355 check_ref = false;
1356 size = build_int_cst (integer_type_node, arg_size);
1357 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1358 }
1359 else
1360 return;
1361 }
1362 else
1363 return;
1364 }
1365 else
1366 {
1367 HOST_WIDE_INT arg_max_size;
1368
1369 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1370
1371 by_ref = false;
1372 check_ref = false;
1373 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1374 &arg_max_size);
1375 if (arg_max_size == -1
1376 || arg_max_size != arg_size
1377 || arg_offset < 0)
1378 return;
1379
1380 ao_ref_init (&r, arg);
1381 }
1382
1383 /* Second stage walks back the BB, looks at individual statements and as long
1384 as it is confident of how the statements affect contents of the
1385 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1386 describing it. */
1387 gsi = gsi_for_stmt (call);
726a989a
RB
1388 gsi_prev (&gsi);
1389 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1390 {
8b7773a4 1391 struct ipa_known_agg_contents_list *n, **p;
726a989a 1392 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1393 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1394 tree lhs, rhs, lhs_base;
1395 bool partial_overlap;
3e293154 1396
8b7773a4 1397 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1398 continue;
8b75fc9b 1399 if (!gimple_assign_single_p (stmt))
8b7773a4 1400 break;
3e293154 1401
726a989a
RB
1402 lhs = gimple_assign_lhs (stmt);
1403 rhs = gimple_assign_rhs1 (stmt);
7d2fb524
MJ
1404 if (!is_gimple_reg_type (rhs)
1405 || TREE_CODE (lhs) == BIT_FIELD_REF
1406 || contains_bitfld_component_ref_p (lhs))
8b7773a4 1407 break;
3e293154 1408
8b7773a4
MJ
1409 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1410 &lhs_max_size);
1411 if (lhs_max_size == -1
1412 || lhs_max_size != lhs_size
1413 || (lhs_offset < arg_offset
1414 && lhs_offset + lhs_size > arg_offset)
1415 || (lhs_offset < arg_offset + arg_size
1416 && lhs_offset + lhs_size > arg_offset + arg_size))
1417 break;
3e293154 1418
8b7773a4 1419 if (check_ref)
518dc859 1420 {
8b7773a4
MJ
1421 if (TREE_CODE (lhs_base) != MEM_REF
1422 || TREE_OPERAND (lhs_base, 0) != arg_base
1423 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1424 break;
3e293154 1425 }
8b7773a4 1426 else if (lhs_base != arg_base)
774b8a55
MJ
1427 {
1428 if (DECL_P (lhs_base))
1429 continue;
1430 else
1431 break;
1432 }
3e293154 1433
8b7773a4
MJ
1434 if (lhs_offset + lhs_size < arg_offset
1435 || lhs_offset >= (arg_offset + arg_size))
1436 continue;
1437
1438 partial_overlap = false;
1439 p = &list;
1440 while (*p && (*p)->offset < lhs_offset)
3e293154 1441 {
8b7773a4 1442 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1443 {
8b7773a4
MJ
1444 partial_overlap = true;
1445 break;
3e293154 1446 }
8b7773a4
MJ
1447 p = &(*p)->next;
1448 }
1449 if (partial_overlap)
1450 break;
1451 if (*p && (*p)->offset < lhs_offset + lhs_size)
1452 {
1453 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1454 /* We already know this value is subsequently overwritten with
1455 something else. */
1456 continue;
3e293154 1457 else
8b7773a4
MJ
1458 /* Otherwise this is a partial overlap which we cannot
1459 represent. */
1460 break;
3e293154 1461 }
3e293154 1462
8b7773a4
MJ
1463 rhs = get_ssa_def_if_simple_copy (rhs);
1464 n = XALLOCA (struct ipa_known_agg_contents_list);
1465 n->size = lhs_size;
1466 n->offset = lhs_offset;
1467 if (is_gimple_ip_invariant (rhs))
1468 {
1469 n->constant = rhs;
1470 const_count++;
1471 }
1472 else
1473 n->constant = NULL_TREE;
1474 n->next = *p;
1475 *p = n;
3e293154 1476
8b7773a4 1477 item_count++;
dfea20f1
MJ
1478 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1479 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1480 break;
1481 }
be95e2b9 1482
8b7773a4
MJ
1483 /* Third stage just goes over the list and creates an appropriate vector of
1484 ipa_agg_jf_item structures out of it, of sourse only if there are
1485 any known constants to begin with. */
3e293154 1486
8b7773a4 1487 if (const_count)
3e293154 1488 {
8b7773a4 1489 jfunc->agg.by_ref = by_ref;
9771b263 1490 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1491 while (list)
1492 {
1493 if (list->constant)
1494 {
f32682ca
DN
1495 struct ipa_agg_jf_item item;
1496 item.offset = list->offset - arg_offset;
7d2fb524 1497 gcc_assert ((item.offset % BITS_PER_UNIT) == 0);
d1f98542 1498 item.value = unshare_expr_without_location (list->constant);
9771b263 1499 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1500 }
1501 list = list->next;
1502 }
3e293154
MJ
1503 }
1504}
1505
06d65050
JH
1506static tree
1507ipa_get_callee_param_type (struct cgraph_edge *e, int i)
1508{
1509 int n;
1510 tree type = (e->callee
1511 ? TREE_TYPE (e->callee->symbol.decl)
1512 : gimple_call_fntype (e->call_stmt));
1513 tree t = TYPE_ARG_TYPES (type);
1514
1515 for (n = 0; n < i; n++)
1516 {
1517 if (!t)
1518 break;
1519 t = TREE_CHAIN (t);
1520 }
1521 if (t)
1522 return TREE_VALUE (t);
1523 if (!e->callee)
1524 return NULL;
1525 t = DECL_ARGUMENTS (e->callee->symbol.decl);
1526 for (n = 0; n < i; n++)
1527 {
1528 if (!t)
1529 return NULL;
1530 t = TREE_CHAIN (t);
1531 }
1532 if (t)
1533 return TREE_TYPE (t);
1534 return NULL;
1535}
1536
3e293154
MJ
1537/* Compute jump function for all arguments of callsite CS and insert the
1538 information in the jump_functions array in the ipa_edge_args corresponding
1539 to this callsite. */
be95e2b9 1540
749aa96d 1541static void
c419671c 1542ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1543 struct cgraph_edge *cs)
3e293154
MJ
1544{
1545 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1546 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1547 gimple call = cs->call_stmt;
8b7773a4 1548 int n, arg_num = gimple_call_num_args (call);
3e293154 1549
606d9a09 1550 if (arg_num == 0 || args->jump_functions)
3e293154 1551 return;
9771b263 1552 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1553
5fe8e757
MJ
1554 if (ipa_func_spec_opts_forbid_analysis_p (cs->caller))
1555 return;
1556
8b7773a4
MJ
1557 for (n = 0; n < arg_num; n++)
1558 {
1559 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1560 tree arg = gimple_call_arg (call, n);
06d65050 1561 tree param_type = ipa_get_callee_param_type (cs, n);
3e293154 1562
8b7773a4 1563 if (is_gimple_ip_invariant (arg))
4502fe8d 1564 ipa_set_jf_constant (jfunc, arg, cs);
8b7773a4
MJ
1565 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1566 && TREE_CODE (arg) == PARM_DECL)
1567 {
1568 int index = ipa_get_param_decl_index (info, arg);
1569
1570 gcc_assert (index >=0);
1571 /* Aggregate passed by value, check for pass-through, otherwise we
1572 will attempt to fill in aggregate contents later in this
1573 for cycle. */
1574 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1575 {
b8f6e610 1576 ipa_set_jf_simple_pass_through (jfunc, index, false, false);
8b7773a4
MJ
1577 continue;
1578 }
1579 }
1580 else if (TREE_CODE (arg) == SSA_NAME)
1581 {
1582 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1583 {
1584 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
b8f6e610 1585 if (index >= 0)
8b7773a4 1586 {
b8f6e610 1587 bool agg_p, type_p;
8b7773a4
MJ
1588 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1589 call, arg);
06d65050
JH
1590 if (param_type && POINTER_TYPE_P (param_type))
1591 type_p = !detect_type_change_ssa (arg, TREE_TYPE (param_type),
1592 call, jfunc);
1593 else
1594 type_p = false;
b8f6e610 1595 if (type_p || jfunc->type == IPA_JF_UNKNOWN)
06d65050
JH
1596 ipa_set_jf_simple_pass_through (jfunc, index, agg_p,
1597 type_p);
8b7773a4
MJ
1598 }
1599 }
1600 else
1601 {
1602 gimple stmt = SSA_NAME_DEF_STMT (arg);
1603 if (is_gimple_assign (stmt))
1604 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
06d65050 1605 call, stmt, arg, param_type);
8b7773a4
MJ
1606 else if (gimple_code (stmt) == GIMPLE_PHI)
1607 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
06d65050 1608 call, stmt, param_type);
8b7773a4
MJ
1609 }
1610 }
1611 else
06d65050
JH
1612 compute_known_type_jump_func (arg, jfunc, call,
1613 param_type
1614 && POINTER_TYPE_P (param_type)
1615 ? TREE_TYPE (param_type)
1616 : NULL);
3e293154 1617
8b7773a4
MJ
1618 if ((jfunc->type != IPA_JF_PASS_THROUGH
1619 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1620 && (jfunc->type != IPA_JF_ANCESTOR
1621 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1622 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1623 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1624 determine_known_aggregate_parts (call, arg, jfunc);
1625 }
3e293154
MJ
1626}
1627
749aa96d
MJ
1628/* Compute jump functions for all edges - both direct and indirect - outgoing
1629 from NODE. Also count the actual arguments in the process. */
1630
062c604f
MJ
1631static void
1632ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1633 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1634{
1635 struct cgraph_edge *cs;
1636
1637 for (cs = node->callees; cs; cs = cs->next_callee)
1638 {
d7da5cc8
MJ
1639 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1640 NULL);
749aa96d
MJ
1641 /* We do not need to bother analyzing calls to unknown
1642 functions unless they may become known during lto/whopr. */
e70670cf 1643 if (!callee->symbol.definition && !flag_lto)
749aa96d 1644 continue;
c419671c 1645 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1646 }
1647
1648 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1649 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1650}
1651
8b7773a4
MJ
1652/* If STMT looks like a statement loading a value from a member pointer formal
1653 parameter, return that parameter and store the offset of the field to
1654 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1655 might be clobbered). If USE_DELTA, then we look for a use of the delta
1656 field rather than the pfn. */
be95e2b9 1657
3e293154 1658static tree
8b7773a4
MJ
1659ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1660 HOST_WIDE_INT *offset_p)
3e293154 1661{
8b7773a4
MJ
1662 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1663
1664 if (!gimple_assign_single_p (stmt))
1665 return NULL_TREE;
3e293154 1666
8b7773a4 1667 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1668 if (TREE_CODE (rhs) == COMPONENT_REF)
1669 {
1670 ref_field = TREE_OPERAND (rhs, 1);
1671 rhs = TREE_OPERAND (rhs, 0);
1672 }
1673 else
1674 ref_field = NULL_TREE;
d242d063 1675 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1676 return NULL_TREE;
3e293154 1677 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1678 if (TREE_CODE (rec) != ADDR_EXPR)
1679 return NULL_TREE;
1680 rec = TREE_OPERAND (rec, 0);
3e293154 1681 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1682 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1683 return NULL_TREE;
d242d063 1684 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1685
8b7773a4
MJ
1686 if (use_delta)
1687 fld = delta_field;
1688 else
1689 fld = ptr_field;
1690 if (offset_p)
1691 *offset_p = int_bit_position (fld);
1692
ae788515
EB
1693 if (ref_field)
1694 {
1695 if (integer_nonzerop (ref_offset))
1696 return NULL_TREE;
ae788515
EB
1697 return ref_field == fld ? rec : NULL_TREE;
1698 }
3e293154 1699 else
8b7773a4
MJ
1700 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1701 : NULL_TREE;
3e293154
MJ
1702}
1703
1704/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1705
3e293154
MJ
1706static bool
1707ipa_is_ssa_with_stmt_def (tree t)
1708{
1709 if (TREE_CODE (t) == SSA_NAME
1710 && !SSA_NAME_IS_DEFAULT_DEF (t))
1711 return true;
1712 else
1713 return false;
1714}
1715
40591473
MJ
1716/* Find the indirect call graph edge corresponding to STMT and mark it as a
1717 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1718 indirect call graph edge. */
be95e2b9 1719
40591473
MJ
1720static struct cgraph_edge *
1721ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1722{
e33c6cd6 1723 struct cgraph_edge *cs;
3e293154 1724
5f902d76 1725 cs = cgraph_edge (node, stmt);
b258210c 1726 cs->indirect_info->param_index = param_index;
8b7773a4 1727 cs->indirect_info->offset = 0;
40591473 1728 cs->indirect_info->polymorphic = 0;
8b7773a4 1729 cs->indirect_info->agg_contents = 0;
c13bc3d9 1730 cs->indirect_info->member_ptr = 0;
40591473 1731 return cs;
3e293154
MJ
1732}
1733
e33c6cd6 1734/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1735 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1736 intermediate information about each formal parameter. Currently it checks
1737 whether the call calls a pointer that is a formal parameter and if so, the
1738 parameter is marked with the called flag and an indirect call graph edge
1739 describing the call is created. This is very simple for ordinary pointers
1740 represented in SSA but not-so-nice when it comes to member pointers. The
1741 ugly part of this function does nothing more than trying to match the
1742 pattern of such a call. An example of such a pattern is the gimple dump
1743 below, the call is on the last line:
3e293154 1744
ae788515
EB
1745 <bb 2>:
1746 f$__delta_5 = f.__delta;
1747 f$__pfn_24 = f.__pfn;
1748
1749 or
3e293154 1750 <bb 2>:
d242d063
MJ
1751 f$__delta_5 = MEM[(struct *)&f];
1752 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1753
ae788515 1754 and a few lines below:
8aa29647
MJ
1755
1756 <bb 5>
3e293154
MJ
1757 D.2496_3 = (int) f$__pfn_24;
1758 D.2497_4 = D.2496_3 & 1;
1759 if (D.2497_4 != 0)
1760 goto <bb 3>;
1761 else
1762 goto <bb 4>;
1763
8aa29647 1764 <bb 6>:
3e293154
MJ
1765 D.2500_7 = (unsigned int) f$__delta_5;
1766 D.2501_8 = &S + D.2500_7;
1767 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1768 D.2503_10 = *D.2502_9;
1769 D.2504_12 = f$__pfn_24 + -1;
1770 D.2505_13 = (unsigned int) D.2504_12;
1771 D.2506_14 = D.2503_10 + D.2505_13;
1772 D.2507_15 = *D.2506_14;
1773 iftmp.11_16 = (String:: *) D.2507_15;
1774
8aa29647 1775 <bb 7>:
3e293154
MJ
1776 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1777 D.2500_19 = (unsigned int) f$__delta_5;
1778 D.2508_20 = &S + D.2500_19;
1779 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1780
1781 Such patterns are results of simple calls to a member pointer:
1782
1783 int doprinting (int (MyString::* f)(int) const)
1784 {
1785 MyString S ("somestring");
1786
1787 return (S.*f)(4);
1788 }
8b7773a4
MJ
1789
1790 Moreover, the function also looks for called pointers loaded from aggregates
1791 passed by value or reference. */
3e293154
MJ
1792
1793static void
b258210c
MJ
1794ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1795 struct ipa_node_params *info,
c419671c 1796 struct param_analysis_info *parms_ainfo,
b258210c 1797 gimple call, tree target)
3e293154 1798{
726a989a 1799 gimple def;
3e293154 1800 tree n1, n2;
726a989a
RB
1801 gimple d1, d2;
1802 tree rec, rec2, cond;
1803 gimple branch;
3e293154 1804 int index;
3e293154 1805 basic_block bb, virt_bb, join;
8b7773a4
MJ
1806 HOST_WIDE_INT offset;
1807 bool by_ref;
3e293154 1808
3e293154
MJ
1809 if (SSA_NAME_IS_DEFAULT_DEF (target))
1810 {
b258210c 1811 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1812 index = ipa_get_param_decl_index (info, var);
1813 if (index >= 0)
40591473 1814 ipa_note_param_call (node, index, call);
3e293154
MJ
1815 return;
1816 }
1817
8b7773a4
MJ
1818 def = SSA_NAME_DEF_STMT (target);
1819 if (gimple_assign_single_p (def)
d044dd17 1820 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4
MJ
1821 gimple_assign_rhs1 (def), &index, &offset,
1822 &by_ref))
1823 {
1824 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1825 cs->indirect_info->offset = offset;
1826 cs->indirect_info->agg_contents = 1;
1827 cs->indirect_info->by_ref = by_ref;
1828 return;
1829 }
1830
3e293154
MJ
1831 /* Now we need to try to match the complex pattern of calling a member
1832 pointer. */
8b7773a4
MJ
1833 if (gimple_code (def) != GIMPLE_PHI
1834 || gimple_phi_num_args (def) != 2
1835 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1836 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1837 return;
1838
3e293154
MJ
1839 /* First, we need to check whether one of these is a load from a member
1840 pointer that is a parameter to this function. */
1841 n1 = PHI_ARG_DEF (def, 0);
1842 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1843 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1844 return;
1845 d1 = SSA_NAME_DEF_STMT (n1);
1846 d2 = SSA_NAME_DEF_STMT (n2);
1847
8aa29647 1848 join = gimple_bb (def);
8b7773a4 1849 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1850 {
8b7773a4 1851 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1852 return;
1853
8aa29647 1854 bb = EDGE_PRED (join, 0)->src;
726a989a 1855 virt_bb = gimple_bb (d2);
3e293154 1856 }
8b7773a4 1857 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1858 {
8aa29647 1859 bb = EDGE_PRED (join, 1)->src;
726a989a 1860 virt_bb = gimple_bb (d1);
3e293154
MJ
1861 }
1862 else
1863 return;
1864
1865 /* Second, we need to check that the basic blocks are laid out in the way
1866 corresponding to the pattern. */
1867
3e293154
MJ
1868 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1869 || single_pred (virt_bb) != bb
1870 || single_succ (virt_bb) != join)
1871 return;
1872
1873 /* Third, let's see that the branching is done depending on the least
1874 significant bit of the pfn. */
1875
1876 branch = last_stmt (bb);
8aa29647 1877 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1878 return;
1879
12430896
RG
1880 if ((gimple_cond_code (branch) != NE_EXPR
1881 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1882 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1883 return;
3e293154 1884
726a989a 1885 cond = gimple_cond_lhs (branch);
3e293154
MJ
1886 if (!ipa_is_ssa_with_stmt_def (cond))
1887 return;
1888
726a989a 1889 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1890 if (!is_gimple_assign (def)
726a989a
RB
1891 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1892 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1893 return;
726a989a
RB
1894
1895 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1896 if (!ipa_is_ssa_with_stmt_def (cond))
1897 return;
1898
726a989a 1899 def = SSA_NAME_DEF_STMT (cond);
3e293154 1900
8b75fc9b
MJ
1901 if (is_gimple_assign (def)
1902 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1903 {
726a989a 1904 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1905 if (!ipa_is_ssa_with_stmt_def (cond))
1906 return;
726a989a 1907 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1908 }
1909
6f7b8b70
RE
1910 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1911 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1912 == ptrmemfunc_vbit_in_delta),
1913 NULL);
3e293154
MJ
1914 if (rec != rec2)
1915 return;
1916
1917 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1918 if (index >= 0
1919 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1920 {
1921 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1922 cs->indirect_info->offset = offset;
1923 cs->indirect_info->agg_contents = 1;
c13bc3d9 1924 cs->indirect_info->member_ptr = 1;
8b7773a4 1925 }
3e293154
MJ
1926
1927 return;
1928}
1929
b258210c
MJ
1930/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1931 object referenced in the expression is a formal parameter of the caller
1932 (described by INFO), create a call note for the statement. */
1933
1934static void
1935ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1936 struct ipa_node_params *info, gimple call,
1937 tree target)
1938{
40591473
MJ
1939 struct cgraph_edge *cs;
1940 struct cgraph_indirect_call_info *ii;
f65cf2b7 1941 struct ipa_jump_func jfunc;
b258210c 1942 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1943 int index;
40591473 1944 HOST_WIDE_INT anc_offset;
b258210c 1945
05842ff5
MJ
1946 if (!flag_devirtualize)
1947 return;
1948
40591473 1949 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1950 return;
1951
40591473
MJ
1952 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1953 {
1954 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1955 return;
b258210c 1956
40591473
MJ
1957 anc_offset = 0;
1958 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1959 gcc_assert (index >= 0);
06d65050
JH
1960 if (detect_type_change_ssa (obj, obj_type_ref_class (target),
1961 call, &jfunc))
40591473
MJ
1962 return;
1963 }
1964 else
1965 {
1966 gimple stmt = SSA_NAME_DEF_STMT (obj);
1967 tree expr;
1968
1969 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1970 if (!expr)
1971 return;
1972 index = ipa_get_param_decl_index (info,
1973 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1974 gcc_assert (index >= 0);
06d65050
JH
1975 if (detect_type_change (obj, expr, obj_type_ref_class (target),
1976 call, &jfunc, anc_offset))
40591473
MJ
1977 return;
1978 }
1979
1980 cs = ipa_note_param_call (node, index, call);
1981 ii = cs->indirect_info;
8b7773a4 1982 ii->offset = anc_offset;
40591473 1983 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
c49bdb2e 1984 ii->otr_type = obj_type_ref_class (target);
40591473 1985 ii->polymorphic = 1;
b258210c
MJ
1986}
1987
1988/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 1989 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 1990 containing intermediate information about each formal parameter. */
b258210c
MJ
1991
1992static void
1993ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 1994 struct ipa_node_params *info,
c419671c 1995 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
1996{
1997 tree target = gimple_call_fn (call);
1998
25583c4f
RS
1999 if (!target)
2000 return;
b258210c 2001 if (TREE_CODE (target) == SSA_NAME)
c419671c 2002 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
1d5755ef 2003 else if (virtual_method_call_p (target))
b258210c
MJ
2004 ipa_analyze_virtual_call_uses (node, info, call, target);
2005}
2006
2007
e33c6cd6
MJ
2008/* Analyze the call statement STMT with respect to formal parameters (described
2009 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 2010 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 2011 intermediate information about each formal parameter. */
be95e2b9 2012
3e293154 2013static void
e33c6cd6 2014ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 2015 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 2016{
726a989a 2017 if (is_gimple_call (stmt))
c419671c 2018 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2019}
2020
2021/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2022 If OP is a parameter declaration, mark it as used in the info structure
2023 passed in DATA. */
2024
2025static bool
2026visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
2027 tree op, void *data)
2028{
2029 struct ipa_node_params *info = (struct ipa_node_params *) data;
2030
2031 op = get_base_address (op);
2032 if (op
2033 && TREE_CODE (op) == PARM_DECL)
2034 {
2035 int index = ipa_get_param_decl_index (info, op);
2036 gcc_assert (index >= 0);
310bc633 2037 ipa_set_param_used (info, index, true);
062c604f
MJ
2038 }
2039
2040 return false;
3e293154
MJ
2041}
2042
2043/* Scan the function body of NODE and inspect the uses of formal parameters.
2044 Store the findings in various structures of the associated ipa_node_params
c419671c 2045 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 2046 vector containing intermediate information about each formal parameter. */
be95e2b9 2047
062c604f
MJ
2048static void
2049ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 2050 struct param_analysis_info *parms_ainfo)
3e293154 2051{
960bfb69 2052 tree decl = node->symbol.decl;
3e293154
MJ
2053 basic_block bb;
2054 struct function *func;
726a989a 2055 gimple_stmt_iterator gsi;
3e293154 2056 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 2057 int i;
3e293154 2058
726a989a 2059 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 2060 return;
3e293154 2061
5fe8e757
MJ
2062 info->uses_analysis_done = 1;
2063 if (ipa_func_spec_opts_forbid_analysis_p (node))
2064 {
2065 for (i = 0; i < ipa_get_param_count (info); i++)
2066 {
2067 ipa_set_param_used (info, i, true);
2068 ipa_set_controlled_uses (info, i, IPA_UNDESCRIBED_USE);
2069 }
2070 return;
2071 }
2072
062c604f
MJ
2073 for (i = 0; i < ipa_get_param_count (info); i++)
2074 {
2075 tree parm = ipa_get_param (info, i);
4502fe8d
MJ
2076 int controlled_uses = 0;
2077
062c604f
MJ
2078 /* For SSA regs see if parameter is used. For non-SSA we compute
2079 the flag during modification analysis. */
4502fe8d
MJ
2080 if (is_gimple_reg (parm))
2081 {
2082 tree ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
2083 parm);
2084 if (ddef && !has_zero_uses (ddef))
2085 {
2086 imm_use_iterator imm_iter;
2087 use_operand_p use_p;
2088
2089 ipa_set_param_used (info, i, true);
2090 FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ddef)
2091 if (!is_gimple_call (USE_STMT (use_p)))
2092 {
2093 controlled_uses = IPA_UNDESCRIBED_USE;
2094 break;
2095 }
2096 else
2097 controlled_uses++;
2098 }
2099 else
2100 controlled_uses = 0;
2101 }
2102 else
2103 controlled_uses = IPA_UNDESCRIBED_USE;
2104 ipa_set_controlled_uses (info, i, controlled_uses);
062c604f
MJ
2105 }
2106
3e293154
MJ
2107 func = DECL_STRUCT_FUNCTION (decl);
2108 FOR_EACH_BB_FN (bb, func)
2109 {
726a989a 2110 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 2111 {
726a989a 2112 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
2113
2114 if (is_gimple_debug (stmt))
2115 continue;
2116
c419671c 2117 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
2118 walk_stmt_load_store_addr_ops (stmt, info,
2119 visit_ref_for_mod_analysis,
2120 visit_ref_for_mod_analysis,
2121 visit_ref_for_mod_analysis);
518dc859 2122 }
355a7673 2123 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
2124 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
2125 visit_ref_for_mod_analysis,
2126 visit_ref_for_mod_analysis,
2127 visit_ref_for_mod_analysis);
518dc859 2128 }
3e293154
MJ
2129}
2130
2c9561b5
MJ
2131/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
2132
2133static void
2134free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
2135{
2136 int i;
2137
2138 for (i = 0; i < param_count; i++)
2139 {
2140 if (parms_ainfo[i].parm_visited_statements)
2141 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
2142 if (parms_ainfo[i].pt_visited_statements)
2143 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
2144 }
2145}
2146
dd5a833e
MS
2147/* Initialize the array describing properties of of formal parameters
2148 of NODE, analyze their uses and compute jump functions associated
2149 with actual arguments of calls from within NODE. */
062c604f
MJ
2150
2151void
2152ipa_analyze_node (struct cgraph_node *node)
2153{
57dbdc5a 2154 struct ipa_node_params *info;
c419671c 2155 struct param_analysis_info *parms_ainfo;
2c9561b5 2156 int param_count;
062c604f 2157
57dbdc5a
MJ
2158 ipa_check_create_node_params ();
2159 ipa_check_create_edge_args ();
2160 info = IPA_NODE_REF (node);
960bfb69 2161 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
062c604f
MJ
2162 ipa_initialize_node_params (node);
2163
2164 param_count = ipa_get_param_count (info);
c419671c
MJ
2165 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
2166 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 2167
c419671c
MJ
2168 ipa_analyze_params_uses (node, parms_ainfo);
2169 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 2170
2c9561b5 2171 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 2172 pop_cfun ();
062c604f
MJ
2173}
2174
e248d83f
MJ
2175/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
2176 attempt a type-based devirtualization. If successful, return the
2177 target function declaration, otherwise return NULL. */
2178
2179tree
2180ipa_intraprocedural_devirtualization (gimple call)
2181{
2182 tree binfo, token, fndecl;
2183 struct ipa_jump_func jfunc;
2184 tree otr = gimple_call_fn (call);
2185
2186 jfunc.type = IPA_JF_UNKNOWN;
2187 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
06d65050 2188 call, obj_type_ref_class (otr));
e248d83f
MJ
2189 if (jfunc.type != IPA_JF_KNOWN_TYPE)
2190 return NULL_TREE;
2191 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
2192 if (!binfo)
2193 return NULL_TREE;
2194 token = OBJ_TYPE_REF_TOKEN (otr);
2195 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
2196 binfo);
2197 return fndecl;
2198}
062c604f 2199
61502ca8 2200/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2201 is being inlined, knowing that DST is of type ancestor and src of known
2202 type. */
2203
2204static void
2205combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2206 struct ipa_jump_func *dst)
2207{
c7573249
MJ
2208 HOST_WIDE_INT combined_offset;
2209 tree combined_type;
b258210c 2210
b8f6e610
MJ
2211 if (!ipa_get_jf_ancestor_type_preserved (dst))
2212 {
2213 dst->type = IPA_JF_UNKNOWN;
2214 return;
2215 }
2216
7b872d9e
MJ
2217 combined_offset = ipa_get_jf_known_type_offset (src)
2218 + ipa_get_jf_ancestor_offset (dst);
2219 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2220
7b872d9e
MJ
2221 ipa_set_jf_known_type (dst, combined_offset,
2222 ipa_get_jf_known_type_base_type (src),
2223 combined_type);
b258210c
MJ
2224}
2225
be95e2b9 2226/* Update the jump functions associated with call graph edge E when the call
3e293154 2227 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2228 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2229
3e293154
MJ
2230static void
2231update_jump_functions_after_inlining (struct cgraph_edge *cs,
2232 struct cgraph_edge *e)
2233{
2234 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2235 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2236 int count = ipa_get_cs_argument_count (args);
2237 int i;
2238
2239 for (i = 0; i < count; i++)
2240 {
b258210c 2241 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2242
685b0d13
MJ
2243 if (dst->type == IPA_JF_ANCESTOR)
2244 {
b258210c 2245 struct ipa_jump_func *src;
8b7773a4 2246 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2247
b258210c
MJ
2248 /* Variable number of arguments can cause havoc if we try to access
2249 one that does not exist in the inlined edge. So make sure we
2250 don't. */
8b7773a4 2251 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2252 {
2253 dst->type = IPA_JF_UNKNOWN;
2254 continue;
2255 }
2256
8b7773a4
MJ
2257 src = ipa_get_ith_jump_func (top, dst_fid);
2258
2259 if (src->agg.items
2260 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2261 {
2262 struct ipa_agg_jf_item *item;
2263 int j;
2264
2265 /* Currently we do not produce clobber aggregate jump functions,
2266 replace with merging when we do. */
2267 gcc_assert (!dst->agg.items);
2268
9771b263 2269 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2270 dst->agg.by_ref = src->agg.by_ref;
9771b263 2271 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2272 item->offset -= dst->value.ancestor.offset;
2273 }
2274
b258210c
MJ
2275 if (src->type == IPA_JF_KNOWN_TYPE)
2276 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2277 else if (src->type == IPA_JF_PASS_THROUGH
2278 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2279 {
2280 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2281 dst->value.ancestor.agg_preserved &=
2282 src->value.pass_through.agg_preserved;
b8f6e610
MJ
2283 dst->value.ancestor.type_preserved &=
2284 src->value.pass_through.type_preserved;
8b7773a4 2285 }
b258210c
MJ
2286 else if (src->type == IPA_JF_ANCESTOR)
2287 {
2288 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2289 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2290 dst->value.ancestor.agg_preserved &=
2291 src->value.ancestor.agg_preserved;
b8f6e610
MJ
2292 dst->value.ancestor.type_preserved &=
2293 src->value.ancestor.type_preserved;
b258210c
MJ
2294 }
2295 else
2296 dst->type = IPA_JF_UNKNOWN;
2297 }
2298 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2299 {
b258210c
MJ
2300 struct ipa_jump_func *src;
2301 /* We must check range due to calls with variable number of arguments
2302 and we cannot combine jump functions with operations. */
2303 if (dst->value.pass_through.operation == NOP_EXPR
2304 && (dst->value.pass_through.formal_id
2305 < ipa_get_cs_argument_count (top)))
2306 {
8b7773a4
MJ
2307 int dst_fid = dst->value.pass_through.formal_id;
2308 src = ipa_get_ith_jump_func (top, dst_fid);
b8f6e610 2309 bool dst_agg_p = ipa_get_jf_pass_through_agg_preserved (dst);
8b7773a4 2310
b8f6e610
MJ
2311 switch (src->type)
2312 {
2313 case IPA_JF_UNKNOWN:
2314 dst->type = IPA_JF_UNKNOWN;
2315 break;
2316 case IPA_JF_KNOWN_TYPE:
2317 ipa_set_jf_known_type (dst,
2318 ipa_get_jf_known_type_offset (src),
2319 ipa_get_jf_known_type_base_type (src),
2320 ipa_get_jf_known_type_base_type (src));
2321 break;
2322 case IPA_JF_CONST:
2323 ipa_set_jf_cst_copy (dst, src);
2324 break;
2325
2326 case IPA_JF_PASS_THROUGH:
2327 {
2328 int formal_id = ipa_get_jf_pass_through_formal_id (src);
2329 enum tree_code operation;
2330 operation = ipa_get_jf_pass_through_operation (src);
2331
2332 if (operation == NOP_EXPR)
2333 {
2334 bool agg_p, type_p;
2335 agg_p = dst_agg_p
2336 && ipa_get_jf_pass_through_agg_preserved (src);
2337 type_p = ipa_get_jf_pass_through_type_preserved (src)
2338 && ipa_get_jf_pass_through_type_preserved (dst);
2339 ipa_set_jf_simple_pass_through (dst, formal_id,
2340 agg_p, type_p);
2341 }
2342 else
2343 {
2344 tree operand = ipa_get_jf_pass_through_operand (src);
2345 ipa_set_jf_arith_pass_through (dst, formal_id, operand,
2346 operation);
2347 }
2348 break;
2349 }
2350 case IPA_JF_ANCESTOR:
2351 {
2352 bool agg_p, type_p;
2353 agg_p = dst_agg_p
2354 && ipa_get_jf_ancestor_agg_preserved (src);
2355 type_p = ipa_get_jf_ancestor_type_preserved (src)
2356 && ipa_get_jf_pass_through_type_preserved (dst);
2357 ipa_set_ancestor_jf (dst,
2358 ipa_get_jf_ancestor_offset (src),
2359 ipa_get_jf_ancestor_type (src),
2360 ipa_get_jf_ancestor_formal_id (src),
2361 agg_p, type_p);
2362 break;
2363 }
2364 default:
2365 gcc_unreachable ();
2366 }
8b7773a4
MJ
2367
2368 if (src->agg.items
b8f6e610 2369 && (dst_agg_p || !src->agg.by_ref))
8b7773a4
MJ
2370 {
2371 /* Currently we do not produce clobber aggregate jump
2372 functions, replace with merging when we do. */
2373 gcc_assert (!dst->agg.items);
2374
2375 dst->agg.by_ref = src->agg.by_ref;
9771b263 2376 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2377 }
b258210c
MJ
2378 }
2379 else
2380 dst->type = IPA_JF_UNKNOWN;
3e293154 2381 }
b258210c
MJ
2382 }
2383}
2384
2385/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2386 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2387
3949c4a7 2388struct cgraph_edge *
81fa35bd 2389ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2390{
2391 struct cgraph_node *callee;
0f378cb5 2392 struct inline_edge_summary *es = inline_edge_summary (ie);
48b1474e 2393 bool unreachable = false;
b258210c 2394
ceeffab0
MJ
2395 if (TREE_CODE (target) == ADDR_EXPR)
2396 target = TREE_OPERAND (target, 0);
b258210c 2397 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2398 {
2399 target = canonicalize_constructor_val (target, NULL);
2400 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2401 {
c13bc3d9
MJ
2402 if (ie->indirect_info->member_ptr)
2403 /* Member pointer call that goes through a VMT lookup. */
2404 return NULL;
2405
a0a7b611
JH
2406 if (dump_file)
2407 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
48b1474e 2408 " in %s/%i, making it unreachable.\n",
9de04252 2409 cgraph_node_name (ie->caller), ie->caller->symbol.order);
48b1474e
MJ
2410 target = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
2411 callee = cgraph_get_create_node (target);
2412 unreachable = true;
a0a7b611 2413 }
48b1474e
MJ
2414 else
2415 callee = cgraph_get_node (target);
a0a7b611 2416 }
48b1474e
MJ
2417 else
2418 callee = cgraph_get_node (target);
a0a7b611
JH
2419
2420 /* Because may-edges are not explicitely represented and vtable may be external,
2421 we may create the first reference to the object in the unit. */
2422 if (!callee || callee->global.inlined_to)
2423 {
a0a7b611
JH
2424
2425 /* We are better to ensure we can refer to it.
2426 In the case of static functions we are out of luck, since we already
2427 removed its body. In the case of public functions we may or may
2428 not introduce the reference. */
2429 if (!canonicalize_constructor_val (target, NULL)
2430 || !TREE_PUBLIC (target))
2431 {
2432 if (dump_file)
2433 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2434 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
9de04252
MJ
2435 xstrdup (cgraph_node_name (ie->caller)),
2436 ie->caller->symbol.order,
2437 xstrdup (cgraph_node_name (ie->callee)),
2438 ie->callee->symbol.order);
a0a7b611
JH
2439 return NULL;
2440 }
48f4a6fa 2441 callee = cgraph_get_create_real_symbol_node (target);
a0a7b611 2442 }
1dbee8c9 2443 ipa_check_create_node_params ();
ceeffab0 2444
81fa35bd
MJ
2445 /* We can not make edges to inline clones. It is bug that someone removed
2446 the cgraph node too early. */
17afc0fe
JH
2447 gcc_assert (!callee->global.inlined_to);
2448
48b1474e 2449 if (dump_file && !unreachable)
b258210c
MJ
2450 {
2451 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2452 "(%s/%i -> %s/%i), for stmt ",
b258210c 2453 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
9de04252
MJ
2454 xstrdup (cgraph_node_name (ie->caller)),
2455 ie->caller->symbol.order,
042ae7d2
JH
2456 xstrdup (cgraph_node_name (callee)),
2457 callee->symbol.order);
b258210c
MJ
2458 if (ie->call_stmt)
2459 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2460 else
2461 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
042ae7d2
JH
2462 }
2463 ie = cgraph_make_edge_direct (ie, callee);
2464 es = inline_edge_summary (ie);
2465 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2466 - eni_size_weights.call_cost);
2467 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2468 - eni_time_weights.call_cost);
749aa96d 2469
b258210c 2470 return ie;
3e293154
MJ
2471}
2472
8b7773a4
MJ
2473/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2474 return NULL if there is not any. BY_REF specifies whether the value has to
2475 be passed by reference or by value. */
2476
2477tree
2478ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2479 HOST_WIDE_INT offset, bool by_ref)
2480{
2481 struct ipa_agg_jf_item *item;
2482 int i;
2483
2484 if (by_ref != agg->by_ref)
2485 return NULL;
2486
9771b263 2487 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2488 if (item->offset == offset)
2489 {
2490 /* Currently we do not have clobber values, return NULL for them once
2491 we do. */
2492 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2493 return item->value;
2494 }
8b7773a4
MJ
2495 return NULL;
2496}
2497
4502fe8d
MJ
2498/* Remove a reference to SYMBOL from the list of references of a node given by
2499 reference description RDESC. */
2500
2501static void
2502remove_described_reference (symtab_node symbol, struct ipa_cst_ref_desc *rdesc)
2503{
2504 struct ipa_ref *to_del;
2505 struct cgraph_edge *origin;
2506
2507 origin = rdesc->cs;
2508 to_del = ipa_find_reference ((symtab_node) origin->caller, symbol,
042ae7d2 2509 origin->call_stmt, origin->lto_stmt_uid);
4502fe8d
MJ
2510 gcc_assert (to_del);
2511 ipa_remove_reference (to_del);
2512 if (dump_file)
2513 fprintf (dump_file, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2514 xstrdup (cgraph_node_name (origin->caller)),
9de04252 2515 origin->caller->symbol.order, xstrdup (symtab_node_name (symbol)));
4502fe8d
MJ
2516}
2517
2518/* If JFUNC has a reference description with refcount different from
2519 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2520 NULL. JFUNC must be a constant jump function. */
2521
2522static struct ipa_cst_ref_desc *
2523jfunc_rdesc_usable (struct ipa_jump_func *jfunc)
2524{
2525 struct ipa_cst_ref_desc *rdesc = ipa_get_jf_constant_rdesc (jfunc);
2526 if (rdesc && rdesc->refcount != IPA_UNDESCRIBED_USE)
2527 return rdesc;
2528 else
2529 return NULL;
2530}
2531
b258210c
MJ
2532/* Try to find a destination for indirect edge IE that corresponds to a simple
2533 call or a call of a member function pointer and where the destination is a
2534 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2535 determined, return the newly direct edge, otherwise return NULL.
2536 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2537
b258210c
MJ
2538static struct cgraph_edge *
2539try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2540 struct ipa_jump_func *jfunc,
2541 struct ipa_node_params *new_root_info)
b258210c 2542{
4502fe8d 2543 struct cgraph_edge *cs;
b258210c 2544 tree target;
042ae7d2
JH
2545 bool agg_contents = ie->indirect_info->agg_contents;
2546 bool speculative = ie->speculative;
2547 struct ipa_cst_ref_desc *rdesc;
b258210c 2548
8b7773a4 2549 if (ie->indirect_info->agg_contents)
d250540a
MJ
2550 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2551 ie->indirect_info->offset,
2552 ie->indirect_info->by_ref);
b258210c 2553 else
d250540a
MJ
2554 target = ipa_value_from_jfunc (new_root_info, jfunc);
2555 if (!target)
2556 return NULL;
4502fe8d
MJ
2557 cs = ipa_make_edge_direct_to_target (ie, target);
2558
042ae7d2
JH
2559 /* FIXME: speculative edges can be handled. */
2560 if (cs && !agg_contents && !speculative
4502fe8d
MJ
2561 && jfunc->type == IPA_JF_CONST
2562 && (rdesc = jfunc_rdesc_usable (jfunc))
2563 && --rdesc->refcount == 0)
2564 remove_described_reference ((symtab_node) cs->callee, rdesc);
2565
2566 return cs;
b258210c
MJ
2567}
2568
d250540a
MJ
2569/* Try to find a destination for indirect edge IE that corresponds to a virtual
2570 call based on a formal parameter which is described by jump function JFUNC
2571 and if it can be determined, make it direct and return the direct edge.
2572 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2573 are relative to. */
b258210c
MJ
2574
2575static struct cgraph_edge *
2576try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2577 struct ipa_jump_func *jfunc,
2578 struct ipa_node_params *new_root_info)
3e293154 2579{
c7573249 2580 tree binfo, target;
b258210c 2581
d250540a
MJ
2582 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2583
da942ca0 2584 if (!binfo)
b258210c 2585 return NULL;
3e293154 2586
da942ca0
JH
2587 if (TREE_CODE (binfo) != TREE_BINFO)
2588 {
c49bdb2e
JH
2589 binfo = gimple_extract_devirt_binfo_from_cst
2590 (binfo, ie->indirect_info->otr_type);
da942ca0
JH
2591 if (!binfo)
2592 return NULL;
2593 }
2594
d250540a 2595 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2596 ie->indirect_info->otr_type);
b258210c 2597 if (binfo)
c7573249
MJ
2598 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2599 binfo);
b258210c
MJ
2600 else
2601 return NULL;
2602
2603 if (target)
81fa35bd 2604 return ipa_make_edge_direct_to_target (ie, target);
b258210c
MJ
2605 else
2606 return NULL;
3e293154
MJ
2607}
2608
2609/* Update the param called notes associated with NODE when CS is being inlined,
2610 assuming NODE is (potentially indirectly) inlined into CS->callee.
2611 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2612 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2613 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2614
f8e2a1ed 2615static bool
e33c6cd6
MJ
2616update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2617 struct cgraph_node *node,
9771b263 2618 vec<cgraph_edge_p> *new_edges)
3e293154 2619{
9e97ff61 2620 struct ipa_edge_args *top;
b258210c 2621 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2622 struct ipa_node_params *new_root_info;
f8e2a1ed 2623 bool res = false;
3e293154 2624
e33c6cd6 2625 ipa_check_create_edge_args ();
9e97ff61 2626 top = IPA_EDGE_REF (cs);
d250540a
MJ
2627 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2628 ? cs->caller->global.inlined_to
2629 : cs->caller);
e33c6cd6
MJ
2630
2631 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2632 {
e33c6cd6 2633 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2634 struct ipa_jump_func *jfunc;
8b7773a4 2635 int param_index;
3e293154 2636
e33c6cd6 2637 next_ie = ie->next_callee;
3e293154 2638
5f902d76
JH
2639 if (ici->param_index == -1)
2640 continue;
e33c6cd6 2641
3e293154 2642 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2643 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2644 {
5ee53a06 2645 ici->param_index = -1;
3e293154
MJ
2646 continue;
2647 }
2648
8b7773a4
MJ
2649 param_index = ici->param_index;
2650 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2651
2652 if (!flag_indirect_inlining)
36b72910
JH
2653 new_direct_edge = NULL;
2654 else if (ici->polymorphic)
d250540a
MJ
2655 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2656 new_root_info);
b258210c 2657 else
d250540a
MJ
2658 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2659 new_root_info);
042ae7d2
JH
2660 /* If speculation was removed, then we need to do nothing. */
2661 if (new_direct_edge && new_direct_edge != ie)
2662 {
2663 new_direct_edge->indirect_inlining_edge = 1;
2664 top = IPA_EDGE_REF (cs);
2665 res = true;
2666 }
2667 else if (new_direct_edge)
685b0d13 2668 {
b258210c 2669 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2670 if (new_direct_edge->call_stmt)
2671 new_direct_edge->call_stmt_cannot_inline_p
4de09b85
DC
2672 = !gimple_check_call_matching_types (
2673 new_direct_edge->call_stmt,
2674 new_direct_edge->callee->symbol.decl, false);
b258210c
MJ
2675 if (new_edges)
2676 {
9771b263 2677 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2678 res = true;
2679 }
042ae7d2 2680 top = IPA_EDGE_REF (cs);
685b0d13 2681 }
36b72910
JH
2682 else if (jfunc->type == IPA_JF_PASS_THROUGH
2683 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2684 {
2685 if (ici->agg_contents
2686 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2687 ici->param_index = -1;
2688 else
2689 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2690 }
2691 else if (jfunc->type == IPA_JF_ANCESTOR)
2692 {
2693 if (ici->agg_contents
2694 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2695 ici->param_index = -1;
2696 else
2697 {
2698 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2699 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2700 }
2701 }
2702 else
2703 /* Either we can find a destination for this edge now or never. */
2704 ici->param_index = -1;
3e293154 2705 }
e33c6cd6 2706
f8e2a1ed 2707 return res;
3e293154
MJ
2708}
2709
2710/* Recursively traverse subtree of NODE (including node) made of inlined
2711 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2712 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2713 update_jump_functions_after_inlining on all non-inlined edges that lead out
2714 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2715 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2716 created. */
be95e2b9 2717
f8e2a1ed 2718static bool
3e293154
MJ
2719propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2720 struct cgraph_node *node,
9771b263 2721 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2722{
2723 struct cgraph_edge *e;
f8e2a1ed 2724 bool res;
3e293154 2725
e33c6cd6 2726 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2727
2728 for (e = node->callees; e; e = e->next_callee)
2729 if (!e->inline_failed)
f8e2a1ed 2730 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2731 else
2732 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2733 for (e = node->indirect_calls; e; e = e->next_callee)
2734 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2735
2736 return res;
3e293154
MJ
2737}
2738
4502fe8d
MJ
2739/* Combine two controlled uses counts as done during inlining. */
2740
2741static int
2742combine_controlled_uses_counters (int c, int d)
2743{
2744 if (c == IPA_UNDESCRIBED_USE || d == IPA_UNDESCRIBED_USE)
2745 return IPA_UNDESCRIBED_USE;
2746 else
2747 return c + d - 1;
2748}
2749
2750/* Propagate number of controlled users from CS->caleee to the new root of the
2751 tree of inlined nodes. */
2752
2753static void
2754propagate_controlled_uses (struct cgraph_edge *cs)
2755{
2756 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
2757 struct cgraph_node *new_root = cs->caller->global.inlined_to
2758 ? cs->caller->global.inlined_to : cs->caller;
2759 struct ipa_node_params *new_root_info = IPA_NODE_REF (new_root);
2760 struct ipa_node_params *old_root_info = IPA_NODE_REF (cs->callee);
2761 int count, i;
2762
2763 count = MIN (ipa_get_cs_argument_count (args),
2764 ipa_get_param_count (old_root_info));
2765 for (i = 0; i < count; i++)
2766 {
2767 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2768 struct ipa_cst_ref_desc *rdesc;
2769
2770 if (jf->type == IPA_JF_PASS_THROUGH)
2771 {
2772 int src_idx, c, d;
2773 src_idx = ipa_get_jf_pass_through_formal_id (jf);
2774 c = ipa_get_controlled_uses (new_root_info, src_idx);
2775 d = ipa_get_controlled_uses (old_root_info, i);
2776
2777 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf)
2778 == NOP_EXPR || c == IPA_UNDESCRIBED_USE);
2779 c = combine_controlled_uses_counters (c, d);
2780 ipa_set_controlled_uses (new_root_info, src_idx, c);
2781 if (c == 0 && new_root_info->ipcp_orig_node)
2782 {
2783 struct cgraph_node *n;
2784 struct ipa_ref *ref;
2785 tree t = new_root_info->known_vals[src_idx];
2786
2787 if (t && TREE_CODE (t) == ADDR_EXPR
2788 && TREE_CODE (TREE_OPERAND (t, 0)) == FUNCTION_DECL
2789 && (n = cgraph_get_node (TREE_OPERAND (t, 0)))
2790 && (ref = ipa_find_reference ((symtab_node) new_root,
042ae7d2 2791 (symtab_node) n, NULL, 0)))
4502fe8d
MJ
2792 {
2793 if (dump_file)
2794 fprintf (dump_file, "ipa-prop: Removing cloning-created "
2795 "reference from %s/%i to %s/%i.\n",
2796 xstrdup (cgraph_node_name (new_root)),
9de04252
MJ
2797 new_root->symbol.order,
2798 xstrdup (cgraph_node_name (n)), n->symbol.order);
4502fe8d
MJ
2799 ipa_remove_reference (ref);
2800 }
2801 }
2802 }
2803 else if (jf->type == IPA_JF_CONST
2804 && (rdesc = jfunc_rdesc_usable (jf)))
2805 {
2806 int d = ipa_get_controlled_uses (old_root_info, i);
2807 int c = rdesc->refcount;
2808 rdesc->refcount = combine_controlled_uses_counters (c, d);
2809 if (rdesc->refcount == 0)
2810 {
2811 tree cst = ipa_get_jf_constant (jf);
2812 struct cgraph_node *n;
2813 gcc_checking_assert (TREE_CODE (cst) == ADDR_EXPR
2814 && TREE_CODE (TREE_OPERAND (cst, 0))
2815 == FUNCTION_DECL);
2816 n = cgraph_get_node (TREE_OPERAND (cst, 0));
2817 if (n)
2818 {
2819 struct cgraph_node *clone;
2820 remove_described_reference ((symtab_node) n, rdesc);
2821
2822 clone = cs->caller;
2823 while (clone->global.inlined_to
2824 && clone != rdesc->cs->caller
2825 && IPA_NODE_REF (clone)->ipcp_orig_node)
2826 {
2827 struct ipa_ref *ref;
2828 ref = ipa_find_reference ((symtab_node) clone,
042ae7d2 2829 (symtab_node) n, NULL, 0);
4502fe8d
MJ
2830 if (ref)
2831 {
2832 if (dump_file)
2833 fprintf (dump_file, "ipa-prop: Removing "
2834 "cloning-created reference "
2835 "from %s/%i to %s/%i.\n",
2836 xstrdup (cgraph_node_name (clone)),
9de04252 2837 clone->symbol.order,
4502fe8d 2838 xstrdup (cgraph_node_name (n)),
9de04252 2839 n->symbol.order);
4502fe8d
MJ
2840 ipa_remove_reference (ref);
2841 }
2842 clone = clone->callers->caller;
2843 }
2844 }
2845 }
2846 }
2847 }
2848
2849 for (i = ipa_get_param_count (old_root_info);
2850 i < ipa_get_cs_argument_count (args);
2851 i++)
2852 {
2853 struct ipa_jump_func *jf = ipa_get_ith_jump_func (args, i);
2854
2855 if (jf->type == IPA_JF_CONST)
2856 {
2857 struct ipa_cst_ref_desc *rdesc = jfunc_rdesc_usable (jf);
2858 if (rdesc)
2859 rdesc->refcount = IPA_UNDESCRIBED_USE;
2860 }
2861 else if (jf->type == IPA_JF_PASS_THROUGH)
2862 ipa_set_controlled_uses (new_root_info,
2863 jf->value.pass_through.formal_id,
2864 IPA_UNDESCRIBED_USE);
2865 }
2866}
2867
3e293154
MJ
2868/* Update jump functions and call note functions on inlining the call site CS.
2869 CS is expected to lead to a node already cloned by
2870 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2871 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2872 created. */
be95e2b9 2873
f8e2a1ed 2874bool
3e293154 2875ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2876 vec<cgraph_edge_p> *new_edges)
3e293154 2877{
5ee53a06 2878 bool changed;
f8e2a1ed
MJ
2879 /* Do nothing if the preparation phase has not been carried out yet
2880 (i.e. during early inlining). */
9771b263 2881 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2882 return false;
2883 gcc_assert (ipa_edge_args_vector);
2884
4502fe8d 2885 propagate_controlled_uses (cs);
5ee53a06
JH
2886 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2887
5ee53a06 2888 return changed;
518dc859
RL
2889}
2890
771578a0
MJ
2891/* Frees all dynamically allocated structures that the argument info points
2892 to. */
be95e2b9 2893
518dc859 2894void
771578a0 2895ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2896{
9771b263 2897 vec_free (args->jump_functions);
771578a0 2898 memset (args, 0, sizeof (*args));
518dc859
RL
2899}
2900
771578a0 2901/* Free all ipa_edge structures. */
be95e2b9 2902
518dc859 2903void
771578a0 2904ipa_free_all_edge_args (void)
518dc859 2905{
771578a0
MJ
2906 int i;
2907 struct ipa_edge_args *args;
518dc859 2908
9771b263
DN
2909 if (!ipa_edge_args_vector)
2910 return;
2911
2912 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
2913 ipa_free_edge_args_substructures (args);
2914
9771b263 2915 vec_free (ipa_edge_args_vector);
518dc859
RL
2916}
2917
771578a0
MJ
2918/* Frees all dynamically allocated structures that the param info points
2919 to. */
be95e2b9 2920
518dc859 2921void
771578a0 2922ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 2923{
9771b263 2924 info->descriptors.release ();
310bc633
MJ
2925 free (info->lattices);
2926 /* Lattice values and their sources are deallocated with their alocation
2927 pool. */
9771b263 2928 info->known_vals.release ();
771578a0 2929 memset (info, 0, sizeof (*info));
518dc859
RL
2930}
2931
771578a0 2932/* Free all ipa_node_params structures. */
be95e2b9 2933
518dc859 2934void
771578a0 2935ipa_free_all_node_params (void)
518dc859 2936{
771578a0
MJ
2937 int i;
2938 struct ipa_node_params *info;
518dc859 2939
9771b263 2940 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
2941 ipa_free_node_params_substructures (info);
2942
9771b263 2943 ipa_node_params_vector.release ();
771578a0
MJ
2944}
2945
2c9561b5
MJ
2946/* Set the aggregate replacements of NODE to be AGGVALS. */
2947
2948void
2949ipa_set_node_agg_value_chain (struct cgraph_node *node,
2950 struct ipa_agg_replacement_value *aggvals)
2951{
9771b263
DN
2952 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2953 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 2954
9771b263 2955 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
2956}
2957
771578a0 2958/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 2959
771578a0 2960static void
5c0466b5 2961ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 2962{
c6f7cfc1 2963 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 2964 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 2965 return;
771578a0 2966 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
2967}
2968
771578a0 2969/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 2970
771578a0 2971static void
5c0466b5 2972ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 2973{
dd6d1ad7 2974 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 2975 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 2976 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
2977 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2978 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
2979}
2980
8b7773a4 2981/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 2982
771578a0
MJ
2983static void
2984ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 2985 __attribute__((unused)) void *data)
771578a0
MJ
2986{
2987 struct ipa_edge_args *old_args, *new_args;
8b7773a4 2988 unsigned int i;
771578a0
MJ
2989
2990 ipa_check_create_edge_args ();
2991
2992 old_args = IPA_EDGE_REF (src);
2993 new_args = IPA_EDGE_REF (dst);
2994
9771b263 2995 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 2996
9771b263 2997 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
4502fe8d
MJ
2998 {
2999 struct ipa_jump_func *src_jf = ipa_get_ith_jump_func (old_args, i);
3000 struct ipa_jump_func *dst_jf = ipa_get_ith_jump_func (new_args, i);
3001
3002 dst_jf->agg.items = vec_safe_copy (dst_jf->agg.items);
3003
3004 if (src_jf->type == IPA_JF_CONST)
3005 {
3006 struct ipa_cst_ref_desc *src_rdesc = jfunc_rdesc_usable (src_jf);
3007
3008 if (!src_rdesc)
3009 dst_jf->value.constant.rdesc = NULL;
3010 else if (src_rdesc->cs == src)
3011 {
3012 struct ipa_cst_ref_desc *dst_rdesc;
3013 gcc_checking_assert (ipa_refdesc_pool);
3014 dst_rdesc
3015 = (struct ipa_cst_ref_desc *) pool_alloc (ipa_refdesc_pool);
3016 dst_rdesc->cs = dst;
4502fe8d 3017 dst_rdesc->refcount = src_rdesc->refcount;
2fd0985c
MJ
3018 dst_rdesc->next_duplicate = src_rdesc->next_duplicate;
3019 src_rdesc->next_duplicate = dst_rdesc;
4502fe8d
MJ
3020 dst_jf->value.constant.rdesc = dst_rdesc;
3021 }
3022 else
3023 {
3024 struct ipa_cst_ref_desc *dst_rdesc;
3025 /* This can happen during inlining, when a JFUNC can refer to a
3026 reference taken in a function up in the tree of inline clones.
3027 We need to find the duplicate that refers to our tree of
3028 inline clones. */
3029
3030 gcc_assert (dst->caller->global.inlined_to);
3031 for (dst_rdesc = src_rdesc->next_duplicate;
3032 dst_rdesc;
3033 dst_rdesc = dst_rdesc->next_duplicate)
2fd0985c
MJ
3034 {
3035 struct cgraph_node *top;
3036 top = dst_rdesc->cs->caller->global.inlined_to
3037 ? dst_rdesc->cs->caller->global.inlined_to
3038 : dst_rdesc->cs->caller;
3039 if (dst->caller->global.inlined_to == top)
3040 break;
3041 }
44a60244 3042 gcc_assert (dst_rdesc);
4502fe8d
MJ
3043 dst_jf->value.constant.rdesc = dst_rdesc;
3044 }
3045 }
3046 }
771578a0
MJ
3047}
3048
3049/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 3050
771578a0
MJ
3051static void
3052ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 3053 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
3054{
3055 struct ipa_node_params *old_info, *new_info;
2c9561b5 3056 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
3057
3058 ipa_check_create_node_params ();
3059 old_info = IPA_NODE_REF (src);
3060 new_info = IPA_NODE_REF (dst);
771578a0 3061
9771b263 3062 new_info->descriptors = old_info->descriptors.copy ();
310bc633 3063 new_info->lattices = NULL;
771578a0 3064 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 3065
3949c4a7
MJ
3066 new_info->uses_analysis_done = old_info->uses_analysis_done;
3067 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
3068
3069 old_av = ipa_get_agg_replacements_for_node (src);
3070 if (!old_av)
3071 return;
3072
3073 new_av = NULL;
3074 while (old_av)
3075 {
3076 struct ipa_agg_replacement_value *v;
3077
3078 v = ggc_alloc_ipa_agg_replacement_value ();
3079 memcpy (v, old_av, sizeof (*v));
3080 v->next = new_av;
3081 new_av = v;
3082 old_av = old_av->next;
3083 }
3084 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
3085}
3086
40982661
JH
3087
3088/* Analyze newly added function into callgraph. */
3089
3090static void
3091ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
3092{
3093 ipa_analyze_node (node);
3094}
3095
771578a0 3096/* Register our cgraph hooks if they are not already there. */
be95e2b9 3097
518dc859 3098void
771578a0 3099ipa_register_cgraph_hooks (void)
518dc859 3100{
771578a0
MJ
3101 if (!edge_removal_hook_holder)
3102 edge_removal_hook_holder =
3103 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
3104 if (!node_removal_hook_holder)
3105 node_removal_hook_holder =
3106 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
3107 if (!edge_duplication_hook_holder)
3108 edge_duplication_hook_holder =
3109 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
3110 if (!node_duplication_hook_holder)
3111 node_duplication_hook_holder =
3112 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
3113 function_insertion_hook_holder =
3114 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 3115}
518dc859 3116
771578a0 3117/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 3118
771578a0
MJ
3119static void
3120ipa_unregister_cgraph_hooks (void)
3121{
3122 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
3123 edge_removal_hook_holder = NULL;
3124 cgraph_remove_node_removal_hook (node_removal_hook_holder);
3125 node_removal_hook_holder = NULL;
3126 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
3127 edge_duplication_hook_holder = NULL;
3128 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
3129 node_duplication_hook_holder = NULL;
40982661
JH
3130 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
3131 function_insertion_hook_holder = NULL;
771578a0
MJ
3132}
3133
3134/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3135 longer needed after ipa-cp. */
be95e2b9 3136
771578a0 3137void
e33c6cd6 3138ipa_free_all_structures_after_ipa_cp (void)
3e293154 3139{
5ee53a06 3140 if (!optimize)
3e293154
MJ
3141 {
3142 ipa_free_all_edge_args ();
3143 ipa_free_all_node_params ();
310bc633
MJ
3144 free_alloc_pool (ipcp_sources_pool);
3145 free_alloc_pool (ipcp_values_pool);
2c9561b5 3146 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154 3147 ipa_unregister_cgraph_hooks ();
4502fe8d
MJ
3148 if (ipa_refdesc_pool)
3149 free_alloc_pool (ipa_refdesc_pool);
3e293154
MJ
3150 }
3151}
3152
3153/* Free all ipa_node_params and all ipa_edge_args structures if they are no
3154 longer needed after indirect inlining. */
be95e2b9 3155
3e293154 3156void
e33c6cd6 3157ipa_free_all_structures_after_iinln (void)
771578a0
MJ
3158{
3159 ipa_free_all_edge_args ();
3160 ipa_free_all_node_params ();
3161 ipa_unregister_cgraph_hooks ();
310bc633
MJ
3162 if (ipcp_sources_pool)
3163 free_alloc_pool (ipcp_sources_pool);
3164 if (ipcp_values_pool)
3165 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
3166 if (ipcp_agg_lattice_pool)
3167 free_alloc_pool (ipcp_agg_lattice_pool);
4502fe8d
MJ
3168 if (ipa_refdesc_pool)
3169 free_alloc_pool (ipa_refdesc_pool);
518dc859
RL
3170}
3171
dcd416e3 3172/* Print ipa_tree_map data structures of all functions in the
518dc859 3173 callgraph to F. */
be95e2b9 3174
518dc859 3175void
2c9561b5 3176ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
3177{
3178 int i, count;
3e293154 3179 struct ipa_node_params *info;
518dc859 3180
e70670cf 3181 if (!node->symbol.definition)
3e293154
MJ
3182 return;
3183 info = IPA_NODE_REF (node);
9de04252
MJ
3184 fprintf (f, " function %s/%i parameter descriptors:\n",
3185 cgraph_node_name (node), node->symbol.order);
3e293154
MJ
3186 count = ipa_get_param_count (info);
3187 for (i = 0; i < count; i++)
518dc859 3188 {
4502fe8d
MJ
3189 int c;
3190
e067bd43 3191 ipa_dump_param (f, info, i);
339f49ec
JH
3192 if (ipa_is_param_used (info, i))
3193 fprintf (f, " used");
4502fe8d
MJ
3194 c = ipa_get_controlled_uses (info, i);
3195 if (c == IPA_UNDESCRIBED_USE)
3196 fprintf (f, " undescribed_use");
3197 else
3198 fprintf (f, " controlled_uses=%i", c);
3e293154 3199 fprintf (f, "\n");
518dc859
RL
3200 }
3201}
dcd416e3 3202
ca30a539 3203/* Print ipa_tree_map data structures of all functions in the
3e293154 3204 callgraph to F. */
be95e2b9 3205
3e293154 3206void
ca30a539 3207ipa_print_all_params (FILE * f)
3e293154
MJ
3208{
3209 struct cgraph_node *node;
3210
ca30a539 3211 fprintf (f, "\nFunction parameters:\n");
65c70e6b 3212 FOR_EACH_FUNCTION (node)
ca30a539 3213 ipa_print_node_params (f, node);
3e293154 3214}
3f84bf08
MJ
3215
3216/* Return a heap allocated vector containing formal parameters of FNDECL. */
3217
9771b263 3218vec<tree>
3f84bf08
MJ
3219ipa_get_vector_of_formal_parms (tree fndecl)
3220{
9771b263 3221 vec<tree> args;
3f84bf08
MJ
3222 int count;
3223 tree parm;
3224
0e8853ee 3225 gcc_assert (!flag_wpa);
310bc633 3226 count = count_formal_params (fndecl);
9771b263 3227 args.create (count);
910ad8de 3228 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 3229 args.quick_push (parm);
3f84bf08
MJ
3230
3231 return args;
3232}
3233
3234/* Return a heap allocated vector containing types of formal parameters of
3235 function type FNTYPE. */
3236
9771b263 3237static inline vec<tree>
3f84bf08
MJ
3238get_vector_of_formal_parm_types (tree fntype)
3239{
9771b263 3240 vec<tree> types;
3f84bf08
MJ
3241 int count = 0;
3242 tree t;
3243
3244 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
3245 count++;
3246
9771b263 3247 types.create (count);
3f84bf08 3248 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 3249 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
3250
3251 return types;
3252}
3253
3254/* Modify the function declaration FNDECL and its type according to the plan in
3255 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3256 to reflect the actual parameters being modified which are determined by the
3257 base_index field. */
3258
3259void
3260ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
3261 const char *synth_parm_prefix)
3262{
9771b263 3263 vec<tree> oparms, otypes;
3f84bf08
MJ
3264 tree orig_type, new_type = NULL;
3265 tree old_arg_types, t, new_arg_types = NULL;
3266 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 3267 int i, len = adjustments.length ();
3f84bf08
MJ
3268 tree new_reversed = NULL;
3269 bool care_for_types, last_parm_void;
3270
3271 if (!synth_parm_prefix)
3272 synth_parm_prefix = "SYNTH";
3273
3274 oparms = ipa_get_vector_of_formal_parms (fndecl);
3275 orig_type = TREE_TYPE (fndecl);
3276 old_arg_types = TYPE_ARG_TYPES (orig_type);
3277
3278 /* The following test is an ugly hack, some functions simply don't have any
3279 arguments in their type. This is probably a bug but well... */
3280 care_for_types = (old_arg_types != NULL_TREE);
3281 if (care_for_types)
3282 {
3283 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
3284 == void_type_node);
3285 otypes = get_vector_of_formal_parm_types (orig_type);
3286 if (last_parm_void)
9771b263 3287 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 3288 else
9771b263 3289 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
3290 }
3291 else
3292 {
3293 last_parm_void = false;
9771b263 3294 otypes.create (0);
3f84bf08
MJ
3295 }
3296
3297 for (i = 0; i < len; i++)
3298 {
3299 struct ipa_parm_adjustment *adj;
3300 gcc_assert (link);
3301
9771b263
DN
3302 adj = &adjustments[i];
3303 parm = oparms[adj->base_index];
3f84bf08
MJ
3304 adj->base = parm;
3305
3306 if (adj->copy_param)
3307 {
3308 if (care_for_types)
9771b263 3309 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
3310 new_arg_types);
3311 *link = parm;
910ad8de 3312 link = &DECL_CHAIN (parm);
3f84bf08
MJ
3313 }
3314 else if (!adj->remove_param)
3315 {
3316 tree new_parm;
3317 tree ptype;
3318
3319 if (adj->by_ref)
3320 ptype = build_pointer_type (adj->type);
3321 else
3322 ptype = adj->type;
3323
3324 if (care_for_types)
3325 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
3326
3327 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
3328 ptype);
3329 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
3330
3331 DECL_ARTIFICIAL (new_parm) = 1;
3332 DECL_ARG_TYPE (new_parm) = ptype;
3333 DECL_CONTEXT (new_parm) = fndecl;
3334 TREE_USED (new_parm) = 1;
3335 DECL_IGNORED_P (new_parm) = 1;
3336 layout_decl (new_parm, 0);
3337
3f84bf08
MJ
3338 adj->base = parm;
3339 adj->reduction = new_parm;
3340
3341 *link = new_parm;
3342
910ad8de 3343 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
3344 }
3345 }
3346
3347 *link = NULL_TREE;
3348
3349 if (care_for_types)
3350 {
3351 new_reversed = nreverse (new_arg_types);
3352 if (last_parm_void)
3353 {
3354 if (new_reversed)
3355 TREE_CHAIN (new_arg_types) = void_list_node;
3356 else
3357 new_reversed = void_list_node;
3358 }
3359 }
3360
3361 /* Use copy_node to preserve as much as possible from original type
3362 (debug info, attribute lists etc.)
3363 Exception is METHOD_TYPEs must have THIS argument.
3364 When we are asked to remove it, we need to build new FUNCTION_TYPE
3365 instead. */
3366 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
3367 || (adjustments[0].copy_param
3368 && adjustments[0].base_index == 0))
3f84bf08 3369 {
4eb3f32c 3370 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
3371 TYPE_ARG_TYPES (new_type) = new_reversed;
3372 }
3373 else
3374 {
3375 new_type
3376 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
3377 new_reversed));
3378 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
3379 DECL_VINDEX (fndecl) = NULL_TREE;
3380 }
3381
d402c33d
JH
3382 /* When signature changes, we need to clear builtin info. */
3383 if (DECL_BUILT_IN (fndecl))
3384 {
3385 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
3386 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
3387 }
3388
3f84bf08
MJ
3389 /* This is a new type, not a copy of an old type. Need to reassociate
3390 variants. We can handle everything except the main variant lazily. */
3391 t = TYPE_MAIN_VARIANT (orig_type);
3392 if (orig_type != t)
3393 {
3394 TYPE_MAIN_VARIANT (new_type) = t;
3395 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
3396 TYPE_NEXT_VARIANT (t) = new_type;
3397 }
3398 else
3399 {
3400 TYPE_MAIN_VARIANT (new_type) = new_type;
3401 TYPE_NEXT_VARIANT (new_type) = NULL;
3402 }
3403
3404 TREE_TYPE (fndecl) = new_type;
9b389a5e 3405 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
3406 otypes.release ();
3407 oparms.release ();
3f84bf08
MJ
3408}
3409
3410/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3411 If this is a directly recursive call, CS must be NULL. Otherwise it must
3412 contain the corresponding call graph edge. */
3413
3414void
3415ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
3416 ipa_parm_adjustment_vec adjustments)
3417{
82338059 3418 struct cgraph_node *current_node = cgraph_get_node (current_function_decl);
9771b263
DN
3419 vec<tree> vargs;
3420 vec<tree, va_gc> **debug_args = NULL;
3f84bf08 3421 gimple new_stmt;
82338059 3422 gimple_stmt_iterator gsi, prev_gsi;
3f84bf08
MJ
3423 tree callee_decl;
3424 int i, len;
3425
9771b263
DN
3426 len = adjustments.length ();
3427 vargs.create (len);
960bfb69 3428 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
82338059 3429 ipa_remove_stmt_references ((symtab_node) current_node, stmt);
3f84bf08
MJ
3430
3431 gsi = gsi_for_stmt (stmt);
82338059
MJ
3432 prev_gsi = gsi;
3433 gsi_prev (&prev_gsi);
3f84bf08
MJ
3434 for (i = 0; i < len; i++)
3435 {
3436 struct ipa_parm_adjustment *adj;
3437
9771b263 3438 adj = &adjustments[i];
3f84bf08
MJ
3439
3440 if (adj->copy_param)
3441 {
3442 tree arg = gimple_call_arg (stmt, adj->base_index);
3443
9771b263 3444 vargs.quick_push (arg);
3f84bf08
MJ
3445 }
3446 else if (!adj->remove_param)
3447 {
fffe1e40
MJ
3448 tree expr, base, off;
3449 location_t loc;
c1ed6a01
MJ
3450 unsigned int deref_align;
3451 bool deref_base = false;
fffe1e40
MJ
3452
3453 /* We create a new parameter out of the value of the old one, we can
3454 do the following kind of transformations:
3455
3456 - A scalar passed by reference is converted to a scalar passed by
3457 value. (adj->by_ref is false and the type of the original
3458 actual argument is a pointer to a scalar).
3459
3460 - A part of an aggregate is passed instead of the whole aggregate.
3461 The part can be passed either by value or by reference, this is
3462 determined by value of adj->by_ref. Moreover, the code below
3463 handles both situations when the original aggregate is passed by
3464 value (its type is not a pointer) and when it is passed by
3465 reference (it is a pointer to an aggregate).
3466
3467 When the new argument is passed by reference (adj->by_ref is true)
3468 it must be a part of an aggregate and therefore we form it by
3469 simply taking the address of a reference inside the original
3470 aggregate. */
3471
3472 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
3473 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
3474 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
3475 : EXPR_LOCATION (base);
fffe1e40 3476
82d49829
MJ
3477 if (TREE_CODE (base) != ADDR_EXPR
3478 && POINTER_TYPE_P (TREE_TYPE (base)))
3479 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 3480 adj->offset / BITS_PER_UNIT);
3f84bf08 3481 else
3f84bf08 3482 {
fffe1e40
MJ
3483 HOST_WIDE_INT base_offset;
3484 tree prev_base;
c1ed6a01 3485 bool addrof;
fffe1e40
MJ
3486
3487 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
3488 {
3489 base = TREE_OPERAND (base, 0);
3490 addrof = true;
3491 }
3492 else
3493 addrof = false;
fffe1e40
MJ
3494 prev_base = base;
3495 base = get_addr_base_and_unit_offset (base, &base_offset);
3496 /* Aggregate arguments can have non-invariant addresses. */
3497 if (!base)
3498 {
3499 base = build_fold_addr_expr (prev_base);
82d49829 3500 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3501 adj->offset / BITS_PER_UNIT);
3502 }
3503 else if (TREE_CODE (base) == MEM_REF)
3504 {
c1ed6a01
MJ
3505 if (!addrof)
3506 {
3507 deref_base = true;
3508 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3509 }
82d49829 3510 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3511 base_offset
3512 + adj->offset / BITS_PER_UNIT);
3513 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3514 off);
fffe1e40
MJ
3515 base = TREE_OPERAND (base, 0);
3516 }
3517 else
3518 {
82d49829 3519 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3520 base_offset
3521 + adj->offset / BITS_PER_UNIT);
3522 base = build_fold_addr_expr (base);
3523 }
3f84bf08 3524 }
fffe1e40 3525
3a5a825a
RG
3526 if (!adj->by_ref)
3527 {
3528 tree type = adj->type;
3529 unsigned int align;
3530 unsigned HOST_WIDE_INT misalign;
644ffefd 3531
c1ed6a01
MJ
3532 if (deref_base)
3533 {
3534 align = deref_align;
3535 misalign = 0;
3536 }
3537 else
3538 {
3539 get_pointer_alignment_1 (base, &align, &misalign);
3540 if (TYPE_ALIGN (type) > align)
3541 align = TYPE_ALIGN (type);
3542 }
27bcd47c
LC
3543 misalign += (tree_to_double_int (off)
3544 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3545 * BITS_PER_UNIT);
3546 misalign = misalign & (align - 1);
3547 if (misalign != 0)
3548 align = (misalign & -misalign);
3549 if (align < TYPE_ALIGN (type))
3550 type = build_aligned_type (type, align);
3551 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3552 }
3553 else
3554 {
3555 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3556 expr = build_fold_addr_expr (expr);
3557 }
fffe1e40 3558
3f84bf08
MJ
3559 expr = force_gimple_operand_gsi (&gsi, expr,
3560 adj->by_ref
3561 || is_gimple_reg_type (adj->type),
3562 NULL, true, GSI_SAME_STMT);
9771b263 3563 vargs.quick_push (expr);
3f84bf08 3564 }
ddb555ed
JJ
3565 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3566 {
3567 unsigned int ix;
3568 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3569 gimple def_temp;
3570
3571 arg = gimple_call_arg (stmt, adj->base_index);
3572 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3573 {
3574 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3575 continue;
3576 arg = fold_convert_loc (gimple_location (stmt),
3577 TREE_TYPE (origin), arg);
3578 }
3579 if (debug_args == NULL)
3580 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3581 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3582 if (ddecl == origin)
3583 {
9771b263 3584 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3585 break;
3586 }
3587 if (ddecl == NULL)
3588 {
3589 ddecl = make_node (DEBUG_EXPR_DECL);
3590 DECL_ARTIFICIAL (ddecl) = 1;
3591 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3592 DECL_MODE (ddecl) = DECL_MODE (origin);
3593
9771b263
DN
3594 vec_safe_push (*debug_args, origin);
3595 vec_safe_push (*debug_args, ddecl);
ddb555ed 3596 }
9771b263 3597 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3598 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3599 }
3f84bf08
MJ
3600 }
3601
3602 if (dump_file && (dump_flags & TDF_DETAILS))
3603 {
3604 fprintf (dump_file, "replacing stmt:");
3605 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3606 }
3607
3f84bf08 3608 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3609 vargs.release ();
3f84bf08
MJ
3610 if (gimple_call_lhs (stmt))
3611 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3612
3613 gimple_set_block (new_stmt, gimple_block (stmt));
3614 if (gimple_has_location (stmt))
3615 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3616 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3617 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3618
3619 if (dump_file && (dump_flags & TDF_DETAILS))
3620 {
3621 fprintf (dump_file, "with stmt:");
3622 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3623 fprintf (dump_file, "\n");
3624 }
3625 gsi_replace (&gsi, new_stmt, true);
3626 if (cs)
3627 cgraph_set_call_stmt (cs, new_stmt);
82338059
MJ
3628 do
3629 {
3630 ipa_record_stmt_references (current_node, gsi_stmt (gsi));
3631 gsi_prev (&gsi);
3632 }
3633 while ((gsi_end_p (prev_gsi) && !gsi_end_p (gsi))
3634 || (!gsi_end_p (prev_gsi) && gsi_stmt (gsi) == gsi_stmt (prev_gsi)));
3635
3f84bf08
MJ
3636 update_ssa (TODO_update_ssa);
3637 free_dominance_info (CDI_DOMINATORS);
3638}
3639
3640/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3641
3642static bool
3643index_in_adjustments_multiple_times_p (int base_index,
3644 ipa_parm_adjustment_vec adjustments)
3645{
9771b263 3646 int i, len = adjustments.length ();
3f84bf08
MJ
3647 bool one = false;
3648
3649 for (i = 0; i < len; i++)
3650 {
3651 struct ipa_parm_adjustment *adj;
9771b263 3652 adj = &adjustments[i];
3f84bf08
MJ
3653
3654 if (adj->base_index == base_index)
3655 {
3656 if (one)
3657 return true;
3658 else
3659 one = true;
3660 }
3661 }
3662 return false;
3663}
3664
3665
3666/* Return adjustments that should have the same effect on function parameters
3667 and call arguments as if they were first changed according to adjustments in
3668 INNER and then by adjustments in OUTER. */
3669
3670ipa_parm_adjustment_vec
3671ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3672 ipa_parm_adjustment_vec outer)
3673{
9771b263
DN
3674 int i, outlen = outer.length ();
3675 int inlen = inner.length ();
3f84bf08
MJ
3676 int removals = 0;
3677 ipa_parm_adjustment_vec adjustments, tmp;
3678
9771b263 3679 tmp.create (inlen);
3f84bf08
MJ
3680 for (i = 0; i < inlen; i++)
3681 {
3682 struct ipa_parm_adjustment *n;
9771b263 3683 n = &inner[i];
3f84bf08
MJ
3684
3685 if (n->remove_param)
3686 removals++;
3687 else
9771b263 3688 tmp.quick_push (*n);
3f84bf08
MJ
3689 }
3690
9771b263 3691 adjustments.create (outlen + removals);
3f84bf08
MJ
3692 for (i = 0; i < outlen; i++)
3693 {
f32682ca 3694 struct ipa_parm_adjustment r;
9771b263
DN
3695 struct ipa_parm_adjustment *out = &outer[i];
3696 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3697
f32682ca 3698 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3699 gcc_assert (!in->remove_param);
3700 if (out->remove_param)
3701 {
3702 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3703 {
f32682ca 3704 r.remove_param = true;
9771b263 3705 adjustments.quick_push (r);
3f84bf08
MJ
3706 }
3707 continue;
3708 }
3709
f32682ca
DN
3710 r.base_index = in->base_index;
3711 r.type = out->type;
3f84bf08
MJ
3712
3713 /* FIXME: Create nonlocal value too. */
3714
3715 if (in->copy_param && out->copy_param)
f32682ca 3716 r.copy_param = true;
3f84bf08 3717 else if (in->copy_param)
f32682ca 3718 r.offset = out->offset;
3f84bf08 3719 else if (out->copy_param)
f32682ca 3720 r.offset = in->offset;
3f84bf08 3721 else
f32682ca 3722 r.offset = in->offset + out->offset;
9771b263 3723 adjustments.quick_push (r);
3f84bf08
MJ
3724 }
3725
3726 for (i = 0; i < inlen; i++)
3727 {
9771b263 3728 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3729
3730 if (n->remove_param)
9771b263 3731 adjustments.quick_push (*n);
3f84bf08
MJ
3732 }
3733
9771b263 3734 tmp.release ();
3f84bf08
MJ
3735 return adjustments;
3736}
3737
3738/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3739 friendly way, assuming they are meant to be applied to FNDECL. */
3740
3741void
3742ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3743 tree fndecl)
3744{
9771b263 3745 int i, len = adjustments.length ();
3f84bf08 3746 bool first = true;
9771b263 3747 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3748
3749 fprintf (file, "IPA param adjustments: ");
3750 for (i = 0; i < len; i++)
3751 {
3752 struct ipa_parm_adjustment *adj;
9771b263 3753 adj = &adjustments[i];
3f84bf08
MJ
3754
3755 if (!first)
3756 fprintf (file, " ");
3757 else
3758 first = false;
3759
3760 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3761 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3762 if (adj->base)
3763 {
3764 fprintf (file, ", base: ");
3765 print_generic_expr (file, adj->base, 0);
3766 }
3767 if (adj->reduction)
3768 {
3769 fprintf (file, ", reduction: ");
3770 print_generic_expr (file, adj->reduction, 0);
3771 }
3772 if (adj->new_ssa_base)
3773 {
3774 fprintf (file, ", new_ssa_base: ");
3775 print_generic_expr (file, adj->new_ssa_base, 0);
3776 }
3777
3778 if (adj->copy_param)
3779 fprintf (file, ", copy_param");
3780 else if (adj->remove_param)
3781 fprintf (file, ", remove_param");
3782 else
3783 fprintf (file, ", offset %li", (long) adj->offset);
3784 if (adj->by_ref)
3785 fprintf (file, ", by_ref");
3786 print_node_brief (file, ", type: ", adj->type, 0);
3787 fprintf (file, "\n");
3788 }
9771b263 3789 parms.release ();
3f84bf08
MJ
3790}
3791
2c9561b5
MJ
3792/* Dump the AV linked list. */
3793
3794void
3795ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3796{
3797 bool comma = false;
3798 fprintf (f, " Aggregate replacements:");
3799 for (; av; av = av->next)
3800 {
3801 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3802 av->index, av->offset);
3803 print_generic_expr (f, av->value, 0);
3804 comma = true;
3805 }
3806 fprintf (f, "\n");
3807}
3808
fb3f88cc
JH
3809/* Stream out jump function JUMP_FUNC to OB. */
3810
3811static void
3812ipa_write_jump_function (struct output_block *ob,
3813 struct ipa_jump_func *jump_func)
3814{
8b7773a4
MJ
3815 struct ipa_agg_jf_item *item;
3816 struct bitpack_d bp;
3817 int i, count;
fb3f88cc 3818
8b7773a4 3819 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3820 switch (jump_func->type)
3821 {
3822 case IPA_JF_UNKNOWN:
3823 break;
b258210c 3824 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3825 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3826 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3827 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3828 break;
fb3f88cc 3829 case IPA_JF_CONST:
5368224f 3830 gcc_assert (
4502fe8d
MJ
3831 EXPR_LOCATION (jump_func->value.constant.value) == UNKNOWN_LOCATION);
3832 stream_write_tree (ob, jump_func->value.constant.value, true);
fb3f88cc
JH
3833 break;
3834 case IPA_JF_PASS_THROUGH:
412288f1 3835 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3836 if (jump_func->value.pass_through.operation == NOP_EXPR)
3837 {
3838 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3839 bp = bitpack_create (ob->main_stream);
3840 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
b8f6e610 3841 bp_pack_value (&bp, jump_func->value.pass_through.type_preserved, 1);
4a53743e
MJ
3842 streamer_write_bitpack (&bp);
3843 }
3844 else
3845 {
3846 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3847 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3848 }
fb3f88cc
JH
3849 break;
3850 case IPA_JF_ANCESTOR:
412288f1 3851 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3852 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3853 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3854 bp = bitpack_create (ob->main_stream);
3855 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
b8f6e610 3856 bp_pack_value (&bp, jump_func->value.ancestor.type_preserved, 1);
8b7773a4 3857 streamer_write_bitpack (&bp);
fb3f88cc 3858 break;
8b7773a4
MJ
3859 }
3860
9771b263 3861 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3862 streamer_write_uhwi (ob, count);
3863 if (count)
3864 {
3865 bp = bitpack_create (ob->main_stream);
3866 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3867 streamer_write_bitpack (&bp);
3868 }
3869
9771b263 3870 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3871 {
3872 streamer_write_uhwi (ob, item->offset);
3873 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
3874 }
3875}
3876
3877/* Read in jump function JUMP_FUNC from IB. */
3878
3879static void
3880ipa_read_jump_function (struct lto_input_block *ib,
3881 struct ipa_jump_func *jump_func,
4502fe8d 3882 struct cgraph_edge *cs,
fb3f88cc
JH
3883 struct data_in *data_in)
3884{
4a53743e
MJ
3885 enum jump_func_type jftype;
3886 enum tree_code operation;
8b7773a4 3887 int i, count;
fb3f88cc 3888
4a53743e
MJ
3889 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3890 switch (jftype)
fb3f88cc
JH
3891 {
3892 case IPA_JF_UNKNOWN:
4a53743e 3893 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 3894 break;
b258210c 3895 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
3896 {
3897 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3898 tree base_type = stream_read_tree (ib, data_in);
3899 tree component_type = stream_read_tree (ib, data_in);
3900
3901 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3902 break;
3903 }
fb3f88cc 3904 case IPA_JF_CONST:
4502fe8d 3905 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in), cs);
fb3f88cc
JH
3906 break;
3907 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
3908 operation = (enum tree_code) streamer_read_uhwi (ib);
3909 if (operation == NOP_EXPR)
3910 {
3911 int formal_id = streamer_read_uhwi (ib);
3912 struct bitpack_d bp = streamer_read_bitpack (ib);
3913 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610
MJ
3914 bool type_preserved = bp_unpack_value (&bp, 1);
3915 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved,
3916 type_preserved);
4a53743e
MJ
3917 }
3918 else
3919 {
3920 tree operand = stream_read_tree (ib, data_in);
3921 int formal_id = streamer_read_uhwi (ib);
3922 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3923 operation);
3924 }
fb3f88cc
JH
3925 break;
3926 case IPA_JF_ANCESTOR:
4a53743e
MJ
3927 {
3928 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3929 tree type = stream_read_tree (ib, data_in);
3930 int formal_id = streamer_read_uhwi (ib);
3931 struct bitpack_d bp = streamer_read_bitpack (ib);
3932 bool agg_preserved = bp_unpack_value (&bp, 1);
b8f6e610 3933 bool type_preserved = bp_unpack_value (&bp, 1);
4a53743e 3934
b8f6e610
MJ
3935 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved,
3936 type_preserved);
4a53743e
MJ
3937 break;
3938 }
8b7773a4
MJ
3939 }
3940
3941 count = streamer_read_uhwi (ib);
9771b263 3942 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
3943 if (count)
3944 {
4a53743e 3945 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
3946 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3947 }
3948 for (i = 0; i < count; i++)
3949 {
f32682ca
DN
3950 struct ipa_agg_jf_item item;
3951 item.offset = streamer_read_uhwi (ib);
3952 item.value = stream_read_tree (ib, data_in);
9771b263 3953 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
3954 }
3955}
3956
e33c6cd6
MJ
3957/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3958 relevant to indirect inlining to OB. */
661e7330
MJ
3959
3960static void
e33c6cd6
MJ
3961ipa_write_indirect_edge_info (struct output_block *ob,
3962 struct cgraph_edge *cs)
661e7330 3963{
e33c6cd6 3964 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 3965 struct bitpack_d bp;
e33c6cd6 3966
412288f1 3967 streamer_write_hwi (ob, ii->param_index);
8b7773a4 3968 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
3969 bp = bitpack_create (ob->main_stream);
3970 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4 3971 bp_pack_value (&bp, ii->agg_contents, 1);
c13bc3d9 3972 bp_pack_value (&bp, ii->member_ptr, 1);
8b7773a4 3973 bp_pack_value (&bp, ii->by_ref, 1);
412288f1 3974 streamer_write_bitpack (&bp);
b258210c
MJ
3975
3976 if (ii->polymorphic)
3977 {
412288f1 3978 streamer_write_hwi (ob, ii->otr_token);
b9393656 3979 stream_write_tree (ob, ii->otr_type, true);
b258210c 3980 }
661e7330
MJ
3981}
3982
e33c6cd6
MJ
3983/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3984 relevant to indirect inlining from IB. */
661e7330
MJ
3985
3986static void
e33c6cd6
MJ
3987ipa_read_indirect_edge_info (struct lto_input_block *ib,
3988 struct data_in *data_in ATTRIBUTE_UNUSED,
3989 struct cgraph_edge *cs)
661e7330 3990{
e33c6cd6 3991 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 3992 struct bitpack_d bp;
661e7330 3993
412288f1 3994 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 3995 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 3996 bp = streamer_read_bitpack (ib);
2465dcc2 3997 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4 3998 ii->agg_contents = bp_unpack_value (&bp, 1);
c13bc3d9 3999 ii->member_ptr = bp_unpack_value (&bp, 1);
8b7773a4 4000 ii->by_ref = bp_unpack_value (&bp, 1);
b258210c
MJ
4001 if (ii->polymorphic)
4002 {
412288f1 4003 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 4004 ii->otr_type = stream_read_tree (ib, data_in);
b258210c 4005 }
661e7330
MJ
4006}
4007
fb3f88cc
JH
4008/* Stream out NODE info to OB. */
4009
4010static void
4011ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
4012{
4013 int node_ref;
7380e6ef 4014 lto_symtab_encoder_t encoder;
fb3f88cc
JH
4015 struct ipa_node_params *info = IPA_NODE_REF (node);
4016 int j;
4017 struct cgraph_edge *e;
2465dcc2 4018 struct bitpack_d bp;
fb3f88cc 4019
7380e6ef
JH
4020 encoder = ob->decl_state->symtab_node_encoder;
4021 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
412288f1 4022 streamer_write_uhwi (ob, node_ref);
fb3f88cc 4023
0e8853ee
JH
4024 streamer_write_uhwi (ob, ipa_get_param_count (info));
4025 for (j = 0; j < ipa_get_param_count (info); j++)
4026 streamer_write_uhwi (ob, ipa_get_param_move_cost (info, j));
2465dcc2 4027 bp = bitpack_create (ob->main_stream);
062c604f 4028 gcc_assert (info->uses_analysis_done
661e7330 4029 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
4030 gcc_assert (!info->node_enqueued);
4031 gcc_assert (!info->ipcp_orig_node);
4032 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 4033 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 4034 streamer_write_bitpack (&bp);
4502fe8d
MJ
4035 for (j = 0; j < ipa_get_param_count (info); j++)
4036 streamer_write_hwi (ob, ipa_get_controlled_uses (info, j));
fb3f88cc
JH
4037 for (e = node->callees; e; e = e->next_callee)
4038 {
4039 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4040
412288f1 4041 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
4042 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4043 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4044 }
e33c6cd6 4045 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4046 {
4047 struct ipa_edge_args *args = IPA_EDGE_REF (e);
4048
412288f1 4049 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
4050 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
4051 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
4052 ipa_write_indirect_edge_info (ob, e);
4053 }
fb3f88cc
JH
4054}
4055
61502ca8 4056/* Stream in NODE info from IB. */
fb3f88cc
JH
4057
4058static void
4059ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
4060 struct data_in *data_in)
4061{
4062 struct ipa_node_params *info = IPA_NODE_REF (node);
4063 int k;
4064 struct cgraph_edge *e;
2465dcc2 4065 struct bitpack_d bp;
fb3f88cc 4066
0e8853ee 4067 ipa_alloc_node_params (node, streamer_read_uhwi (ib));
fb3f88cc 4068
0e8853ee
JH
4069 for (k = 0; k < ipa_get_param_count (info); k++)
4070 info->descriptors[k].move_cost = streamer_read_uhwi (ib);
4071
412288f1 4072 bp = streamer_read_bitpack (ib);
fb3f88cc 4073 if (ipa_get_param_count (info) != 0)
062c604f 4074 info->uses_analysis_done = true;
fb3f88cc
JH
4075 info->node_enqueued = false;
4076 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 4077 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
1b14621a
MJ
4078 for (k = 0; k < ipa_get_param_count (info); k++)
4079 ipa_set_controlled_uses (info, k, streamer_read_hwi (ib));
fb3f88cc
JH
4080 for (e = node->callees; e; e = e->next_callee)
4081 {
4082 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4083 int count = streamer_read_uhwi (ib);
fb3f88cc 4084
fb3f88cc
JH
4085 if (!count)
4086 continue;
9771b263 4087 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 4088
fb3f88cc 4089 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d
MJ
4090 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
4091 data_in);
fb3f88cc 4092 }
e33c6cd6 4093 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
4094 {
4095 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 4096 int count = streamer_read_uhwi (ib);
c8246dbe 4097
c8246dbe
JH
4098 if (count)
4099 {
9771b263 4100 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 4101 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
4502fe8d 4102 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), e,
606d9a09 4103 data_in);
c8246dbe
JH
4104 }
4105 ipa_read_indirect_edge_info (ib, data_in, e);
4106 }
fb3f88cc
JH
4107}
4108
4109/* Write jump functions for nodes in SET. */
4110
4111void
f27c1867 4112ipa_prop_write_jump_functions (void)
fb3f88cc
JH
4113{
4114 struct cgraph_node *node;
93536c97 4115 struct output_block *ob;
fb3f88cc 4116 unsigned int count = 0;
f27c1867
JH
4117 lto_symtab_encoder_iterator lsei;
4118 lto_symtab_encoder_t encoder;
4119
fb3f88cc 4120
9771b263 4121 if (!ipa_node_params_vector.exists ())
93536c97 4122 return;
fb3f88cc 4123
93536c97 4124 ob = create_output_block (LTO_section_jump_functions);
f27c1867 4125 encoder = ob->decl_state->symtab_node_encoder;
93536c97 4126 ob->cgraph_node = NULL;
f27c1867
JH
4127 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4128 lsei_next_function_in_partition (&lsei))
fb3f88cc 4129 {
f27c1867 4130 node = lsei_cgraph_node (lsei);
c47d0034
JH
4131 if (cgraph_function_with_gimple_body_p (node)
4132 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4133 count++;
4134 }
4135
412288f1 4136 streamer_write_uhwi (ob, count);
fb3f88cc
JH
4137
4138 /* Process all of the functions. */
f27c1867
JH
4139 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4140 lsei_next_function_in_partition (&lsei))
fb3f88cc 4141 {
f27c1867 4142 node = lsei_cgraph_node (lsei);
c47d0034
JH
4143 if (cgraph_function_with_gimple_body_p (node)
4144 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
4145 ipa_write_node_info (ob, node);
4146 }
412288f1 4147 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
4148 produce_asm (ob, NULL);
4149 destroy_output_block (ob);
4150}
4151
4152/* Read section in file FILE_DATA of length LEN with data DATA. */
4153
4154static void
4155ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
4156 size_t len)
4157{
4158 const struct lto_function_header *header =
4159 (const struct lto_function_header *) data;
4ad9a9de
EB
4160 const int cfg_offset = sizeof (struct lto_function_header);
4161 const int main_offset = cfg_offset + header->cfg_size;
4162 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
4163 struct data_in *data_in;
4164 struct lto_input_block ib_main;
4165 unsigned int i;
4166 unsigned int count;
4167
4168 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4169 header->main_size);
4170
4171 data_in =
4172 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4173 header->string_size, vNULL);
412288f1 4174 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
4175
4176 for (i = 0; i < count; i++)
4177 {
4178 unsigned int index;
4179 struct cgraph_node *node;
7380e6ef 4180 lto_symtab_encoder_t encoder;
fb3f88cc 4181
412288f1 4182 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
4183 encoder = file_data->symtab_node_encoder;
4184 node = cgraph (lto_symtab_encoder_deref (encoder, index));
e70670cf 4185 gcc_assert (node->symbol.definition);
fb3f88cc
JH
4186 ipa_read_node_info (&ib_main, node, data_in);
4187 }
4188 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4189 len);
4190 lto_data_in_delete (data_in);
4191}
4192
4193/* Read ipcp jump functions. */
4194
4195void
4196ipa_prop_read_jump_functions (void)
4197{
4198 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4199 struct lto_file_decl_data *file_data;
4200 unsigned int j = 0;
4201
4202 ipa_check_create_node_params ();
4203 ipa_check_create_edge_args ();
4204 ipa_register_cgraph_hooks ();
4205
4206 while ((file_data = file_data_vec[j++]))
4207 {
4208 size_t len;
4209 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
4210
4211 if (data)
4212 ipa_prop_read_section (file_data, data, len);
4213 }
4214}
4215
b8698a0f 4216/* After merging units, we can get mismatch in argument counts.
61502ca8 4217 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
4218 Also compute called_with_variable_arg info. */
4219
4220void
4221ipa_update_after_lto_read (void)
4222{
05d3aa37
MJ
4223 ipa_check_create_node_params ();
4224 ipa_check_create_edge_args ();
fb3f88cc 4225}
2c9561b5
MJ
4226
4227void
4228write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
4229{
4230 int node_ref;
4231 unsigned int count = 0;
4232 lto_symtab_encoder_t encoder;
4233 struct ipa_agg_replacement_value *aggvals, *av;
4234
4235 aggvals = ipa_get_agg_replacements_for_node (node);
4236 encoder = ob->decl_state->symtab_node_encoder;
4237 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
4238 streamer_write_uhwi (ob, node_ref);
4239
4240 for (av = aggvals; av; av = av->next)
4241 count++;
4242 streamer_write_uhwi (ob, count);
4243
4244 for (av = aggvals; av; av = av->next)
4245 {
7b920a9a
MJ
4246 struct bitpack_d bp;
4247
2c9561b5
MJ
4248 streamer_write_uhwi (ob, av->offset);
4249 streamer_write_uhwi (ob, av->index);
4250 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
4251
4252 bp = bitpack_create (ob->main_stream);
4253 bp_pack_value (&bp, av->by_ref, 1);
4254 streamer_write_bitpack (&bp);
2c9561b5
MJ
4255 }
4256}
4257
4258/* Stream in the aggregate value replacement chain for NODE from IB. */
4259
4260static void
4261read_agg_replacement_chain (struct lto_input_block *ib,
4262 struct cgraph_node *node,
4263 struct data_in *data_in)
4264{
4265 struct ipa_agg_replacement_value *aggvals = NULL;
4266 unsigned int count, i;
4267
4268 count = streamer_read_uhwi (ib);
4269 for (i = 0; i <count; i++)
4270 {
4271 struct ipa_agg_replacement_value *av;
7b920a9a 4272 struct bitpack_d bp;
2c9561b5
MJ
4273
4274 av = ggc_alloc_ipa_agg_replacement_value ();
4275 av->offset = streamer_read_uhwi (ib);
4276 av->index = streamer_read_uhwi (ib);
4277 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
4278 bp = streamer_read_bitpack (ib);
4279 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
4280 av->next = aggvals;
4281 aggvals = av;
4282 }
4283 ipa_set_node_agg_value_chain (node, aggvals);
4284}
4285
4286/* Write all aggregate replacement for nodes in set. */
4287
4288void
4289ipa_prop_write_all_agg_replacement (void)
4290{
4291 struct cgraph_node *node;
4292 struct output_block *ob;
4293 unsigned int count = 0;
4294 lto_symtab_encoder_iterator lsei;
4295 lto_symtab_encoder_t encoder;
4296
4297 if (!ipa_node_agg_replacements)
4298 return;
4299
4300 ob = create_output_block (LTO_section_ipcp_transform);
4301 encoder = ob->decl_state->symtab_node_encoder;
4302 ob->cgraph_node = NULL;
4303 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4304 lsei_next_function_in_partition (&lsei))
4305 {
4306 node = lsei_cgraph_node (lsei);
4307 if (cgraph_function_with_gimple_body_p (node)
4308 && ipa_get_agg_replacements_for_node (node) != NULL)
4309 count++;
4310 }
4311
4312 streamer_write_uhwi (ob, count);
4313
4314 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
4315 lsei_next_function_in_partition (&lsei))
4316 {
4317 node = lsei_cgraph_node (lsei);
4318 if (cgraph_function_with_gimple_body_p (node)
4319 && ipa_get_agg_replacements_for_node (node) != NULL)
4320 write_agg_replacement_chain (ob, node);
4321 }
4322 streamer_write_char_stream (ob->main_stream, 0);
4323 produce_asm (ob, NULL);
4324 destroy_output_block (ob);
4325}
4326
4327/* Read replacements section in file FILE_DATA of length LEN with data
4328 DATA. */
4329
4330static void
4331read_replacements_section (struct lto_file_decl_data *file_data,
4332 const char *data,
4333 size_t len)
4334{
4335 const struct lto_function_header *header =
4336 (const struct lto_function_header *) data;
4337 const int cfg_offset = sizeof (struct lto_function_header);
4338 const int main_offset = cfg_offset + header->cfg_size;
4339 const int string_offset = main_offset + header->main_size;
4340 struct data_in *data_in;
4341 struct lto_input_block ib_main;
4342 unsigned int i;
4343 unsigned int count;
4344
4345 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
4346 header->main_size);
4347
4348 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 4349 header->string_size, vNULL);
2c9561b5
MJ
4350 count = streamer_read_uhwi (&ib_main);
4351
4352 for (i = 0; i < count; i++)
4353 {
4354 unsigned int index;
4355 struct cgraph_node *node;
4356 lto_symtab_encoder_t encoder;
4357
4358 index = streamer_read_uhwi (&ib_main);
4359 encoder = file_data->symtab_node_encoder;
4360 node = cgraph (lto_symtab_encoder_deref (encoder, index));
e70670cf 4361 gcc_assert (node->symbol.definition);
2c9561b5
MJ
4362 read_agg_replacement_chain (&ib_main, node, data_in);
4363 }
4364 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
4365 len);
4366 lto_data_in_delete (data_in);
4367}
4368
4369/* Read IPA-CP aggregate replacements. */
4370
4371void
4372ipa_prop_read_all_agg_replacement (void)
4373{
4374 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
4375 struct lto_file_decl_data *file_data;
4376 unsigned int j = 0;
4377
4378 while ((file_data = file_data_vec[j++]))
4379 {
4380 size_t len;
4381 const char *data = lto_get_section_data (file_data,
4382 LTO_section_ipcp_transform,
4383 NULL, &len);
4384 if (data)
4385 read_replacements_section (file_data, data, len);
4386 }
4387}
4388
4389/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
4390 NODE. */
4391
4392static void
4393adjust_agg_replacement_values (struct cgraph_node *node,
4394 struct ipa_agg_replacement_value *aggval)
4395{
4396 struct ipa_agg_replacement_value *v;
4397 int i, c = 0, d = 0, *adj;
4398
4399 if (!node->clone.combined_args_to_skip)
4400 return;
4401
4402 for (v = aggval; v; v = v->next)
4403 {
4404 gcc_assert (v->index >= 0);
4405 if (c < v->index)
4406 c = v->index;
4407 }
4408 c++;
4409
4410 adj = XALLOCAVEC (int, c);
4411 for (i = 0; i < c; i++)
4412 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
4413 {
4414 adj[i] = -1;
4415 d++;
4416 }
4417 else
4418 adj[i] = i - d;
4419
4420 for (v = aggval; v; v = v->next)
4421 v->index = adj[v->index];
4422}
4423
4424
4425/* Function body transformation phase. */
4426
4427unsigned int
4428ipcp_transform_function (struct cgraph_node *node)
4429{
6e1aa848 4430 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
4431 struct param_analysis_info *parms_ainfo;
4432 struct ipa_agg_replacement_value *aggval;
4433 gimple_stmt_iterator gsi;
4434 basic_block bb;
4435 int param_count;
4436 bool cfg_changed = false, something_changed = false;
4437
4438 gcc_checking_assert (cfun);
4439 gcc_checking_assert (current_function_decl);
4440
4441 if (dump_file)
4442 fprintf (dump_file, "Modification phase of node %s/%i\n",
9de04252 4443 cgraph_node_name (node), node->symbol.order);
2c9561b5
MJ
4444
4445 aggval = ipa_get_agg_replacements_for_node (node);
4446 if (!aggval)
4447 return 0;
4448 param_count = count_formal_params (node->symbol.decl);
4449 if (param_count == 0)
4450 return 0;
4451 adjust_agg_replacement_values (node, aggval);
4452 if (dump_file)
4453 ipa_dump_agg_replacement_values (dump_file, aggval);
4454 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
4455 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 4456 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
4457 ipa_populate_param_decls (node, descriptors);
4458
4459 FOR_EACH_BB (bb)
4460 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
4461 {
4462 struct ipa_agg_replacement_value *v;
4463 gimple stmt = gsi_stmt (gsi);
4464 tree rhs, val, t;
4465 HOST_WIDE_INT offset;
4466 int index;
4467 bool by_ref, vce;
4468
4469 if (!gimple_assign_load_p (stmt))
4470 continue;
4471 rhs = gimple_assign_rhs1 (stmt);
4472 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
4473 continue;
4474
4475 vce = false;
4476 t = rhs;
4477 while (handled_component_p (t))
4478 {
4479 /* V_C_E can do things like convert an array of integers to one
4480 bigger integer and similar things we do not handle below. */
4481 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
4482 {
4483 vce = true;
4484 break;
4485 }
4486 t = TREE_OPERAND (t, 0);
4487 }
4488 if (vce)
4489 continue;
4490
4491 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
4492 rhs, &index, &offset, &by_ref))
4493 continue;
4494 for (v = aggval; v; v = v->next)
4495 if (v->index == index
4496 && v->offset == offset)
4497 break;
7b920a9a 4498 if (!v || v->by_ref != by_ref)
2c9561b5
MJ
4499 continue;
4500
4501 gcc_checking_assert (is_gimple_ip_invariant (v->value));
4502 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
4503 {
4504 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
4505 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
4506 else if (TYPE_SIZE (TREE_TYPE (rhs))
4507 == TYPE_SIZE (TREE_TYPE (v->value)))
4508 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
4509 else
4510 {
4511 if (dump_file)
4512 {
4513 fprintf (dump_file, " const ");
4514 print_generic_expr (dump_file, v->value, 0);
4515 fprintf (dump_file, " can't be converted to type of ");
4516 print_generic_expr (dump_file, rhs, 0);
4517 fprintf (dump_file, "\n");
4518 }
4519 continue;
4520 }
4521 }
4522 else
4523 val = v->value;
4524
4525 if (dump_file && (dump_flags & TDF_DETAILS))
4526 {
4527 fprintf (dump_file, "Modifying stmt:\n ");
4528 print_gimple_stmt (dump_file, stmt, 0, 0);
4529 }
4530 gimple_assign_set_rhs_from_tree (&gsi, val);
4531 update_stmt (stmt);
4532
4533 if (dump_file && (dump_flags & TDF_DETAILS))
4534 {
4535 fprintf (dump_file, "into:\n ");
4536 print_gimple_stmt (dump_file, stmt, 0, 0);
4537 fprintf (dump_file, "\n");
4538 }
4539
4540 something_changed = true;
4541 if (maybe_clean_eh_stmt (stmt)
4542 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4543 cfg_changed = true;
4544 }
4545
9771b263 4546 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4547 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4548 descriptors.release ();
2c9561b5
MJ
4549
4550 if (!something_changed)
4551 return 0;
4552 else if (cfg_changed)
4553 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4554 else
4555 return TODO_update_ssa_only_virtuals;
4556}
This page took 3.716817 seconds and 5 git commands to generate.