]> gcc.gnu.org Git - gcc.git/blame - gcc/ipa-prop.c
AMD specific default alignment changes
[gcc.git] / gcc / ipa-prop.c
CommitLineData
518dc859 1/* Interprocedural analyses.
d1e082c2 2 Copyright (C) 2005-2013 Free Software Foundation, Inc.
518dc859
RL
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
518dc859
RL
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
518dc859
RL
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tree.h"
24#include "langhooks.h"
25#include "ggc.h"
26#include "target.h"
27#include "cgraph.h"
28#include "ipa-prop.h"
29#include "tree-flow.h"
30#include "tree-pass.h"
771578a0 31#include "tree-inline.h"
0f378cb5 32#include "ipa-inline.h"
b258210c 33#include "gimple.h"
518dc859 34#include "flags.h"
3e293154 35#include "diagnostic.h"
cf835838 36#include "gimple-pretty-print.h"
fb3f88cc 37#include "lto-streamer.h"
f0efc7aa
DN
38#include "data-streamer.h"
39#include "tree-streamer.h"
dfea20f1 40#include "params.h"
771578a0 41
062c604f
MJ
42/* Intermediate information about a parameter that is only useful during the
43 run of ipa_analyze_node and is not kept afterwards. */
44
45struct param_analysis_info
46{
8b7773a4
MJ
47 bool parm_modified, ref_modified, pt_modified;
48 bitmap parm_visited_statements, pt_visited_statements;
062c604f
MJ
49};
50
771578a0 51/* Vector where the parameter infos are actually stored. */
9771b263 52vec<ipa_node_params_t> ipa_node_params_vector;
2c9561b5 53/* Vector of known aggregate values in cloned nodes. */
9771b263 54vec<ipa_agg_replacement_value_p, va_gc> *ipa_node_agg_replacements;
771578a0 55/* Vector where the parameter infos are actually stored. */
9771b263 56vec<ipa_edge_args_t, va_gc> *ipa_edge_args_vector;
771578a0
MJ
57
58/* Holders of ipa cgraph hooks: */
e2c9111c
JH
59static struct cgraph_edge_hook_list *edge_removal_hook_holder;
60static struct cgraph_node_hook_list *node_removal_hook_holder;
61static struct cgraph_2edge_hook_list *edge_duplication_hook_holder;
62static struct cgraph_2node_hook_list *node_duplication_hook_holder;
40982661 63static struct cgraph_node_hook_list *function_insertion_hook_holder;
518dc859 64
be95e2b9
MJ
65/* Return index of the formal whose tree is PTREE in function which corresponds
66 to INFO. */
67
d044dd17 68static int
9771b263 69ipa_get_param_decl_index_1 (vec<ipa_param_descriptor_t> descriptors, tree ptree)
518dc859
RL
70{
71 int i, count;
72
9771b263 73 count = descriptors.length ();
518dc859 74 for (i = 0; i < count; i++)
9771b263 75 if (descriptors[i].decl == ptree)
518dc859
RL
76 return i;
77
78 return -1;
79}
80
d044dd17
MJ
81/* Return index of the formal whose tree is PTREE in function which corresponds
82 to INFO. */
83
84int
85ipa_get_param_decl_index (struct ipa_node_params *info, tree ptree)
86{
87 return ipa_get_param_decl_index_1 (info->descriptors, ptree);
88}
89
90/* Populate the param_decl field in parameter DESCRIPTORS that correspond to
91 NODE. */
be95e2b9 92
f8e2a1ed
MJ
93static void
94ipa_populate_param_decls (struct cgraph_node *node,
9771b263 95 vec<ipa_param_descriptor_t> &descriptors)
518dc859
RL
96{
97 tree fndecl;
98 tree fnargs;
99 tree parm;
100 int param_num;
3e293154 101
960bfb69 102 fndecl = node->symbol.decl;
518dc859
RL
103 fnargs = DECL_ARGUMENTS (fndecl);
104 param_num = 0;
910ad8de 105 for (parm = fnargs; parm; parm = DECL_CHAIN (parm))
518dc859 106 {
9771b263 107 descriptors[param_num].decl = parm;
518dc859
RL
108 param_num++;
109 }
110}
111
3f84bf08
MJ
112/* Return how many formal parameters FNDECL has. */
113
114static inline int
310bc633 115count_formal_params (tree fndecl)
3f84bf08
MJ
116{
117 tree parm;
118 int count = 0;
119
910ad8de 120 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
3f84bf08
MJ
121 count++;
122
123 return count;
124}
125
f8e2a1ed
MJ
126/* Initialize the ipa_node_params structure associated with NODE by counting
127 the function parameters, creating the descriptors and populating their
128 param_decls. */
be95e2b9 129
f8e2a1ed
MJ
130void
131ipa_initialize_node_params (struct cgraph_node *node)
132{
133 struct ipa_node_params *info = IPA_NODE_REF (node);
134
9771b263 135 if (!info->descriptors.exists ())
f8e2a1ed 136 {
310bc633
MJ
137 int param_count;
138
960bfb69 139 param_count = count_formal_params (node->symbol.decl);
310bc633
MJ
140 if (param_count)
141 {
9771b263 142 info->descriptors.safe_grow_cleared (param_count);
d044dd17 143 ipa_populate_param_decls (node, info->descriptors);
310bc633 144 }
f8e2a1ed 145 }
518dc859
RL
146}
147
749aa96d
MJ
148/* Print the jump functions associated with call graph edge CS to file F. */
149
150static void
151ipa_print_node_jump_functions_for_edge (FILE *f, struct cgraph_edge *cs)
152{
153 int i, count;
154
155 count = ipa_get_cs_argument_count (IPA_EDGE_REF (cs));
156 for (i = 0; i < count; i++)
157 {
158 struct ipa_jump_func *jump_func;
159 enum jump_func_type type;
160
161 jump_func = ipa_get_ith_jump_func (IPA_EDGE_REF (cs), i);
162 type = jump_func->type;
163
164 fprintf (f, " param %d: ", i);
165 if (type == IPA_JF_UNKNOWN)
166 fprintf (f, "UNKNOWN\n");
167 else if (type == IPA_JF_KNOWN_TYPE)
168 {
c7573249
MJ
169 fprintf (f, "KNOWN TYPE: base ");
170 print_generic_expr (f, jump_func->value.known_type.base_type, 0);
171 fprintf (f, ", offset "HOST_WIDE_INT_PRINT_DEC", component ",
172 jump_func->value.known_type.offset);
173 print_generic_expr (f, jump_func->value.known_type.component_type, 0);
174 fprintf (f, "\n");
749aa96d
MJ
175 }
176 else if (type == IPA_JF_CONST)
177 {
178 tree val = jump_func->value.constant;
179 fprintf (f, "CONST: ");
180 print_generic_expr (f, val, 0);
181 if (TREE_CODE (val) == ADDR_EXPR
182 && TREE_CODE (TREE_OPERAND (val, 0)) == CONST_DECL)
183 {
184 fprintf (f, " -> ");
185 print_generic_expr (f, DECL_INITIAL (TREE_OPERAND (val, 0)),
186 0);
187 }
188 fprintf (f, "\n");
189 }
749aa96d
MJ
190 else if (type == IPA_JF_PASS_THROUGH)
191 {
192 fprintf (f, "PASS THROUGH: ");
8b7773a4 193 fprintf (f, "%d, op %s",
749aa96d
MJ
194 jump_func->value.pass_through.formal_id,
195 tree_code_name[(int)
196 jump_func->value.pass_through.operation]);
197 if (jump_func->value.pass_through.operation != NOP_EXPR)
8b7773a4
MJ
198 {
199 fprintf (f, " ");
200 print_generic_expr (f,
201 jump_func->value.pass_through.operand, 0);
202 }
203 if (jump_func->value.pass_through.agg_preserved)
204 fprintf (f, ", agg_preserved");
3ea6239f 205 fprintf (f, "\n");
749aa96d
MJ
206 }
207 else if (type == IPA_JF_ANCESTOR)
208 {
209 fprintf (f, "ANCESTOR: ");
210 fprintf (f, "%d, offset "HOST_WIDE_INT_PRINT_DEC", ",
211 jump_func->value.ancestor.formal_id,
212 jump_func->value.ancestor.offset);
213 print_generic_expr (f, jump_func->value.ancestor.type, 0);
8b7773a4
MJ
214 if (jump_func->value.ancestor.agg_preserved)
215 fprintf (f, ", agg_preserved");
3ea6239f 216 fprintf (f, "\n");
749aa96d 217 }
8b7773a4
MJ
218
219 if (jump_func->agg.items)
220 {
221 struct ipa_agg_jf_item *item;
222 int j;
223
224 fprintf (f, " Aggregate passed by %s:\n",
225 jump_func->agg.by_ref ? "reference" : "value");
9771b263 226 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, j, item)
8b7773a4
MJ
227 {
228 fprintf (f, " offset: " HOST_WIDE_INT_PRINT_DEC ", ",
229 item->offset);
230 if (TYPE_P (item->value))
231 fprintf (f, "clobber of " HOST_WIDE_INT_PRINT_DEC " bits",
232 tree_low_cst (TYPE_SIZE (item->value), 1));
233 else
234 {
235 fprintf (f, "cst: ");
236 print_generic_expr (f, item->value, 0);
237 }
238 fprintf (f, "\n");
239 }
240 }
749aa96d
MJ
241 }
242}
243
244
be95e2b9
MJ
245/* Print the jump functions of all arguments on all call graph edges going from
246 NODE to file F. */
247
518dc859 248void
3e293154 249ipa_print_node_jump_functions (FILE *f, struct cgraph_node *node)
518dc859 250{
3e293154 251 struct cgraph_edge *cs;
749aa96d 252 int i;
518dc859 253
ca30a539 254 fprintf (f, " Jump functions of caller %s:\n", cgraph_node_name (node));
3e293154
MJ
255 for (cs = node->callees; cs; cs = cs->next_callee)
256 {
257 if (!ipa_edge_args_info_available_for_edge_p (cs))
258 continue;
259
749aa96d 260 fprintf (f, " callsite %s/%i -> %s/%i : \n",
036c0102
UB
261 xstrdup (cgraph_node_name (node)), node->uid,
262 xstrdup (cgraph_node_name (cs->callee)), cs->callee->uid);
749aa96d
MJ
263 ipa_print_node_jump_functions_for_edge (f, cs);
264 }
518dc859 265
749aa96d
MJ
266 for (cs = node->indirect_calls, i = 0; cs; cs = cs->next_callee, i++)
267 {
268 if (!ipa_edge_args_info_available_for_edge_p (cs))
269 continue;
3e293154 270
749aa96d
MJ
271 if (cs->call_stmt)
272 {
273 fprintf (f, " indirect callsite %d for stmt ", i);
274 print_gimple_stmt (f, cs->call_stmt, 0, TDF_SLIM);
3e293154 275 }
749aa96d
MJ
276 else
277 fprintf (f, " indirect callsite %d :\n", i);
278 ipa_print_node_jump_functions_for_edge (f, cs);
279
3e293154
MJ
280 }
281}
282
283/* Print ipa_jump_func data structures of all nodes in the call graph to F. */
be95e2b9 284
3e293154
MJ
285void
286ipa_print_all_jump_functions (FILE *f)
287{
288 struct cgraph_node *node;
289
ca30a539 290 fprintf (f, "\nJump functions:\n");
65c70e6b 291 FOR_EACH_FUNCTION (node)
3e293154
MJ
292 {
293 ipa_print_node_jump_functions (f, node);
294 }
295}
296
7b872d9e
MJ
297/* Set JFUNC to be a known type jump function. */
298
299static void
300ipa_set_jf_known_type (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
301 tree base_type, tree component_type)
302{
303 jfunc->type = IPA_JF_KNOWN_TYPE;
304 jfunc->value.known_type.offset = offset,
305 jfunc->value.known_type.base_type = base_type;
306 jfunc->value.known_type.component_type = component_type;
307}
308
309/* Set JFUNC to be a constant jmp function. */
310
311static void
312ipa_set_jf_constant (struct ipa_jump_func *jfunc, tree constant)
313{
5368224f
DC
314 constant = unshare_expr (constant);
315 if (constant && EXPR_P (constant))
316 SET_EXPR_LOCATION (constant, UNKNOWN_LOCATION);
7b872d9e 317 jfunc->type = IPA_JF_CONST;
d1f98542 318 jfunc->value.constant = unshare_expr_without_location (constant);
7b872d9e
MJ
319}
320
321/* Set JFUNC to be a simple pass-through jump function. */
322static void
8b7773a4
MJ
323ipa_set_jf_simple_pass_through (struct ipa_jump_func *jfunc, int formal_id,
324 bool agg_preserved)
7b872d9e
MJ
325{
326 jfunc->type = IPA_JF_PASS_THROUGH;
327 jfunc->value.pass_through.operand = NULL_TREE;
328 jfunc->value.pass_through.formal_id = formal_id;
329 jfunc->value.pass_through.operation = NOP_EXPR;
8b7773a4 330 jfunc->value.pass_through.agg_preserved = agg_preserved;
7b872d9e
MJ
331}
332
333/* Set JFUNC to be an arithmetic pass through jump function. */
334
335static void
336ipa_set_jf_arith_pass_through (struct ipa_jump_func *jfunc, int formal_id,
337 tree operand, enum tree_code operation)
338{
339 jfunc->type = IPA_JF_PASS_THROUGH;
d1f98542 340 jfunc->value.pass_through.operand = unshare_expr_without_location (operand);
7b872d9e
MJ
341 jfunc->value.pass_through.formal_id = formal_id;
342 jfunc->value.pass_through.operation = operation;
8b7773a4 343 jfunc->value.pass_through.agg_preserved = false;
7b872d9e
MJ
344}
345
346/* Set JFUNC to be an ancestor jump function. */
347
348static void
349ipa_set_ancestor_jf (struct ipa_jump_func *jfunc, HOST_WIDE_INT offset,
8b7773a4 350 tree type, int formal_id, bool agg_preserved)
7b872d9e
MJ
351{
352 jfunc->type = IPA_JF_ANCESTOR;
353 jfunc->value.ancestor.formal_id = formal_id;
354 jfunc->value.ancestor.offset = offset;
355 jfunc->value.ancestor.type = type;
8b7773a4 356 jfunc->value.ancestor.agg_preserved = agg_preserved;
7b872d9e
MJ
357}
358
e248d83f
MJ
359/* Extract the acual BINFO being described by JFUNC which must be a known type
360 jump function. */
361
362tree
363ipa_binfo_from_known_type_jfunc (struct ipa_jump_func *jfunc)
364{
365 tree base_binfo = TYPE_BINFO (jfunc->value.known_type.base_type);
366 if (!base_binfo)
367 return NULL_TREE;
368 return get_binfo_at_offset (base_binfo,
369 jfunc->value.known_type.offset,
370 jfunc->value.known_type.component_type);
371}
372
f65cf2b7
MJ
373/* Structure to be passed in between detect_type_change and
374 check_stmt_for_type_change. */
375
376struct type_change_info
377{
290ebcb7
MJ
378 /* Offset into the object where there is the virtual method pointer we are
379 looking for. */
380 HOST_WIDE_INT offset;
381 /* The declaration or SSA_NAME pointer of the base that we are checking for
382 type change. */
383 tree object;
384 /* If we actually can tell the type that the object has changed to, it is
385 stored in this field. Otherwise it remains NULL_TREE. */
386 tree known_current_type;
f65cf2b7
MJ
387 /* Set to true if dynamic type change has been detected. */
388 bool type_maybe_changed;
290ebcb7
MJ
389 /* Set to true if multiple types have been encountered. known_current_type
390 must be disregarded in that case. */
391 bool multiple_types_encountered;
f65cf2b7
MJ
392};
393
394/* Return true if STMT can modify a virtual method table pointer.
395
396 This function makes special assumptions about both constructors and
397 destructors which are all the functions that are allowed to alter the VMT
398 pointers. It assumes that destructors begin with assignment into all VMT
399 pointers and that constructors essentially look in the following way:
400
401 1) The very first thing they do is that they call constructors of ancestor
402 sub-objects that have them.
403
404 2) Then VMT pointers of this and all its ancestors is set to new values
405 corresponding to the type corresponding to the constructor.
406
407 3) Only afterwards, other stuff such as constructor of member sub-objects
408 and the code written by the user is run. Only this may include calling
409 virtual functions, directly or indirectly.
410
411 There is no way to call a constructor of an ancestor sub-object in any
412 other way.
413
414 This means that we do not have to care whether constructors get the correct
415 type information because they will always change it (in fact, if we define
416 the type to be given by the VMT pointer, it is undefined).
417
418 The most important fact to derive from the above is that if, for some
419 statement in the section 3, we try to detect whether the dynamic type has
420 changed, we can safely ignore all calls as we examine the function body
421 backwards until we reach statements in section 2 because these calls cannot
422 be ancestor constructors or destructors (if the input is not bogus) and so
423 do not change the dynamic type (this holds true only for automatically
424 allocated objects but at the moment we devirtualize only these). We then
425 must detect that statements in section 2 change the dynamic type and can try
426 to derive the new type. That is enough and we can stop, we will never see
427 the calls into constructors of sub-objects in this code. Therefore we can
428 safely ignore all call statements that we traverse.
429 */
430
431static bool
432stmt_may_be_vtbl_ptr_store (gimple stmt)
433{
434 if (is_gimple_call (stmt))
435 return false;
436 else if (is_gimple_assign (stmt))
437 {
438 tree lhs = gimple_assign_lhs (stmt);
439
0004f992
MJ
440 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs)))
441 {
442 if (flag_strict_aliasing
443 && !POINTER_TYPE_P (TREE_TYPE (lhs)))
444 return false;
445
446 if (TREE_CODE (lhs) == COMPONENT_REF
447 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1)))
f65cf2b7 448 return false;
0004f992
MJ
449 /* In the future we might want to use get_base_ref_and_offset to find
450 if there is a field corresponding to the offset and if so, proceed
451 almost like if it was a component ref. */
452 }
f65cf2b7
MJ
453 }
454 return true;
455}
456
290ebcb7
MJ
457/* If STMT can be proved to be an assignment to the virtual method table
458 pointer of ANALYZED_OBJ and the type associated with the new table
459 identified, return the type. Otherwise return NULL_TREE. */
460
461static tree
462extr_type_from_vtbl_ptr_store (gimple stmt, struct type_change_info *tci)
463{
464 HOST_WIDE_INT offset, size, max_size;
465 tree lhs, rhs, base;
466
467 if (!gimple_assign_single_p (stmt))
468 return NULL_TREE;
469
470 lhs = gimple_assign_lhs (stmt);
471 rhs = gimple_assign_rhs1 (stmt);
472 if (TREE_CODE (lhs) != COMPONENT_REF
473 || !DECL_VIRTUAL_P (TREE_OPERAND (lhs, 1))
474 || TREE_CODE (rhs) != ADDR_EXPR)
475 return NULL_TREE;
476 rhs = get_base_address (TREE_OPERAND (rhs, 0));
477 if (!rhs
478 || TREE_CODE (rhs) != VAR_DECL
479 || !DECL_VIRTUAL_P (rhs))
480 return NULL_TREE;
481
482 base = get_ref_base_and_extent (lhs, &offset, &size, &max_size);
483 if (offset != tci->offset
484 || size != POINTER_SIZE
485 || max_size != POINTER_SIZE)
486 return NULL_TREE;
487 if (TREE_CODE (base) == MEM_REF)
488 {
489 if (TREE_CODE (tci->object) != MEM_REF
490 || TREE_OPERAND (tci->object, 0) != TREE_OPERAND (base, 0)
491 || !tree_int_cst_equal (TREE_OPERAND (tci->object, 1),
492 TREE_OPERAND (base, 1)))
493 return NULL_TREE;
494 }
495 else if (tci->object != base)
496 return NULL_TREE;
497
498 return DECL_CONTEXT (rhs);
499}
500
61502ca8 501/* Callback of walk_aliased_vdefs and a helper function for
f65cf2b7
MJ
502 detect_type_change to check whether a particular statement may modify
503 the virtual table pointer, and if possible also determine the new type of
504 the (sub-)object. It stores its result into DATA, which points to a
505 type_change_info structure. */
506
507static bool
508check_stmt_for_type_change (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef, void *data)
509{
510 gimple stmt = SSA_NAME_DEF_STMT (vdef);
511 struct type_change_info *tci = (struct type_change_info *) data;
512
513 if (stmt_may_be_vtbl_ptr_store (stmt))
514 {
290ebcb7
MJ
515 tree type;
516 type = extr_type_from_vtbl_ptr_store (stmt, tci);
517 if (tci->type_maybe_changed
518 && type != tci->known_current_type)
519 tci->multiple_types_encountered = true;
520 tci->known_current_type = type;
f65cf2b7
MJ
521 tci->type_maybe_changed = true;
522 return true;
523 }
524 else
525 return false;
526}
527
290ebcb7
MJ
528
529
530/* Like detect_type_change but with extra argument COMP_TYPE which will become
531 the component type part of new JFUNC of dynamic type change is detected and
532 the new base type is identified. */
f65cf2b7
MJ
533
534static bool
290ebcb7
MJ
535detect_type_change_1 (tree arg, tree base, tree comp_type, gimple call,
536 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
f65cf2b7
MJ
537{
538 struct type_change_info tci;
539 ao_ref ao;
540
541 gcc_checking_assert (DECL_P (arg)
542 || TREE_CODE (arg) == MEM_REF
543 || handled_component_p (arg));
544 /* Const calls cannot call virtual methods through VMT and so type changes do
545 not matter. */
05842ff5 546 if (!flag_devirtualize || !gimple_vuse (call))
f65cf2b7
MJ
547 return false;
548
dd887943 549 ao_ref_init (&ao, arg);
f65cf2b7
MJ
550 ao.base = base;
551 ao.offset = offset;
552 ao.size = POINTER_SIZE;
553 ao.max_size = ao.size;
f65cf2b7 554
290ebcb7
MJ
555 tci.offset = offset;
556 tci.object = get_base_address (arg);
557 tci.known_current_type = NULL_TREE;
558 tci.type_maybe_changed = false;
559 tci.multiple_types_encountered = false;
560
f65cf2b7
MJ
561 walk_aliased_vdefs (&ao, gimple_vuse (call), check_stmt_for_type_change,
562 &tci, NULL);
563 if (!tci.type_maybe_changed)
564 return false;
565
290ebcb7
MJ
566 if (!tci.known_current_type
567 || tci.multiple_types_encountered
568 || offset != 0)
569 jfunc->type = IPA_JF_UNKNOWN;
570 else
7b872d9e 571 ipa_set_jf_known_type (jfunc, 0, tci.known_current_type, comp_type);
290ebcb7 572
f65cf2b7
MJ
573 return true;
574}
575
290ebcb7
MJ
576/* Detect whether the dynamic type of ARG has changed (before callsite CALL) by
577 looking for assignments to its virtual table pointer. If it is, return true
578 and fill in the jump function JFUNC with relevant type information or set it
579 to unknown. ARG is the object itself (not a pointer to it, unless
580 dereferenced). BASE is the base of the memory access as returned by
581 get_ref_base_and_extent, as is the offset. */
582
583static bool
584detect_type_change (tree arg, tree base, gimple call,
585 struct ipa_jump_func *jfunc, HOST_WIDE_INT offset)
586{
587 return detect_type_change_1 (arg, base, TREE_TYPE (arg), call, jfunc, offset);
588}
589
f65cf2b7
MJ
590/* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
591 SSA name (its dereference will become the base and the offset is assumed to
592 be zero). */
593
594static bool
595detect_type_change_ssa (tree arg, gimple call, struct ipa_jump_func *jfunc)
596{
290ebcb7
MJ
597 tree comp_type;
598
f65cf2b7 599 gcc_checking_assert (TREE_CODE (arg) == SSA_NAME);
05842ff5
MJ
600 if (!flag_devirtualize
601 || !POINTER_TYPE_P (TREE_TYPE (arg))
f65cf2b7
MJ
602 || TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) != RECORD_TYPE)
603 return false;
604
290ebcb7 605 comp_type = TREE_TYPE (TREE_TYPE (arg));
f65cf2b7 606 arg = build2 (MEM_REF, ptr_type_node, arg,
290ebcb7 607 build_int_cst (ptr_type_node, 0));
f65cf2b7 608
290ebcb7 609 return detect_type_change_1 (arg, arg, comp_type, call, jfunc, 0);
f65cf2b7
MJ
610}
611
fdb0e1b4
MJ
612/* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
613 boolean variable pointed to by DATA. */
614
615static bool
616mark_modified (ao_ref *ao ATTRIBUTE_UNUSED, tree vdef ATTRIBUTE_UNUSED,
617 void *data)
618{
619 bool *b = (bool *) data;
620 *b = true;
621 return true;
622}
623
8b7773a4
MJ
624/* Return true if a load from a formal parameter PARM_LOAD is known to retreive
625 a value known not to be modified in this function before reaching the
626 statement STMT. PARM_AINFO is a pointer to a structure containing temporary
627 information about the parameter. */
fdb0e1b4
MJ
628
629static bool
8b7773a4
MJ
630parm_preserved_before_stmt_p (struct param_analysis_info *parm_ainfo,
631 gimple stmt, tree parm_load)
fdb0e1b4
MJ
632{
633 bool modified = false;
8b7773a4 634 bitmap *visited_stmts;
fdb0e1b4
MJ
635 ao_ref refd;
636
8b7773a4
MJ
637 if (parm_ainfo && parm_ainfo->parm_modified)
638 return false;
fdb0e1b4
MJ
639
640 gcc_checking_assert (gimple_vuse (stmt) != NULL_TREE);
8b7773a4
MJ
641 ao_ref_init (&refd, parm_load);
642 /* We can cache visited statements only when parm_ainfo is available and when
643 we are looking at a naked load of the whole parameter. */
644 if (!parm_ainfo || TREE_CODE (parm_load) != PARM_DECL)
645 visited_stmts = NULL;
646 else
647 visited_stmts = &parm_ainfo->parm_visited_statements;
648 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
649 visited_stmts);
650 if (parm_ainfo && modified)
651 parm_ainfo->parm_modified = true;
652 return !modified;
fdb0e1b4
MJ
653}
654
655/* If STMT is an assignment that loads a value from an parameter declaration,
656 return the index of the parameter in ipa_node_params which has not been
657 modified. Otherwise return -1. */
658
659static int
9771b263 660load_from_unmodified_param (vec<ipa_param_descriptor_t> descriptors,
fdb0e1b4
MJ
661 struct param_analysis_info *parms_ainfo,
662 gimple stmt)
663{
664 int index;
665 tree op1;
666
667 if (!gimple_assign_single_p (stmt))
668 return -1;
669
670 op1 = gimple_assign_rhs1 (stmt);
671 if (TREE_CODE (op1) != PARM_DECL)
672 return -1;
673
d044dd17 674 index = ipa_get_param_decl_index_1 (descriptors, op1);
fdb0e1b4 675 if (index < 0
8b7773a4
MJ
676 || !parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
677 : NULL, stmt, op1))
fdb0e1b4
MJ
678 return -1;
679
680 return index;
681}
f65cf2b7 682
8b7773a4
MJ
683/* Return true if memory reference REF loads data that are known to be
684 unmodified in this function before reaching statement STMT. PARM_AINFO, if
685 non-NULL, is a pointer to a structure containing temporary information about
686 PARM. */
687
688static bool
689parm_ref_data_preserved_p (struct param_analysis_info *parm_ainfo,
690 gimple stmt, tree ref)
691{
692 bool modified = false;
693 ao_ref refd;
694
695 gcc_checking_assert (gimple_vuse (stmt));
696 if (parm_ainfo && parm_ainfo->ref_modified)
697 return false;
698
699 ao_ref_init (&refd, ref);
700 walk_aliased_vdefs (&refd, gimple_vuse (stmt), mark_modified, &modified,
701 NULL);
702 if (parm_ainfo && modified)
703 parm_ainfo->ref_modified = true;
704 return !modified;
705}
706
707/* Return true if the data pointed to by PARM is known to be unmodified in this
708 function before reaching call statement CALL into which it is passed.
709 PARM_AINFO is a pointer to a structure containing temporary information
710 about PARM. */
711
712static bool
713parm_ref_data_pass_through_p (struct param_analysis_info *parm_ainfo,
714 gimple call, tree parm)
715{
716 bool modified = false;
717 ao_ref refd;
718
719 /* It's unnecessary to calculate anything about memory contnets for a const
720 function because it is not goin to use it. But do not cache the result
721 either. Also, no such calculations for non-pointers. */
722 if (!gimple_vuse (call)
723 || !POINTER_TYPE_P (TREE_TYPE (parm)))
724 return false;
725
726 if (parm_ainfo->pt_modified)
727 return false;
728
729 ao_ref_init_from_ptr_and_size (&refd, parm, NULL_TREE);
730 walk_aliased_vdefs (&refd, gimple_vuse (call), mark_modified, &modified,
731 parm_ainfo ? &parm_ainfo->pt_visited_statements : NULL);
732 if (modified)
733 parm_ainfo->pt_modified = true;
734 return !modified;
735}
736
737/* Return true if we can prove that OP is a memory reference loading unmodified
738 data from an aggregate passed as a parameter and if the aggregate is passed
739 by reference, that the alias type of the load corresponds to the type of the
740 formal parameter (so that we can rely on this type for TBAA in callers).
741 INFO and PARMS_AINFO describe parameters of the current function (but the
742 latter can be NULL), STMT is the load statement. If function returns true,
743 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
744 within the aggregate and whether it is a load from a value passed by
745 reference respectively. */
746
747static bool
9771b263 748ipa_load_from_parm_agg_1 (vec<ipa_param_descriptor_t> descriptors,
8b7773a4
MJ
749 struct param_analysis_info *parms_ainfo, gimple stmt,
750 tree op, int *index_p, HOST_WIDE_INT *offset_p,
751 bool *by_ref_p)
752{
753 int index;
754 HOST_WIDE_INT size, max_size;
755 tree base = get_ref_base_and_extent (op, offset_p, &size, &max_size);
756
757 if (max_size == -1 || max_size != size || *offset_p < 0)
758 return false;
759
760 if (DECL_P (base))
761 {
d044dd17 762 int index = ipa_get_param_decl_index_1 (descriptors, base);
8b7773a4
MJ
763 if (index >= 0
764 && parm_preserved_before_stmt_p (parms_ainfo ? &parms_ainfo[index]
765 : NULL, stmt, op))
766 {
767 *index_p = index;
768 *by_ref_p = false;
769 return true;
770 }
771 return false;
772 }
773
774 if (TREE_CODE (base) != MEM_REF
775 || TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME
776 || !integer_zerop (TREE_OPERAND (base, 1)))
777 return false;
778
779 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0)))
780 {
781 tree parm = SSA_NAME_VAR (TREE_OPERAND (base, 0));
d044dd17 782 index = ipa_get_param_decl_index_1 (descriptors, parm);
8b7773a4
MJ
783 }
784 else
785 {
786 /* This branch catches situations where a pointer parameter is not a
787 gimple register, for example:
788
789 void hip7(S*) (struct S * p)
790 {
791 void (*<T2e4>) (struct S *) D.1867;
792 struct S * p.1;
793
794 <bb 2>:
795 p.1_1 = p;
796 D.1867_2 = p.1_1->f;
797 D.1867_2 ();
798 gdp = &p;
799 */
800
801 gimple def = SSA_NAME_DEF_STMT (TREE_OPERAND (base, 0));
d044dd17 802 index = load_from_unmodified_param (descriptors, parms_ainfo, def);
8b7773a4
MJ
803 }
804
805 if (index >= 0
806 && parm_ref_data_preserved_p (parms_ainfo ? &parms_ainfo[index] : NULL,
807 stmt, op))
808 {
809 *index_p = index;
810 *by_ref_p = true;
811 return true;
812 }
813 return false;
814}
815
816/* Just like the previous function, just without the param_analysis_info
817 pointer, for users outside of this file. */
818
819bool
820ipa_load_from_parm_agg (struct ipa_node_params *info, gimple stmt,
821 tree op, int *index_p, HOST_WIDE_INT *offset_p,
822 bool *by_ref_p)
823{
d044dd17
MJ
824 return ipa_load_from_parm_agg_1 (info->descriptors, NULL, stmt, op, index_p,
825 offset_p, by_ref_p);
8b7773a4
MJ
826}
827
b258210c 828/* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
fdb0e1b4
MJ
829 of an assignment statement STMT, try to determine whether we are actually
830 handling any of the following cases and construct an appropriate jump
831 function into JFUNC if so:
832
833 1) The passed value is loaded from a formal parameter which is not a gimple
834 register (most probably because it is addressable, the value has to be
835 scalar) and we can guarantee the value has not changed. This case can
836 therefore be described by a simple pass-through jump function. For example:
837
838 foo (int a)
839 {
840 int a.0;
841
842 a.0_2 = a;
843 bar (a.0_2);
844
845 2) The passed value can be described by a simple arithmetic pass-through
846 jump function. E.g.
847
848 foo (int a)
849 {
850 int D.2064;
851
852 D.2064_4 = a.1(D) + 4;
853 bar (D.2064_4);
854
855 This case can also occur in combination of the previous one, e.g.:
856
857 foo (int a, int z)
858 {
859 int a.0;
860 int D.2064;
861
862 a.0_3 = a;
863 D.2064_4 = a.0_3 + 4;
864 foo (D.2064_4);
865
866 3) The passed value is an address of an object within another one (which
867 also passed by reference). Such situations are described by an ancestor
868 jump function and describe situations such as:
869
870 B::foo() (struct B * const this)
871 {
872 struct A * D.1845;
873
874 D.1845_2 = &this_1(D)->D.1748;
875 A::bar (D.1845_2);
876
877 INFO is the structure describing individual parameters access different
878 stages of IPA optimizations. PARMS_AINFO contains the information that is
879 only needed for intraprocedural analysis. */
685b0d13
MJ
880
881static void
b258210c 882compute_complex_assign_jump_func (struct ipa_node_params *info,
fdb0e1b4 883 struct param_analysis_info *parms_ainfo,
b258210c 884 struct ipa_jump_func *jfunc,
f65cf2b7 885 gimple call, gimple stmt, tree name)
685b0d13
MJ
886{
887 HOST_WIDE_INT offset, size, max_size;
fdb0e1b4 888 tree op1, tc_ssa, base, ssa;
685b0d13 889 int index;
685b0d13 890
685b0d13 891 op1 = gimple_assign_rhs1 (stmt);
685b0d13 892
fdb0e1b4 893 if (TREE_CODE (op1) == SSA_NAME)
685b0d13 894 {
fdb0e1b4
MJ
895 if (SSA_NAME_IS_DEFAULT_DEF (op1))
896 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (op1));
897 else
d044dd17 898 index = load_from_unmodified_param (info->descriptors, parms_ainfo,
fdb0e1b4
MJ
899 SSA_NAME_DEF_STMT (op1));
900 tc_ssa = op1;
901 }
902 else
903 {
d044dd17 904 index = load_from_unmodified_param (info->descriptors, parms_ainfo, stmt);
fdb0e1b4
MJ
905 tc_ssa = gimple_assign_lhs (stmt);
906 }
907
908 if (index >= 0)
909 {
910 tree op2 = gimple_assign_rhs2 (stmt);
685b0d13 911
b258210c 912 if (op2)
685b0d13 913 {
b258210c
MJ
914 if (!is_gimple_ip_invariant (op2)
915 || (TREE_CODE_CLASS (gimple_expr_code (stmt)) != tcc_comparison
916 && !useless_type_conversion_p (TREE_TYPE (name),
917 TREE_TYPE (op1))))
918 return;
919
7b872d9e
MJ
920 ipa_set_jf_arith_pass_through (jfunc, index, op2,
921 gimple_assign_rhs_code (stmt));
685b0d13 922 }
fdb0e1b4
MJ
923 else if (gimple_assign_single_p (stmt)
924 && !detect_type_change_ssa (tc_ssa, call, jfunc))
8b7773a4
MJ
925 {
926 bool agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
927 call, tc_ssa);
928 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
929 }
685b0d13
MJ
930 return;
931 }
932
933 if (TREE_CODE (op1) != ADDR_EXPR)
934 return;
935 op1 = TREE_OPERAND (op1, 0);
f65cf2b7 936 if (TREE_CODE (TREE_TYPE (op1)) != RECORD_TYPE)
b258210c 937 return;
32aa622c
MJ
938 base = get_ref_base_and_extent (op1, &offset, &size, &max_size);
939 if (TREE_CODE (base) != MEM_REF
1a15bfdc
RG
940 /* If this is a varying address, punt. */
941 || max_size == -1
942 || max_size != size)
685b0d13 943 return;
32aa622c 944 offset += mem_ref_offset (base).low * BITS_PER_UNIT;
f65cf2b7
MJ
945 ssa = TREE_OPERAND (base, 0);
946 if (TREE_CODE (ssa) != SSA_NAME
947 || !SSA_NAME_IS_DEFAULT_DEF (ssa)
280fedf0 948 || offset < 0)
685b0d13
MJ
949 return;
950
32aa622c 951 /* Dynamic types are changed only in constructors and destructors and */
f65cf2b7
MJ
952 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (ssa));
953 if (index >= 0
954 && !detect_type_change (op1, base, call, jfunc, offset))
8b7773a4
MJ
955 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (op1), index,
956 parm_ref_data_pass_through_p (&parms_ainfo[index],
957 call, ssa));
685b0d13
MJ
958}
959
40591473
MJ
960/* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
961 it looks like:
962
963 iftmp.1_3 = &obj_2(D)->D.1762;
964
965 The base of the MEM_REF must be a default definition SSA NAME of a
966 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
967 whole MEM_REF expression is returned and the offset calculated from any
968 handled components and the MEM_REF itself is stored into *OFFSET. The whole
969 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
970
971static tree
972get_ancestor_addr_info (gimple assign, tree *obj_p, HOST_WIDE_INT *offset)
973{
974 HOST_WIDE_INT size, max_size;
975 tree expr, parm, obj;
976
977 if (!gimple_assign_single_p (assign))
978 return NULL_TREE;
979 expr = gimple_assign_rhs1 (assign);
980
981 if (TREE_CODE (expr) != ADDR_EXPR)
982 return NULL_TREE;
983 expr = TREE_OPERAND (expr, 0);
984 obj = expr;
985 expr = get_ref_base_and_extent (expr, offset, &size, &max_size);
986
987 if (TREE_CODE (expr) != MEM_REF
988 /* If this is a varying address, punt. */
989 || max_size == -1
990 || max_size != size
991 || *offset < 0)
992 return NULL_TREE;
993 parm = TREE_OPERAND (expr, 0);
994 if (TREE_CODE (parm) != SSA_NAME
995 || !SSA_NAME_IS_DEFAULT_DEF (parm)
996 || TREE_CODE (SSA_NAME_VAR (parm)) != PARM_DECL)
997 return NULL_TREE;
998
999 *offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
1000 *obj_p = obj;
1001 return expr;
1002}
1003
685b0d13 1004
b258210c
MJ
1005/* Given that an actual argument is an SSA_NAME that is a result of a phi
1006 statement PHI, try to find out whether NAME is in fact a
1007 multiple-inheritance typecast from a descendant into an ancestor of a formal
1008 parameter and thus can be described by an ancestor jump function and if so,
1009 write the appropriate function into JFUNC.
1010
1011 Essentially we want to match the following pattern:
1012
1013 if (obj_2(D) != 0B)
1014 goto <bb 3>;
1015 else
1016 goto <bb 4>;
1017
1018 <bb 3>:
1019 iftmp.1_3 = &obj_2(D)->D.1762;
1020
1021 <bb 4>:
1022 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1023 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1024 return D.1879_6; */
1025
1026static void
1027compute_complex_ancestor_jump_func (struct ipa_node_params *info,
8b7773a4 1028 struct param_analysis_info *parms_ainfo,
b258210c 1029 struct ipa_jump_func *jfunc,
f65cf2b7 1030 gimple call, gimple phi)
b258210c 1031{
40591473 1032 HOST_WIDE_INT offset;
b258210c
MJ
1033 gimple assign, cond;
1034 basic_block phi_bb, assign_bb, cond_bb;
f65cf2b7 1035 tree tmp, parm, expr, obj;
b258210c
MJ
1036 int index, i;
1037
54e348cb 1038 if (gimple_phi_num_args (phi) != 2)
b258210c
MJ
1039 return;
1040
54e348cb
MJ
1041 if (integer_zerop (PHI_ARG_DEF (phi, 1)))
1042 tmp = PHI_ARG_DEF (phi, 0);
1043 else if (integer_zerop (PHI_ARG_DEF (phi, 0)))
1044 tmp = PHI_ARG_DEF (phi, 1);
1045 else
1046 return;
b258210c
MJ
1047 if (TREE_CODE (tmp) != SSA_NAME
1048 || SSA_NAME_IS_DEFAULT_DEF (tmp)
1049 || !POINTER_TYPE_P (TREE_TYPE (tmp))
1050 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp))) != RECORD_TYPE)
1051 return;
1052
1053 assign = SSA_NAME_DEF_STMT (tmp);
1054 assign_bb = gimple_bb (assign);
40591473 1055 if (!single_pred_p (assign_bb))
b258210c 1056 return;
40591473
MJ
1057 expr = get_ancestor_addr_info (assign, &obj, &offset);
1058 if (!expr)
b258210c
MJ
1059 return;
1060 parm = TREE_OPERAND (expr, 0);
b258210c 1061 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (parm));
40591473 1062 gcc_assert (index >= 0);
b258210c
MJ
1063
1064 cond_bb = single_pred (assign_bb);
1065 cond = last_stmt (cond_bb);
69610617
SB
1066 if (!cond
1067 || gimple_code (cond) != GIMPLE_COND
b258210c
MJ
1068 || gimple_cond_code (cond) != NE_EXPR
1069 || gimple_cond_lhs (cond) != parm
1070 || !integer_zerop (gimple_cond_rhs (cond)))
1071 return;
1072
b258210c
MJ
1073 phi_bb = gimple_bb (phi);
1074 for (i = 0; i < 2; i++)
1075 {
1076 basic_block pred = EDGE_PRED (phi_bb, i)->src;
1077 if (pred != assign_bb && pred != cond_bb)
1078 return;
1079 }
1080
f65cf2b7 1081 if (!detect_type_change (obj, expr, call, jfunc, offset))
8b7773a4
MJ
1082 ipa_set_ancestor_jf (jfunc, offset, TREE_TYPE (obj), index,
1083 parm_ref_data_pass_through_p (&parms_ainfo[index],
1084 call, parm));
b258210c
MJ
1085}
1086
61502ca8 1087/* Given OP which is passed as an actual argument to a called function,
b258210c
MJ
1088 determine if it is possible to construct a KNOWN_TYPE jump function for it
1089 and if so, create one and store it to JFUNC. */
1090
1091static void
f65cf2b7
MJ
1092compute_known_type_jump_func (tree op, struct ipa_jump_func *jfunc,
1093 gimple call)
b258210c 1094{
32aa622c 1095 HOST_WIDE_INT offset, size, max_size;
c7573249 1096 tree base;
b258210c 1097
05842ff5
MJ
1098 if (!flag_devirtualize
1099 || TREE_CODE (op) != ADDR_EXPR
32aa622c 1100 || TREE_CODE (TREE_TYPE (TREE_TYPE (op))) != RECORD_TYPE)
b258210c
MJ
1101 return;
1102
1103 op = TREE_OPERAND (op, 0);
32aa622c
MJ
1104 base = get_ref_base_and_extent (op, &offset, &size, &max_size);
1105 if (!DECL_P (base)
1106 || max_size == -1
1107 || max_size != size
1108 || TREE_CODE (TREE_TYPE (base)) != RECORD_TYPE
1109 || is_global_var (base))
1110 return;
1111
4e03d849
MJ
1112 if (!TYPE_BINFO (TREE_TYPE (base))
1113 || detect_type_change (op, base, call, jfunc, offset))
f65cf2b7
MJ
1114 return;
1115
7b872d9e 1116 ipa_set_jf_known_type (jfunc, offset, TREE_TYPE (base), TREE_TYPE (op));
b258210c
MJ
1117}
1118
be95e2b9
MJ
1119/* Inspect the given TYPE and return true iff it has the same structure (the
1120 same number of fields of the same types) as a C++ member pointer. If
1121 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1122 corresponding fields there. */
1123
3e293154
MJ
1124static bool
1125type_like_member_ptr_p (tree type, tree *method_ptr, tree *delta)
1126{
1127 tree fld;
1128
1129 if (TREE_CODE (type) != RECORD_TYPE)
1130 return false;
1131
1132 fld = TYPE_FIELDS (type);
1133 if (!fld || !POINTER_TYPE_P (TREE_TYPE (fld))
8b7773a4
MJ
1134 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld))) != METHOD_TYPE
1135 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1136 return false;
1137
1138 if (method_ptr)
1139 *method_ptr = fld;
1140
910ad8de 1141 fld = DECL_CHAIN (fld);
8b7773a4
MJ
1142 if (!fld || INTEGRAL_TYPE_P (fld)
1143 || !host_integerp (DECL_FIELD_OFFSET (fld), 1))
3e293154
MJ
1144 return false;
1145 if (delta)
1146 *delta = fld;
1147
910ad8de 1148 if (DECL_CHAIN (fld))
3e293154
MJ
1149 return false;
1150
1151 return true;
1152}
1153
61502ca8 1154/* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
8b7773a4
MJ
1155 return the rhs of its defining statement. Otherwise return RHS as it
1156 is. */
7ec49257
MJ
1157
1158static inline tree
1159get_ssa_def_if_simple_copy (tree rhs)
1160{
1161 while (TREE_CODE (rhs) == SSA_NAME && !SSA_NAME_IS_DEFAULT_DEF (rhs))
1162 {
1163 gimple def_stmt = SSA_NAME_DEF_STMT (rhs);
1164
1165 if (gimple_assign_single_p (def_stmt))
1166 rhs = gimple_assign_rhs1 (def_stmt);
9961eb45
MJ
1167 else
1168 break;
7ec49257
MJ
1169 }
1170 return rhs;
1171}
1172
8b7773a4
MJ
1173/* Simple linked list, describing known contents of an aggregate beforere
1174 call. */
1175
1176struct ipa_known_agg_contents_list
1177{
1178 /* Offset and size of the described part of the aggregate. */
1179 HOST_WIDE_INT offset, size;
1180 /* Known constant value or NULL if the contents is known to be unknown. */
1181 tree constant;
1182 /* Pointer to the next structure in the list. */
1183 struct ipa_known_agg_contents_list *next;
1184};
3e293154 1185
8b7773a4
MJ
1186/* Traverse statements from CALL backwards, scanning whether an aggregate given
1187 in ARG is filled in with constant values. ARG can either be an aggregate
1188 expression or a pointer to an aggregate. JFUNC is the jump function into
1189 which the constants are subsequently stored. */
be95e2b9 1190
3e293154 1191static void
8b7773a4
MJ
1192determine_known_aggregate_parts (gimple call, tree arg,
1193 struct ipa_jump_func *jfunc)
3e293154 1194{
8b7773a4
MJ
1195 struct ipa_known_agg_contents_list *list = NULL;
1196 int item_count = 0, const_count = 0;
1197 HOST_WIDE_INT arg_offset, arg_size;
726a989a 1198 gimple_stmt_iterator gsi;
8b7773a4
MJ
1199 tree arg_base;
1200 bool check_ref, by_ref;
1201 ao_ref r;
3e293154 1202
8b7773a4
MJ
1203 /* The function operates in three stages. First, we prepare check_ref, r,
1204 arg_base and arg_offset based on what is actually passed as an actual
1205 argument. */
3e293154 1206
8b7773a4
MJ
1207 if (POINTER_TYPE_P (TREE_TYPE (arg)))
1208 {
1209 by_ref = true;
1210 if (TREE_CODE (arg) == SSA_NAME)
1211 {
1212 tree type_size;
1213 if (!host_integerp (TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg))), 1))
1214 return;
1215 check_ref = true;
1216 arg_base = arg;
1217 arg_offset = 0;
1218 type_size = TYPE_SIZE (TREE_TYPE (TREE_TYPE (arg)));
1219 arg_size = tree_low_cst (type_size, 1);
1220 ao_ref_init_from_ptr_and_size (&r, arg_base, NULL_TREE);
1221 }
1222 else if (TREE_CODE (arg) == ADDR_EXPR)
1223 {
1224 HOST_WIDE_INT arg_max_size;
1225
1226 arg = TREE_OPERAND (arg, 0);
1227 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1228 &arg_max_size);
1229 if (arg_max_size == -1
1230 || arg_max_size != arg_size
1231 || arg_offset < 0)
1232 return;
1233 if (DECL_P (arg_base))
1234 {
1235 tree size;
1236 check_ref = false;
1237 size = build_int_cst (integer_type_node, arg_size);
1238 ao_ref_init_from_ptr_and_size (&r, arg_base, size);
1239 }
1240 else
1241 return;
1242 }
1243 else
1244 return;
1245 }
1246 else
1247 {
1248 HOST_WIDE_INT arg_max_size;
1249
1250 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg)));
1251
1252 by_ref = false;
1253 check_ref = false;
1254 arg_base = get_ref_base_and_extent (arg, &arg_offset, &arg_size,
1255 &arg_max_size);
1256 if (arg_max_size == -1
1257 || arg_max_size != arg_size
1258 || arg_offset < 0)
1259 return;
1260
1261 ao_ref_init (&r, arg);
1262 }
1263
1264 /* Second stage walks back the BB, looks at individual statements and as long
1265 as it is confident of how the statements affect contents of the
1266 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1267 describing it. */
1268 gsi = gsi_for_stmt (call);
726a989a
RB
1269 gsi_prev (&gsi);
1270 for (; !gsi_end_p (gsi); gsi_prev (&gsi))
3e293154 1271 {
8b7773a4 1272 struct ipa_known_agg_contents_list *n, **p;
726a989a 1273 gimple stmt = gsi_stmt (gsi);
8b7773a4
MJ
1274 HOST_WIDE_INT lhs_offset, lhs_size, lhs_max_size;
1275 tree lhs, rhs, lhs_base;
1276 bool partial_overlap;
3e293154 1277
8b7773a4 1278 if (!stmt_may_clobber_ref_p_1 (stmt, &r))
8aa29647 1279 continue;
8b75fc9b 1280 if (!gimple_assign_single_p (stmt))
8b7773a4 1281 break;
3e293154 1282
726a989a
RB
1283 lhs = gimple_assign_lhs (stmt);
1284 rhs = gimple_assign_rhs1 (stmt);
8b7773a4
MJ
1285 if (!is_gimple_reg_type (rhs))
1286 break;
3e293154 1287
8b7773a4
MJ
1288 lhs_base = get_ref_base_and_extent (lhs, &lhs_offset, &lhs_size,
1289 &lhs_max_size);
1290 if (lhs_max_size == -1
1291 || lhs_max_size != lhs_size
1292 || (lhs_offset < arg_offset
1293 && lhs_offset + lhs_size > arg_offset)
1294 || (lhs_offset < arg_offset + arg_size
1295 && lhs_offset + lhs_size > arg_offset + arg_size))
1296 break;
3e293154 1297
8b7773a4 1298 if (check_ref)
518dc859 1299 {
8b7773a4
MJ
1300 if (TREE_CODE (lhs_base) != MEM_REF
1301 || TREE_OPERAND (lhs_base, 0) != arg_base
1302 || !integer_zerop (TREE_OPERAND (lhs_base, 1)))
1303 break;
3e293154 1304 }
8b7773a4 1305 else if (lhs_base != arg_base)
774b8a55
MJ
1306 {
1307 if (DECL_P (lhs_base))
1308 continue;
1309 else
1310 break;
1311 }
3e293154 1312
8b7773a4
MJ
1313 if (lhs_offset + lhs_size < arg_offset
1314 || lhs_offset >= (arg_offset + arg_size))
1315 continue;
1316
1317 partial_overlap = false;
1318 p = &list;
1319 while (*p && (*p)->offset < lhs_offset)
3e293154 1320 {
8b7773a4 1321 if ((*p)->offset + (*p)->size > lhs_offset)
3e293154 1322 {
8b7773a4
MJ
1323 partial_overlap = true;
1324 break;
3e293154 1325 }
8b7773a4
MJ
1326 p = &(*p)->next;
1327 }
1328 if (partial_overlap)
1329 break;
1330 if (*p && (*p)->offset < lhs_offset + lhs_size)
1331 {
1332 if ((*p)->offset == lhs_offset && (*p)->size == lhs_size)
1333 /* We already know this value is subsequently overwritten with
1334 something else. */
1335 continue;
3e293154 1336 else
8b7773a4
MJ
1337 /* Otherwise this is a partial overlap which we cannot
1338 represent. */
1339 break;
3e293154 1340 }
3e293154 1341
8b7773a4
MJ
1342 rhs = get_ssa_def_if_simple_copy (rhs);
1343 n = XALLOCA (struct ipa_known_agg_contents_list);
1344 n->size = lhs_size;
1345 n->offset = lhs_offset;
1346 if (is_gimple_ip_invariant (rhs))
1347 {
1348 n->constant = rhs;
1349 const_count++;
1350 }
1351 else
1352 n->constant = NULL_TREE;
1353 n->next = *p;
1354 *p = n;
3e293154 1355
8b7773a4 1356 item_count++;
dfea20f1
MJ
1357 if (const_count == PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS)
1358 || item_count == 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS))
8b7773a4
MJ
1359 break;
1360 }
be95e2b9 1361
8b7773a4
MJ
1362 /* Third stage just goes over the list and creates an appropriate vector of
1363 ipa_agg_jf_item structures out of it, of sourse only if there are
1364 any known constants to begin with. */
3e293154 1365
8b7773a4 1366 if (const_count)
3e293154 1367 {
8b7773a4 1368 jfunc->agg.by_ref = by_ref;
9771b263 1369 vec_alloc (jfunc->agg.items, const_count);
8b7773a4
MJ
1370 while (list)
1371 {
1372 if (list->constant)
1373 {
f32682ca
DN
1374 struct ipa_agg_jf_item item;
1375 item.offset = list->offset - arg_offset;
d1f98542 1376 item.value = unshare_expr_without_location (list->constant);
9771b263 1377 jfunc->agg.items->quick_push (item);
8b7773a4
MJ
1378 }
1379 list = list->next;
1380 }
3e293154
MJ
1381 }
1382}
1383
1384/* Compute jump function for all arguments of callsite CS and insert the
1385 information in the jump_functions array in the ipa_edge_args corresponding
1386 to this callsite. */
be95e2b9 1387
749aa96d 1388static void
c419671c 1389ipa_compute_jump_functions_for_edge (struct param_analysis_info *parms_ainfo,
062c604f 1390 struct cgraph_edge *cs)
3e293154
MJ
1391{
1392 struct ipa_node_params *info = IPA_NODE_REF (cs->caller);
606d9a09
MJ
1393 struct ipa_edge_args *args = IPA_EDGE_REF (cs);
1394 gimple call = cs->call_stmt;
8b7773a4 1395 int n, arg_num = gimple_call_num_args (call);
3e293154 1396
606d9a09 1397 if (arg_num == 0 || args->jump_functions)
3e293154 1398 return;
9771b263 1399 vec_safe_grow_cleared (args->jump_functions, arg_num);
3e293154 1400
8b7773a4
MJ
1401 for (n = 0; n < arg_num; n++)
1402 {
1403 struct ipa_jump_func *jfunc = ipa_get_ith_jump_func (args, n);
1404 tree arg = gimple_call_arg (call, n);
3e293154 1405
8b7773a4
MJ
1406 if (is_gimple_ip_invariant (arg))
1407 ipa_set_jf_constant (jfunc, arg);
1408 else if (!is_gimple_reg_type (TREE_TYPE (arg))
1409 && TREE_CODE (arg) == PARM_DECL)
1410 {
1411 int index = ipa_get_param_decl_index (info, arg);
1412
1413 gcc_assert (index >=0);
1414 /* Aggregate passed by value, check for pass-through, otherwise we
1415 will attempt to fill in aggregate contents later in this
1416 for cycle. */
1417 if (parm_preserved_before_stmt_p (&parms_ainfo[index], call, arg))
1418 {
1419 ipa_set_jf_simple_pass_through (jfunc, index, false);
1420 continue;
1421 }
1422 }
1423 else if (TREE_CODE (arg) == SSA_NAME)
1424 {
1425 if (SSA_NAME_IS_DEFAULT_DEF (arg))
1426 {
1427 int index = ipa_get_param_decl_index (info, SSA_NAME_VAR (arg));
1428 if (index >= 0
1429 && !detect_type_change_ssa (arg, call, jfunc))
1430 {
1431 bool agg_p;
1432 agg_p = parm_ref_data_pass_through_p (&parms_ainfo[index],
1433 call, arg);
1434 ipa_set_jf_simple_pass_through (jfunc, index, agg_p);
1435 }
1436 }
1437 else
1438 {
1439 gimple stmt = SSA_NAME_DEF_STMT (arg);
1440 if (is_gimple_assign (stmt))
1441 compute_complex_assign_jump_func (info, parms_ainfo, jfunc,
1442 call, stmt, arg);
1443 else if (gimple_code (stmt) == GIMPLE_PHI)
1444 compute_complex_ancestor_jump_func (info, parms_ainfo, jfunc,
1445 call, stmt);
1446 }
1447 }
1448 else
1449 compute_known_type_jump_func (arg, jfunc, call);
3e293154 1450
8b7773a4
MJ
1451 if ((jfunc->type != IPA_JF_PASS_THROUGH
1452 || !ipa_get_jf_pass_through_agg_preserved (jfunc))
1453 && (jfunc->type != IPA_JF_ANCESTOR
1454 || !ipa_get_jf_ancestor_agg_preserved (jfunc))
1455 && (AGGREGATE_TYPE_P (TREE_TYPE (arg))
1456 || (POINTER_TYPE_P (TREE_TYPE (arg)))))
1457 determine_known_aggregate_parts (call, arg, jfunc);
1458 }
3e293154
MJ
1459}
1460
749aa96d
MJ
1461/* Compute jump functions for all edges - both direct and indirect - outgoing
1462 from NODE. Also count the actual arguments in the process. */
1463
062c604f
MJ
1464static void
1465ipa_compute_jump_functions (struct cgraph_node *node,
c419671c 1466 struct param_analysis_info *parms_ainfo)
749aa96d
MJ
1467{
1468 struct cgraph_edge *cs;
1469
1470 for (cs = node->callees; cs; cs = cs->next_callee)
1471 {
d7da5cc8
MJ
1472 struct cgraph_node *callee = cgraph_function_or_thunk_node (cs->callee,
1473 NULL);
749aa96d
MJ
1474 /* We do not need to bother analyzing calls to unknown
1475 functions unless they may become known during lto/whopr. */
d7da5cc8 1476 if (!callee->analyzed && !flag_lto)
749aa96d 1477 continue;
c419671c 1478 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1479 }
1480
1481 for (cs = node->indirect_calls; cs; cs = cs->next_callee)
c419671c 1482 ipa_compute_jump_functions_for_edge (parms_ainfo, cs);
749aa96d
MJ
1483}
1484
8b7773a4
MJ
1485/* If STMT looks like a statement loading a value from a member pointer formal
1486 parameter, return that parameter and store the offset of the field to
1487 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1488 might be clobbered). If USE_DELTA, then we look for a use of the delta
1489 field rather than the pfn. */
be95e2b9 1490
3e293154 1491static tree
8b7773a4
MJ
1492ipa_get_stmt_member_ptr_load_param (gimple stmt, bool use_delta,
1493 HOST_WIDE_INT *offset_p)
3e293154 1494{
8b7773a4
MJ
1495 tree rhs, rec, ref_field, ref_offset, fld, ptr_field, delta_field;
1496
1497 if (!gimple_assign_single_p (stmt))
1498 return NULL_TREE;
3e293154 1499
8b7773a4 1500 rhs = gimple_assign_rhs1 (stmt);
ae788515
EB
1501 if (TREE_CODE (rhs) == COMPONENT_REF)
1502 {
1503 ref_field = TREE_OPERAND (rhs, 1);
1504 rhs = TREE_OPERAND (rhs, 0);
1505 }
1506 else
1507 ref_field = NULL_TREE;
d242d063 1508 if (TREE_CODE (rhs) != MEM_REF)
3e293154 1509 return NULL_TREE;
3e293154 1510 rec = TREE_OPERAND (rhs, 0);
d242d063
MJ
1511 if (TREE_CODE (rec) != ADDR_EXPR)
1512 return NULL_TREE;
1513 rec = TREE_OPERAND (rec, 0);
3e293154 1514 if (TREE_CODE (rec) != PARM_DECL
6f7b8b70 1515 || !type_like_member_ptr_p (TREE_TYPE (rec), &ptr_field, &delta_field))
3e293154 1516 return NULL_TREE;
d242d063 1517 ref_offset = TREE_OPERAND (rhs, 1);
ae788515 1518
8b7773a4
MJ
1519 if (use_delta)
1520 fld = delta_field;
1521 else
1522 fld = ptr_field;
1523 if (offset_p)
1524 *offset_p = int_bit_position (fld);
1525
ae788515
EB
1526 if (ref_field)
1527 {
1528 if (integer_nonzerop (ref_offset))
1529 return NULL_TREE;
ae788515
EB
1530 return ref_field == fld ? rec : NULL_TREE;
1531 }
3e293154 1532 else
8b7773a4
MJ
1533 return tree_int_cst_equal (byte_position (fld), ref_offset) ? rec
1534 : NULL_TREE;
3e293154
MJ
1535}
1536
1537/* Returns true iff T is an SSA_NAME defined by a statement. */
be95e2b9 1538
3e293154
MJ
1539static bool
1540ipa_is_ssa_with_stmt_def (tree t)
1541{
1542 if (TREE_CODE (t) == SSA_NAME
1543 && !SSA_NAME_IS_DEFAULT_DEF (t))
1544 return true;
1545 else
1546 return false;
1547}
1548
40591473
MJ
1549/* Find the indirect call graph edge corresponding to STMT and mark it as a
1550 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1551 indirect call graph edge. */
be95e2b9 1552
40591473
MJ
1553static struct cgraph_edge *
1554ipa_note_param_call (struct cgraph_node *node, int param_index, gimple stmt)
3e293154 1555{
e33c6cd6 1556 struct cgraph_edge *cs;
3e293154 1557
5f902d76 1558 cs = cgraph_edge (node, stmt);
b258210c 1559 cs->indirect_info->param_index = param_index;
8b7773a4 1560 cs->indirect_info->offset = 0;
40591473 1561 cs->indirect_info->polymorphic = 0;
8b7773a4 1562 cs->indirect_info->agg_contents = 0;
40591473 1563 return cs;
3e293154
MJ
1564}
1565
e33c6cd6 1566/* Analyze the CALL and examine uses of formal parameters of the caller NODE
c419671c 1567 (described by INFO). PARMS_AINFO is a pointer to a vector containing
062c604f
MJ
1568 intermediate information about each formal parameter. Currently it checks
1569 whether the call calls a pointer that is a formal parameter and if so, the
1570 parameter is marked with the called flag and an indirect call graph edge
1571 describing the call is created. This is very simple for ordinary pointers
1572 represented in SSA but not-so-nice when it comes to member pointers. The
1573 ugly part of this function does nothing more than trying to match the
1574 pattern of such a call. An example of such a pattern is the gimple dump
1575 below, the call is on the last line:
3e293154 1576
ae788515
EB
1577 <bb 2>:
1578 f$__delta_5 = f.__delta;
1579 f$__pfn_24 = f.__pfn;
1580
1581 or
3e293154 1582 <bb 2>:
d242d063
MJ
1583 f$__delta_5 = MEM[(struct *)&f];
1584 f$__pfn_24 = MEM[(struct *)&f + 4B];
8aa29647 1585
ae788515 1586 and a few lines below:
8aa29647
MJ
1587
1588 <bb 5>
3e293154
MJ
1589 D.2496_3 = (int) f$__pfn_24;
1590 D.2497_4 = D.2496_3 & 1;
1591 if (D.2497_4 != 0)
1592 goto <bb 3>;
1593 else
1594 goto <bb 4>;
1595
8aa29647 1596 <bb 6>:
3e293154
MJ
1597 D.2500_7 = (unsigned int) f$__delta_5;
1598 D.2501_8 = &S + D.2500_7;
1599 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1600 D.2503_10 = *D.2502_9;
1601 D.2504_12 = f$__pfn_24 + -1;
1602 D.2505_13 = (unsigned int) D.2504_12;
1603 D.2506_14 = D.2503_10 + D.2505_13;
1604 D.2507_15 = *D.2506_14;
1605 iftmp.11_16 = (String:: *) D.2507_15;
1606
8aa29647 1607 <bb 7>:
3e293154
MJ
1608 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1609 D.2500_19 = (unsigned int) f$__delta_5;
1610 D.2508_20 = &S + D.2500_19;
1611 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1612
1613 Such patterns are results of simple calls to a member pointer:
1614
1615 int doprinting (int (MyString::* f)(int) const)
1616 {
1617 MyString S ("somestring");
1618
1619 return (S.*f)(4);
1620 }
8b7773a4
MJ
1621
1622 Moreover, the function also looks for called pointers loaded from aggregates
1623 passed by value or reference. */
3e293154
MJ
1624
1625static void
b258210c
MJ
1626ipa_analyze_indirect_call_uses (struct cgraph_node *node,
1627 struct ipa_node_params *info,
c419671c 1628 struct param_analysis_info *parms_ainfo,
b258210c 1629 gimple call, tree target)
3e293154 1630{
726a989a 1631 gimple def;
3e293154 1632 tree n1, n2;
726a989a
RB
1633 gimple d1, d2;
1634 tree rec, rec2, cond;
1635 gimple branch;
3e293154 1636 int index;
3e293154 1637 basic_block bb, virt_bb, join;
8b7773a4
MJ
1638 HOST_WIDE_INT offset;
1639 bool by_ref;
3e293154 1640
3e293154
MJ
1641 if (SSA_NAME_IS_DEFAULT_DEF (target))
1642 {
b258210c 1643 tree var = SSA_NAME_VAR (target);
3e293154
MJ
1644 index = ipa_get_param_decl_index (info, var);
1645 if (index >= 0)
40591473 1646 ipa_note_param_call (node, index, call);
3e293154
MJ
1647 return;
1648 }
1649
8b7773a4
MJ
1650 def = SSA_NAME_DEF_STMT (target);
1651 if (gimple_assign_single_p (def)
d044dd17 1652 && ipa_load_from_parm_agg_1 (info->descriptors, parms_ainfo, def,
8b7773a4
MJ
1653 gimple_assign_rhs1 (def), &index, &offset,
1654 &by_ref))
1655 {
1656 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1657 cs->indirect_info->offset = offset;
1658 cs->indirect_info->agg_contents = 1;
1659 cs->indirect_info->by_ref = by_ref;
1660 return;
1661 }
1662
3e293154
MJ
1663 /* Now we need to try to match the complex pattern of calling a member
1664 pointer. */
8b7773a4
MJ
1665 if (gimple_code (def) != GIMPLE_PHI
1666 || gimple_phi_num_args (def) != 2
1667 || !POINTER_TYPE_P (TREE_TYPE (target))
3e293154
MJ
1668 || TREE_CODE (TREE_TYPE (TREE_TYPE (target))) != METHOD_TYPE)
1669 return;
1670
3e293154
MJ
1671 /* First, we need to check whether one of these is a load from a member
1672 pointer that is a parameter to this function. */
1673 n1 = PHI_ARG_DEF (def, 0);
1674 n2 = PHI_ARG_DEF (def, 1);
1fc8feb5 1675 if (!ipa_is_ssa_with_stmt_def (n1) || !ipa_is_ssa_with_stmt_def (n2))
3e293154
MJ
1676 return;
1677 d1 = SSA_NAME_DEF_STMT (n1);
1678 d2 = SSA_NAME_DEF_STMT (n2);
1679
8aa29647 1680 join = gimple_bb (def);
8b7773a4 1681 if ((rec = ipa_get_stmt_member_ptr_load_param (d1, false, &offset)))
3e293154 1682 {
8b7773a4 1683 if (ipa_get_stmt_member_ptr_load_param (d2, false, NULL))
3e293154
MJ
1684 return;
1685
8aa29647 1686 bb = EDGE_PRED (join, 0)->src;
726a989a 1687 virt_bb = gimple_bb (d2);
3e293154 1688 }
8b7773a4 1689 else if ((rec = ipa_get_stmt_member_ptr_load_param (d2, false, &offset)))
3e293154 1690 {
8aa29647 1691 bb = EDGE_PRED (join, 1)->src;
726a989a 1692 virt_bb = gimple_bb (d1);
3e293154
MJ
1693 }
1694 else
1695 return;
1696
1697 /* Second, we need to check that the basic blocks are laid out in the way
1698 corresponding to the pattern. */
1699
3e293154
MJ
1700 if (!single_pred_p (virt_bb) || !single_succ_p (virt_bb)
1701 || single_pred (virt_bb) != bb
1702 || single_succ (virt_bb) != join)
1703 return;
1704
1705 /* Third, let's see that the branching is done depending on the least
1706 significant bit of the pfn. */
1707
1708 branch = last_stmt (bb);
8aa29647 1709 if (!branch || gimple_code (branch) != GIMPLE_COND)
3e293154
MJ
1710 return;
1711
12430896
RG
1712 if ((gimple_cond_code (branch) != NE_EXPR
1713 && gimple_cond_code (branch) != EQ_EXPR)
726a989a 1714 || !integer_zerop (gimple_cond_rhs (branch)))
3e293154 1715 return;
3e293154 1716
726a989a 1717 cond = gimple_cond_lhs (branch);
3e293154
MJ
1718 if (!ipa_is_ssa_with_stmt_def (cond))
1719 return;
1720
726a989a 1721 def = SSA_NAME_DEF_STMT (cond);
8b75fc9b 1722 if (!is_gimple_assign (def)
726a989a
RB
1723 || gimple_assign_rhs_code (def) != BIT_AND_EXPR
1724 || !integer_onep (gimple_assign_rhs2 (def)))
3e293154 1725 return;
726a989a
RB
1726
1727 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1728 if (!ipa_is_ssa_with_stmt_def (cond))
1729 return;
1730
726a989a 1731 def = SSA_NAME_DEF_STMT (cond);
3e293154 1732
8b75fc9b
MJ
1733 if (is_gimple_assign (def)
1734 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3e293154 1735 {
726a989a 1736 cond = gimple_assign_rhs1 (def);
3e293154
MJ
1737 if (!ipa_is_ssa_with_stmt_def (cond))
1738 return;
726a989a 1739 def = SSA_NAME_DEF_STMT (cond);
3e293154
MJ
1740 }
1741
6f7b8b70
RE
1742 rec2 = ipa_get_stmt_member_ptr_load_param (def,
1743 (TARGET_PTRMEMFUNC_VBIT_LOCATION
8b7773a4
MJ
1744 == ptrmemfunc_vbit_in_delta),
1745 NULL);
3e293154
MJ
1746 if (rec != rec2)
1747 return;
1748
1749 index = ipa_get_param_decl_index (info, rec);
8b7773a4
MJ
1750 if (index >= 0
1751 && parm_preserved_before_stmt_p (&parms_ainfo[index], call, rec))
1752 {
1753 struct cgraph_edge *cs = ipa_note_param_call (node, index, call);
1754 cs->indirect_info->offset = offset;
1755 cs->indirect_info->agg_contents = 1;
1756 }
3e293154
MJ
1757
1758 return;
1759}
1760
b258210c
MJ
1761/* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
1762 object referenced in the expression is a formal parameter of the caller
1763 (described by INFO), create a call note for the statement. */
1764
1765static void
1766ipa_analyze_virtual_call_uses (struct cgraph_node *node,
1767 struct ipa_node_params *info, gimple call,
1768 tree target)
1769{
40591473
MJ
1770 struct cgraph_edge *cs;
1771 struct cgraph_indirect_call_info *ii;
f65cf2b7 1772 struct ipa_jump_func jfunc;
b258210c 1773 tree obj = OBJ_TYPE_REF_OBJECT (target);
b258210c 1774 int index;
40591473 1775 HOST_WIDE_INT anc_offset;
b258210c 1776
05842ff5
MJ
1777 if (!flag_devirtualize)
1778 return;
1779
40591473 1780 if (TREE_CODE (obj) != SSA_NAME)
b258210c
MJ
1781 return;
1782
40591473
MJ
1783 if (SSA_NAME_IS_DEFAULT_DEF (obj))
1784 {
1785 if (TREE_CODE (SSA_NAME_VAR (obj)) != PARM_DECL)
1786 return;
b258210c 1787
40591473
MJ
1788 anc_offset = 0;
1789 index = ipa_get_param_decl_index (info, SSA_NAME_VAR (obj));
1790 gcc_assert (index >= 0);
1791 if (detect_type_change_ssa (obj, call, &jfunc))
1792 return;
1793 }
1794 else
1795 {
1796 gimple stmt = SSA_NAME_DEF_STMT (obj);
1797 tree expr;
1798
1799 expr = get_ancestor_addr_info (stmt, &obj, &anc_offset);
1800 if (!expr)
1801 return;
1802 index = ipa_get_param_decl_index (info,
1803 SSA_NAME_VAR (TREE_OPERAND (expr, 0)));
1804 gcc_assert (index >= 0);
1805 if (detect_type_change (obj, expr, call, &jfunc, anc_offset))
1806 return;
1807 }
1808
1809 cs = ipa_note_param_call (node, index, call);
1810 ii = cs->indirect_info;
8b7773a4 1811 ii->offset = anc_offset;
40591473
MJ
1812 ii->otr_token = tree_low_cst (OBJ_TYPE_REF_TOKEN (target), 1);
1813 ii->otr_type = TREE_TYPE (TREE_TYPE (OBJ_TYPE_REF_OBJECT (target)));
1814 ii->polymorphic = 1;
b258210c
MJ
1815}
1816
1817/* Analyze a call statement CALL whether and how it utilizes formal parameters
c419671c 1818 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
062c604f 1819 containing intermediate information about each formal parameter. */
b258210c
MJ
1820
1821static void
1822ipa_analyze_call_uses (struct cgraph_node *node,
062c604f 1823 struct ipa_node_params *info,
c419671c 1824 struct param_analysis_info *parms_ainfo, gimple call)
b258210c
MJ
1825{
1826 tree target = gimple_call_fn (call);
1827
25583c4f
RS
1828 if (!target)
1829 return;
b258210c 1830 if (TREE_CODE (target) == SSA_NAME)
c419671c 1831 ipa_analyze_indirect_call_uses (node, info, parms_ainfo, call, target);
b258210c
MJ
1832 else if (TREE_CODE (target) == OBJ_TYPE_REF)
1833 ipa_analyze_virtual_call_uses (node, info, call, target);
1834}
1835
1836
e33c6cd6
MJ
1837/* Analyze the call statement STMT with respect to formal parameters (described
1838 in INFO) of caller given by NODE. Currently it only checks whether formal
c419671c 1839 parameters are called. PARMS_AINFO is a pointer to a vector containing
062c604f 1840 intermediate information about each formal parameter. */
be95e2b9 1841
3e293154 1842static void
e33c6cd6 1843ipa_analyze_stmt_uses (struct cgraph_node *node, struct ipa_node_params *info,
c419671c 1844 struct param_analysis_info *parms_ainfo, gimple stmt)
3e293154 1845{
726a989a 1846 if (is_gimple_call (stmt))
c419671c 1847 ipa_analyze_call_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
1848}
1849
1850/* Callback of walk_stmt_load_store_addr_ops for the visit_load.
1851 If OP is a parameter declaration, mark it as used in the info structure
1852 passed in DATA. */
1853
1854static bool
1855visit_ref_for_mod_analysis (gimple stmt ATTRIBUTE_UNUSED,
1856 tree op, void *data)
1857{
1858 struct ipa_node_params *info = (struct ipa_node_params *) data;
1859
1860 op = get_base_address (op);
1861 if (op
1862 && TREE_CODE (op) == PARM_DECL)
1863 {
1864 int index = ipa_get_param_decl_index (info, op);
1865 gcc_assert (index >= 0);
310bc633 1866 ipa_set_param_used (info, index, true);
062c604f
MJ
1867 }
1868
1869 return false;
3e293154
MJ
1870}
1871
1872/* Scan the function body of NODE and inspect the uses of formal parameters.
1873 Store the findings in various structures of the associated ipa_node_params
c419671c 1874 structure, such as parameter flags, notes etc. PARMS_AINFO is a pointer to a
062c604f 1875 vector containing intermediate information about each formal parameter. */
be95e2b9 1876
062c604f
MJ
1877static void
1878ipa_analyze_params_uses (struct cgraph_node *node,
c419671c 1879 struct param_analysis_info *parms_ainfo)
3e293154 1880{
960bfb69 1881 tree decl = node->symbol.decl;
3e293154
MJ
1882 basic_block bb;
1883 struct function *func;
726a989a 1884 gimple_stmt_iterator gsi;
3e293154 1885 struct ipa_node_params *info = IPA_NODE_REF (node);
062c604f 1886 int i;
3e293154 1887
726a989a 1888 if (ipa_get_param_count (info) == 0 || info->uses_analysis_done)
3e293154 1889 return;
3e293154 1890
062c604f
MJ
1891 for (i = 0; i < ipa_get_param_count (info); i++)
1892 {
1893 tree parm = ipa_get_param (info, i);
32244553 1894 tree ddef;
062c604f
MJ
1895 /* For SSA regs see if parameter is used. For non-SSA we compute
1896 the flag during modification analysis. */
1897 if (is_gimple_reg (parm)
32244553
RG
1898 && (ddef = ssa_default_def (DECL_STRUCT_FUNCTION (node->symbol.decl),
1899 parm)) != NULL_TREE
1900 && !has_zero_uses (ddef))
310bc633 1901 ipa_set_param_used (info, i, true);
062c604f
MJ
1902 }
1903
3e293154
MJ
1904 func = DECL_STRUCT_FUNCTION (decl);
1905 FOR_EACH_BB_FN (bb, func)
1906 {
726a989a 1907 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3e293154 1908 {
726a989a 1909 gimple stmt = gsi_stmt (gsi);
062c604f
MJ
1910
1911 if (is_gimple_debug (stmt))
1912 continue;
1913
c419671c 1914 ipa_analyze_stmt_uses (node, info, parms_ainfo, stmt);
062c604f
MJ
1915 walk_stmt_load_store_addr_ops (stmt, info,
1916 visit_ref_for_mod_analysis,
1917 visit_ref_for_mod_analysis,
1918 visit_ref_for_mod_analysis);
518dc859 1919 }
355a7673 1920 for (gsi = gsi_start_phis (bb); !gsi_end_p (gsi); gsi_next (&gsi))
062c604f
MJ
1921 walk_stmt_load_store_addr_ops (gsi_stmt (gsi), info,
1922 visit_ref_for_mod_analysis,
1923 visit_ref_for_mod_analysis,
1924 visit_ref_for_mod_analysis);
518dc859 1925 }
3e293154
MJ
1926
1927 info->uses_analysis_done = 1;
1928}
1929
2c9561b5
MJ
1930/* Free stuff in PARMS_AINFO, assume there are PARAM_COUNT parameters. */
1931
1932static void
1933free_parms_ainfo (struct param_analysis_info *parms_ainfo, int param_count)
1934{
1935 int i;
1936
1937 for (i = 0; i < param_count; i++)
1938 {
1939 if (parms_ainfo[i].parm_visited_statements)
1940 BITMAP_FREE (parms_ainfo[i].parm_visited_statements);
1941 if (parms_ainfo[i].pt_visited_statements)
1942 BITMAP_FREE (parms_ainfo[i].pt_visited_statements);
1943 }
1944}
1945
dd5a833e
MS
1946/* Initialize the array describing properties of of formal parameters
1947 of NODE, analyze their uses and compute jump functions associated
1948 with actual arguments of calls from within NODE. */
062c604f
MJ
1949
1950void
1951ipa_analyze_node (struct cgraph_node *node)
1952{
57dbdc5a 1953 struct ipa_node_params *info;
c419671c 1954 struct param_analysis_info *parms_ainfo;
2c9561b5 1955 int param_count;
062c604f 1956
57dbdc5a
MJ
1957 ipa_check_create_node_params ();
1958 ipa_check_create_edge_args ();
1959 info = IPA_NODE_REF (node);
960bfb69 1960 push_cfun (DECL_STRUCT_FUNCTION (node->symbol.decl));
062c604f
MJ
1961 ipa_initialize_node_params (node);
1962
1963 param_count = ipa_get_param_count (info);
c419671c
MJ
1964 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
1965 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
062c604f 1966
c419671c
MJ
1967 ipa_analyze_params_uses (node, parms_ainfo);
1968 ipa_compute_jump_functions (node, parms_ainfo);
062c604f 1969
2c9561b5 1970 free_parms_ainfo (parms_ainfo, param_count);
f65cf2b7 1971 pop_cfun ();
062c604f
MJ
1972}
1973
e248d83f
MJ
1974/* Given a statement CALL which must be a GIMPLE_CALL calling an OBJ_TYPE_REF
1975 attempt a type-based devirtualization. If successful, return the
1976 target function declaration, otherwise return NULL. */
1977
1978tree
1979ipa_intraprocedural_devirtualization (gimple call)
1980{
1981 tree binfo, token, fndecl;
1982 struct ipa_jump_func jfunc;
1983 tree otr = gimple_call_fn (call);
1984
1985 jfunc.type = IPA_JF_UNKNOWN;
1986 compute_known_type_jump_func (OBJ_TYPE_REF_OBJECT (otr), &jfunc,
1987 call);
1988 if (jfunc.type != IPA_JF_KNOWN_TYPE)
1989 return NULL_TREE;
1990 binfo = ipa_binfo_from_known_type_jfunc (&jfunc);
1991 if (!binfo)
1992 return NULL_TREE;
1993 token = OBJ_TYPE_REF_TOKEN (otr);
1994 fndecl = gimple_get_virt_method_for_binfo (tree_low_cst (token, 1),
1995 binfo);
1996 return fndecl;
1997}
062c604f 1998
61502ca8 1999/* Update the jump function DST when the call graph edge corresponding to SRC is
b258210c
MJ
2000 is being inlined, knowing that DST is of type ancestor and src of known
2001 type. */
2002
2003static void
2004combine_known_type_and_ancestor_jfs (struct ipa_jump_func *src,
2005 struct ipa_jump_func *dst)
2006{
c7573249
MJ
2007 HOST_WIDE_INT combined_offset;
2008 tree combined_type;
b258210c 2009
7b872d9e
MJ
2010 combined_offset = ipa_get_jf_known_type_offset (src)
2011 + ipa_get_jf_ancestor_offset (dst);
2012 combined_type = ipa_get_jf_ancestor_type (dst);
c7573249 2013
7b872d9e
MJ
2014 ipa_set_jf_known_type (dst, combined_offset,
2015 ipa_get_jf_known_type_base_type (src),
2016 combined_type);
b258210c
MJ
2017}
2018
be95e2b9 2019/* Update the jump functions associated with call graph edge E when the call
3e293154 2020 graph edge CS is being inlined, assuming that E->caller is already (possibly
b258210c 2021 indirectly) inlined into CS->callee and that E has not been inlined. */
be95e2b9 2022
3e293154
MJ
2023static void
2024update_jump_functions_after_inlining (struct cgraph_edge *cs,
2025 struct cgraph_edge *e)
2026{
2027 struct ipa_edge_args *top = IPA_EDGE_REF (cs);
2028 struct ipa_edge_args *args = IPA_EDGE_REF (e);
2029 int count = ipa_get_cs_argument_count (args);
2030 int i;
2031
2032 for (i = 0; i < count; i++)
2033 {
b258210c 2034 struct ipa_jump_func *dst = ipa_get_ith_jump_func (args, i);
3e293154 2035
685b0d13
MJ
2036 if (dst->type == IPA_JF_ANCESTOR)
2037 {
b258210c 2038 struct ipa_jump_func *src;
8b7773a4 2039 int dst_fid = dst->value.ancestor.formal_id;
685b0d13 2040
b258210c
MJ
2041 /* Variable number of arguments can cause havoc if we try to access
2042 one that does not exist in the inlined edge. So make sure we
2043 don't. */
8b7773a4 2044 if (dst_fid >= ipa_get_cs_argument_count (top))
b258210c
MJ
2045 {
2046 dst->type = IPA_JF_UNKNOWN;
2047 continue;
2048 }
2049
8b7773a4
MJ
2050 src = ipa_get_ith_jump_func (top, dst_fid);
2051
2052 if (src->agg.items
2053 && (dst->value.ancestor.agg_preserved || !src->agg.by_ref))
2054 {
2055 struct ipa_agg_jf_item *item;
2056 int j;
2057
2058 /* Currently we do not produce clobber aggregate jump functions,
2059 replace with merging when we do. */
2060 gcc_assert (!dst->agg.items);
2061
9771b263 2062 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4 2063 dst->agg.by_ref = src->agg.by_ref;
9771b263 2064 FOR_EACH_VEC_SAFE_ELT (dst->agg.items, j, item)
8b7773a4
MJ
2065 item->offset -= dst->value.ancestor.offset;
2066 }
2067
b258210c
MJ
2068 if (src->type == IPA_JF_KNOWN_TYPE)
2069 combine_known_type_and_ancestor_jfs (src, dst);
b258210c
MJ
2070 else if (src->type == IPA_JF_PASS_THROUGH
2071 && src->value.pass_through.operation == NOP_EXPR)
8b7773a4
MJ
2072 {
2073 dst->value.ancestor.formal_id = src->value.pass_through.formal_id;
2074 dst->value.ancestor.agg_preserved &=
2075 src->value.pass_through.agg_preserved;
2076 }
b258210c
MJ
2077 else if (src->type == IPA_JF_ANCESTOR)
2078 {
2079 dst->value.ancestor.formal_id = src->value.ancestor.formal_id;
2080 dst->value.ancestor.offset += src->value.ancestor.offset;
8b7773a4
MJ
2081 dst->value.ancestor.agg_preserved &=
2082 src->value.ancestor.agg_preserved;
b258210c
MJ
2083 }
2084 else
2085 dst->type = IPA_JF_UNKNOWN;
2086 }
2087 else if (dst->type == IPA_JF_PASS_THROUGH)
3e293154 2088 {
b258210c
MJ
2089 struct ipa_jump_func *src;
2090 /* We must check range due to calls with variable number of arguments
2091 and we cannot combine jump functions with operations. */
2092 if (dst->value.pass_through.operation == NOP_EXPR
2093 && (dst->value.pass_through.formal_id
2094 < ipa_get_cs_argument_count (top)))
2095 {
8b7773a4
MJ
2096 bool agg_p;
2097 int dst_fid = dst->value.pass_through.formal_id;
2098 src = ipa_get_ith_jump_func (top, dst_fid);
2099 agg_p = dst->value.pass_through.agg_preserved;
2100
2101 dst->type = src->type;
2102 dst->value = src->value;
2103
2104 if (src->agg.items
2105 && (agg_p || !src->agg.by_ref))
2106 {
2107 /* Currently we do not produce clobber aggregate jump
2108 functions, replace with merging when we do. */
2109 gcc_assert (!dst->agg.items);
2110
2111 dst->agg.by_ref = src->agg.by_ref;
9771b263 2112 dst->agg.items = vec_safe_copy (src->agg.items);
8b7773a4
MJ
2113 }
2114
2115 if (!agg_p)
2116 {
2117 if (dst->type == IPA_JF_PASS_THROUGH)
2118 dst->value.pass_through.agg_preserved = false;
2119 else if (dst->type == IPA_JF_ANCESTOR)
2120 dst->value.ancestor.agg_preserved = false;
2121 }
b258210c
MJ
2122 }
2123 else
2124 dst->type = IPA_JF_UNKNOWN;
3e293154 2125 }
b258210c
MJ
2126 }
2127}
2128
2129/* If TARGET is an addr_expr of a function declaration, make it the destination
81fa35bd 2130 of an indirect edge IE and return the edge. Otherwise, return NULL. */
b258210c 2131
3949c4a7 2132struct cgraph_edge *
81fa35bd 2133ipa_make_edge_direct_to_target (struct cgraph_edge *ie, tree target)
b258210c
MJ
2134{
2135 struct cgraph_node *callee;
0f378cb5 2136 struct inline_edge_summary *es = inline_edge_summary (ie);
b258210c 2137
ceeffab0
MJ
2138 if (TREE_CODE (target) == ADDR_EXPR)
2139 target = TREE_OPERAND (target, 0);
b258210c 2140 if (TREE_CODE (target) != FUNCTION_DECL)
a0a7b611
JH
2141 {
2142 target = canonicalize_constructor_val (target, NULL);
2143 if (!target || TREE_CODE (target) != FUNCTION_DECL)
2144 {
2145 if (dump_file)
2146 fprintf (dump_file, "ipa-prop: Discovered direct call to non-function"
2147 " in (%s/%i).\n",
2148 cgraph_node_name (ie->caller), ie->caller->uid);
2149 return NULL;
2150 }
2151 }
581985d7 2152 callee = cgraph_get_node (target);
a0a7b611
JH
2153
2154 /* Because may-edges are not explicitely represented and vtable may be external,
2155 we may create the first reference to the object in the unit. */
2156 if (!callee || callee->global.inlined_to)
2157 {
a0a7b611
JH
2158
2159 /* We are better to ensure we can refer to it.
2160 In the case of static functions we are out of luck, since we already
2161 removed its body. In the case of public functions we may or may
2162 not introduce the reference. */
2163 if (!canonicalize_constructor_val (target, NULL)
2164 || !TREE_PUBLIC (target))
2165 {
2166 if (dump_file)
2167 fprintf (dump_file, "ipa-prop: Discovered call to a known target "
2168 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2169 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2170 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
2171 return NULL;
2172 }
48f4a6fa 2173 callee = cgraph_get_create_real_symbol_node (target);
a0a7b611 2174 }
1dbee8c9 2175 ipa_check_create_node_params ();
ceeffab0 2176
81fa35bd
MJ
2177 /* We can not make edges to inline clones. It is bug that someone removed
2178 the cgraph node too early. */
17afc0fe
JH
2179 gcc_assert (!callee->global.inlined_to);
2180
81fa35bd 2181 cgraph_make_edge_direct (ie, callee);
0f378cb5
JH
2182 es = inline_edge_summary (ie);
2183 es->call_stmt_size -= (eni_size_weights.indirect_call_cost
2184 - eni_size_weights.call_cost);
2185 es->call_stmt_time -= (eni_time_weights.indirect_call_cost
2186 - eni_time_weights.call_cost);
b258210c
MJ
2187 if (dump_file)
2188 {
2189 fprintf (dump_file, "ipa-prop: Discovered %s call to a known target "
ceeffab0 2190 "(%s/%i -> %s/%i), for stmt ",
b258210c 2191 ie->indirect_info->polymorphic ? "a virtual" : "an indirect",
036c0102
UB
2192 xstrdup (cgraph_node_name (ie->caller)), ie->caller->uid,
2193 xstrdup (cgraph_node_name (ie->callee)), ie->callee->uid);
b258210c
MJ
2194 if (ie->call_stmt)
2195 print_gimple_stmt (dump_file, ie->call_stmt, 2, TDF_SLIM);
2196 else
2197 fprintf (dump_file, "with uid %i\n", ie->lto_stmt_uid);
3e293154 2198 }
380ed5ed 2199 callee = cgraph_function_or_thunk_node (callee, NULL);
749aa96d 2200
b258210c 2201 return ie;
3e293154
MJ
2202}
2203
8b7773a4
MJ
2204/* Retrieve value from aggregate jump function AGG for the given OFFSET or
2205 return NULL if there is not any. BY_REF specifies whether the value has to
2206 be passed by reference or by value. */
2207
2208tree
2209ipa_find_agg_cst_for_param (struct ipa_agg_jump_function *agg,
2210 HOST_WIDE_INT offset, bool by_ref)
2211{
2212 struct ipa_agg_jf_item *item;
2213 int i;
2214
2215 if (by_ref != agg->by_ref)
2216 return NULL;
2217
9771b263 2218 FOR_EACH_VEC_SAFE_ELT (agg->items, i, item)
2c9561b5
MJ
2219 if (item->offset == offset)
2220 {
2221 /* Currently we do not have clobber values, return NULL for them once
2222 we do. */
2223 gcc_checking_assert (is_gimple_ip_invariant (item->value));
2224 return item->value;
2225 }
8b7773a4
MJ
2226 return NULL;
2227}
2228
b258210c
MJ
2229/* Try to find a destination for indirect edge IE that corresponds to a simple
2230 call or a call of a member function pointer and where the destination is a
2231 pointer formal parameter described by jump function JFUNC. If it can be
d250540a
MJ
2232 determined, return the newly direct edge, otherwise return NULL.
2233 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
be95e2b9 2234
b258210c
MJ
2235static struct cgraph_edge *
2236try_make_edge_direct_simple_call (struct cgraph_edge *ie,
d250540a
MJ
2237 struct ipa_jump_func *jfunc,
2238 struct ipa_node_params *new_root_info)
b258210c
MJ
2239{
2240 tree target;
2241
8b7773a4 2242 if (ie->indirect_info->agg_contents)
d250540a
MJ
2243 target = ipa_find_agg_cst_for_param (&jfunc->agg,
2244 ie->indirect_info->offset,
2245 ie->indirect_info->by_ref);
b258210c 2246 else
d250540a
MJ
2247 target = ipa_value_from_jfunc (new_root_info, jfunc);
2248 if (!target)
2249 return NULL;
81fa35bd 2250 return ipa_make_edge_direct_to_target (ie, target);
b258210c
MJ
2251}
2252
d250540a
MJ
2253/* Try to find a destination for indirect edge IE that corresponds to a virtual
2254 call based on a formal parameter which is described by jump function JFUNC
2255 and if it can be determined, make it direct and return the direct edge.
2256 Otherwise, return NULL. NEW_ROOT_INFO is the node info that JFUNC lattices
2257 are relative to. */
b258210c
MJ
2258
2259static struct cgraph_edge *
2260try_make_edge_direct_virtual_call (struct cgraph_edge *ie,
d250540a
MJ
2261 struct ipa_jump_func *jfunc,
2262 struct ipa_node_params *new_root_info)
3e293154 2263{
c7573249 2264 tree binfo, target;
b258210c 2265
d250540a
MJ
2266 binfo = ipa_value_from_jfunc (new_root_info, jfunc);
2267
da942ca0 2268 if (!binfo)
b258210c 2269 return NULL;
3e293154 2270
da942ca0
JH
2271 if (TREE_CODE (binfo) != TREE_BINFO)
2272 {
2273 binfo = gimple_extract_devirt_binfo_from_cst (binfo);
2274 if (!binfo)
2275 return NULL;
2276 }
2277
d250540a 2278 binfo = get_binfo_at_offset (binfo, ie->indirect_info->offset,
c7573249 2279 ie->indirect_info->otr_type);
b258210c 2280 if (binfo)
c7573249
MJ
2281 target = gimple_get_virt_method_for_binfo (ie->indirect_info->otr_token,
2282 binfo);
b258210c
MJ
2283 else
2284 return NULL;
2285
2286 if (target)
81fa35bd 2287 return ipa_make_edge_direct_to_target (ie, target);
b258210c
MJ
2288 else
2289 return NULL;
3e293154
MJ
2290}
2291
2292/* Update the param called notes associated with NODE when CS is being inlined,
2293 assuming NODE is (potentially indirectly) inlined into CS->callee.
2294 Moreover, if the callee is discovered to be constant, create a new cgraph
e56f5f3e 2295 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
f8e2a1ed 2296 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
be95e2b9 2297
f8e2a1ed 2298static bool
e33c6cd6
MJ
2299update_indirect_edges_after_inlining (struct cgraph_edge *cs,
2300 struct cgraph_node *node,
9771b263 2301 vec<cgraph_edge_p> *new_edges)
3e293154 2302{
9e97ff61 2303 struct ipa_edge_args *top;
b258210c 2304 struct cgraph_edge *ie, *next_ie, *new_direct_edge;
d250540a 2305 struct ipa_node_params *new_root_info;
f8e2a1ed 2306 bool res = false;
3e293154 2307
e33c6cd6 2308 ipa_check_create_edge_args ();
9e97ff61 2309 top = IPA_EDGE_REF (cs);
d250540a
MJ
2310 new_root_info = IPA_NODE_REF (cs->caller->global.inlined_to
2311 ? cs->caller->global.inlined_to
2312 : cs->caller);
e33c6cd6
MJ
2313
2314 for (ie = node->indirect_calls; ie; ie = next_ie)
3e293154 2315 {
e33c6cd6 2316 struct cgraph_indirect_call_info *ici = ie->indirect_info;
3e293154 2317 struct ipa_jump_func *jfunc;
8b7773a4 2318 int param_index;
3e293154 2319
e33c6cd6 2320 next_ie = ie->next_callee;
3e293154 2321
5f902d76
JH
2322 if (ici->param_index == -1)
2323 continue;
e33c6cd6 2324
3e293154 2325 /* We must check range due to calls with variable number of arguments: */
e33c6cd6 2326 if (ici->param_index >= ipa_get_cs_argument_count (top))
3e293154 2327 {
5ee53a06 2328 ici->param_index = -1;
3e293154
MJ
2329 continue;
2330 }
2331
8b7773a4
MJ
2332 param_index = ici->param_index;
2333 jfunc = ipa_get_ith_jump_func (top, param_index);
5ee53a06
JH
2334
2335 if (!flag_indirect_inlining)
36b72910
JH
2336 new_direct_edge = NULL;
2337 else if (ici->polymorphic)
d250540a
MJ
2338 new_direct_edge = try_make_edge_direct_virtual_call (ie, jfunc,
2339 new_root_info);
b258210c 2340 else
d250540a
MJ
2341 new_direct_edge = try_make_edge_direct_simple_call (ie, jfunc,
2342 new_root_info);
b258210c 2343 if (new_direct_edge)
685b0d13 2344 {
b258210c 2345 new_direct_edge->indirect_inlining_edge = 1;
89faf322
RG
2346 if (new_direct_edge->call_stmt)
2347 new_direct_edge->call_stmt_cannot_inline_p
2348 = !gimple_check_call_matching_types (new_direct_edge->call_stmt,
960bfb69 2349 new_direct_edge->callee->symbol.decl);
b258210c
MJ
2350 if (new_edges)
2351 {
9771b263 2352 new_edges->safe_push (new_direct_edge);
b258210c
MJ
2353 top = IPA_EDGE_REF (cs);
2354 res = true;
2355 }
685b0d13 2356 }
36b72910
JH
2357 else if (jfunc->type == IPA_JF_PASS_THROUGH
2358 && ipa_get_jf_pass_through_operation (jfunc) == NOP_EXPR)
2359 {
2360 if (ici->agg_contents
2361 && !ipa_get_jf_pass_through_agg_preserved (jfunc))
2362 ici->param_index = -1;
2363 else
2364 ici->param_index = ipa_get_jf_pass_through_formal_id (jfunc);
2365 }
2366 else if (jfunc->type == IPA_JF_ANCESTOR)
2367 {
2368 if (ici->agg_contents
2369 && !ipa_get_jf_ancestor_agg_preserved (jfunc))
2370 ici->param_index = -1;
2371 else
2372 {
2373 ici->param_index = ipa_get_jf_ancestor_formal_id (jfunc);
2374 ici->offset += ipa_get_jf_ancestor_offset (jfunc);
2375 }
2376 }
2377 else
2378 /* Either we can find a destination for this edge now or never. */
2379 ici->param_index = -1;
3e293154 2380 }
e33c6cd6 2381
f8e2a1ed 2382 return res;
3e293154
MJ
2383}
2384
2385/* Recursively traverse subtree of NODE (including node) made of inlined
2386 cgraph_edges when CS has been inlined and invoke
e33c6cd6 2387 update_indirect_edges_after_inlining on all nodes and
3e293154
MJ
2388 update_jump_functions_after_inlining on all non-inlined edges that lead out
2389 of this subtree. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2390 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
2391 created. */
be95e2b9 2392
f8e2a1ed 2393static bool
3e293154
MJ
2394propagate_info_to_inlined_callees (struct cgraph_edge *cs,
2395 struct cgraph_node *node,
9771b263 2396 vec<cgraph_edge_p> *new_edges)
3e293154
MJ
2397{
2398 struct cgraph_edge *e;
f8e2a1ed 2399 bool res;
3e293154 2400
e33c6cd6 2401 res = update_indirect_edges_after_inlining (cs, node, new_edges);
3e293154
MJ
2402
2403 for (e = node->callees; e; e = e->next_callee)
2404 if (!e->inline_failed)
f8e2a1ed 2405 res |= propagate_info_to_inlined_callees (cs, e->callee, new_edges);
3e293154
MJ
2406 else
2407 update_jump_functions_after_inlining (cs, e);
5ee53a06
JH
2408 for (e = node->indirect_calls; e; e = e->next_callee)
2409 update_jump_functions_after_inlining (cs, e);
f8e2a1ed
MJ
2410
2411 return res;
3e293154
MJ
2412}
2413
2414/* Update jump functions and call note functions on inlining the call site CS.
2415 CS is expected to lead to a node already cloned by
2416 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
f8e2a1ed
MJ
2417 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
2418 created. */
be95e2b9 2419
f8e2a1ed 2420bool
3e293154 2421ipa_propagate_indirect_call_infos (struct cgraph_edge *cs,
9771b263 2422 vec<cgraph_edge_p> *new_edges)
3e293154 2423{
5ee53a06 2424 bool changed;
f8e2a1ed
MJ
2425 /* Do nothing if the preparation phase has not been carried out yet
2426 (i.e. during early inlining). */
9771b263 2427 if (!ipa_node_params_vector.exists ())
f8e2a1ed
MJ
2428 return false;
2429 gcc_assert (ipa_edge_args_vector);
2430
5ee53a06
JH
2431 changed = propagate_info_to_inlined_callees (cs, cs->callee, new_edges);
2432
2433 /* We do not keep jump functions of inlined edges up to date. Better to free
2434 them so we do not access them accidentally. */
2435 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
2436 return changed;
518dc859
RL
2437}
2438
771578a0
MJ
2439/* Frees all dynamically allocated structures that the argument info points
2440 to. */
be95e2b9 2441
518dc859 2442void
771578a0 2443ipa_free_edge_args_substructures (struct ipa_edge_args *args)
518dc859 2444{
9771b263 2445 vec_free (args->jump_functions);
771578a0 2446 memset (args, 0, sizeof (*args));
518dc859
RL
2447}
2448
771578a0 2449/* Free all ipa_edge structures. */
be95e2b9 2450
518dc859 2451void
771578a0 2452ipa_free_all_edge_args (void)
518dc859 2453{
771578a0
MJ
2454 int i;
2455 struct ipa_edge_args *args;
518dc859 2456
9771b263
DN
2457 if (!ipa_edge_args_vector)
2458 return;
2459
2460 FOR_EACH_VEC_ELT (*ipa_edge_args_vector, i, args)
771578a0
MJ
2461 ipa_free_edge_args_substructures (args);
2462
9771b263 2463 vec_free (ipa_edge_args_vector);
518dc859
RL
2464}
2465
771578a0
MJ
2466/* Frees all dynamically allocated structures that the param info points
2467 to. */
be95e2b9 2468
518dc859 2469void
771578a0 2470ipa_free_node_params_substructures (struct ipa_node_params *info)
518dc859 2471{
9771b263 2472 info->descriptors.release ();
310bc633
MJ
2473 free (info->lattices);
2474 /* Lattice values and their sources are deallocated with their alocation
2475 pool. */
9771b263 2476 info->known_vals.release ();
771578a0 2477 memset (info, 0, sizeof (*info));
518dc859
RL
2478}
2479
771578a0 2480/* Free all ipa_node_params structures. */
be95e2b9 2481
518dc859 2482void
771578a0 2483ipa_free_all_node_params (void)
518dc859 2484{
771578a0
MJ
2485 int i;
2486 struct ipa_node_params *info;
518dc859 2487
9771b263 2488 FOR_EACH_VEC_ELT (ipa_node_params_vector, i, info)
771578a0
MJ
2489 ipa_free_node_params_substructures (info);
2490
9771b263 2491 ipa_node_params_vector.release ();
771578a0
MJ
2492}
2493
2c9561b5
MJ
2494/* Set the aggregate replacements of NODE to be AGGVALS. */
2495
2496void
2497ipa_set_node_agg_value_chain (struct cgraph_node *node,
2498 struct ipa_agg_replacement_value *aggvals)
2499{
9771b263
DN
2500 if (vec_safe_length (ipa_node_agg_replacements) <= (unsigned) cgraph_max_uid)
2501 vec_safe_grow_cleared (ipa_node_agg_replacements, cgraph_max_uid + 1);
2c9561b5 2502
9771b263 2503 (*ipa_node_agg_replacements)[node->uid] = aggvals;
2c9561b5
MJ
2504}
2505
771578a0 2506/* Hook that is called by cgraph.c when an edge is removed. */
be95e2b9 2507
771578a0 2508static void
5c0466b5 2509ipa_edge_removal_hook (struct cgraph_edge *cs, void *data ATTRIBUTE_UNUSED)
771578a0 2510{
c6f7cfc1 2511 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 2512 if (vec_safe_length (ipa_edge_args_vector) <= (unsigned)cs->uid)
c6f7cfc1 2513 return;
771578a0 2514 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs));
518dc859
RL
2515}
2516
771578a0 2517/* Hook that is called by cgraph.c when a node is removed. */
be95e2b9 2518
771578a0 2519static void
5c0466b5 2520ipa_node_removal_hook (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
771578a0 2521{
dd6d1ad7 2522 /* During IPA-CP updating we can be called on not-yet analyze clones. */
9771b263 2523 if (ipa_node_params_vector.length () > (unsigned)node->uid)
2c9561b5 2524 ipa_free_node_params_substructures (IPA_NODE_REF (node));
9771b263
DN
2525 if (vec_safe_length (ipa_node_agg_replacements) > (unsigned)node->uid)
2526 (*ipa_node_agg_replacements)[(unsigned)node->uid] = NULL;
771578a0
MJ
2527}
2528
8b7773a4 2529/* Hook that is called by cgraph.c when an edge is duplicated. */
be95e2b9 2530
771578a0
MJ
2531static void
2532ipa_edge_duplication_hook (struct cgraph_edge *src, struct cgraph_edge *dst,
f8e2a1ed 2533 __attribute__((unused)) void *data)
771578a0
MJ
2534{
2535 struct ipa_edge_args *old_args, *new_args;
8b7773a4 2536 unsigned int i;
771578a0
MJ
2537
2538 ipa_check_create_edge_args ();
2539
2540 old_args = IPA_EDGE_REF (src);
2541 new_args = IPA_EDGE_REF (dst);
2542
9771b263 2543 new_args->jump_functions = vec_safe_copy (old_args->jump_functions);
8b7773a4 2544
9771b263
DN
2545 for (i = 0; i < vec_safe_length (old_args->jump_functions); i++)
2546 (*new_args->jump_functions)[i].agg.items
2547 = vec_safe_copy ((*old_args->jump_functions)[i].agg.items);
771578a0
MJ
2548}
2549
2550/* Hook that is called by cgraph.c when a node is duplicated. */
be95e2b9 2551
771578a0
MJ
2552static void
2553ipa_node_duplication_hook (struct cgraph_node *src, struct cgraph_node *dst,
10a5dd5d 2554 ATTRIBUTE_UNUSED void *data)
771578a0
MJ
2555{
2556 struct ipa_node_params *old_info, *new_info;
2c9561b5 2557 struct ipa_agg_replacement_value *old_av, *new_av;
771578a0
MJ
2558
2559 ipa_check_create_node_params ();
2560 old_info = IPA_NODE_REF (src);
2561 new_info = IPA_NODE_REF (dst);
771578a0 2562
9771b263 2563 new_info->descriptors = old_info->descriptors.copy ();
310bc633 2564 new_info->lattices = NULL;
771578a0 2565 new_info->ipcp_orig_node = old_info->ipcp_orig_node;
3949c4a7 2566
3949c4a7
MJ
2567 new_info->uses_analysis_done = old_info->uses_analysis_done;
2568 new_info->node_enqueued = old_info->node_enqueued;
2c9561b5
MJ
2569
2570 old_av = ipa_get_agg_replacements_for_node (src);
2571 if (!old_av)
2572 return;
2573
2574 new_av = NULL;
2575 while (old_av)
2576 {
2577 struct ipa_agg_replacement_value *v;
2578
2579 v = ggc_alloc_ipa_agg_replacement_value ();
2580 memcpy (v, old_av, sizeof (*v));
2581 v->next = new_av;
2582 new_av = v;
2583 old_av = old_av->next;
2584 }
2585 ipa_set_node_agg_value_chain (dst, new_av);
771578a0
MJ
2586}
2587
40982661
JH
2588
2589/* Analyze newly added function into callgraph. */
2590
2591static void
2592ipa_add_new_function (struct cgraph_node *node, void *data ATTRIBUTE_UNUSED)
2593{
2594 ipa_analyze_node (node);
2595}
2596
771578a0 2597/* Register our cgraph hooks if they are not already there. */
be95e2b9 2598
518dc859 2599void
771578a0 2600ipa_register_cgraph_hooks (void)
518dc859 2601{
771578a0
MJ
2602 if (!edge_removal_hook_holder)
2603 edge_removal_hook_holder =
2604 cgraph_add_edge_removal_hook (&ipa_edge_removal_hook, NULL);
2605 if (!node_removal_hook_holder)
2606 node_removal_hook_holder =
2607 cgraph_add_node_removal_hook (&ipa_node_removal_hook, NULL);
2608 if (!edge_duplication_hook_holder)
2609 edge_duplication_hook_holder =
2610 cgraph_add_edge_duplication_hook (&ipa_edge_duplication_hook, NULL);
2611 if (!node_duplication_hook_holder)
2612 node_duplication_hook_holder =
2613 cgraph_add_node_duplication_hook (&ipa_node_duplication_hook, NULL);
40982661
JH
2614 function_insertion_hook_holder =
2615 cgraph_add_function_insertion_hook (&ipa_add_new_function, NULL);
771578a0 2616}
518dc859 2617
771578a0 2618/* Unregister our cgraph hooks if they are not already there. */
be95e2b9 2619
771578a0
MJ
2620static void
2621ipa_unregister_cgraph_hooks (void)
2622{
2623 cgraph_remove_edge_removal_hook (edge_removal_hook_holder);
2624 edge_removal_hook_holder = NULL;
2625 cgraph_remove_node_removal_hook (node_removal_hook_holder);
2626 node_removal_hook_holder = NULL;
2627 cgraph_remove_edge_duplication_hook (edge_duplication_hook_holder);
2628 edge_duplication_hook_holder = NULL;
2629 cgraph_remove_node_duplication_hook (node_duplication_hook_holder);
2630 node_duplication_hook_holder = NULL;
40982661
JH
2631 cgraph_remove_function_insertion_hook (function_insertion_hook_holder);
2632 function_insertion_hook_holder = NULL;
771578a0
MJ
2633}
2634
2635/* Free all ipa_node_params and all ipa_edge_args structures if they are no
2636 longer needed after ipa-cp. */
be95e2b9 2637
771578a0 2638void
e33c6cd6 2639ipa_free_all_structures_after_ipa_cp (void)
3e293154 2640{
5ee53a06 2641 if (!optimize)
3e293154
MJ
2642 {
2643 ipa_free_all_edge_args ();
2644 ipa_free_all_node_params ();
310bc633
MJ
2645 free_alloc_pool (ipcp_sources_pool);
2646 free_alloc_pool (ipcp_values_pool);
2c9561b5 2647 free_alloc_pool (ipcp_agg_lattice_pool);
3e293154
MJ
2648 ipa_unregister_cgraph_hooks ();
2649 }
2650}
2651
2652/* Free all ipa_node_params and all ipa_edge_args structures if they are no
2653 longer needed after indirect inlining. */
be95e2b9 2654
3e293154 2655void
e33c6cd6 2656ipa_free_all_structures_after_iinln (void)
771578a0
MJ
2657{
2658 ipa_free_all_edge_args ();
2659 ipa_free_all_node_params ();
2660 ipa_unregister_cgraph_hooks ();
310bc633
MJ
2661 if (ipcp_sources_pool)
2662 free_alloc_pool (ipcp_sources_pool);
2663 if (ipcp_values_pool)
2664 free_alloc_pool (ipcp_values_pool);
2c9561b5
MJ
2665 if (ipcp_agg_lattice_pool)
2666 free_alloc_pool (ipcp_agg_lattice_pool);
518dc859
RL
2667}
2668
dcd416e3 2669/* Print ipa_tree_map data structures of all functions in the
518dc859 2670 callgraph to F. */
be95e2b9 2671
518dc859 2672void
2c9561b5 2673ipa_print_node_params (FILE *f, struct cgraph_node *node)
518dc859
RL
2674{
2675 int i, count;
2676 tree temp;
3e293154 2677 struct ipa_node_params *info;
518dc859 2678
3e293154
MJ
2679 if (!node->analyzed)
2680 return;
2681 info = IPA_NODE_REF (node);
b258210c
MJ
2682 fprintf (f, " function %s parameter descriptors:\n",
2683 cgraph_node_name (node));
3e293154
MJ
2684 count = ipa_get_param_count (info);
2685 for (i = 0; i < count; i++)
518dc859 2686 {
f8e2a1ed 2687 temp = ipa_get_param (info, i);
ca30a539
JH
2688 if (TREE_CODE (temp) == PARM_DECL)
2689 fprintf (f, " param %d : %s", i,
90e1a349
MH
2690 (DECL_NAME (temp)
2691 ? (*lang_hooks.decl_printable_name) (temp, 2)
2692 : "(unnamed)"));
339f49ec
JH
2693 if (ipa_is_param_used (info, i))
2694 fprintf (f, " used");
3e293154 2695 fprintf (f, "\n");
518dc859
RL
2696 }
2697}
dcd416e3 2698
ca30a539 2699/* Print ipa_tree_map data structures of all functions in the
3e293154 2700 callgraph to F. */
be95e2b9 2701
3e293154 2702void
ca30a539 2703ipa_print_all_params (FILE * f)
3e293154
MJ
2704{
2705 struct cgraph_node *node;
2706
ca30a539 2707 fprintf (f, "\nFunction parameters:\n");
65c70e6b 2708 FOR_EACH_FUNCTION (node)
ca30a539 2709 ipa_print_node_params (f, node);
3e293154 2710}
3f84bf08
MJ
2711
2712/* Return a heap allocated vector containing formal parameters of FNDECL. */
2713
9771b263 2714vec<tree>
3f84bf08
MJ
2715ipa_get_vector_of_formal_parms (tree fndecl)
2716{
9771b263 2717 vec<tree> args;
3f84bf08
MJ
2718 int count;
2719 tree parm;
2720
310bc633 2721 count = count_formal_params (fndecl);
9771b263 2722 args.create (count);
910ad8de 2723 for (parm = DECL_ARGUMENTS (fndecl); parm; parm = DECL_CHAIN (parm))
9771b263 2724 args.quick_push (parm);
3f84bf08
MJ
2725
2726 return args;
2727}
2728
2729/* Return a heap allocated vector containing types of formal parameters of
2730 function type FNTYPE. */
2731
9771b263 2732static inline vec<tree>
3f84bf08
MJ
2733get_vector_of_formal_parm_types (tree fntype)
2734{
9771b263 2735 vec<tree> types;
3f84bf08
MJ
2736 int count = 0;
2737 tree t;
2738
2739 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
2740 count++;
2741
9771b263 2742 types.create (count);
3f84bf08 2743 for (t = TYPE_ARG_TYPES (fntype); t; t = TREE_CHAIN (t))
9771b263 2744 types.quick_push (TREE_VALUE (t));
3f84bf08
MJ
2745
2746 return types;
2747}
2748
2749/* Modify the function declaration FNDECL and its type according to the plan in
2750 ADJUSTMENTS. It also sets base fields of individual adjustments structures
2751 to reflect the actual parameters being modified which are determined by the
2752 base_index field. */
2753
2754void
2755ipa_modify_formal_parameters (tree fndecl, ipa_parm_adjustment_vec adjustments,
2756 const char *synth_parm_prefix)
2757{
9771b263 2758 vec<tree> oparms, otypes;
3f84bf08
MJ
2759 tree orig_type, new_type = NULL;
2760 tree old_arg_types, t, new_arg_types = NULL;
2761 tree parm, *link = &DECL_ARGUMENTS (fndecl);
9771b263 2762 int i, len = adjustments.length ();
3f84bf08
MJ
2763 tree new_reversed = NULL;
2764 bool care_for_types, last_parm_void;
2765
2766 if (!synth_parm_prefix)
2767 synth_parm_prefix = "SYNTH";
2768
2769 oparms = ipa_get_vector_of_formal_parms (fndecl);
2770 orig_type = TREE_TYPE (fndecl);
2771 old_arg_types = TYPE_ARG_TYPES (orig_type);
2772
2773 /* The following test is an ugly hack, some functions simply don't have any
2774 arguments in their type. This is probably a bug but well... */
2775 care_for_types = (old_arg_types != NULL_TREE);
2776 if (care_for_types)
2777 {
2778 last_parm_void = (TREE_VALUE (tree_last (old_arg_types))
2779 == void_type_node);
2780 otypes = get_vector_of_formal_parm_types (orig_type);
2781 if (last_parm_void)
9771b263 2782 gcc_assert (oparms.length () + 1 == otypes.length ());
3f84bf08 2783 else
9771b263 2784 gcc_assert (oparms.length () == otypes.length ());
3f84bf08
MJ
2785 }
2786 else
2787 {
2788 last_parm_void = false;
9771b263 2789 otypes.create (0);
3f84bf08
MJ
2790 }
2791
2792 for (i = 0; i < len; i++)
2793 {
2794 struct ipa_parm_adjustment *adj;
2795 gcc_assert (link);
2796
9771b263
DN
2797 adj = &adjustments[i];
2798 parm = oparms[adj->base_index];
3f84bf08
MJ
2799 adj->base = parm;
2800
2801 if (adj->copy_param)
2802 {
2803 if (care_for_types)
9771b263 2804 new_arg_types = tree_cons (NULL_TREE, otypes[adj->base_index],
3f84bf08
MJ
2805 new_arg_types);
2806 *link = parm;
910ad8de 2807 link = &DECL_CHAIN (parm);
3f84bf08
MJ
2808 }
2809 else if (!adj->remove_param)
2810 {
2811 tree new_parm;
2812 tree ptype;
2813
2814 if (adj->by_ref)
2815 ptype = build_pointer_type (adj->type);
2816 else
2817 ptype = adj->type;
2818
2819 if (care_for_types)
2820 new_arg_types = tree_cons (NULL_TREE, ptype, new_arg_types);
2821
2822 new_parm = build_decl (UNKNOWN_LOCATION, PARM_DECL, NULL_TREE,
2823 ptype);
2824 DECL_NAME (new_parm) = create_tmp_var_name (synth_parm_prefix);
2825
2826 DECL_ARTIFICIAL (new_parm) = 1;
2827 DECL_ARG_TYPE (new_parm) = ptype;
2828 DECL_CONTEXT (new_parm) = fndecl;
2829 TREE_USED (new_parm) = 1;
2830 DECL_IGNORED_P (new_parm) = 1;
2831 layout_decl (new_parm, 0);
2832
3f84bf08
MJ
2833 adj->base = parm;
2834 adj->reduction = new_parm;
2835
2836 *link = new_parm;
2837
910ad8de 2838 link = &DECL_CHAIN (new_parm);
3f84bf08
MJ
2839 }
2840 }
2841
2842 *link = NULL_TREE;
2843
2844 if (care_for_types)
2845 {
2846 new_reversed = nreverse (new_arg_types);
2847 if (last_parm_void)
2848 {
2849 if (new_reversed)
2850 TREE_CHAIN (new_arg_types) = void_list_node;
2851 else
2852 new_reversed = void_list_node;
2853 }
2854 }
2855
2856 /* Use copy_node to preserve as much as possible from original type
2857 (debug info, attribute lists etc.)
2858 Exception is METHOD_TYPEs must have THIS argument.
2859 When we are asked to remove it, we need to build new FUNCTION_TYPE
2860 instead. */
2861 if (TREE_CODE (orig_type) != METHOD_TYPE
9771b263
DN
2862 || (adjustments[0].copy_param
2863 && adjustments[0].base_index == 0))
3f84bf08 2864 {
4eb3f32c 2865 new_type = build_distinct_type_copy (orig_type);
3f84bf08
MJ
2866 TYPE_ARG_TYPES (new_type) = new_reversed;
2867 }
2868 else
2869 {
2870 new_type
2871 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type),
2872 new_reversed));
2873 TYPE_CONTEXT (new_type) = TYPE_CONTEXT (orig_type);
2874 DECL_VINDEX (fndecl) = NULL_TREE;
2875 }
2876
d402c33d
JH
2877 /* When signature changes, we need to clear builtin info. */
2878 if (DECL_BUILT_IN (fndecl))
2879 {
2880 DECL_BUILT_IN_CLASS (fndecl) = NOT_BUILT_IN;
2881 DECL_FUNCTION_CODE (fndecl) = (enum built_in_function) 0;
2882 }
2883
3f84bf08
MJ
2884 /* This is a new type, not a copy of an old type. Need to reassociate
2885 variants. We can handle everything except the main variant lazily. */
2886 t = TYPE_MAIN_VARIANT (orig_type);
2887 if (orig_type != t)
2888 {
2889 TYPE_MAIN_VARIANT (new_type) = t;
2890 TYPE_NEXT_VARIANT (new_type) = TYPE_NEXT_VARIANT (t);
2891 TYPE_NEXT_VARIANT (t) = new_type;
2892 }
2893 else
2894 {
2895 TYPE_MAIN_VARIANT (new_type) = new_type;
2896 TYPE_NEXT_VARIANT (new_type) = NULL;
2897 }
2898
2899 TREE_TYPE (fndecl) = new_type;
9b389a5e 2900 DECL_VIRTUAL_P (fndecl) = 0;
9771b263
DN
2901 otypes.release ();
2902 oparms.release ();
3f84bf08
MJ
2903}
2904
2905/* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
2906 If this is a directly recursive call, CS must be NULL. Otherwise it must
2907 contain the corresponding call graph edge. */
2908
2909void
2910ipa_modify_call_arguments (struct cgraph_edge *cs, gimple stmt,
2911 ipa_parm_adjustment_vec adjustments)
2912{
9771b263
DN
2913 vec<tree> vargs;
2914 vec<tree, va_gc> **debug_args = NULL;
3f84bf08
MJ
2915 gimple new_stmt;
2916 gimple_stmt_iterator gsi;
2917 tree callee_decl;
2918 int i, len;
2919
9771b263
DN
2920 len = adjustments.length ();
2921 vargs.create (len);
960bfb69 2922 callee_decl = !cs ? gimple_call_fndecl (stmt) : cs->callee->symbol.decl;
3f84bf08
MJ
2923
2924 gsi = gsi_for_stmt (stmt);
2925 for (i = 0; i < len; i++)
2926 {
2927 struct ipa_parm_adjustment *adj;
2928
9771b263 2929 adj = &adjustments[i];
3f84bf08
MJ
2930
2931 if (adj->copy_param)
2932 {
2933 tree arg = gimple_call_arg (stmt, adj->base_index);
2934
9771b263 2935 vargs.quick_push (arg);
3f84bf08
MJ
2936 }
2937 else if (!adj->remove_param)
2938 {
fffe1e40
MJ
2939 tree expr, base, off;
2940 location_t loc;
c1ed6a01
MJ
2941 unsigned int deref_align;
2942 bool deref_base = false;
fffe1e40
MJ
2943
2944 /* We create a new parameter out of the value of the old one, we can
2945 do the following kind of transformations:
2946
2947 - A scalar passed by reference is converted to a scalar passed by
2948 value. (adj->by_ref is false and the type of the original
2949 actual argument is a pointer to a scalar).
2950
2951 - A part of an aggregate is passed instead of the whole aggregate.
2952 The part can be passed either by value or by reference, this is
2953 determined by value of adj->by_ref. Moreover, the code below
2954 handles both situations when the original aggregate is passed by
2955 value (its type is not a pointer) and when it is passed by
2956 reference (it is a pointer to an aggregate).
2957
2958 When the new argument is passed by reference (adj->by_ref is true)
2959 it must be a part of an aggregate and therefore we form it by
2960 simply taking the address of a reference inside the original
2961 aggregate. */
2962
2963 gcc_checking_assert (adj->offset % BITS_PER_UNIT == 0);
2964 base = gimple_call_arg (stmt, adj->base_index);
3a50da34
DC
2965 loc = DECL_P (base) ? DECL_SOURCE_LOCATION (base)
2966 : EXPR_LOCATION (base);
fffe1e40 2967
82d49829
MJ
2968 if (TREE_CODE (base) != ADDR_EXPR
2969 && POINTER_TYPE_P (TREE_TYPE (base)))
2970 off = build_int_cst (adj->alias_ptr_type,
fffe1e40 2971 adj->offset / BITS_PER_UNIT);
3f84bf08 2972 else
3f84bf08 2973 {
fffe1e40
MJ
2974 HOST_WIDE_INT base_offset;
2975 tree prev_base;
c1ed6a01 2976 bool addrof;
fffe1e40
MJ
2977
2978 if (TREE_CODE (base) == ADDR_EXPR)
c1ed6a01
MJ
2979 {
2980 base = TREE_OPERAND (base, 0);
2981 addrof = true;
2982 }
2983 else
2984 addrof = false;
fffe1e40
MJ
2985 prev_base = base;
2986 base = get_addr_base_and_unit_offset (base, &base_offset);
2987 /* Aggregate arguments can have non-invariant addresses. */
2988 if (!base)
2989 {
2990 base = build_fold_addr_expr (prev_base);
82d49829 2991 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
2992 adj->offset / BITS_PER_UNIT);
2993 }
2994 else if (TREE_CODE (base) == MEM_REF)
2995 {
c1ed6a01
MJ
2996 if (!addrof)
2997 {
2998 deref_base = true;
2999 deref_align = TYPE_ALIGN (TREE_TYPE (base));
3000 }
82d49829 3001 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3002 base_offset
3003 + adj->offset / BITS_PER_UNIT);
3004 off = int_const_binop (PLUS_EXPR, TREE_OPERAND (base, 1),
d35936ab 3005 off);
fffe1e40
MJ
3006 base = TREE_OPERAND (base, 0);
3007 }
3008 else
3009 {
82d49829 3010 off = build_int_cst (adj->alias_ptr_type,
fffe1e40
MJ
3011 base_offset
3012 + adj->offset / BITS_PER_UNIT);
3013 base = build_fold_addr_expr (base);
3014 }
3f84bf08 3015 }
fffe1e40 3016
3a5a825a
RG
3017 if (!adj->by_ref)
3018 {
3019 tree type = adj->type;
3020 unsigned int align;
3021 unsigned HOST_WIDE_INT misalign;
644ffefd 3022
c1ed6a01
MJ
3023 if (deref_base)
3024 {
3025 align = deref_align;
3026 misalign = 0;
3027 }
3028 else
3029 {
3030 get_pointer_alignment_1 (base, &align, &misalign);
3031 if (TYPE_ALIGN (type) > align)
3032 align = TYPE_ALIGN (type);
3033 }
27bcd47c
LC
3034 misalign += (tree_to_double_int (off)
3035 .sext (TYPE_PRECISION (TREE_TYPE (off))).low
3a5a825a
RG
3036 * BITS_PER_UNIT);
3037 misalign = misalign & (align - 1);
3038 if (misalign != 0)
3039 align = (misalign & -misalign);
3040 if (align < TYPE_ALIGN (type))
3041 type = build_aligned_type (type, align);
3042 expr = fold_build2_loc (loc, MEM_REF, type, base, off);
3043 }
3044 else
3045 {
3046 expr = fold_build2_loc (loc, MEM_REF, adj->type, base, off);
3047 expr = build_fold_addr_expr (expr);
3048 }
fffe1e40 3049
3f84bf08
MJ
3050 expr = force_gimple_operand_gsi (&gsi, expr,
3051 adj->by_ref
3052 || is_gimple_reg_type (adj->type),
3053 NULL, true, GSI_SAME_STMT);
9771b263 3054 vargs.quick_push (expr);
3f84bf08 3055 }
ddb555ed
JJ
3056 if (!adj->copy_param && MAY_HAVE_DEBUG_STMTS)
3057 {
3058 unsigned int ix;
3059 tree ddecl = NULL_TREE, origin = DECL_ORIGIN (adj->base), arg;
3060 gimple def_temp;
3061
3062 arg = gimple_call_arg (stmt, adj->base_index);
3063 if (!useless_type_conversion_p (TREE_TYPE (origin), TREE_TYPE (arg)))
3064 {
3065 if (!fold_convertible_p (TREE_TYPE (origin), arg))
3066 continue;
3067 arg = fold_convert_loc (gimple_location (stmt),
3068 TREE_TYPE (origin), arg);
3069 }
3070 if (debug_args == NULL)
3071 debug_args = decl_debug_args_insert (callee_decl);
9771b263 3072 for (ix = 0; vec_safe_iterate (*debug_args, ix, &ddecl); ix += 2)
ddb555ed
JJ
3073 if (ddecl == origin)
3074 {
9771b263 3075 ddecl = (**debug_args)[ix + 1];
ddb555ed
JJ
3076 break;
3077 }
3078 if (ddecl == NULL)
3079 {
3080 ddecl = make_node (DEBUG_EXPR_DECL);
3081 DECL_ARTIFICIAL (ddecl) = 1;
3082 TREE_TYPE (ddecl) = TREE_TYPE (origin);
3083 DECL_MODE (ddecl) = DECL_MODE (origin);
3084
9771b263
DN
3085 vec_safe_push (*debug_args, origin);
3086 vec_safe_push (*debug_args, ddecl);
ddb555ed 3087 }
9771b263 3088 def_temp = gimple_build_debug_bind (ddecl, unshare_expr (arg), stmt);
ddb555ed
JJ
3089 gsi_insert_before (&gsi, def_temp, GSI_SAME_STMT);
3090 }
3f84bf08
MJ
3091 }
3092
3093 if (dump_file && (dump_flags & TDF_DETAILS))
3094 {
3095 fprintf (dump_file, "replacing stmt:");
3096 print_gimple_stmt (dump_file, gsi_stmt (gsi), 0, 0);
3097 }
3098
3f84bf08 3099 new_stmt = gimple_build_call_vec (callee_decl, vargs);
9771b263 3100 vargs.release ();
3f84bf08
MJ
3101 if (gimple_call_lhs (stmt))
3102 gimple_call_set_lhs (new_stmt, gimple_call_lhs (stmt));
3103
3104 gimple_set_block (new_stmt, gimple_block (stmt));
3105 if (gimple_has_location (stmt))
3106 gimple_set_location (new_stmt, gimple_location (stmt));
3f84bf08 3107 gimple_call_set_chain (new_stmt, gimple_call_chain (stmt));
a7a296ab 3108 gimple_call_copy_flags (new_stmt, stmt);
3f84bf08
MJ
3109
3110 if (dump_file && (dump_flags & TDF_DETAILS))
3111 {
3112 fprintf (dump_file, "with stmt:");
3113 print_gimple_stmt (dump_file, new_stmt, 0, 0);
3114 fprintf (dump_file, "\n");
3115 }
3116 gsi_replace (&gsi, new_stmt, true);
3117 if (cs)
3118 cgraph_set_call_stmt (cs, new_stmt);
3119 update_ssa (TODO_update_ssa);
3120 free_dominance_info (CDI_DOMINATORS);
3121}
3122
3123/* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
3124
3125static bool
3126index_in_adjustments_multiple_times_p (int base_index,
3127 ipa_parm_adjustment_vec adjustments)
3128{
9771b263 3129 int i, len = adjustments.length ();
3f84bf08
MJ
3130 bool one = false;
3131
3132 for (i = 0; i < len; i++)
3133 {
3134 struct ipa_parm_adjustment *adj;
9771b263 3135 adj = &adjustments[i];
3f84bf08
MJ
3136
3137 if (adj->base_index == base_index)
3138 {
3139 if (one)
3140 return true;
3141 else
3142 one = true;
3143 }
3144 }
3145 return false;
3146}
3147
3148
3149/* Return adjustments that should have the same effect on function parameters
3150 and call arguments as if they were first changed according to adjustments in
3151 INNER and then by adjustments in OUTER. */
3152
3153ipa_parm_adjustment_vec
3154ipa_combine_adjustments (ipa_parm_adjustment_vec inner,
3155 ipa_parm_adjustment_vec outer)
3156{
9771b263
DN
3157 int i, outlen = outer.length ();
3158 int inlen = inner.length ();
3f84bf08
MJ
3159 int removals = 0;
3160 ipa_parm_adjustment_vec adjustments, tmp;
3161
9771b263 3162 tmp.create (inlen);
3f84bf08
MJ
3163 for (i = 0; i < inlen; i++)
3164 {
3165 struct ipa_parm_adjustment *n;
9771b263 3166 n = &inner[i];
3f84bf08
MJ
3167
3168 if (n->remove_param)
3169 removals++;
3170 else
9771b263 3171 tmp.quick_push (*n);
3f84bf08
MJ
3172 }
3173
9771b263 3174 adjustments.create (outlen + removals);
3f84bf08
MJ
3175 for (i = 0; i < outlen; i++)
3176 {
f32682ca 3177 struct ipa_parm_adjustment r;
9771b263
DN
3178 struct ipa_parm_adjustment *out = &outer[i];
3179 struct ipa_parm_adjustment *in = &tmp[out->base_index];
3f84bf08 3180
f32682ca 3181 memset (&r, 0, sizeof (r));
3f84bf08
MJ
3182 gcc_assert (!in->remove_param);
3183 if (out->remove_param)
3184 {
3185 if (!index_in_adjustments_multiple_times_p (in->base_index, tmp))
3186 {
f32682ca 3187 r.remove_param = true;
9771b263 3188 adjustments.quick_push (r);
3f84bf08
MJ
3189 }
3190 continue;
3191 }
3192
f32682ca
DN
3193 r.base_index = in->base_index;
3194 r.type = out->type;
3f84bf08
MJ
3195
3196 /* FIXME: Create nonlocal value too. */
3197
3198 if (in->copy_param && out->copy_param)
f32682ca 3199 r.copy_param = true;
3f84bf08 3200 else if (in->copy_param)
f32682ca 3201 r.offset = out->offset;
3f84bf08 3202 else if (out->copy_param)
f32682ca 3203 r.offset = in->offset;
3f84bf08 3204 else
f32682ca 3205 r.offset = in->offset + out->offset;
9771b263 3206 adjustments.quick_push (r);
3f84bf08
MJ
3207 }
3208
3209 for (i = 0; i < inlen; i++)
3210 {
9771b263 3211 struct ipa_parm_adjustment *n = &inner[i];
3f84bf08
MJ
3212
3213 if (n->remove_param)
9771b263 3214 adjustments.quick_push (*n);
3f84bf08
MJ
3215 }
3216
9771b263 3217 tmp.release ();
3f84bf08
MJ
3218 return adjustments;
3219}
3220
3221/* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
3222 friendly way, assuming they are meant to be applied to FNDECL. */
3223
3224void
3225ipa_dump_param_adjustments (FILE *file, ipa_parm_adjustment_vec adjustments,
3226 tree fndecl)
3227{
9771b263 3228 int i, len = adjustments.length ();
3f84bf08 3229 bool first = true;
9771b263 3230 vec<tree> parms = ipa_get_vector_of_formal_parms (fndecl);
3f84bf08
MJ
3231
3232 fprintf (file, "IPA param adjustments: ");
3233 for (i = 0; i < len; i++)
3234 {
3235 struct ipa_parm_adjustment *adj;
9771b263 3236 adj = &adjustments[i];
3f84bf08
MJ
3237
3238 if (!first)
3239 fprintf (file, " ");
3240 else
3241 first = false;
3242
3243 fprintf (file, "%i. base_index: %i - ", i, adj->base_index);
9771b263 3244 print_generic_expr (file, parms[adj->base_index], 0);
3f84bf08
MJ
3245 if (adj->base)
3246 {
3247 fprintf (file, ", base: ");
3248 print_generic_expr (file, adj->base, 0);
3249 }
3250 if (adj->reduction)
3251 {
3252 fprintf (file, ", reduction: ");
3253 print_generic_expr (file, adj->reduction, 0);
3254 }
3255 if (adj->new_ssa_base)
3256 {
3257 fprintf (file, ", new_ssa_base: ");
3258 print_generic_expr (file, adj->new_ssa_base, 0);
3259 }
3260
3261 if (adj->copy_param)
3262 fprintf (file, ", copy_param");
3263 else if (adj->remove_param)
3264 fprintf (file, ", remove_param");
3265 else
3266 fprintf (file, ", offset %li", (long) adj->offset);
3267 if (adj->by_ref)
3268 fprintf (file, ", by_ref");
3269 print_node_brief (file, ", type: ", adj->type, 0);
3270 fprintf (file, "\n");
3271 }
9771b263 3272 parms.release ();
3f84bf08
MJ
3273}
3274
2c9561b5
MJ
3275/* Dump the AV linked list. */
3276
3277void
3278ipa_dump_agg_replacement_values (FILE *f, struct ipa_agg_replacement_value *av)
3279{
3280 bool comma = false;
3281 fprintf (f, " Aggregate replacements:");
3282 for (; av; av = av->next)
3283 {
3284 fprintf (f, "%s %i[" HOST_WIDE_INT_PRINT_DEC "]=", comma ? "," : "",
3285 av->index, av->offset);
3286 print_generic_expr (f, av->value, 0);
3287 comma = true;
3288 }
3289 fprintf (f, "\n");
3290}
3291
fb3f88cc
JH
3292/* Stream out jump function JUMP_FUNC to OB. */
3293
3294static void
3295ipa_write_jump_function (struct output_block *ob,
3296 struct ipa_jump_func *jump_func)
3297{
8b7773a4
MJ
3298 struct ipa_agg_jf_item *item;
3299 struct bitpack_d bp;
3300 int i, count;
fb3f88cc 3301
8b7773a4 3302 streamer_write_uhwi (ob, jump_func->type);
fb3f88cc
JH
3303 switch (jump_func->type)
3304 {
3305 case IPA_JF_UNKNOWN:
3306 break;
b258210c 3307 case IPA_JF_KNOWN_TYPE:
c7573249
MJ
3308 streamer_write_uhwi (ob, jump_func->value.known_type.offset);
3309 stream_write_tree (ob, jump_func->value.known_type.base_type, true);
3310 stream_write_tree (ob, jump_func->value.known_type.component_type, true);
b258210c 3311 break;
fb3f88cc 3312 case IPA_JF_CONST:
5368224f 3313 gcc_assert (
2f13f2de 3314 EXPR_LOCATION (jump_func->value.constant) == UNKNOWN_LOCATION);
b9393656 3315 stream_write_tree (ob, jump_func->value.constant, true);
fb3f88cc
JH
3316 break;
3317 case IPA_JF_PASS_THROUGH:
412288f1 3318 streamer_write_uhwi (ob, jump_func->value.pass_through.operation);
4a53743e
MJ
3319 if (jump_func->value.pass_through.operation == NOP_EXPR)
3320 {
3321 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3322 bp = bitpack_create (ob->main_stream);
3323 bp_pack_value (&bp, jump_func->value.pass_through.agg_preserved, 1);
3324 streamer_write_bitpack (&bp);
3325 }
3326 else
3327 {
3328 stream_write_tree (ob, jump_func->value.pass_through.operand, true);
3329 streamer_write_uhwi (ob, jump_func->value.pass_through.formal_id);
3330 }
fb3f88cc
JH
3331 break;
3332 case IPA_JF_ANCESTOR:
412288f1 3333 streamer_write_uhwi (ob, jump_func->value.ancestor.offset);
b9393656 3334 stream_write_tree (ob, jump_func->value.ancestor.type, true);
412288f1 3335 streamer_write_uhwi (ob, jump_func->value.ancestor.formal_id);
8b7773a4
MJ
3336 bp = bitpack_create (ob->main_stream);
3337 bp_pack_value (&bp, jump_func->value.ancestor.agg_preserved, 1);
3338 streamer_write_bitpack (&bp);
fb3f88cc 3339 break;
8b7773a4
MJ
3340 }
3341
9771b263 3342 count = vec_safe_length (jump_func->agg.items);
8b7773a4
MJ
3343 streamer_write_uhwi (ob, count);
3344 if (count)
3345 {
3346 bp = bitpack_create (ob->main_stream);
3347 bp_pack_value (&bp, jump_func->agg.by_ref, 1);
3348 streamer_write_bitpack (&bp);
3349 }
3350
9771b263 3351 FOR_EACH_VEC_SAFE_ELT (jump_func->agg.items, i, item)
8b7773a4
MJ
3352 {
3353 streamer_write_uhwi (ob, item->offset);
3354 stream_write_tree (ob, item->value, true);
fb3f88cc
JH
3355 }
3356}
3357
3358/* Read in jump function JUMP_FUNC from IB. */
3359
3360static void
3361ipa_read_jump_function (struct lto_input_block *ib,
3362 struct ipa_jump_func *jump_func,
3363 struct data_in *data_in)
3364{
4a53743e
MJ
3365 enum jump_func_type jftype;
3366 enum tree_code operation;
8b7773a4 3367 int i, count;
fb3f88cc 3368
4a53743e
MJ
3369 jftype = (enum jump_func_type) streamer_read_uhwi (ib);
3370 switch (jftype)
fb3f88cc
JH
3371 {
3372 case IPA_JF_UNKNOWN:
4a53743e 3373 jump_func->type = IPA_JF_UNKNOWN;
fb3f88cc 3374 break;
b258210c 3375 case IPA_JF_KNOWN_TYPE:
4a53743e
MJ
3376 {
3377 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3378 tree base_type = stream_read_tree (ib, data_in);
3379 tree component_type = stream_read_tree (ib, data_in);
3380
3381 ipa_set_jf_known_type (jump_func, offset, base_type, component_type);
3382 break;
3383 }
fb3f88cc 3384 case IPA_JF_CONST:
4a53743e 3385 ipa_set_jf_constant (jump_func, stream_read_tree (ib, data_in));
fb3f88cc
JH
3386 break;
3387 case IPA_JF_PASS_THROUGH:
4a53743e
MJ
3388 operation = (enum tree_code) streamer_read_uhwi (ib);
3389 if (operation == NOP_EXPR)
3390 {
3391 int formal_id = streamer_read_uhwi (ib);
3392 struct bitpack_d bp = streamer_read_bitpack (ib);
3393 bool agg_preserved = bp_unpack_value (&bp, 1);
3394 ipa_set_jf_simple_pass_through (jump_func, formal_id, agg_preserved);
3395 }
3396 else
3397 {
3398 tree operand = stream_read_tree (ib, data_in);
3399 int formal_id = streamer_read_uhwi (ib);
3400 ipa_set_jf_arith_pass_through (jump_func, formal_id, operand,
3401 operation);
3402 }
fb3f88cc
JH
3403 break;
3404 case IPA_JF_ANCESTOR:
4a53743e
MJ
3405 {
3406 HOST_WIDE_INT offset = streamer_read_uhwi (ib);
3407 tree type = stream_read_tree (ib, data_in);
3408 int formal_id = streamer_read_uhwi (ib);
3409 struct bitpack_d bp = streamer_read_bitpack (ib);
3410 bool agg_preserved = bp_unpack_value (&bp, 1);
3411
3412 ipa_set_ancestor_jf (jump_func, offset, type, formal_id, agg_preserved);
3413 break;
3414 }
8b7773a4
MJ
3415 }
3416
3417 count = streamer_read_uhwi (ib);
9771b263 3418 vec_alloc (jump_func->agg.items, count);
8b7773a4
MJ
3419 if (count)
3420 {
4a53743e 3421 struct bitpack_d bp = streamer_read_bitpack (ib);
8b7773a4
MJ
3422 jump_func->agg.by_ref = bp_unpack_value (&bp, 1);
3423 }
3424 for (i = 0; i < count; i++)
3425 {
f32682ca
DN
3426 struct ipa_agg_jf_item item;
3427 item.offset = streamer_read_uhwi (ib);
3428 item.value = stream_read_tree (ib, data_in);
9771b263 3429 jump_func->agg.items->quick_push (item);
fb3f88cc
JH
3430 }
3431}
3432
e33c6cd6
MJ
3433/* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
3434 relevant to indirect inlining to OB. */
661e7330
MJ
3435
3436static void
e33c6cd6
MJ
3437ipa_write_indirect_edge_info (struct output_block *ob,
3438 struct cgraph_edge *cs)
661e7330 3439{
e33c6cd6 3440 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 3441 struct bitpack_d bp;
e33c6cd6 3442
412288f1 3443 streamer_write_hwi (ob, ii->param_index);
8b7773a4 3444 streamer_write_hwi (ob, ii->offset);
2465dcc2
RG
3445 bp = bitpack_create (ob->main_stream);
3446 bp_pack_value (&bp, ii->polymorphic, 1);
8b7773a4
MJ
3447 bp_pack_value (&bp, ii->agg_contents, 1);
3448 bp_pack_value (&bp, ii->by_ref, 1);
412288f1 3449 streamer_write_bitpack (&bp);
b258210c
MJ
3450
3451 if (ii->polymorphic)
3452 {
412288f1 3453 streamer_write_hwi (ob, ii->otr_token);
b9393656 3454 stream_write_tree (ob, ii->otr_type, true);
b258210c 3455 }
661e7330
MJ
3456}
3457
e33c6cd6
MJ
3458/* Read in parts of cgraph_indirect_call_info corresponding to CS that are
3459 relevant to indirect inlining from IB. */
661e7330
MJ
3460
3461static void
e33c6cd6
MJ
3462ipa_read_indirect_edge_info (struct lto_input_block *ib,
3463 struct data_in *data_in ATTRIBUTE_UNUSED,
3464 struct cgraph_edge *cs)
661e7330 3465{
e33c6cd6 3466 struct cgraph_indirect_call_info *ii = cs->indirect_info;
2465dcc2 3467 struct bitpack_d bp;
661e7330 3468
412288f1 3469 ii->param_index = (int) streamer_read_hwi (ib);
8b7773a4 3470 ii->offset = (HOST_WIDE_INT) streamer_read_hwi (ib);
412288f1 3471 bp = streamer_read_bitpack (ib);
2465dcc2 3472 ii->polymorphic = bp_unpack_value (&bp, 1);
8b7773a4
MJ
3473 ii->agg_contents = bp_unpack_value (&bp, 1);
3474 ii->by_ref = bp_unpack_value (&bp, 1);
b258210c
MJ
3475 if (ii->polymorphic)
3476 {
412288f1 3477 ii->otr_token = (HOST_WIDE_INT) streamer_read_hwi (ib);
b9393656 3478 ii->otr_type = stream_read_tree (ib, data_in);
b258210c 3479 }
661e7330
MJ
3480}
3481
fb3f88cc
JH
3482/* Stream out NODE info to OB. */
3483
3484static void
3485ipa_write_node_info (struct output_block *ob, struct cgraph_node *node)
3486{
3487 int node_ref;
7380e6ef 3488 lto_symtab_encoder_t encoder;
fb3f88cc
JH
3489 struct ipa_node_params *info = IPA_NODE_REF (node);
3490 int j;
3491 struct cgraph_edge *e;
2465dcc2 3492 struct bitpack_d bp;
fb3f88cc 3493
7380e6ef
JH
3494 encoder = ob->decl_state->symtab_node_encoder;
3495 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
412288f1 3496 streamer_write_uhwi (ob, node_ref);
fb3f88cc 3497
2465dcc2 3498 bp = bitpack_create (ob->main_stream);
062c604f 3499 gcc_assert (info->uses_analysis_done
661e7330 3500 || ipa_get_param_count (info) == 0);
fb3f88cc
JH
3501 gcc_assert (!info->node_enqueued);
3502 gcc_assert (!info->ipcp_orig_node);
3503 for (j = 0; j < ipa_get_param_count (info); j++)
310bc633 3504 bp_pack_value (&bp, ipa_is_param_used (info, j), 1);
412288f1 3505 streamer_write_bitpack (&bp);
fb3f88cc
JH
3506 for (e = node->callees; e; e = e->next_callee)
3507 {
3508 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3509
412288f1 3510 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
fb3f88cc
JH
3511 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3512 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3513 }
e33c6cd6 3514 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
3515 {
3516 struct ipa_edge_args *args = IPA_EDGE_REF (e);
3517
412288f1 3518 streamer_write_uhwi (ob, ipa_get_cs_argument_count (args));
c8246dbe
JH
3519 for (j = 0; j < ipa_get_cs_argument_count (args); j++)
3520 ipa_write_jump_function (ob, ipa_get_ith_jump_func (args, j));
3521 ipa_write_indirect_edge_info (ob, e);
3522 }
fb3f88cc
JH
3523}
3524
61502ca8 3525/* Stream in NODE info from IB. */
fb3f88cc
JH
3526
3527static void
3528ipa_read_node_info (struct lto_input_block *ib, struct cgraph_node *node,
3529 struct data_in *data_in)
3530{
3531 struct ipa_node_params *info = IPA_NODE_REF (node);
3532 int k;
3533 struct cgraph_edge *e;
2465dcc2 3534 struct bitpack_d bp;
fb3f88cc
JH
3535
3536 ipa_initialize_node_params (node);
3537
412288f1 3538 bp = streamer_read_bitpack (ib);
fb3f88cc 3539 if (ipa_get_param_count (info) != 0)
062c604f 3540 info->uses_analysis_done = true;
fb3f88cc
JH
3541 info->node_enqueued = false;
3542 for (k = 0; k < ipa_get_param_count (info); k++)
310bc633 3543 ipa_set_param_used (info, k, bp_unpack_value (&bp, 1));
fb3f88cc
JH
3544 for (e = node->callees; e; e = e->next_callee)
3545 {
3546 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 3547 int count = streamer_read_uhwi (ib);
fb3f88cc 3548
fb3f88cc
JH
3549 if (!count)
3550 continue;
9771b263 3551 vec_safe_grow_cleared (args->jump_functions, count);
fb3f88cc 3552
fb3f88cc
JH
3553 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
3554 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k), data_in);
3555 }
e33c6cd6 3556 for (e = node->indirect_calls; e; e = e->next_callee)
c8246dbe
JH
3557 {
3558 struct ipa_edge_args *args = IPA_EDGE_REF (e);
412288f1 3559 int count = streamer_read_uhwi (ib);
c8246dbe 3560
c8246dbe
JH
3561 if (count)
3562 {
9771b263 3563 vec_safe_grow_cleared (args->jump_functions, count);
c8246dbe 3564 for (k = 0; k < ipa_get_cs_argument_count (args); k++)
606d9a09
MJ
3565 ipa_read_jump_function (ib, ipa_get_ith_jump_func (args, k),
3566 data_in);
c8246dbe
JH
3567 }
3568 ipa_read_indirect_edge_info (ib, data_in, e);
3569 }
fb3f88cc
JH
3570}
3571
3572/* Write jump functions for nodes in SET. */
3573
3574void
f27c1867 3575ipa_prop_write_jump_functions (void)
fb3f88cc
JH
3576{
3577 struct cgraph_node *node;
93536c97 3578 struct output_block *ob;
fb3f88cc 3579 unsigned int count = 0;
f27c1867
JH
3580 lto_symtab_encoder_iterator lsei;
3581 lto_symtab_encoder_t encoder;
3582
fb3f88cc 3583
9771b263 3584 if (!ipa_node_params_vector.exists ())
93536c97 3585 return;
fb3f88cc 3586
93536c97 3587 ob = create_output_block (LTO_section_jump_functions);
f27c1867 3588 encoder = ob->decl_state->symtab_node_encoder;
93536c97 3589 ob->cgraph_node = NULL;
f27c1867
JH
3590 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3591 lsei_next_function_in_partition (&lsei))
fb3f88cc 3592 {
f27c1867 3593 node = lsei_cgraph_node (lsei);
c47d0034
JH
3594 if (cgraph_function_with_gimple_body_p (node)
3595 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
3596 count++;
3597 }
3598
412288f1 3599 streamer_write_uhwi (ob, count);
fb3f88cc
JH
3600
3601 /* Process all of the functions. */
f27c1867
JH
3602 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3603 lsei_next_function_in_partition (&lsei))
fb3f88cc 3604 {
f27c1867 3605 node = lsei_cgraph_node (lsei);
c47d0034
JH
3606 if (cgraph_function_with_gimple_body_p (node)
3607 && IPA_NODE_REF (node) != NULL)
fb3f88cc
JH
3608 ipa_write_node_info (ob, node);
3609 }
412288f1 3610 streamer_write_char_stream (ob->main_stream, 0);
fb3f88cc
JH
3611 produce_asm (ob, NULL);
3612 destroy_output_block (ob);
3613}
3614
3615/* Read section in file FILE_DATA of length LEN with data DATA. */
3616
3617static void
3618ipa_prop_read_section (struct lto_file_decl_data *file_data, const char *data,
3619 size_t len)
3620{
3621 const struct lto_function_header *header =
3622 (const struct lto_function_header *) data;
4ad9a9de
EB
3623 const int cfg_offset = sizeof (struct lto_function_header);
3624 const int main_offset = cfg_offset + header->cfg_size;
3625 const int string_offset = main_offset + header->main_size;
fb3f88cc
JH
3626 struct data_in *data_in;
3627 struct lto_input_block ib_main;
3628 unsigned int i;
3629 unsigned int count;
3630
3631 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3632 header->main_size);
3633
3634 data_in =
3635 lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 3636 header->string_size, vNULL);
412288f1 3637 count = streamer_read_uhwi (&ib_main);
fb3f88cc
JH
3638
3639 for (i = 0; i < count; i++)
3640 {
3641 unsigned int index;
3642 struct cgraph_node *node;
7380e6ef 3643 lto_symtab_encoder_t encoder;
fb3f88cc 3644
412288f1 3645 index = streamer_read_uhwi (&ib_main);
7380e6ef
JH
3646 encoder = file_data->symtab_node_encoder;
3647 node = cgraph (lto_symtab_encoder_deref (encoder, index));
9b3cf76a 3648 gcc_assert (node->analyzed);
fb3f88cc
JH
3649 ipa_read_node_info (&ib_main, node, data_in);
3650 }
3651 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3652 len);
3653 lto_data_in_delete (data_in);
3654}
3655
3656/* Read ipcp jump functions. */
3657
3658void
3659ipa_prop_read_jump_functions (void)
3660{
3661 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3662 struct lto_file_decl_data *file_data;
3663 unsigned int j = 0;
3664
3665 ipa_check_create_node_params ();
3666 ipa_check_create_edge_args ();
3667 ipa_register_cgraph_hooks ();
3668
3669 while ((file_data = file_data_vec[j++]))
3670 {
3671 size_t len;
3672 const char *data = lto_get_section_data (file_data, LTO_section_jump_functions, NULL, &len);
3673
3674 if (data)
3675 ipa_prop_read_section (file_data, data, len);
3676 }
3677}
3678
b8698a0f 3679/* After merging units, we can get mismatch in argument counts.
61502ca8 3680 Also decl merging might've rendered parameter lists obsolete.
fb3f88cc
JH
3681 Also compute called_with_variable_arg info. */
3682
3683void
3684ipa_update_after_lto_read (void)
3685{
3686 struct cgraph_node *node;
fb3f88cc 3687
05d3aa37
MJ
3688 ipa_check_create_node_params ();
3689 ipa_check_create_edge_args ();
3690
65c70e6b 3691 FOR_EACH_DEFINED_FUNCTION (node)
563cb662 3692 if (node->analyzed)
05d3aa37 3693 ipa_initialize_node_params (node);
fb3f88cc 3694}
2c9561b5
MJ
3695
3696void
3697write_agg_replacement_chain (struct output_block *ob, struct cgraph_node *node)
3698{
3699 int node_ref;
3700 unsigned int count = 0;
3701 lto_symtab_encoder_t encoder;
3702 struct ipa_agg_replacement_value *aggvals, *av;
3703
3704 aggvals = ipa_get_agg_replacements_for_node (node);
3705 encoder = ob->decl_state->symtab_node_encoder;
3706 node_ref = lto_symtab_encoder_encode (encoder, (symtab_node) node);
3707 streamer_write_uhwi (ob, node_ref);
3708
3709 for (av = aggvals; av; av = av->next)
3710 count++;
3711 streamer_write_uhwi (ob, count);
3712
3713 for (av = aggvals; av; av = av->next)
3714 {
7b920a9a
MJ
3715 struct bitpack_d bp;
3716
2c9561b5
MJ
3717 streamer_write_uhwi (ob, av->offset);
3718 streamer_write_uhwi (ob, av->index);
3719 stream_write_tree (ob, av->value, true);
7b920a9a
MJ
3720
3721 bp = bitpack_create (ob->main_stream);
3722 bp_pack_value (&bp, av->by_ref, 1);
3723 streamer_write_bitpack (&bp);
2c9561b5
MJ
3724 }
3725}
3726
3727/* Stream in the aggregate value replacement chain for NODE from IB. */
3728
3729static void
3730read_agg_replacement_chain (struct lto_input_block *ib,
3731 struct cgraph_node *node,
3732 struct data_in *data_in)
3733{
3734 struct ipa_agg_replacement_value *aggvals = NULL;
3735 unsigned int count, i;
3736
3737 count = streamer_read_uhwi (ib);
3738 for (i = 0; i <count; i++)
3739 {
3740 struct ipa_agg_replacement_value *av;
7b920a9a 3741 struct bitpack_d bp;
2c9561b5
MJ
3742
3743 av = ggc_alloc_ipa_agg_replacement_value ();
3744 av->offset = streamer_read_uhwi (ib);
3745 av->index = streamer_read_uhwi (ib);
3746 av->value = stream_read_tree (ib, data_in);
7b920a9a
MJ
3747 bp = streamer_read_bitpack (ib);
3748 av->by_ref = bp_unpack_value (&bp, 1);
2c9561b5
MJ
3749 av->next = aggvals;
3750 aggvals = av;
3751 }
3752 ipa_set_node_agg_value_chain (node, aggvals);
3753}
3754
3755/* Write all aggregate replacement for nodes in set. */
3756
3757void
3758ipa_prop_write_all_agg_replacement (void)
3759{
3760 struct cgraph_node *node;
3761 struct output_block *ob;
3762 unsigned int count = 0;
3763 lto_symtab_encoder_iterator lsei;
3764 lto_symtab_encoder_t encoder;
3765
3766 if (!ipa_node_agg_replacements)
3767 return;
3768
3769 ob = create_output_block (LTO_section_ipcp_transform);
3770 encoder = ob->decl_state->symtab_node_encoder;
3771 ob->cgraph_node = NULL;
3772 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3773 lsei_next_function_in_partition (&lsei))
3774 {
3775 node = lsei_cgraph_node (lsei);
3776 if (cgraph_function_with_gimple_body_p (node)
3777 && ipa_get_agg_replacements_for_node (node) != NULL)
3778 count++;
3779 }
3780
3781 streamer_write_uhwi (ob, count);
3782
3783 for (lsei = lsei_start_function_in_partition (encoder); !lsei_end_p (lsei);
3784 lsei_next_function_in_partition (&lsei))
3785 {
3786 node = lsei_cgraph_node (lsei);
3787 if (cgraph_function_with_gimple_body_p (node)
3788 && ipa_get_agg_replacements_for_node (node) != NULL)
3789 write_agg_replacement_chain (ob, node);
3790 }
3791 streamer_write_char_stream (ob->main_stream, 0);
3792 produce_asm (ob, NULL);
3793 destroy_output_block (ob);
3794}
3795
3796/* Read replacements section in file FILE_DATA of length LEN with data
3797 DATA. */
3798
3799static void
3800read_replacements_section (struct lto_file_decl_data *file_data,
3801 const char *data,
3802 size_t len)
3803{
3804 const struct lto_function_header *header =
3805 (const struct lto_function_header *) data;
3806 const int cfg_offset = sizeof (struct lto_function_header);
3807 const int main_offset = cfg_offset + header->cfg_size;
3808 const int string_offset = main_offset + header->main_size;
3809 struct data_in *data_in;
3810 struct lto_input_block ib_main;
3811 unsigned int i;
3812 unsigned int count;
3813
3814 LTO_INIT_INPUT_BLOCK (ib_main, (const char *) data + main_offset, 0,
3815 header->main_size);
3816
3817 data_in = lto_data_in_create (file_data, (const char *) data + string_offset,
6e1aa848 3818 header->string_size, vNULL);
2c9561b5
MJ
3819 count = streamer_read_uhwi (&ib_main);
3820
3821 for (i = 0; i < count; i++)
3822 {
3823 unsigned int index;
3824 struct cgraph_node *node;
3825 lto_symtab_encoder_t encoder;
3826
3827 index = streamer_read_uhwi (&ib_main);
3828 encoder = file_data->symtab_node_encoder;
3829 node = cgraph (lto_symtab_encoder_deref (encoder, index));
3830 gcc_assert (node->analyzed);
3831 read_agg_replacement_chain (&ib_main, node, data_in);
3832 }
3833 lto_free_section_data (file_data, LTO_section_jump_functions, NULL, data,
3834 len);
3835 lto_data_in_delete (data_in);
3836}
3837
3838/* Read IPA-CP aggregate replacements. */
3839
3840void
3841ipa_prop_read_all_agg_replacement (void)
3842{
3843 struct lto_file_decl_data **file_data_vec = lto_get_file_decl_data ();
3844 struct lto_file_decl_data *file_data;
3845 unsigned int j = 0;
3846
3847 while ((file_data = file_data_vec[j++]))
3848 {
3849 size_t len;
3850 const char *data = lto_get_section_data (file_data,
3851 LTO_section_ipcp_transform,
3852 NULL, &len);
3853 if (data)
3854 read_replacements_section (file_data, data, len);
3855 }
3856}
3857
3858/* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
3859 NODE. */
3860
3861static void
3862adjust_agg_replacement_values (struct cgraph_node *node,
3863 struct ipa_agg_replacement_value *aggval)
3864{
3865 struct ipa_agg_replacement_value *v;
3866 int i, c = 0, d = 0, *adj;
3867
3868 if (!node->clone.combined_args_to_skip)
3869 return;
3870
3871 for (v = aggval; v; v = v->next)
3872 {
3873 gcc_assert (v->index >= 0);
3874 if (c < v->index)
3875 c = v->index;
3876 }
3877 c++;
3878
3879 adj = XALLOCAVEC (int, c);
3880 for (i = 0; i < c; i++)
3881 if (bitmap_bit_p (node->clone.combined_args_to_skip, i))
3882 {
3883 adj[i] = -1;
3884 d++;
3885 }
3886 else
3887 adj[i] = i - d;
3888
3889 for (v = aggval; v; v = v->next)
3890 v->index = adj[v->index];
3891}
3892
3893
3894/* Function body transformation phase. */
3895
3896unsigned int
3897ipcp_transform_function (struct cgraph_node *node)
3898{
6e1aa848 3899 vec<ipa_param_descriptor_t> descriptors = vNULL;
2c9561b5
MJ
3900 struct param_analysis_info *parms_ainfo;
3901 struct ipa_agg_replacement_value *aggval;
3902 gimple_stmt_iterator gsi;
3903 basic_block bb;
3904 int param_count;
3905 bool cfg_changed = false, something_changed = false;
3906
3907 gcc_checking_assert (cfun);
3908 gcc_checking_assert (current_function_decl);
3909
3910 if (dump_file)
3911 fprintf (dump_file, "Modification phase of node %s/%i\n",
3912 cgraph_node_name (node), node->uid);
3913
3914 aggval = ipa_get_agg_replacements_for_node (node);
3915 if (!aggval)
3916 return 0;
3917 param_count = count_formal_params (node->symbol.decl);
3918 if (param_count == 0)
3919 return 0;
3920 adjust_agg_replacement_values (node, aggval);
3921 if (dump_file)
3922 ipa_dump_agg_replacement_values (dump_file, aggval);
3923 parms_ainfo = XALLOCAVEC (struct param_analysis_info, param_count);
3924 memset (parms_ainfo, 0, sizeof (struct param_analysis_info) * param_count);
9771b263 3925 descriptors.safe_grow_cleared (param_count);
2c9561b5
MJ
3926 ipa_populate_param_decls (node, descriptors);
3927
3928 FOR_EACH_BB (bb)
3929 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
3930 {
3931 struct ipa_agg_replacement_value *v;
3932 gimple stmt = gsi_stmt (gsi);
3933 tree rhs, val, t;
3934 HOST_WIDE_INT offset;
3935 int index;
3936 bool by_ref, vce;
3937
3938 if (!gimple_assign_load_p (stmt))
3939 continue;
3940 rhs = gimple_assign_rhs1 (stmt);
3941 if (!is_gimple_reg_type (TREE_TYPE (rhs)))
3942 continue;
3943
3944 vce = false;
3945 t = rhs;
3946 while (handled_component_p (t))
3947 {
3948 /* V_C_E can do things like convert an array of integers to one
3949 bigger integer and similar things we do not handle below. */
3950 if (TREE_CODE (rhs) == VIEW_CONVERT_EXPR)
3951 {
3952 vce = true;
3953 break;
3954 }
3955 t = TREE_OPERAND (t, 0);
3956 }
3957 if (vce)
3958 continue;
3959
3960 if (!ipa_load_from_parm_agg_1 (descriptors, parms_ainfo, stmt,
3961 rhs, &index, &offset, &by_ref))
3962 continue;
3963 for (v = aggval; v; v = v->next)
3964 if (v->index == index
3965 && v->offset == offset)
3966 break;
7b920a9a 3967 if (!v || v->by_ref != by_ref)
2c9561b5
MJ
3968 continue;
3969
3970 gcc_checking_assert (is_gimple_ip_invariant (v->value));
3971 if (!useless_type_conversion_p (TREE_TYPE (rhs), TREE_TYPE (v->value)))
3972 {
3973 if (fold_convertible_p (TREE_TYPE (rhs), v->value))
3974 val = fold_build1 (NOP_EXPR, TREE_TYPE (rhs), v->value);
3975 else if (TYPE_SIZE (TREE_TYPE (rhs))
3976 == TYPE_SIZE (TREE_TYPE (v->value)))
3977 val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), v->value);
3978 else
3979 {
3980 if (dump_file)
3981 {
3982 fprintf (dump_file, " const ");
3983 print_generic_expr (dump_file, v->value, 0);
3984 fprintf (dump_file, " can't be converted to type of ");
3985 print_generic_expr (dump_file, rhs, 0);
3986 fprintf (dump_file, "\n");
3987 }
3988 continue;
3989 }
3990 }
3991 else
3992 val = v->value;
3993
3994 if (dump_file && (dump_flags & TDF_DETAILS))
3995 {
3996 fprintf (dump_file, "Modifying stmt:\n ");
3997 print_gimple_stmt (dump_file, stmt, 0, 0);
3998 }
3999 gimple_assign_set_rhs_from_tree (&gsi, val);
4000 update_stmt (stmt);
4001
4002 if (dump_file && (dump_flags & TDF_DETAILS))
4003 {
4004 fprintf (dump_file, "into:\n ");
4005 print_gimple_stmt (dump_file, stmt, 0, 0);
4006 fprintf (dump_file, "\n");
4007 }
4008
4009 something_changed = true;
4010 if (maybe_clean_eh_stmt (stmt)
4011 && gimple_purge_dead_eh_edges (gimple_bb (stmt)))
4012 cfg_changed = true;
4013 }
4014
9771b263 4015 (*ipa_node_agg_replacements)[node->uid] = NULL;
2c9561b5 4016 free_parms_ainfo (parms_ainfo, param_count);
9771b263 4017 descriptors.release ();
2c9561b5
MJ
4018
4019 if (!something_changed)
4020 return 0;
4021 else if (cfg_changed)
4022 return TODO_update_ssa_only_virtuals | TODO_cleanup_cfg;
4023 else
4024 return TODO_update_ssa_only_virtuals;
4025}
This page took 3.038503 seconds and 5 git commands to generate.