1 /* Interprocedural analyses.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
30 #include "fold-const.h"
31 #include "internal-fn.h"
32 #include "gimple-fold.h"
35 #include "insn-config.h"
44 #include "stor-layout.h"
45 #include "print-tree.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "gimple-walk.h"
50 #include "langhooks.h"
53 #include "alloc-pool.h"
54 #include "symbol-summary.h"
57 #include "tree-into-ssa.h"
59 #include "tree-pass.h"
60 #include "tree-inline.h"
61 #include "ipa-inline.h"
62 #include "diagnostic.h"
63 #include "gimple-pretty-print.h"
64 #include "lto-streamer.h"
65 #include "data-streamer.h"
66 #include "tree-streamer.h"
68 #include "ipa-utils.h"
73 /* Intermediate information that we get from alias analysis about a particular
74 parameter in a particular basic_block. When a parameter or the memory it
75 references is marked modified, we use that information in all dominatd
76 blocks without cosulting alias analysis oracle. */
78 struct param_aa_status
80 /* Set when this structure contains meaningful information. If not, the
81 structure describing a dominating BB should be used instead. */
84 /* Whether we have seen something which might have modified the data in
85 question. PARM is for the parameter itself, REF is for data it points to
86 but using the alias type of individual accesses and PT is the same thing
87 but for computing aggregate pass-through functions using a very inclusive
89 bool parm_modified
, ref_modified
, pt_modified
;
92 /* Information related to a given BB that used only when looking at function
97 /* Call graph edges going out of this BB. */
98 vec
<cgraph_edge
*> cg_edges
;
99 /* Alias analysis statuses of each formal parameter at this bb. */
100 vec
<param_aa_status
> param_aa_statuses
;
103 /* Structure with global information that is only used when looking at function
106 struct func_body_info
108 /* The node that is being analyzed. */
112 struct ipa_node_params
*info
;
114 /* Information about individual BBs. */
115 vec
<ipa_bb_info
> bb_infos
;
117 /* Number of parameters. */
120 /* Number of statements already walked by when analyzing this function. */
121 unsigned int aa_walked
;
124 /* Function summary where the parameter infos are actually stored. */
125 ipa_node_params_t
*ipa_node_params_sum
= NULL
;
126 /* Vector of IPA-CP transformation data for each clone. */
127 vec
<ipcp_transformation_summary
, va_gc
> *ipcp_transformations
;
128 /* Vector where the parameter infos are actually stored. */
129 vec
<ipa_edge_args
, va_gc
> *ipa_edge_args_vector
;
131 /* Holders of ipa cgraph hooks: */
132 static struct cgraph_edge_hook_list
*edge_removal_hook_holder
;
133 static struct cgraph_2edge_hook_list
*edge_duplication_hook_holder
;
134 static struct cgraph_node_hook_list
*function_insertion_hook_holder
;
136 /* Description of a reference to an IPA constant. */
137 struct ipa_cst_ref_desc
139 /* Edge that corresponds to the statement which took the reference. */
140 struct cgraph_edge
*cs
;
141 /* Linked list of duplicates created when call graph edges are cloned. */
142 struct ipa_cst_ref_desc
*next_duplicate
;
143 /* Number of references in IPA structures, IPA_UNDESCRIBED_USE if the value
144 if out of control. */
148 /* Allocation pool for reference descriptions. */
150 static pool_allocator
<ipa_cst_ref_desc
> ipa_refdesc_pool
151 ("IPA-PROP ref descriptions", 32);
153 /* Return true if DECL_FUNCTION_SPECIFIC_OPTIMIZATION of the decl associated
154 with NODE should prevent us from analyzing it for the purposes of IPA-CP. */
157 ipa_func_spec_opts_forbid_analysis_p (struct cgraph_node
*node
)
159 tree fs_opts
= DECL_FUNCTION_SPECIFIC_OPTIMIZATION (node
->decl
);
163 return !opt_for_fn (node
->decl
, optimize
) || !opt_for_fn (node
->decl
, flag_ipa_cp
);
166 /* Return index of the formal whose tree is PTREE in function which corresponds
170 ipa_get_param_decl_index_1 (vec
<ipa_param_descriptor
> descriptors
, tree ptree
)
174 count
= descriptors
.length ();
175 for (i
= 0; i
< count
; i
++)
176 if (descriptors
[i
].decl
== ptree
)
182 /* Return index of the formal whose tree is PTREE in function which corresponds
186 ipa_get_param_decl_index (struct ipa_node_params
*info
, tree ptree
)
188 return ipa_get_param_decl_index_1 (info
->descriptors
, ptree
);
191 /* Populate the param_decl field in parameter DESCRIPTORS that correspond to
195 ipa_populate_param_decls (struct cgraph_node
*node
,
196 vec
<ipa_param_descriptor
> &descriptors
)
204 gcc_assert (gimple_has_body_p (fndecl
));
205 fnargs
= DECL_ARGUMENTS (fndecl
);
207 for (parm
= fnargs
; parm
; parm
= DECL_CHAIN (parm
))
209 descriptors
[param_num
].decl
= parm
;
210 descriptors
[param_num
].move_cost
= estimate_move_cost (TREE_TYPE (parm
),
216 /* Return how many formal parameters FNDECL has. */
219 count_formal_params (tree fndecl
)
223 gcc_assert (gimple_has_body_p (fndecl
));
225 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
231 /* Return the declaration of Ith formal parameter of the function corresponding
232 to INFO. Note there is no setter function as this array is built just once
233 using ipa_initialize_node_params. */
236 ipa_dump_param (FILE *file
, struct ipa_node_params
*info
, int i
)
238 fprintf (file
, "param #%i", i
);
239 if (info
->descriptors
[i
].decl
)
242 print_generic_expr (file
, info
->descriptors
[i
].decl
, 0);
246 /* Initialize the ipa_node_params structure associated with NODE
247 to hold PARAM_COUNT parameters. */
250 ipa_alloc_node_params (struct cgraph_node
*node
, int param_count
)
252 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
254 if (!info
->descriptors
.exists () && param_count
)
255 info
->descriptors
.safe_grow_cleared (param_count
);
258 /* Initialize the ipa_node_params structure associated with NODE by counting
259 the function parameters, creating the descriptors and populating their
263 ipa_initialize_node_params (struct cgraph_node
*node
)
265 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
267 if (!info
->descriptors
.exists ())
269 ipa_alloc_node_params (node
, count_formal_params (node
->decl
));
270 ipa_populate_param_decls (node
, info
->descriptors
);
274 /* Print the jump functions associated with call graph edge CS to file F. */
277 ipa_print_node_jump_functions_for_edge (FILE *f
, struct cgraph_edge
*cs
)
281 count
= ipa_get_cs_argument_count (IPA_EDGE_REF (cs
));
282 for (i
= 0; i
< count
; i
++)
284 struct ipa_jump_func
*jump_func
;
285 enum jump_func_type type
;
287 jump_func
= ipa_get_ith_jump_func (IPA_EDGE_REF (cs
), i
);
288 type
= jump_func
->type
;
290 fprintf (f
, " param %d: ", i
);
291 if (type
== IPA_JF_UNKNOWN
)
292 fprintf (f
, "UNKNOWN\n");
293 else if (type
== IPA_JF_CONST
)
295 tree val
= jump_func
->value
.constant
.value
;
296 fprintf (f
, "CONST: ");
297 print_generic_expr (f
, val
, 0);
298 if (TREE_CODE (val
) == ADDR_EXPR
299 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONST_DECL
)
302 print_generic_expr (f
, DECL_INITIAL (TREE_OPERAND (val
, 0)),
307 else if (type
== IPA_JF_PASS_THROUGH
)
309 fprintf (f
, "PASS THROUGH: ");
310 fprintf (f
, "%d, op %s",
311 jump_func
->value
.pass_through
.formal_id
,
312 get_tree_code_name(jump_func
->value
.pass_through
.operation
));
313 if (jump_func
->value
.pass_through
.operation
!= NOP_EXPR
)
316 print_generic_expr (f
,
317 jump_func
->value
.pass_through
.operand
, 0);
319 if (jump_func
->value
.pass_through
.agg_preserved
)
320 fprintf (f
, ", agg_preserved");
323 else if (type
== IPA_JF_ANCESTOR
)
325 fprintf (f
, "ANCESTOR: ");
326 fprintf (f
, "%d, offset " HOST_WIDE_INT_PRINT_DEC
,
327 jump_func
->value
.ancestor
.formal_id
,
328 jump_func
->value
.ancestor
.offset
);
329 if (jump_func
->value
.ancestor
.agg_preserved
)
330 fprintf (f
, ", agg_preserved");
334 if (jump_func
->agg
.items
)
336 struct ipa_agg_jf_item
*item
;
339 fprintf (f
, " Aggregate passed by %s:\n",
340 jump_func
->agg
.by_ref
? "reference" : "value");
341 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, j
, item
)
343 fprintf (f
, " offset: " HOST_WIDE_INT_PRINT_DEC
", ",
345 if (TYPE_P (item
->value
))
346 fprintf (f
, "clobber of " HOST_WIDE_INT_PRINT_DEC
" bits",
347 tree_to_uhwi (TYPE_SIZE (item
->value
)));
350 fprintf (f
, "cst: ");
351 print_generic_expr (f
, item
->value
, 0);
357 struct ipa_polymorphic_call_context
*ctx
358 = ipa_get_ith_polymorhic_call_context (IPA_EDGE_REF (cs
), i
);
359 if (ctx
&& !ctx
->useless_p ())
361 fprintf (f
, " Context: ");
362 ctx
->dump (dump_file
);
365 if (jump_func
->alignment
.known
)
367 fprintf (f
, " Alignment: %u, misalignment: %u\n",
368 jump_func
->alignment
.align
,
369 jump_func
->alignment
.misalign
);
372 fprintf (f
, " Unknown alignment\n");
377 /* Print the jump functions of all arguments on all call graph edges going from
381 ipa_print_node_jump_functions (FILE *f
, struct cgraph_node
*node
)
383 struct cgraph_edge
*cs
;
385 fprintf (f
, " Jump functions of caller %s/%i:\n", node
->name (),
387 for (cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
389 if (!ipa_edge_args_info_available_for_edge_p (cs
))
392 fprintf (f
, " callsite %s/%i -> %s/%i : \n",
393 xstrdup_for_dump (node
->name ()), node
->order
,
394 xstrdup_for_dump (cs
->callee
->name ()),
396 ipa_print_node_jump_functions_for_edge (f
, cs
);
399 for (cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
401 struct cgraph_indirect_call_info
*ii
;
402 if (!ipa_edge_args_info_available_for_edge_p (cs
))
405 ii
= cs
->indirect_info
;
406 if (ii
->agg_contents
)
407 fprintf (f
, " indirect %s callsite, calling param %i, "
408 "offset " HOST_WIDE_INT_PRINT_DEC
", %s",
409 ii
->member_ptr
? "member ptr" : "aggregate",
410 ii
->param_index
, ii
->offset
,
411 ii
->by_ref
? "by reference" : "by_value");
413 fprintf (f
, " indirect %s callsite, calling param %i, "
414 "offset " HOST_WIDE_INT_PRINT_DEC
,
415 ii
->polymorphic
? "polymorphic" : "simple", ii
->param_index
,
420 fprintf (f
, ", for stmt ");
421 print_gimple_stmt (f
, cs
->call_stmt
, 0, TDF_SLIM
);
426 ii
->context
.dump (f
);
427 ipa_print_node_jump_functions_for_edge (f
, cs
);
431 /* Print ipa_jump_func data structures of all nodes in the call graph to F. */
434 ipa_print_all_jump_functions (FILE *f
)
436 struct cgraph_node
*node
;
438 fprintf (f
, "\nJump functions:\n");
439 FOR_EACH_FUNCTION (node
)
441 ipa_print_node_jump_functions (f
, node
);
445 /* Set jfunc to be a know-really nothing jump function. */
448 ipa_set_jf_unknown (struct ipa_jump_func
*jfunc
)
450 jfunc
->type
= IPA_JF_UNKNOWN
;
451 jfunc
->alignment
.known
= false;
454 /* Set JFUNC to be a copy of another jmp (to be used by jump function
455 combination code). The two functions will share their rdesc. */
458 ipa_set_jf_cst_copy (struct ipa_jump_func
*dst
,
459 struct ipa_jump_func
*src
)
462 gcc_checking_assert (src
->type
== IPA_JF_CONST
);
463 dst
->type
= IPA_JF_CONST
;
464 dst
->value
.constant
= src
->value
.constant
;
467 /* Set JFUNC to be a constant jmp function. */
470 ipa_set_jf_constant (struct ipa_jump_func
*jfunc
, tree constant
,
471 struct cgraph_edge
*cs
)
473 constant
= unshare_expr (constant
);
474 if (constant
&& EXPR_P (constant
))
475 SET_EXPR_LOCATION (constant
, UNKNOWN_LOCATION
);
476 jfunc
->type
= IPA_JF_CONST
;
477 jfunc
->value
.constant
.value
= unshare_expr_without_location (constant
);
479 if (TREE_CODE (constant
) == ADDR_EXPR
480 && TREE_CODE (TREE_OPERAND (constant
, 0)) == FUNCTION_DECL
)
482 struct ipa_cst_ref_desc
*rdesc
;
484 rdesc
= ipa_refdesc_pool
.allocate ();
486 rdesc
->next_duplicate
= NULL
;
488 jfunc
->value
.constant
.rdesc
= rdesc
;
491 jfunc
->value
.constant
.rdesc
= NULL
;
494 /* Set JFUNC to be a simple pass-through jump function. */
496 ipa_set_jf_simple_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
499 jfunc
->type
= IPA_JF_PASS_THROUGH
;
500 jfunc
->value
.pass_through
.operand
= NULL_TREE
;
501 jfunc
->value
.pass_through
.formal_id
= formal_id
;
502 jfunc
->value
.pass_through
.operation
= NOP_EXPR
;
503 jfunc
->value
.pass_through
.agg_preserved
= agg_preserved
;
506 /* Set JFUNC to be an arithmetic pass through jump function. */
509 ipa_set_jf_arith_pass_through (struct ipa_jump_func
*jfunc
, int formal_id
,
510 tree operand
, enum tree_code operation
)
512 jfunc
->type
= IPA_JF_PASS_THROUGH
;
513 jfunc
->value
.pass_through
.operand
= unshare_expr_without_location (operand
);
514 jfunc
->value
.pass_through
.formal_id
= formal_id
;
515 jfunc
->value
.pass_through
.operation
= operation
;
516 jfunc
->value
.pass_through
.agg_preserved
= false;
519 /* Set JFUNC to be an ancestor jump function. */
522 ipa_set_ancestor_jf (struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
,
523 int formal_id
, bool agg_preserved
)
525 jfunc
->type
= IPA_JF_ANCESTOR
;
526 jfunc
->value
.ancestor
.formal_id
= formal_id
;
527 jfunc
->value
.ancestor
.offset
= offset
;
528 jfunc
->value
.ancestor
.agg_preserved
= agg_preserved
;
531 /* Get IPA BB information about the given BB. FBI is the context of analyzis
532 of this function body. */
534 static struct ipa_bb_info
*
535 ipa_get_bb_info (struct func_body_info
*fbi
, basic_block bb
)
537 gcc_checking_assert (fbi
);
538 return &fbi
->bb_infos
[bb
->index
];
541 /* Structure to be passed in between detect_type_change and
542 check_stmt_for_type_change. */
544 struct prop_type_change_info
546 /* Offset into the object where there is the virtual method pointer we are
548 HOST_WIDE_INT offset
;
549 /* The declaration or SSA_NAME pointer of the base that we are checking for
552 /* Set to true if dynamic type change has been detected. */
553 bool type_maybe_changed
;
556 /* Return true if STMT can modify a virtual method table pointer.
558 This function makes special assumptions about both constructors and
559 destructors which are all the functions that are allowed to alter the VMT
560 pointers. It assumes that destructors begin with assignment into all VMT
561 pointers and that constructors essentially look in the following way:
563 1) The very first thing they do is that they call constructors of ancestor
564 sub-objects that have them.
566 2) Then VMT pointers of this and all its ancestors is set to new values
567 corresponding to the type corresponding to the constructor.
569 3) Only afterwards, other stuff such as constructor of member sub-objects
570 and the code written by the user is run. Only this may include calling
571 virtual functions, directly or indirectly.
573 There is no way to call a constructor of an ancestor sub-object in any
576 This means that we do not have to care whether constructors get the correct
577 type information because they will always change it (in fact, if we define
578 the type to be given by the VMT pointer, it is undefined).
580 The most important fact to derive from the above is that if, for some
581 statement in the section 3, we try to detect whether the dynamic type has
582 changed, we can safely ignore all calls as we examine the function body
583 backwards until we reach statements in section 2 because these calls cannot
584 be ancestor constructors or destructors (if the input is not bogus) and so
585 do not change the dynamic type (this holds true only for automatically
586 allocated objects but at the moment we devirtualize only these). We then
587 must detect that statements in section 2 change the dynamic type and can try
588 to derive the new type. That is enough and we can stop, we will never see
589 the calls into constructors of sub-objects in this code. Therefore we can
590 safely ignore all call statements that we traverse.
594 stmt_may_be_vtbl_ptr_store (gimple stmt
)
596 if (is_gimple_call (stmt
))
598 if (gimple_clobber_p (stmt
))
600 else if (is_gimple_assign (stmt
))
602 tree lhs
= gimple_assign_lhs (stmt
);
604 if (!AGGREGATE_TYPE_P (TREE_TYPE (lhs
)))
606 if (flag_strict_aliasing
607 && !POINTER_TYPE_P (TREE_TYPE (lhs
)))
610 if (TREE_CODE (lhs
) == COMPONENT_REF
611 && !DECL_VIRTUAL_P (TREE_OPERAND (lhs
, 1)))
613 /* In the future we might want to use get_base_ref_and_offset to find
614 if there is a field corresponding to the offset and if so, proceed
615 almost like if it was a component ref. */
621 /* Callback of walk_aliased_vdefs and a helper function for detect_type_change
622 to check whether a particular statement may modify the virtual table
623 pointerIt stores its result into DATA, which points to a
624 prop_type_change_info structure. */
627 check_stmt_for_type_change (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef
, void *data
)
629 gimple stmt
= SSA_NAME_DEF_STMT (vdef
);
630 struct prop_type_change_info
*tci
= (struct prop_type_change_info
*) data
;
632 if (stmt_may_be_vtbl_ptr_store (stmt
))
634 tci
->type_maybe_changed
= true;
641 /* See if ARG is PARAM_DECl describing instance passed by pointer
642 or reference in FUNCTION. Return false if the dynamic type may change
643 in between beggining of the function until CALL is invoked.
645 Generally functions are not allowed to change type of such instances,
646 but they call destructors. We assume that methods can not destroy the THIS
647 pointer. Also as a special cases, constructor and destructors may change
648 type of the THIS pointer. */
651 param_type_may_change_p (tree function
, tree arg
, gimple call
)
653 /* Pure functions can not do any changes on the dynamic type;
654 that require writting to memory. */
655 if (flags_from_decl_or_type (function
) & (ECF_PURE
| ECF_CONST
))
657 /* We need to check if we are within inlined consturctor
658 or destructor (ideally we would have way to check that the
659 inline cdtor is actually working on ARG, but we don't have
660 easy tie on this, so punt on all non-pure cdtors.
661 We may also record the types of cdtors and once we know type
662 of the instance match them.
664 Also code unification optimizations may merge calls from
665 different blocks making return values unreliable. So
666 do nothing during late optimization. */
667 if (DECL_STRUCT_FUNCTION (function
)->after_inlining
)
669 if (TREE_CODE (arg
) == SSA_NAME
670 && SSA_NAME_IS_DEFAULT_DEF (arg
)
671 && TREE_CODE (SSA_NAME_VAR (arg
)) == PARM_DECL
)
673 /* Normal (non-THIS) argument. */
674 if ((SSA_NAME_VAR (arg
) != DECL_ARGUMENTS (function
)
675 || TREE_CODE (TREE_TYPE (function
)) != METHOD_TYPE
)
676 /* THIS pointer of an method - here we we want to watch constructors
677 and destructors as those definitely may change the dynamic
679 || (TREE_CODE (TREE_TYPE (function
)) == METHOD_TYPE
680 && !DECL_CXX_CONSTRUCTOR_P (function
)
681 && !DECL_CXX_DESTRUCTOR_P (function
)
682 && (SSA_NAME_VAR (arg
) == DECL_ARGUMENTS (function
))))
684 /* Walk the inline stack and watch out for ctors/dtors. */
685 for (tree block
= gimple_block (call
); block
&& TREE_CODE (block
) == BLOCK
;
686 block
= BLOCK_SUPERCONTEXT (block
))
687 if (inlined_polymorphic_ctor_dtor_block_p (block
, false))
695 /* Detect whether the dynamic type of ARG of COMP_TYPE has changed (before
696 callsite CALL) by looking for assignments to its virtual table pointer. If
697 it is, return true and fill in the jump function JFUNC with relevant type
698 information or set it to unknown. ARG is the object itself (not a pointer
699 to it, unless dereferenced). BASE is the base of the memory access as
700 returned by get_ref_base_and_extent, as is the offset.
702 This is helper function for detect_type_change and detect_type_change_ssa
703 that does the heavy work which is usually unnecesary. */
706 detect_type_change_from_memory_writes (tree arg
, tree base
, tree comp_type
,
707 gcall
*call
, struct ipa_jump_func
*jfunc
,
708 HOST_WIDE_INT offset
)
710 struct prop_type_change_info tci
;
712 bool entry_reached
= false;
714 gcc_checking_assert (DECL_P (arg
)
715 || TREE_CODE (arg
) == MEM_REF
716 || handled_component_p (arg
));
718 comp_type
= TYPE_MAIN_VARIANT (comp_type
);
720 /* Const calls cannot call virtual methods through VMT and so type changes do
722 if (!flag_devirtualize
|| !gimple_vuse (call
)
723 /* Be sure expected_type is polymorphic. */
725 || TREE_CODE (comp_type
) != RECORD_TYPE
726 || !TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))
727 || !BINFO_VTABLE (TYPE_BINFO (TYPE_MAIN_VARIANT (comp_type
))))
730 ao_ref_init (&ao
, arg
);
733 ao
.size
= POINTER_SIZE
;
734 ao
.max_size
= ao
.size
;
737 tci
.object
= get_base_address (arg
);
738 tci
.type_maybe_changed
= false;
740 walk_aliased_vdefs (&ao
, gimple_vuse (call
), check_stmt_for_type_change
,
741 &tci
, NULL
, &entry_reached
);
742 if (!tci
.type_maybe_changed
)
745 ipa_set_jf_unknown (jfunc
);
749 /* Detect whether the dynamic type of ARG of COMP_TYPE may have changed.
750 If it is, return true and fill in the jump function JFUNC with relevant type
751 information or set it to unknown. ARG is the object itself (not a pointer
752 to it, unless dereferenced). BASE is the base of the memory access as
753 returned by get_ref_base_and_extent, as is the offset. */
756 detect_type_change (tree arg
, tree base
, tree comp_type
, gcall
*call
,
757 struct ipa_jump_func
*jfunc
, HOST_WIDE_INT offset
)
759 if (!flag_devirtualize
)
762 if (TREE_CODE (base
) == MEM_REF
763 && !param_type_may_change_p (current_function_decl
,
764 TREE_OPERAND (base
, 0),
767 return detect_type_change_from_memory_writes (arg
, base
, comp_type
,
768 call
, jfunc
, offset
);
771 /* Like detect_type_change but ARG is supposed to be a non-dereferenced pointer
772 SSA name (its dereference will become the base and the offset is assumed to
776 detect_type_change_ssa (tree arg
, tree comp_type
,
777 gcall
*call
, struct ipa_jump_func
*jfunc
)
779 gcc_checking_assert (TREE_CODE (arg
) == SSA_NAME
);
780 if (!flag_devirtualize
781 || !POINTER_TYPE_P (TREE_TYPE (arg
)))
784 if (!param_type_may_change_p (current_function_decl
, arg
, call
))
787 arg
= build2 (MEM_REF
, ptr_type_node
, arg
,
788 build_int_cst (ptr_type_node
, 0));
790 return detect_type_change_from_memory_writes (arg
, arg
, comp_type
,
794 /* Callback of walk_aliased_vdefs. Flags that it has been invoked to the
795 boolean variable pointed to by DATA. */
798 mark_modified (ao_ref
*ao ATTRIBUTE_UNUSED
, tree vdef ATTRIBUTE_UNUSED
,
801 bool *b
= (bool *) data
;
806 /* Return true if we have already walked so many statements in AA that we
807 should really just start giving up. */
810 aa_overwalked (struct func_body_info
*fbi
)
812 gcc_checking_assert (fbi
);
813 return fbi
->aa_walked
> (unsigned) PARAM_VALUE (PARAM_IPA_MAX_AA_STEPS
);
816 /* Find the nearest valid aa status for parameter specified by INDEX that
819 static struct param_aa_status
*
820 find_dominating_aa_status (struct func_body_info
*fbi
, basic_block bb
,
825 bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
828 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
829 if (!bi
->param_aa_statuses
.is_empty ()
830 && bi
->param_aa_statuses
[index
].valid
)
831 return &bi
->param_aa_statuses
[index
];
835 /* Get AA status structure for the given BB and parameter with INDEX. Allocate
836 structures and/or intialize the result with a dominating description as
839 static struct param_aa_status
*
840 parm_bb_aa_status_for_bb (struct func_body_info
*fbi
, basic_block bb
,
843 gcc_checking_assert (fbi
);
844 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
845 if (bi
->param_aa_statuses
.is_empty ())
846 bi
->param_aa_statuses
.safe_grow_cleared (fbi
->param_count
);
847 struct param_aa_status
*paa
= &bi
->param_aa_statuses
[index
];
850 gcc_checking_assert (!paa
->parm_modified
851 && !paa
->ref_modified
852 && !paa
->pt_modified
);
853 struct param_aa_status
*dom_paa
;
854 dom_paa
= find_dominating_aa_status (fbi
, bb
, index
);
864 /* Return true if a load from a formal parameter PARM_LOAD is known to retrieve
865 a value known not to be modified in this function before reaching the
866 statement STMT. FBI holds information about the function we have so far
867 gathered but do not survive the summary building stage. */
870 parm_preserved_before_stmt_p (struct func_body_info
*fbi
, int index
,
871 gimple stmt
, tree parm_load
)
873 struct param_aa_status
*paa
;
874 bool modified
= false;
877 /* FIXME: FBI can be NULL if we are being called from outside
878 ipa_node_analysis or ipcp_transform_function, which currently happens
879 during inlining analysis. It would be great to extend fbi's lifetime and
880 always have it. Currently, we are just not afraid of too much walking in
884 if (aa_overwalked (fbi
))
886 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
887 if (paa
->parm_modified
)
893 gcc_checking_assert (gimple_vuse (stmt
) != NULL_TREE
);
894 ao_ref_init (&refd
, parm_load
);
895 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
898 fbi
->aa_walked
+= walked
;
900 paa
->parm_modified
= true;
904 /* If STMT is an assignment that loads a value from an parameter declaration,
905 return the index of the parameter in ipa_node_params which has not been
906 modified. Otherwise return -1. */
909 load_from_unmodified_param (struct func_body_info
*fbi
,
910 vec
<ipa_param_descriptor
> descriptors
,
916 if (!gimple_assign_single_p (stmt
))
919 op1
= gimple_assign_rhs1 (stmt
);
920 if (TREE_CODE (op1
) != PARM_DECL
)
923 index
= ipa_get_param_decl_index_1 (descriptors
, op1
);
925 || !parm_preserved_before_stmt_p (fbi
, index
, stmt
, op1
))
931 /* Return true if memory reference REF (which must be a load through parameter
932 with INDEX) loads data that are known to be unmodified in this function
933 before reaching statement STMT. */
936 parm_ref_data_preserved_p (struct func_body_info
*fbi
,
937 int index
, gimple stmt
, tree ref
)
939 struct param_aa_status
*paa
;
940 bool modified
= false;
943 /* FIXME: FBI can be NULL if we are being called from outside
944 ipa_node_analysis or ipcp_transform_function, which currently happens
945 during inlining analysis. It would be great to extend fbi's lifetime and
946 always have it. Currently, we are just not afraid of too much walking in
950 if (aa_overwalked (fbi
))
952 paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (stmt
), index
);
953 if (paa
->ref_modified
)
959 gcc_checking_assert (gimple_vuse (stmt
));
960 ao_ref_init (&refd
, ref
);
961 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (stmt
), mark_modified
,
964 fbi
->aa_walked
+= walked
;
966 paa
->ref_modified
= true;
970 /* Return true if the data pointed to by PARM (which is a parameter with INDEX)
971 is known to be unmodified in this function before reaching call statement
972 CALL into which it is passed. FBI describes the function body. */
975 parm_ref_data_pass_through_p (struct func_body_info
*fbi
, int index
,
976 gimple call
, tree parm
)
978 bool modified
= false;
981 /* It's unnecessary to calculate anything about memory contnets for a const
982 function because it is not goin to use it. But do not cache the result
983 either. Also, no such calculations for non-pointers. */
984 if (!gimple_vuse (call
)
985 || !POINTER_TYPE_P (TREE_TYPE (parm
))
986 || aa_overwalked (fbi
))
989 struct param_aa_status
*paa
= parm_bb_aa_status_for_bb (fbi
, gimple_bb (call
),
991 if (paa
->pt_modified
)
994 ao_ref_init_from_ptr_and_size (&refd
, parm
, NULL_TREE
);
995 int walked
= walk_aliased_vdefs (&refd
, gimple_vuse (call
), mark_modified
,
997 fbi
->aa_walked
+= walked
;
999 paa
->pt_modified
= true;
1003 /* Return true if we can prove that OP is a memory reference loading unmodified
1004 data from an aggregate passed as a parameter and if the aggregate is passed
1005 by reference, that the alias type of the load corresponds to the type of the
1006 formal parameter (so that we can rely on this type for TBAA in callers).
1007 INFO and PARMS_AINFO describe parameters of the current function (but the
1008 latter can be NULL), STMT is the load statement. If function returns true,
1009 *INDEX_P, *OFFSET_P and *BY_REF is filled with the parameter index, offset
1010 within the aggregate and whether it is a load from a value passed by
1011 reference respectively. */
1014 ipa_load_from_parm_agg_1 (struct func_body_info
*fbi
,
1015 vec
<ipa_param_descriptor
> descriptors
,
1016 gimple stmt
, tree op
, int *index_p
,
1017 HOST_WIDE_INT
*offset_p
, HOST_WIDE_INT
*size_p
,
1021 HOST_WIDE_INT size
, max_size
;
1022 tree base
= get_ref_base_and_extent (op
, offset_p
, &size
, &max_size
);
1024 if (max_size
== -1 || max_size
!= size
|| *offset_p
< 0)
1029 int index
= ipa_get_param_decl_index_1 (descriptors
, base
);
1031 && parm_preserved_before_stmt_p (fbi
, index
, stmt
, op
))
1042 if (TREE_CODE (base
) != MEM_REF
1043 || TREE_CODE (TREE_OPERAND (base
, 0)) != SSA_NAME
1044 || !integer_zerop (TREE_OPERAND (base
, 1)))
1047 if (SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0)))
1049 tree parm
= SSA_NAME_VAR (TREE_OPERAND (base
, 0));
1050 index
= ipa_get_param_decl_index_1 (descriptors
, parm
);
1054 /* This branch catches situations where a pointer parameter is not a
1055 gimple register, for example:
1057 void hip7(S*) (struct S * p)
1059 void (*<T2e4>) (struct S *) D.1867;
1064 D.1867_2 = p.1_1->f;
1069 gimple def
= SSA_NAME_DEF_STMT (TREE_OPERAND (base
, 0));
1070 index
= load_from_unmodified_param (fbi
, descriptors
, def
);
1074 && parm_ref_data_preserved_p (fbi
, index
, stmt
, op
))
1085 /* Just like the previous function, just without the param_analysis_info
1086 pointer, for users outside of this file. */
1089 ipa_load_from_parm_agg (struct ipa_node_params
*info
, gimple stmt
,
1090 tree op
, int *index_p
, HOST_WIDE_INT
*offset_p
,
1093 return ipa_load_from_parm_agg_1 (NULL
, info
->descriptors
, stmt
, op
, index_p
,
1094 offset_p
, NULL
, by_ref_p
);
1097 /* Given that an actual argument is an SSA_NAME (given in NAME) and is a result
1098 of an assignment statement STMT, try to determine whether we are actually
1099 handling any of the following cases and construct an appropriate jump
1100 function into JFUNC if so:
1102 1) The passed value is loaded from a formal parameter which is not a gimple
1103 register (most probably because it is addressable, the value has to be
1104 scalar) and we can guarantee the value has not changed. This case can
1105 therefore be described by a simple pass-through jump function. For example:
1114 2) The passed value can be described by a simple arithmetic pass-through
1121 D.2064_4 = a.1(D) + 4;
1124 This case can also occur in combination of the previous one, e.g.:
1132 D.2064_4 = a.0_3 + 4;
1135 3) The passed value is an address of an object within another one (which
1136 also passed by reference). Such situations are described by an ancestor
1137 jump function and describe situations such as:
1139 B::foo() (struct B * const this)
1143 D.1845_2 = &this_1(D)->D.1748;
1146 INFO is the structure describing individual parameters access different
1147 stages of IPA optimizations. PARMS_AINFO contains the information that is
1148 only needed for intraprocedural analysis. */
1151 compute_complex_assign_jump_func (struct func_body_info
*fbi
,
1152 struct ipa_node_params
*info
,
1153 struct ipa_jump_func
*jfunc
,
1154 gcall
*call
, gimple stmt
, tree name
,
1157 HOST_WIDE_INT offset
, size
, max_size
;
1158 tree op1
, tc_ssa
, base
, ssa
;
1161 op1
= gimple_assign_rhs1 (stmt
);
1163 if (TREE_CODE (op1
) == SSA_NAME
)
1165 if (SSA_NAME_IS_DEFAULT_DEF (op1
))
1166 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (op1
));
1168 index
= load_from_unmodified_param (fbi
, info
->descriptors
,
1169 SSA_NAME_DEF_STMT (op1
));
1174 index
= load_from_unmodified_param (fbi
, info
->descriptors
, stmt
);
1175 tc_ssa
= gimple_assign_lhs (stmt
);
1180 tree op2
= gimple_assign_rhs2 (stmt
);
1184 if (!is_gimple_ip_invariant (op2
)
1185 || (TREE_CODE_CLASS (gimple_expr_code (stmt
)) != tcc_comparison
1186 && !useless_type_conversion_p (TREE_TYPE (name
),
1190 ipa_set_jf_arith_pass_through (jfunc
, index
, op2
,
1191 gimple_assign_rhs_code (stmt
));
1193 else if (gimple_assign_single_p (stmt
))
1195 bool agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, tc_ssa
);
1196 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1201 if (TREE_CODE (op1
) != ADDR_EXPR
)
1203 op1
= TREE_OPERAND (op1
, 0);
1204 if (TREE_CODE (TREE_TYPE (op1
)) != RECORD_TYPE
)
1206 base
= get_ref_base_and_extent (op1
, &offset
, &size
, &max_size
);
1207 if (TREE_CODE (base
) != MEM_REF
1208 /* If this is a varying address, punt. */
1210 || max_size
!= size
)
1212 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
1213 ssa
= TREE_OPERAND (base
, 0);
1214 if (TREE_CODE (ssa
) != SSA_NAME
1215 || !SSA_NAME_IS_DEFAULT_DEF (ssa
)
1219 /* Dynamic types are changed in constructors and destructors. */
1220 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (ssa
));
1221 if (index
>= 0 && param_type
&& POINTER_TYPE_P (param_type
))
1222 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1223 parm_ref_data_pass_through_p (fbi
, index
, call
, ssa
));
1226 /* Extract the base, offset and MEM_REF expression from a statement ASSIGN if
1229 iftmp.1_3 = &obj_2(D)->D.1762;
1231 The base of the MEM_REF must be a default definition SSA NAME of a
1232 parameter. Return NULL_TREE if it looks otherwise. If case of success, the
1233 whole MEM_REF expression is returned and the offset calculated from any
1234 handled components and the MEM_REF itself is stored into *OFFSET. The whole
1235 RHS stripped off the ADDR_EXPR is stored into *OBJ_P. */
1238 get_ancestor_addr_info (gimple assign
, tree
*obj_p
, HOST_WIDE_INT
*offset
)
1240 HOST_WIDE_INT size
, max_size
;
1241 tree expr
, parm
, obj
;
1243 if (!gimple_assign_single_p (assign
))
1245 expr
= gimple_assign_rhs1 (assign
);
1247 if (TREE_CODE (expr
) != ADDR_EXPR
)
1249 expr
= TREE_OPERAND (expr
, 0);
1251 expr
= get_ref_base_and_extent (expr
, offset
, &size
, &max_size
);
1253 if (TREE_CODE (expr
) != MEM_REF
1254 /* If this is a varying address, punt. */
1259 parm
= TREE_OPERAND (expr
, 0);
1260 if (TREE_CODE (parm
) != SSA_NAME
1261 || !SSA_NAME_IS_DEFAULT_DEF (parm
)
1262 || TREE_CODE (SSA_NAME_VAR (parm
)) != PARM_DECL
)
1265 *offset
+= mem_ref_offset (expr
).to_short_addr () * BITS_PER_UNIT
;
1271 /* Given that an actual argument is an SSA_NAME that is a result of a phi
1272 statement PHI, try to find out whether NAME is in fact a
1273 multiple-inheritance typecast from a descendant into an ancestor of a formal
1274 parameter and thus can be described by an ancestor jump function and if so,
1275 write the appropriate function into JFUNC.
1277 Essentially we want to match the following pattern:
1285 iftmp.1_3 = &obj_2(D)->D.1762;
1288 # iftmp.1_1 = PHI <iftmp.1_3(3), 0B(2)>
1289 D.1879_6 = middleman_1 (iftmp.1_1, i_5(D));
1293 compute_complex_ancestor_jump_func (struct func_body_info
*fbi
,
1294 struct ipa_node_params
*info
,
1295 struct ipa_jump_func
*jfunc
,
1296 gcall
*call
, gphi
*phi
)
1298 HOST_WIDE_INT offset
;
1299 gimple assign
, cond
;
1300 basic_block phi_bb
, assign_bb
, cond_bb
;
1301 tree tmp
, parm
, expr
, obj
;
1304 if (gimple_phi_num_args (phi
) != 2)
1307 if (integer_zerop (PHI_ARG_DEF (phi
, 1)))
1308 tmp
= PHI_ARG_DEF (phi
, 0);
1309 else if (integer_zerop (PHI_ARG_DEF (phi
, 0)))
1310 tmp
= PHI_ARG_DEF (phi
, 1);
1313 if (TREE_CODE (tmp
) != SSA_NAME
1314 || SSA_NAME_IS_DEFAULT_DEF (tmp
)
1315 || !POINTER_TYPE_P (TREE_TYPE (tmp
))
1316 || TREE_CODE (TREE_TYPE (TREE_TYPE (tmp
))) != RECORD_TYPE
)
1319 assign
= SSA_NAME_DEF_STMT (tmp
);
1320 assign_bb
= gimple_bb (assign
);
1321 if (!single_pred_p (assign_bb
))
1323 expr
= get_ancestor_addr_info (assign
, &obj
, &offset
);
1326 parm
= TREE_OPERAND (expr
, 0);
1327 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (parm
));
1331 cond_bb
= single_pred (assign_bb
);
1332 cond
= last_stmt (cond_bb
);
1334 || gimple_code (cond
) != GIMPLE_COND
1335 || gimple_cond_code (cond
) != NE_EXPR
1336 || gimple_cond_lhs (cond
) != parm
1337 || !integer_zerop (gimple_cond_rhs (cond
)))
1340 phi_bb
= gimple_bb (phi
);
1341 for (i
= 0; i
< 2; i
++)
1343 basic_block pred
= EDGE_PRED (phi_bb
, i
)->src
;
1344 if (pred
!= assign_bb
&& pred
!= cond_bb
)
1348 ipa_set_ancestor_jf (jfunc
, offset
, index
,
1349 parm_ref_data_pass_through_p (fbi
, index
, call
, parm
));
1352 /* Inspect the given TYPE and return true iff it has the same structure (the
1353 same number of fields of the same types) as a C++ member pointer. If
1354 METHOD_PTR and DELTA are non-NULL, store the trees representing the
1355 corresponding fields there. */
1358 type_like_member_ptr_p (tree type
, tree
*method_ptr
, tree
*delta
)
1362 if (TREE_CODE (type
) != RECORD_TYPE
)
1365 fld
= TYPE_FIELDS (type
);
1366 if (!fld
|| !POINTER_TYPE_P (TREE_TYPE (fld
))
1367 || TREE_CODE (TREE_TYPE (TREE_TYPE (fld
))) != METHOD_TYPE
1368 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1374 fld
= DECL_CHAIN (fld
);
1375 if (!fld
|| INTEGRAL_TYPE_P (fld
)
1376 || !tree_fits_uhwi_p (DECL_FIELD_OFFSET (fld
)))
1381 if (DECL_CHAIN (fld
))
1387 /* If RHS is an SSA_NAME and it is defined by a simple copy assign statement,
1388 return the rhs of its defining statement. Otherwise return RHS as it
1392 get_ssa_def_if_simple_copy (tree rhs
)
1394 while (TREE_CODE (rhs
) == SSA_NAME
&& !SSA_NAME_IS_DEFAULT_DEF (rhs
))
1396 gimple def_stmt
= SSA_NAME_DEF_STMT (rhs
);
1398 if (gimple_assign_single_p (def_stmt
))
1399 rhs
= gimple_assign_rhs1 (def_stmt
);
1406 /* Simple linked list, describing known contents of an aggregate beforere
1409 struct ipa_known_agg_contents_list
1411 /* Offset and size of the described part of the aggregate. */
1412 HOST_WIDE_INT offset
, size
;
1413 /* Known constant value or NULL if the contents is known to be unknown. */
1415 /* Pointer to the next structure in the list. */
1416 struct ipa_known_agg_contents_list
*next
;
1419 /* Find the proper place in linked list of ipa_known_agg_contents_list
1420 structures where to put a new one with the given LHS_OFFSET and LHS_SIZE,
1421 unless there is a partial overlap, in which case return NULL, or such
1422 element is already there, in which case set *ALREADY_THERE to true. */
1424 static struct ipa_known_agg_contents_list
**
1425 get_place_in_agg_contents_list (struct ipa_known_agg_contents_list
**list
,
1426 HOST_WIDE_INT lhs_offset
,
1427 HOST_WIDE_INT lhs_size
,
1428 bool *already_there
)
1430 struct ipa_known_agg_contents_list
**p
= list
;
1431 while (*p
&& (*p
)->offset
< lhs_offset
)
1433 if ((*p
)->offset
+ (*p
)->size
> lhs_offset
)
1438 if (*p
&& (*p
)->offset
< lhs_offset
+ lhs_size
)
1440 if ((*p
)->offset
== lhs_offset
&& (*p
)->size
== lhs_size
)
1441 /* We already know this value is subsequently overwritten with
1443 *already_there
= true;
1445 /* Otherwise this is a partial overlap which we cannot
1452 /* Build aggregate jump function from LIST, assuming there are exactly
1453 CONST_COUNT constant entries there and that th offset of the passed argument
1454 is ARG_OFFSET and store it into JFUNC. */
1457 build_agg_jump_func_from_list (struct ipa_known_agg_contents_list
*list
,
1458 int const_count
, HOST_WIDE_INT arg_offset
,
1459 struct ipa_jump_func
*jfunc
)
1461 vec_alloc (jfunc
->agg
.items
, const_count
);
1466 struct ipa_agg_jf_item item
;
1467 item
.offset
= list
->offset
- arg_offset
;
1468 gcc_assert ((item
.offset
% BITS_PER_UNIT
) == 0);
1469 item
.value
= unshare_expr_without_location (list
->constant
);
1470 jfunc
->agg
.items
->quick_push (item
);
1476 /* Traverse statements from CALL backwards, scanning whether an aggregate given
1477 in ARG is filled in with constant values. ARG can either be an aggregate
1478 expression or a pointer to an aggregate. ARG_TYPE is the type of the
1479 aggregate. JFUNC is the jump function into which the constants are
1480 subsequently stored. */
1483 determine_locally_known_aggregate_parts (gcall
*call
, tree arg
,
1485 struct ipa_jump_func
*jfunc
)
1487 struct ipa_known_agg_contents_list
*list
= NULL
;
1488 int item_count
= 0, const_count
= 0;
1489 HOST_WIDE_INT arg_offset
, arg_size
;
1490 gimple_stmt_iterator gsi
;
1492 bool check_ref
, by_ref
;
1495 /* The function operates in three stages. First, we prepare check_ref, r,
1496 arg_base and arg_offset based on what is actually passed as an actual
1499 if (POINTER_TYPE_P (arg_type
))
1502 if (TREE_CODE (arg
) == SSA_NAME
)
1505 if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (arg_type
))))
1510 type_size
= TYPE_SIZE (TREE_TYPE (arg_type
));
1511 arg_size
= tree_to_uhwi (type_size
);
1512 ao_ref_init_from_ptr_and_size (&r
, arg_base
, NULL_TREE
);
1514 else if (TREE_CODE (arg
) == ADDR_EXPR
)
1516 HOST_WIDE_INT arg_max_size
;
1518 arg
= TREE_OPERAND (arg
, 0);
1519 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1521 if (arg_max_size
== -1
1522 || arg_max_size
!= arg_size
1525 if (DECL_P (arg_base
))
1528 ao_ref_init (&r
, arg_base
);
1538 HOST_WIDE_INT arg_max_size
;
1540 gcc_checking_assert (AGGREGATE_TYPE_P (TREE_TYPE (arg
)));
1544 arg_base
= get_ref_base_and_extent (arg
, &arg_offset
, &arg_size
,
1546 if (arg_max_size
== -1
1547 || arg_max_size
!= arg_size
1551 ao_ref_init (&r
, arg
);
1554 /* Second stage walks back the BB, looks at individual statements and as long
1555 as it is confident of how the statements affect contents of the
1556 aggregates, it builds a sorted linked list of ipa_agg_jf_list structures
1558 gsi
= gsi_for_stmt (call
);
1560 for (; !gsi_end_p (gsi
); gsi_prev (&gsi
))
1562 struct ipa_known_agg_contents_list
*n
, **p
;
1563 gimple stmt
= gsi_stmt (gsi
);
1564 HOST_WIDE_INT lhs_offset
, lhs_size
, lhs_max_size
;
1565 tree lhs
, rhs
, lhs_base
;
1567 if (!stmt_may_clobber_ref_p_1 (stmt
, &r
))
1569 if (!gimple_assign_single_p (stmt
))
1572 lhs
= gimple_assign_lhs (stmt
);
1573 rhs
= gimple_assign_rhs1 (stmt
);
1574 if (!is_gimple_reg_type (TREE_TYPE (rhs
))
1575 || TREE_CODE (lhs
) == BIT_FIELD_REF
1576 || contains_bitfld_component_ref_p (lhs
))
1579 lhs_base
= get_ref_base_and_extent (lhs
, &lhs_offset
, &lhs_size
,
1581 if (lhs_max_size
== -1
1582 || lhs_max_size
!= lhs_size
)
1587 if (TREE_CODE (lhs_base
) != MEM_REF
1588 || TREE_OPERAND (lhs_base
, 0) != arg_base
1589 || !integer_zerop (TREE_OPERAND (lhs_base
, 1)))
1592 else if (lhs_base
!= arg_base
)
1594 if (DECL_P (lhs_base
))
1600 bool already_there
= false;
1601 p
= get_place_in_agg_contents_list (&list
, lhs_offset
, lhs_size
,
1608 rhs
= get_ssa_def_if_simple_copy (rhs
);
1609 n
= XALLOCA (struct ipa_known_agg_contents_list
);
1611 n
->offset
= lhs_offset
;
1612 if (is_gimple_ip_invariant (rhs
))
1618 n
->constant
= NULL_TREE
;
1623 if (const_count
== PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
)
1624 || item_count
== 2 * PARAM_VALUE (PARAM_IPA_MAX_AGG_ITEMS
))
1628 /* Third stage just goes over the list and creates an appropriate vector of
1629 ipa_agg_jf_item structures out of it, of sourse only if there are
1630 any known constants to begin with. */
1634 jfunc
->agg
.by_ref
= by_ref
;
1635 build_agg_jump_func_from_list (list
, const_count
, arg_offset
, jfunc
);
1640 ipa_get_callee_param_type (struct cgraph_edge
*e
, int i
)
1643 tree type
= (e
->callee
1644 ? TREE_TYPE (e
->callee
->decl
)
1645 : gimple_call_fntype (e
->call_stmt
));
1646 tree t
= TYPE_ARG_TYPES (type
);
1648 for (n
= 0; n
< i
; n
++)
1655 return TREE_VALUE (t
);
1658 t
= DECL_ARGUMENTS (e
->callee
->decl
);
1659 for (n
= 0; n
< i
; n
++)
1666 return TREE_TYPE (t
);
1670 /* Compute jump function for all arguments of callsite CS and insert the
1671 information in the jump_functions array in the ipa_edge_args corresponding
1672 to this callsite. */
1675 ipa_compute_jump_functions_for_edge (struct func_body_info
*fbi
,
1676 struct cgraph_edge
*cs
)
1678 struct ipa_node_params
*info
= IPA_NODE_REF (cs
->caller
);
1679 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
1680 gcall
*call
= cs
->call_stmt
;
1681 int n
, arg_num
= gimple_call_num_args (call
);
1682 bool useful_context
= false;
1684 if (arg_num
== 0 || args
->jump_functions
)
1686 vec_safe_grow_cleared (args
->jump_functions
, arg_num
);
1687 if (flag_devirtualize
)
1688 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, arg_num
);
1690 if (gimple_call_internal_p (call
))
1692 if (ipa_func_spec_opts_forbid_analysis_p (cs
->caller
))
1695 for (n
= 0; n
< arg_num
; n
++)
1697 struct ipa_jump_func
*jfunc
= ipa_get_ith_jump_func (args
, n
);
1698 tree arg
= gimple_call_arg (call
, n
);
1699 tree param_type
= ipa_get_callee_param_type (cs
, n
);
1700 if (flag_devirtualize
&& POINTER_TYPE_P (TREE_TYPE (arg
)))
1703 struct ipa_polymorphic_call_context
context (cs
->caller
->decl
,
1706 context
.get_dynamic_type (instance
, arg
, NULL
, cs
->call_stmt
);
1707 *ipa_get_ith_polymorhic_call_context (args
, n
) = context
;
1708 if (!context
.useless_p ())
1709 useful_context
= true;
1712 if (POINTER_TYPE_P (TREE_TYPE(arg
)))
1714 unsigned HOST_WIDE_INT hwi_bitpos
;
1717 if (get_pointer_alignment_1 (arg
, &align
, &hwi_bitpos
)
1718 && align
% BITS_PER_UNIT
== 0
1719 && hwi_bitpos
% BITS_PER_UNIT
== 0)
1721 jfunc
->alignment
.known
= true;
1722 jfunc
->alignment
.align
= align
/ BITS_PER_UNIT
;
1723 jfunc
->alignment
.misalign
= hwi_bitpos
/ BITS_PER_UNIT
;
1726 gcc_assert (!jfunc
->alignment
.known
);
1729 gcc_assert (!jfunc
->alignment
.known
);
1731 if (is_gimple_ip_invariant (arg
))
1732 ipa_set_jf_constant (jfunc
, arg
, cs
);
1733 else if (!is_gimple_reg_type (TREE_TYPE (arg
))
1734 && TREE_CODE (arg
) == PARM_DECL
)
1736 int index
= ipa_get_param_decl_index (info
, arg
);
1738 gcc_assert (index
>=0);
1739 /* Aggregate passed by value, check for pass-through, otherwise we
1740 will attempt to fill in aggregate contents later in this
1742 if (parm_preserved_before_stmt_p (fbi
, index
, call
, arg
))
1744 ipa_set_jf_simple_pass_through (jfunc
, index
, false);
1748 else if (TREE_CODE (arg
) == SSA_NAME
)
1750 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
1752 int index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (arg
));
1756 agg_p
= parm_ref_data_pass_through_p (fbi
, index
, call
, arg
);
1757 ipa_set_jf_simple_pass_through (jfunc
, index
, agg_p
);
1762 gimple stmt
= SSA_NAME_DEF_STMT (arg
);
1763 if (is_gimple_assign (stmt
))
1764 compute_complex_assign_jump_func (fbi
, info
, jfunc
,
1765 call
, stmt
, arg
, param_type
);
1766 else if (gimple_code (stmt
) == GIMPLE_PHI
)
1767 compute_complex_ancestor_jump_func (fbi
, info
, jfunc
,
1769 as_a
<gphi
*> (stmt
));
1773 /* If ARG is pointer, we can not use its type to determine the type of aggregate
1774 passed (because type conversions are ignored in gimple). Usually we can
1775 safely get type from function declaration, but in case of K&R prototypes or
1776 variadic functions we can try our luck with type of the pointer passed.
1777 TODO: Since we look for actual initialization of the memory object, we may better
1778 work out the type based on the memory stores we find. */
1780 param_type
= TREE_TYPE (arg
);
1782 if ((jfunc
->type
!= IPA_JF_PASS_THROUGH
1783 || !ipa_get_jf_pass_through_agg_preserved (jfunc
))
1784 && (jfunc
->type
!= IPA_JF_ANCESTOR
1785 || !ipa_get_jf_ancestor_agg_preserved (jfunc
))
1786 && (AGGREGATE_TYPE_P (TREE_TYPE (arg
))
1787 || POINTER_TYPE_P (param_type
)))
1788 determine_locally_known_aggregate_parts (call
, arg
, param_type
, jfunc
);
1790 if (!useful_context
)
1791 vec_free (args
->polymorphic_call_contexts
);
1794 /* Compute jump functions for all edges - both direct and indirect - outgoing
1798 ipa_compute_jump_functions_for_bb (struct func_body_info
*fbi
, basic_block bb
)
1800 struct ipa_bb_info
*bi
= ipa_get_bb_info (fbi
, bb
);
1802 struct cgraph_edge
*cs
;
1804 FOR_EACH_VEC_ELT_REVERSE (bi
->cg_edges
, i
, cs
)
1806 struct cgraph_node
*callee
= cs
->callee
;
1810 callee
->ultimate_alias_target ();
1811 /* We do not need to bother analyzing calls to unknown functions
1812 unless they may become known during lto/whopr. */
1813 if (!callee
->definition
&& !flag_lto
)
1816 ipa_compute_jump_functions_for_edge (fbi
, cs
);
1820 /* If STMT looks like a statement loading a value from a member pointer formal
1821 parameter, return that parameter and store the offset of the field to
1822 *OFFSET_P, if it is non-NULL. Otherwise return NULL (but *OFFSET_P still
1823 might be clobbered). If USE_DELTA, then we look for a use of the delta
1824 field rather than the pfn. */
1827 ipa_get_stmt_member_ptr_load_param (gimple stmt
, bool use_delta
,
1828 HOST_WIDE_INT
*offset_p
)
1830 tree rhs
, rec
, ref_field
, ref_offset
, fld
, ptr_field
, delta_field
;
1832 if (!gimple_assign_single_p (stmt
))
1835 rhs
= gimple_assign_rhs1 (stmt
);
1836 if (TREE_CODE (rhs
) == COMPONENT_REF
)
1838 ref_field
= TREE_OPERAND (rhs
, 1);
1839 rhs
= TREE_OPERAND (rhs
, 0);
1842 ref_field
= NULL_TREE
;
1843 if (TREE_CODE (rhs
) != MEM_REF
)
1845 rec
= TREE_OPERAND (rhs
, 0);
1846 if (TREE_CODE (rec
) != ADDR_EXPR
)
1848 rec
= TREE_OPERAND (rec
, 0);
1849 if (TREE_CODE (rec
) != PARM_DECL
1850 || !type_like_member_ptr_p (TREE_TYPE (rec
), &ptr_field
, &delta_field
))
1852 ref_offset
= TREE_OPERAND (rhs
, 1);
1859 *offset_p
= int_bit_position (fld
);
1863 if (integer_nonzerop (ref_offset
))
1865 return ref_field
== fld
? rec
: NULL_TREE
;
1868 return tree_int_cst_equal (byte_position (fld
), ref_offset
) ? rec
1872 /* Returns true iff T is an SSA_NAME defined by a statement. */
1875 ipa_is_ssa_with_stmt_def (tree t
)
1877 if (TREE_CODE (t
) == SSA_NAME
1878 && !SSA_NAME_IS_DEFAULT_DEF (t
))
1884 /* Find the indirect call graph edge corresponding to STMT and mark it as a
1885 call to a parameter number PARAM_INDEX. NODE is the caller. Return the
1886 indirect call graph edge. */
1888 static struct cgraph_edge
*
1889 ipa_note_param_call (struct cgraph_node
*node
, int param_index
,
1892 struct cgraph_edge
*cs
;
1894 cs
= node
->get_edge (stmt
);
1895 cs
->indirect_info
->param_index
= param_index
;
1896 cs
->indirect_info
->agg_contents
= 0;
1897 cs
->indirect_info
->member_ptr
= 0;
1901 /* Analyze the CALL and examine uses of formal parameters of the caller NODE
1902 (described by INFO). PARMS_AINFO is a pointer to a vector containing
1903 intermediate information about each formal parameter. Currently it checks
1904 whether the call calls a pointer that is a formal parameter and if so, the
1905 parameter is marked with the called flag and an indirect call graph edge
1906 describing the call is created. This is very simple for ordinary pointers
1907 represented in SSA but not-so-nice when it comes to member pointers. The
1908 ugly part of this function does nothing more than trying to match the
1909 pattern of such a call. An example of such a pattern is the gimple dump
1910 below, the call is on the last line:
1913 f$__delta_5 = f.__delta;
1914 f$__pfn_24 = f.__pfn;
1918 f$__delta_5 = MEM[(struct *)&f];
1919 f$__pfn_24 = MEM[(struct *)&f + 4B];
1921 and a few lines below:
1924 D.2496_3 = (int) f$__pfn_24;
1925 D.2497_4 = D.2496_3 & 1;
1932 D.2500_7 = (unsigned int) f$__delta_5;
1933 D.2501_8 = &S + D.2500_7;
1934 D.2502_9 = (int (*__vtbl_ptr_type) (void) * *) D.2501_8;
1935 D.2503_10 = *D.2502_9;
1936 D.2504_12 = f$__pfn_24 + -1;
1937 D.2505_13 = (unsigned int) D.2504_12;
1938 D.2506_14 = D.2503_10 + D.2505_13;
1939 D.2507_15 = *D.2506_14;
1940 iftmp.11_16 = (String:: *) D.2507_15;
1943 # iftmp.11_1 = PHI <iftmp.11_16(3), f$__pfn_24(2)>
1944 D.2500_19 = (unsigned int) f$__delta_5;
1945 D.2508_20 = &S + D.2500_19;
1946 D.2493_21 = iftmp.11_1 (D.2508_20, 4);
1948 Such patterns are results of simple calls to a member pointer:
1950 int doprinting (int (MyString::* f)(int) const)
1952 MyString S ("somestring");
1957 Moreover, the function also looks for called pointers loaded from aggregates
1958 passed by value or reference. */
1961 ipa_analyze_indirect_call_uses (struct func_body_info
*fbi
, gcall
*call
,
1964 struct ipa_node_params
*info
= fbi
->info
;
1965 HOST_WIDE_INT offset
;
1968 if (SSA_NAME_IS_DEFAULT_DEF (target
))
1970 tree var
= SSA_NAME_VAR (target
);
1971 int index
= ipa_get_param_decl_index (info
, var
);
1973 ipa_note_param_call (fbi
->node
, index
, call
);
1978 gimple def
= SSA_NAME_DEF_STMT (target
);
1979 if (gimple_assign_single_p (def
)
1980 && ipa_load_from_parm_agg_1 (fbi
, info
->descriptors
, def
,
1981 gimple_assign_rhs1 (def
), &index
, &offset
,
1984 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
1985 cs
->indirect_info
->offset
= offset
;
1986 cs
->indirect_info
->agg_contents
= 1;
1987 cs
->indirect_info
->by_ref
= by_ref
;
1991 /* Now we need to try to match the complex pattern of calling a member
1993 if (gimple_code (def
) != GIMPLE_PHI
1994 || gimple_phi_num_args (def
) != 2
1995 || !POINTER_TYPE_P (TREE_TYPE (target
))
1996 || TREE_CODE (TREE_TYPE (TREE_TYPE (target
))) != METHOD_TYPE
)
1999 /* First, we need to check whether one of these is a load from a member
2000 pointer that is a parameter to this function. */
2001 tree n1
= PHI_ARG_DEF (def
, 0);
2002 tree n2
= PHI_ARG_DEF (def
, 1);
2003 if (!ipa_is_ssa_with_stmt_def (n1
) || !ipa_is_ssa_with_stmt_def (n2
))
2005 gimple d1
= SSA_NAME_DEF_STMT (n1
);
2006 gimple d2
= SSA_NAME_DEF_STMT (n2
);
2009 basic_block bb
, virt_bb
;
2010 basic_block join
= gimple_bb (def
);
2011 if ((rec
= ipa_get_stmt_member_ptr_load_param (d1
, false, &offset
)))
2013 if (ipa_get_stmt_member_ptr_load_param (d2
, false, NULL
))
2016 bb
= EDGE_PRED (join
, 0)->src
;
2017 virt_bb
= gimple_bb (d2
);
2019 else if ((rec
= ipa_get_stmt_member_ptr_load_param (d2
, false, &offset
)))
2021 bb
= EDGE_PRED (join
, 1)->src
;
2022 virt_bb
= gimple_bb (d1
);
2027 /* Second, we need to check that the basic blocks are laid out in the way
2028 corresponding to the pattern. */
2030 if (!single_pred_p (virt_bb
) || !single_succ_p (virt_bb
)
2031 || single_pred (virt_bb
) != bb
2032 || single_succ (virt_bb
) != join
)
2035 /* Third, let's see that the branching is done depending on the least
2036 significant bit of the pfn. */
2038 gimple branch
= last_stmt (bb
);
2039 if (!branch
|| gimple_code (branch
) != GIMPLE_COND
)
2042 if ((gimple_cond_code (branch
) != NE_EXPR
2043 && gimple_cond_code (branch
) != EQ_EXPR
)
2044 || !integer_zerop (gimple_cond_rhs (branch
)))
2047 tree cond
= gimple_cond_lhs (branch
);
2048 if (!ipa_is_ssa_with_stmt_def (cond
))
2051 def
= SSA_NAME_DEF_STMT (cond
);
2052 if (!is_gimple_assign (def
)
2053 || gimple_assign_rhs_code (def
) != BIT_AND_EXPR
2054 || !integer_onep (gimple_assign_rhs2 (def
)))
2057 cond
= gimple_assign_rhs1 (def
);
2058 if (!ipa_is_ssa_with_stmt_def (cond
))
2061 def
= SSA_NAME_DEF_STMT (cond
);
2063 if (is_gimple_assign (def
)
2064 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
2066 cond
= gimple_assign_rhs1 (def
);
2067 if (!ipa_is_ssa_with_stmt_def (cond
))
2069 def
= SSA_NAME_DEF_STMT (cond
);
2073 rec2
= ipa_get_stmt_member_ptr_load_param (def
,
2074 (TARGET_PTRMEMFUNC_VBIT_LOCATION
2075 == ptrmemfunc_vbit_in_delta
),
2080 index
= ipa_get_param_decl_index (info
, rec
);
2082 && parm_preserved_before_stmt_p (fbi
, index
, call
, rec
))
2084 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2085 cs
->indirect_info
->offset
= offset
;
2086 cs
->indirect_info
->agg_contents
= 1;
2087 cs
->indirect_info
->member_ptr
= 1;
2093 /* Analyze a CALL to an OBJ_TYPE_REF which is passed in TARGET and if the
2094 object referenced in the expression is a formal parameter of the caller
2095 FBI->node (described by FBI->info), create a call note for the
2099 ipa_analyze_virtual_call_uses (struct func_body_info
*fbi
,
2100 gcall
*call
, tree target
)
2102 tree obj
= OBJ_TYPE_REF_OBJECT (target
);
2104 HOST_WIDE_INT anc_offset
;
2106 if (!flag_devirtualize
)
2109 if (TREE_CODE (obj
) != SSA_NAME
)
2112 struct ipa_node_params
*info
= fbi
->info
;
2113 if (SSA_NAME_IS_DEFAULT_DEF (obj
))
2115 struct ipa_jump_func jfunc
;
2116 if (TREE_CODE (SSA_NAME_VAR (obj
)) != PARM_DECL
)
2120 index
= ipa_get_param_decl_index (info
, SSA_NAME_VAR (obj
));
2121 gcc_assert (index
>= 0);
2122 if (detect_type_change_ssa (obj
, obj_type_ref_class (target
),
2128 struct ipa_jump_func jfunc
;
2129 gimple stmt
= SSA_NAME_DEF_STMT (obj
);
2132 expr
= get_ancestor_addr_info (stmt
, &obj
, &anc_offset
);
2135 index
= ipa_get_param_decl_index (info
,
2136 SSA_NAME_VAR (TREE_OPERAND (expr
, 0)));
2137 gcc_assert (index
>= 0);
2138 if (detect_type_change (obj
, expr
, obj_type_ref_class (target
),
2139 call
, &jfunc
, anc_offset
))
2143 struct cgraph_edge
*cs
= ipa_note_param_call (fbi
->node
, index
, call
);
2144 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
2145 ii
->offset
= anc_offset
;
2146 ii
->otr_token
= tree_to_uhwi (OBJ_TYPE_REF_TOKEN (target
));
2147 ii
->otr_type
= obj_type_ref_class (target
);
2148 ii
->polymorphic
= 1;
2151 /* Analyze a call statement CALL whether and how it utilizes formal parameters
2152 of the caller (described by INFO). PARMS_AINFO is a pointer to a vector
2153 containing intermediate information about each formal parameter. */
2156 ipa_analyze_call_uses (struct func_body_info
*fbi
, gcall
*call
)
2158 tree target
= gimple_call_fn (call
);
2161 || (TREE_CODE (target
) != SSA_NAME
2162 && !virtual_method_call_p (target
)))
2165 struct cgraph_edge
*cs
= fbi
->node
->get_edge (call
);
2166 /* If we previously turned the call into a direct call, there is
2167 no need to analyze. */
2168 if (cs
&& !cs
->indirect_unknown_callee
)
2171 if (cs
->indirect_info
->polymorphic
&& flag_devirtualize
)
2174 tree target
= gimple_call_fn (call
);
2175 ipa_polymorphic_call_context
context (current_function_decl
,
2176 target
, call
, &instance
);
2178 gcc_checking_assert (cs
->indirect_info
->otr_type
2179 == obj_type_ref_class (target
));
2180 gcc_checking_assert (cs
->indirect_info
->otr_token
2181 == tree_to_shwi (OBJ_TYPE_REF_TOKEN (target
)));
2183 cs
->indirect_info
->vptr_changed
2184 = !context
.get_dynamic_type (instance
,
2185 OBJ_TYPE_REF_OBJECT (target
),
2186 obj_type_ref_class (target
), call
);
2187 cs
->indirect_info
->context
= context
;
2190 if (TREE_CODE (target
) == SSA_NAME
)
2191 ipa_analyze_indirect_call_uses (fbi
, call
, target
);
2192 else if (virtual_method_call_p (target
))
2193 ipa_analyze_virtual_call_uses (fbi
, call
, target
);
2197 /* Analyze the call statement STMT with respect to formal parameters (described
2198 in INFO) of caller given by FBI->NODE. Currently it only checks whether
2199 formal parameters are called. */
2202 ipa_analyze_stmt_uses (struct func_body_info
*fbi
, gimple stmt
)
2204 if (is_gimple_call (stmt
))
2205 ipa_analyze_call_uses (fbi
, as_a
<gcall
*> (stmt
));
2208 /* Callback of walk_stmt_load_store_addr_ops for the visit_load.
2209 If OP is a parameter declaration, mark it as used in the info structure
2213 visit_ref_for_mod_analysis (gimple
, tree op
, tree
, void *data
)
2215 struct ipa_node_params
*info
= (struct ipa_node_params
*) data
;
2217 op
= get_base_address (op
);
2219 && TREE_CODE (op
) == PARM_DECL
)
2221 int index
= ipa_get_param_decl_index (info
, op
);
2222 gcc_assert (index
>= 0);
2223 ipa_set_param_used (info
, index
, true);
2229 /* Scan the statements in BB and inspect the uses of formal parameters. Store
2230 the findings in various structures of the associated ipa_node_params
2231 structure, such as parameter flags, notes etc. FBI holds various data about
2232 the function being analyzed. */
2235 ipa_analyze_params_uses_in_bb (struct func_body_info
*fbi
, basic_block bb
)
2237 gimple_stmt_iterator gsi
;
2238 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2240 gimple stmt
= gsi_stmt (gsi
);
2242 if (is_gimple_debug (stmt
))
2245 ipa_analyze_stmt_uses (fbi
, stmt
);
2246 walk_stmt_load_store_addr_ops (stmt
, fbi
->info
,
2247 visit_ref_for_mod_analysis
,
2248 visit_ref_for_mod_analysis
,
2249 visit_ref_for_mod_analysis
);
2251 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2252 walk_stmt_load_store_addr_ops (gsi_stmt (gsi
), fbi
->info
,
2253 visit_ref_for_mod_analysis
,
2254 visit_ref_for_mod_analysis
,
2255 visit_ref_for_mod_analysis
);
2258 /* Calculate controlled uses of parameters of NODE. */
2261 ipa_analyze_controlled_uses (struct cgraph_node
*node
)
2263 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
2265 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2267 tree parm
= ipa_get_param (info
, i
);
2268 int controlled_uses
= 0;
2270 /* For SSA regs see if parameter is used. For non-SSA we compute
2271 the flag during modification analysis. */
2272 if (is_gimple_reg (parm
))
2274 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
),
2276 if (ddef
&& !has_zero_uses (ddef
))
2278 imm_use_iterator imm_iter
;
2279 use_operand_p use_p
;
2281 ipa_set_param_used (info
, i
, true);
2282 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, ddef
)
2283 if (!is_gimple_call (USE_STMT (use_p
)))
2285 if (!is_gimple_debug (USE_STMT (use_p
)))
2287 controlled_uses
= IPA_UNDESCRIBED_USE
;
2295 controlled_uses
= 0;
2298 controlled_uses
= IPA_UNDESCRIBED_USE
;
2299 ipa_set_controlled_uses (info
, i
, controlled_uses
);
2303 /* Free stuff in BI. */
2306 free_ipa_bb_info (struct ipa_bb_info
*bi
)
2308 bi
->cg_edges
.release ();
2309 bi
->param_aa_statuses
.release ();
2312 /* Dominator walker driving the analysis. */
2314 class analysis_dom_walker
: public dom_walker
2317 analysis_dom_walker (struct func_body_info
*fbi
)
2318 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
) {}
2320 virtual void before_dom_children (basic_block
);
2323 struct func_body_info
*m_fbi
;
2327 analysis_dom_walker::before_dom_children (basic_block bb
)
2329 ipa_analyze_params_uses_in_bb (m_fbi
, bb
);
2330 ipa_compute_jump_functions_for_bb (m_fbi
, bb
);
2333 /* Initialize the array describing properties of of formal parameters
2334 of NODE, analyze their uses and compute jump functions associated
2335 with actual arguments of calls from within NODE. */
2338 ipa_analyze_node (struct cgraph_node
*node
)
2340 struct func_body_info fbi
;
2341 struct ipa_node_params
*info
;
2343 ipa_check_create_node_params ();
2344 ipa_check_create_edge_args ();
2345 info
= IPA_NODE_REF (node
);
2347 if (info
->analysis_done
)
2349 info
->analysis_done
= 1;
2351 if (ipa_func_spec_opts_forbid_analysis_p (node
))
2353 for (int i
= 0; i
< ipa_get_param_count (info
); i
++)
2355 ipa_set_param_used (info
, i
, true);
2356 ipa_set_controlled_uses (info
, i
, IPA_UNDESCRIBED_USE
);
2361 struct function
*func
= DECL_STRUCT_FUNCTION (node
->decl
);
2363 calculate_dominance_info (CDI_DOMINATORS
);
2364 ipa_initialize_node_params (node
);
2365 ipa_analyze_controlled_uses (node
);
2368 fbi
.info
= IPA_NODE_REF (node
);
2369 fbi
.bb_infos
= vNULL
;
2370 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
2371 fbi
.param_count
= ipa_get_param_count (info
);
2374 for (struct cgraph_edge
*cs
= node
->callees
; cs
; cs
= cs
->next_callee
)
2376 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2377 bi
->cg_edges
.safe_push (cs
);
2380 for (struct cgraph_edge
*cs
= node
->indirect_calls
; cs
; cs
= cs
->next_callee
)
2382 ipa_bb_info
*bi
= ipa_get_bb_info (&fbi
, gimple_bb (cs
->call_stmt
));
2383 bi
->cg_edges
.safe_push (cs
);
2386 analysis_dom_walker (&fbi
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
2389 struct ipa_bb_info
*bi
;
2390 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
2391 free_ipa_bb_info (bi
);
2392 fbi
.bb_infos
.release ();
2393 free_dominance_info (CDI_DOMINATORS
);
2397 /* Update the jump functions associated with call graph edge E when the call
2398 graph edge CS is being inlined, assuming that E->caller is already (possibly
2399 indirectly) inlined into CS->callee and that E has not been inlined. */
2402 update_jump_functions_after_inlining (struct cgraph_edge
*cs
,
2403 struct cgraph_edge
*e
)
2405 struct ipa_edge_args
*top
= IPA_EDGE_REF (cs
);
2406 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
2407 int count
= ipa_get_cs_argument_count (args
);
2410 for (i
= 0; i
< count
; i
++)
2412 struct ipa_jump_func
*dst
= ipa_get_ith_jump_func (args
, i
);
2413 struct ipa_polymorphic_call_context
*dst_ctx
2414 = ipa_get_ith_polymorhic_call_context (args
, i
);
2416 if (dst
->type
== IPA_JF_ANCESTOR
)
2418 struct ipa_jump_func
*src
;
2419 int dst_fid
= dst
->value
.ancestor
.formal_id
;
2420 struct ipa_polymorphic_call_context
*src_ctx
2421 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2423 /* Variable number of arguments can cause havoc if we try to access
2424 one that does not exist in the inlined edge. So make sure we
2426 if (dst_fid
>= ipa_get_cs_argument_count (top
))
2428 ipa_set_jf_unknown (dst
);
2432 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2434 if (src_ctx
&& !src_ctx
->useless_p ())
2436 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2438 /* TODO: Make type preserved safe WRT contexts. */
2439 if (!ipa_get_jf_ancestor_type_preserved (dst
))
2440 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2441 ctx
.offset_by (dst
->value
.ancestor
.offset
);
2442 if (!ctx
.useless_p ())
2444 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2446 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2448 dst_ctx
->combine_with (ctx
);
2452 && (dst
->value
.ancestor
.agg_preserved
|| !src
->agg
.by_ref
))
2454 struct ipa_agg_jf_item
*item
;
2457 /* Currently we do not produce clobber aggregate jump functions,
2458 replace with merging when we do. */
2459 gcc_assert (!dst
->agg
.items
);
2461 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2462 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2463 FOR_EACH_VEC_SAFE_ELT (dst
->agg
.items
, j
, item
)
2464 item
->offset
-= dst
->value
.ancestor
.offset
;
2467 if (src
->type
== IPA_JF_PASS_THROUGH
2468 && src
->value
.pass_through
.operation
== NOP_EXPR
)
2470 dst
->value
.ancestor
.formal_id
= src
->value
.pass_through
.formal_id
;
2471 dst
->value
.ancestor
.agg_preserved
&=
2472 src
->value
.pass_through
.agg_preserved
;
2474 else if (src
->type
== IPA_JF_ANCESTOR
)
2476 dst
->value
.ancestor
.formal_id
= src
->value
.ancestor
.formal_id
;
2477 dst
->value
.ancestor
.offset
+= src
->value
.ancestor
.offset
;
2478 dst
->value
.ancestor
.agg_preserved
&=
2479 src
->value
.ancestor
.agg_preserved
;
2482 ipa_set_jf_unknown (dst
);
2484 else if (dst
->type
== IPA_JF_PASS_THROUGH
)
2486 struct ipa_jump_func
*src
;
2487 /* We must check range due to calls with variable number of arguments
2488 and we cannot combine jump functions with operations. */
2489 if (dst
->value
.pass_through
.operation
== NOP_EXPR
2490 && (dst
->value
.pass_through
.formal_id
2491 < ipa_get_cs_argument_count (top
)))
2493 int dst_fid
= dst
->value
.pass_through
.formal_id
;
2494 src
= ipa_get_ith_jump_func (top
, dst_fid
);
2495 bool dst_agg_p
= ipa_get_jf_pass_through_agg_preserved (dst
);
2496 struct ipa_polymorphic_call_context
*src_ctx
2497 = ipa_get_ith_polymorhic_call_context (top
, dst_fid
);
2499 if (src_ctx
&& !src_ctx
->useless_p ())
2501 struct ipa_polymorphic_call_context ctx
= *src_ctx
;
2503 /* TODO: Make type preserved safe WRT contexts. */
2504 if (!ipa_get_jf_pass_through_type_preserved (dst
))
2505 ctx
.possible_dynamic_type_change (e
->in_polymorphic_cdtor
);
2506 if (!ctx
.useless_p ())
2510 vec_safe_grow_cleared (args
->polymorphic_call_contexts
,
2512 dst_ctx
= ipa_get_ith_polymorhic_call_context (args
, i
);
2514 dst_ctx
->combine_with (ctx
);
2519 case IPA_JF_UNKNOWN
:
2520 ipa_set_jf_unknown (dst
);
2523 ipa_set_jf_cst_copy (dst
, src
);
2526 case IPA_JF_PASS_THROUGH
:
2528 int formal_id
= ipa_get_jf_pass_through_formal_id (src
);
2529 enum tree_code operation
;
2530 operation
= ipa_get_jf_pass_through_operation (src
);
2532 if (operation
== NOP_EXPR
)
2536 && ipa_get_jf_pass_through_agg_preserved (src
);
2537 ipa_set_jf_simple_pass_through (dst
, formal_id
, agg_p
);
2541 tree operand
= ipa_get_jf_pass_through_operand (src
);
2542 ipa_set_jf_arith_pass_through (dst
, formal_id
, operand
,
2547 case IPA_JF_ANCESTOR
:
2551 && ipa_get_jf_ancestor_agg_preserved (src
);
2552 ipa_set_ancestor_jf (dst
,
2553 ipa_get_jf_ancestor_offset (src
),
2554 ipa_get_jf_ancestor_formal_id (src
),
2563 && (dst_agg_p
|| !src
->agg
.by_ref
))
2565 /* Currently we do not produce clobber aggregate jump
2566 functions, replace with merging when we do. */
2567 gcc_assert (!dst
->agg
.items
);
2569 dst
->agg
.by_ref
= src
->agg
.by_ref
;
2570 dst
->agg
.items
= vec_safe_copy (src
->agg
.items
);
2574 ipa_set_jf_unknown (dst
);
2579 /* If TARGET is an addr_expr of a function declaration, make it the
2580 (SPECULATIVE)destination of an indirect edge IE and return the edge.
2581 Otherwise, return NULL. */
2583 struct cgraph_edge
*
2584 ipa_make_edge_direct_to_target (struct cgraph_edge
*ie
, tree target
,
2587 struct cgraph_node
*callee
;
2588 struct inline_edge_summary
*es
= inline_edge_summary (ie
);
2589 bool unreachable
= false;
2591 if (TREE_CODE (target
) == ADDR_EXPR
)
2592 target
= TREE_OPERAND (target
, 0);
2593 if (TREE_CODE (target
) != FUNCTION_DECL
)
2595 target
= canonicalize_constructor_val (target
, NULL
);
2596 if (!target
|| TREE_CODE (target
) != FUNCTION_DECL
)
2598 /* Member pointer call that goes through a VMT lookup. */
2599 if (ie
->indirect_info
->member_ptr
2600 /* Or if target is not an invariant expression and we do not
2601 know if it will evaulate to function at runtime.
2602 This can happen when folding through &VAR, where &VAR
2603 is IP invariant, but VAR itself is not.
2605 TODO: Revisit this when GCC 5 is branched. It seems that
2606 member_ptr check is not needed and that we may try to fold
2607 the expression and see if VAR is readonly. */
2608 || !is_gimple_ip_invariant (target
))
2610 if (dump_enabled_p ())
2612 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2613 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2614 "discovered direct call non-invariant "
2616 ie
->caller
->name (), ie
->caller
->order
);
2622 if (dump_enabled_p ())
2624 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2625 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2626 "discovered direct call to non-function in %s/%i, "
2627 "making it __builtin_unreachable\n",
2628 ie
->caller
->name (), ie
->caller
->order
);
2631 target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2632 callee
= cgraph_node::get_create (target
);
2636 callee
= cgraph_node::get (target
);
2639 callee
= cgraph_node::get (target
);
2641 /* Because may-edges are not explicitely represented and vtable may be external,
2642 we may create the first reference to the object in the unit. */
2643 if (!callee
|| callee
->global
.inlined_to
)
2646 /* We are better to ensure we can refer to it.
2647 In the case of static functions we are out of luck, since we already
2648 removed its body. In the case of public functions we may or may
2649 not introduce the reference. */
2650 if (!canonicalize_constructor_val (target
, NULL
)
2651 || !TREE_PUBLIC (target
))
2654 fprintf (dump_file
, "ipa-prop: Discovered call to a known target "
2655 "(%s/%i -> %s/%i) but can not refer to it. Giving up.\n",
2656 xstrdup_for_dump (ie
->caller
->name ()),
2658 xstrdup_for_dump (ie
->callee
->name ()),
2662 callee
= cgraph_node::get_create (target
);
2665 /* If the edge is already speculated. */
2666 if (speculative
&& ie
->speculative
)
2668 struct cgraph_edge
*e2
;
2669 struct ipa_ref
*ref
;
2670 ie
->speculative_call_info (e2
, ie
, ref
);
2671 if (e2
->callee
->ultimate_alias_target ()
2672 != callee
->ultimate_alias_target ())
2675 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2676 "(%s/%i -> %s/%i) but the call is already speculated to %s/%i. Giving up.\n",
2677 xstrdup_for_dump (ie
->caller
->name ()),
2679 xstrdup_for_dump (callee
->name ()),
2681 xstrdup_for_dump (e2
->callee
->name ()),
2687 fprintf (dump_file
, "ipa-prop: Discovered call to a speculative target "
2688 "(%s/%i -> %s/%i) this agree with previous speculation.\n",
2689 xstrdup_for_dump (ie
->caller
->name ()),
2691 xstrdup_for_dump (callee
->name ()),
2697 if (!dbg_cnt (devirt
))
2700 ipa_check_create_node_params ();
2702 /* We can not make edges to inline clones. It is bug that someone removed
2703 the cgraph node too early. */
2704 gcc_assert (!callee
->global
.inlined_to
);
2706 if (dump_file
&& !unreachable
)
2708 fprintf (dump_file
, "ipa-prop: Discovered %s call to a %s target "
2709 "(%s/%i -> %s/%i), for stmt ",
2710 ie
->indirect_info
->polymorphic
? "a virtual" : "an indirect",
2711 speculative
? "speculative" : "known",
2712 xstrdup_for_dump (ie
->caller
->name ()),
2714 xstrdup_for_dump (callee
->name ()),
2717 print_gimple_stmt (dump_file
, ie
->call_stmt
, 2, TDF_SLIM
);
2719 fprintf (dump_file
, "with uid %i\n", ie
->lto_stmt_uid
);
2721 if (dump_enabled_p ())
2723 location_t loc
= gimple_location_safe (ie
->call_stmt
);
2725 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, loc
,
2726 "converting indirect call in %s to direct call to %s\n",
2727 ie
->caller
->name (), callee
->name ());
2731 struct cgraph_edge
*orig
= ie
;
2732 ie
= ie
->make_direct (callee
);
2733 /* If we resolved speculative edge the cost is already up to date
2734 for direct call (adjusted by inline_edge_duplication_hook). */
2737 es
= inline_edge_summary (ie
);
2738 es
->call_stmt_size
-= (eni_size_weights
.indirect_call_cost
2739 - eni_size_weights
.call_cost
);
2740 es
->call_stmt_time
-= (eni_time_weights
.indirect_call_cost
2741 - eni_time_weights
.call_cost
);
2746 if (!callee
->can_be_discarded_p ())
2749 alias
= dyn_cast
<cgraph_node
*> (callee
->noninterposable_alias ());
2753 /* make_speculative will update ie's cost to direct call cost. */
2754 ie
= ie
->make_speculative
2755 (callee
, ie
->count
* 8 / 10, ie
->frequency
* 8 / 10);
2761 /* Retrieve value from aggregate jump function AGG for the given OFFSET or
2762 return NULL if there is not any. BY_REF specifies whether the value has to
2763 be passed by reference or by value. */
2766 ipa_find_agg_cst_for_param (struct ipa_agg_jump_function
*agg
,
2767 HOST_WIDE_INT offset
, bool by_ref
)
2769 struct ipa_agg_jf_item
*item
;
2772 if (by_ref
!= agg
->by_ref
)
2775 FOR_EACH_VEC_SAFE_ELT (agg
->items
, i
, item
)
2776 if (item
->offset
== offset
)
2778 /* Currently we do not have clobber values, return NULL for them once
2780 gcc_checking_assert (is_gimple_ip_invariant (item
->value
));
2786 /* Remove a reference to SYMBOL from the list of references of a node given by
2787 reference description RDESC. Return true if the reference has been
2788 successfully found and removed. */
2791 remove_described_reference (symtab_node
*symbol
, struct ipa_cst_ref_desc
*rdesc
)
2793 struct ipa_ref
*to_del
;
2794 struct cgraph_edge
*origin
;
2799 to_del
= origin
->caller
->find_reference (symbol
, origin
->call_stmt
,
2800 origin
->lto_stmt_uid
);
2804 to_del
->remove_reference ();
2806 fprintf (dump_file
, "ipa-prop: Removed a reference from %s/%i to %s.\n",
2807 xstrdup_for_dump (origin
->caller
->name ()),
2808 origin
->caller
->order
, xstrdup_for_dump (symbol
->name ()));
2812 /* If JFUNC has a reference description with refcount different from
2813 IPA_UNDESCRIBED_USE, return the reference description, otherwise return
2814 NULL. JFUNC must be a constant jump function. */
2816 static struct ipa_cst_ref_desc
*
2817 jfunc_rdesc_usable (struct ipa_jump_func
*jfunc
)
2819 struct ipa_cst_ref_desc
*rdesc
= ipa_get_jf_constant_rdesc (jfunc
);
2820 if (rdesc
&& rdesc
->refcount
!= IPA_UNDESCRIBED_USE
)
2826 /* If the value of constant jump function JFUNC is an address of a function
2827 declaration, return the associated call graph node. Otherwise return
2830 static cgraph_node
*
2831 cgraph_node_for_jfunc (struct ipa_jump_func
*jfunc
)
2833 gcc_checking_assert (jfunc
->type
== IPA_JF_CONST
);
2834 tree cst
= ipa_get_jf_constant (jfunc
);
2835 if (TREE_CODE (cst
) != ADDR_EXPR
2836 || TREE_CODE (TREE_OPERAND (cst
, 0)) != FUNCTION_DECL
)
2839 return cgraph_node::get (TREE_OPERAND (cst
, 0));
2843 /* If JFUNC is a constant jump function with a usable rdesc, decrement its
2844 refcount and if it hits zero, remove reference to SYMBOL from the caller of
2845 the edge specified in the rdesc. Return false if either the symbol or the
2846 reference could not be found, otherwise return true. */
2849 try_decrement_rdesc_refcount (struct ipa_jump_func
*jfunc
)
2851 struct ipa_cst_ref_desc
*rdesc
;
2852 if (jfunc
->type
== IPA_JF_CONST
2853 && (rdesc
= jfunc_rdesc_usable (jfunc
))
2854 && --rdesc
->refcount
== 0)
2856 symtab_node
*symbol
= cgraph_node_for_jfunc (jfunc
);
2860 return remove_described_reference (symbol
, rdesc
);
2865 /* Try to find a destination for indirect edge IE that corresponds to a simple
2866 call or a call of a member function pointer and where the destination is a
2867 pointer formal parameter described by jump function JFUNC. If it can be
2868 determined, return the newly direct edge, otherwise return NULL.
2869 NEW_ROOT_INFO is the node info that JFUNC lattices are relative to. */
2871 static struct cgraph_edge
*
2872 try_make_edge_direct_simple_call (struct cgraph_edge
*ie
,
2873 struct ipa_jump_func
*jfunc
,
2874 struct ipa_node_params
*new_root_info
)
2876 struct cgraph_edge
*cs
;
2878 bool agg_contents
= ie
->indirect_info
->agg_contents
;
2880 if (ie
->indirect_info
->agg_contents
)
2881 target
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2882 ie
->indirect_info
->offset
,
2883 ie
->indirect_info
->by_ref
);
2885 target
= ipa_value_from_jfunc (new_root_info
, jfunc
);
2888 cs
= ipa_make_edge_direct_to_target (ie
, target
);
2890 if (cs
&& !agg_contents
)
2893 gcc_checking_assert (cs
->callee
2895 || jfunc
->type
!= IPA_JF_CONST
2896 || !cgraph_node_for_jfunc (jfunc
)
2897 || cs
->callee
== cgraph_node_for_jfunc (jfunc
)));
2898 ok
= try_decrement_rdesc_refcount (jfunc
);
2899 gcc_checking_assert (ok
);
2905 /* Return the target to be used in cases of impossible devirtualization. IE
2906 and target (the latter can be NULL) are dumped when dumping is enabled. */
2909 ipa_impossible_devirt_target (struct cgraph_edge
*ie
, tree target
)
2915 "Type inconsistent devirtualization: %s/%i->%s\n",
2916 ie
->caller
->name (), ie
->caller
->order
,
2917 IDENTIFIER_POINTER (DECL_ASSEMBLER_NAME (target
)));
2920 "No devirtualization target in %s/%i\n",
2921 ie
->caller
->name (), ie
->caller
->order
);
2923 tree new_target
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
2924 cgraph_node::get_create (new_target
);
2928 /* Try to find a destination for indirect edge IE that corresponds to a virtual
2929 call based on a formal parameter which is described by jump function JFUNC
2930 and if it can be determined, make it direct and return the direct edge.
2931 Otherwise, return NULL. CTX describes the polymorphic context that the
2932 parameter the call is based on brings along with it. */
2934 static struct cgraph_edge
*
2935 try_make_edge_direct_virtual_call (struct cgraph_edge
*ie
,
2936 struct ipa_jump_func
*jfunc
,
2937 struct ipa_polymorphic_call_context ctx
)
2940 bool speculative
= false;
2942 if (!opt_for_fn (ie
->caller
->decl
, flag_devirtualize
))
2945 gcc_assert (!ie
->indirect_info
->by_ref
);
2947 /* Try to do lookup via known virtual table pointer value. */
2948 if (!ie
->indirect_info
->vptr_changed
2949 || opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
))
2952 unsigned HOST_WIDE_INT offset
;
2953 tree t
= ipa_find_agg_cst_for_param (&jfunc
->agg
,
2954 ie
->indirect_info
->offset
,
2956 if (t
&& vtable_pointer_value_to_vtable (t
, &vtable
, &offset
))
2958 t
= gimple_get_virt_method_for_vtable (ie
->indirect_info
->otr_token
,
2962 if ((TREE_CODE (TREE_TYPE (t
)) == FUNCTION_TYPE
2963 && DECL_FUNCTION_CODE (t
) == BUILT_IN_UNREACHABLE
)
2964 || !possible_polymorphic_call_target_p
2965 (ie
, cgraph_node::get (t
)))
2967 /* Do not speculate builtin_unreachable, it is stupid! */
2968 if (!ie
->indirect_info
->vptr_changed
)
2969 target
= ipa_impossible_devirt_target (ie
, target
);
2974 speculative
= ie
->indirect_info
->vptr_changed
;
2980 ipa_polymorphic_call_context
ie_context (ie
);
2981 vec
<cgraph_node
*>targets
;
2984 ctx
.offset_by (ie
->indirect_info
->offset
);
2985 if (ie
->indirect_info
->vptr_changed
)
2986 ctx
.possible_dynamic_type_change (ie
->in_polymorphic_cdtor
,
2987 ie
->indirect_info
->otr_type
);
2988 ctx
.combine_with (ie_context
, ie
->indirect_info
->otr_type
);
2989 targets
= possible_polymorphic_call_targets
2990 (ie
->indirect_info
->otr_type
,
2991 ie
->indirect_info
->otr_token
,
2993 if (final
&& targets
.length () <= 1)
2995 speculative
= false;
2996 if (targets
.length () == 1)
2997 target
= targets
[0]->decl
;
2999 target
= ipa_impossible_devirt_target (ie
, NULL_TREE
);
3001 else if (!target
&& opt_for_fn (ie
->caller
->decl
, flag_devirtualize_speculatively
)
3002 && !ie
->speculative
&& ie
->maybe_hot_p ())
3005 n
= try_speculative_devirtualization (ie
->indirect_info
->otr_type
,
3006 ie
->indirect_info
->otr_token
,
3007 ie
->indirect_info
->context
);
3017 if (!possible_polymorphic_call_target_p
3018 (ie
, cgraph_node::get_create (target
)))
3022 target
= ipa_impossible_devirt_target (ie
, target
);
3024 return ipa_make_edge_direct_to_target (ie
, target
, speculative
);
3030 /* Update the param called notes associated with NODE when CS is being inlined,
3031 assuming NODE is (potentially indirectly) inlined into CS->callee.
3032 Moreover, if the callee is discovered to be constant, create a new cgraph
3033 edge for it. Newly discovered indirect edges will be added to *NEW_EDGES,
3034 unless NEW_EDGES is NULL. Return true iff a new edge(s) were created. */
3037 update_indirect_edges_after_inlining (struct cgraph_edge
*cs
,
3038 struct cgraph_node
*node
,
3039 vec
<cgraph_edge
*> *new_edges
)
3041 struct ipa_edge_args
*top
;
3042 struct cgraph_edge
*ie
, *next_ie
, *new_direct_edge
;
3043 struct ipa_node_params
*new_root_info
;
3046 ipa_check_create_edge_args ();
3047 top
= IPA_EDGE_REF (cs
);
3048 new_root_info
= IPA_NODE_REF (cs
->caller
->global
.inlined_to
3049 ? cs
->caller
->global
.inlined_to
3052 for (ie
= node
->indirect_calls
; ie
; ie
= next_ie
)
3054 struct cgraph_indirect_call_info
*ici
= ie
->indirect_info
;
3055 struct ipa_jump_func
*jfunc
;
3057 cgraph_node
*spec_target
= NULL
;
3059 next_ie
= ie
->next_callee
;
3061 if (ici
->param_index
== -1)
3064 /* We must check range due to calls with variable number of arguments: */
3065 if (ici
->param_index
>= ipa_get_cs_argument_count (top
))
3067 ici
->param_index
= -1;
3071 param_index
= ici
->param_index
;
3072 jfunc
= ipa_get_ith_jump_func (top
, param_index
);
3074 if (ie
->speculative
)
3076 struct cgraph_edge
*de
;
3077 struct ipa_ref
*ref
;
3078 ie
->speculative_call_info (de
, ie
, ref
);
3079 spec_target
= de
->callee
;
3082 if (!opt_for_fn (node
->decl
, flag_indirect_inlining
))
3083 new_direct_edge
= NULL
;
3084 else if (ici
->polymorphic
)
3086 ipa_polymorphic_call_context ctx
;
3087 ctx
= ipa_context_from_jfunc (new_root_info
, cs
, param_index
, jfunc
);
3088 new_direct_edge
= try_make_edge_direct_virtual_call (ie
, jfunc
, ctx
);
3091 new_direct_edge
= try_make_edge_direct_simple_call (ie
, jfunc
,
3093 /* If speculation was removed, then we need to do nothing. */
3094 if (new_direct_edge
&& new_direct_edge
!= ie
3095 && new_direct_edge
->callee
== spec_target
)
3097 new_direct_edge
->indirect_inlining_edge
= 1;
3098 top
= IPA_EDGE_REF (cs
);
3100 if (!new_direct_edge
->speculative
)
3103 else if (new_direct_edge
)
3105 new_direct_edge
->indirect_inlining_edge
= 1;
3106 if (new_direct_edge
->call_stmt
)
3107 new_direct_edge
->call_stmt_cannot_inline_p
3108 = !gimple_check_call_matching_types (
3109 new_direct_edge
->call_stmt
,
3110 new_direct_edge
->callee
->decl
, false);
3113 new_edges
->safe_push (new_direct_edge
);
3116 top
= IPA_EDGE_REF (cs
);
3117 /* If speculative edge was introduced we still need to update
3118 call info of the indirect edge. */
3119 if (!new_direct_edge
->speculative
)
3122 if (jfunc
->type
== IPA_JF_PASS_THROUGH
3123 && ipa_get_jf_pass_through_operation (jfunc
) == NOP_EXPR
)
3125 if (ici
->agg_contents
3126 && !ipa_get_jf_pass_through_agg_preserved (jfunc
)
3127 && !ici
->polymorphic
)
3128 ici
->param_index
= -1;
3131 ici
->param_index
= ipa_get_jf_pass_through_formal_id (jfunc
);
3132 if (ici
->polymorphic
3133 && !ipa_get_jf_pass_through_type_preserved (jfunc
))
3134 ici
->vptr_changed
= true;
3137 else if (jfunc
->type
== IPA_JF_ANCESTOR
)
3139 if (ici
->agg_contents
3140 && !ipa_get_jf_ancestor_agg_preserved (jfunc
)
3141 && !ici
->polymorphic
)
3142 ici
->param_index
= -1;
3145 ici
->param_index
= ipa_get_jf_ancestor_formal_id (jfunc
);
3146 ici
->offset
+= ipa_get_jf_ancestor_offset (jfunc
);
3147 if (ici
->polymorphic
3148 && !ipa_get_jf_ancestor_type_preserved (jfunc
))
3149 ici
->vptr_changed
= true;
3153 /* Either we can find a destination for this edge now or never. */
3154 ici
->param_index
= -1;
3160 /* Recursively traverse subtree of NODE (including node) made of inlined
3161 cgraph_edges when CS has been inlined and invoke
3162 update_indirect_edges_after_inlining on all nodes and
3163 update_jump_functions_after_inlining on all non-inlined edges that lead out
3164 of this subtree. Newly discovered indirect edges will be added to
3165 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were
3169 propagate_info_to_inlined_callees (struct cgraph_edge
*cs
,
3170 struct cgraph_node
*node
,
3171 vec
<cgraph_edge
*> *new_edges
)
3173 struct cgraph_edge
*e
;
3176 res
= update_indirect_edges_after_inlining (cs
, node
, new_edges
);
3178 for (e
= node
->callees
; e
; e
= e
->next_callee
)
3179 if (!e
->inline_failed
)
3180 res
|= propagate_info_to_inlined_callees (cs
, e
->callee
, new_edges
);
3182 update_jump_functions_after_inlining (cs
, e
);
3183 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
3184 update_jump_functions_after_inlining (cs
, e
);
3189 /* Combine two controlled uses counts as done during inlining. */
3192 combine_controlled_uses_counters (int c
, int d
)
3194 if (c
== IPA_UNDESCRIBED_USE
|| d
== IPA_UNDESCRIBED_USE
)
3195 return IPA_UNDESCRIBED_USE
;
3200 /* Propagate number of controlled users from CS->caleee to the new root of the
3201 tree of inlined nodes. */
3204 propagate_controlled_uses (struct cgraph_edge
*cs
)
3206 struct ipa_edge_args
*args
= IPA_EDGE_REF (cs
);
3207 struct cgraph_node
*new_root
= cs
->caller
->global
.inlined_to
3208 ? cs
->caller
->global
.inlined_to
: cs
->caller
;
3209 struct ipa_node_params
*new_root_info
= IPA_NODE_REF (new_root
);
3210 struct ipa_node_params
*old_root_info
= IPA_NODE_REF (cs
->callee
);
3213 count
= MIN (ipa_get_cs_argument_count (args
),
3214 ipa_get_param_count (old_root_info
));
3215 for (i
= 0; i
< count
; i
++)
3217 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3218 struct ipa_cst_ref_desc
*rdesc
;
3220 if (jf
->type
== IPA_JF_PASS_THROUGH
)
3223 src_idx
= ipa_get_jf_pass_through_formal_id (jf
);
3224 c
= ipa_get_controlled_uses (new_root_info
, src_idx
);
3225 d
= ipa_get_controlled_uses (old_root_info
, i
);
3227 gcc_checking_assert (ipa_get_jf_pass_through_operation (jf
)
3228 == NOP_EXPR
|| c
== IPA_UNDESCRIBED_USE
);
3229 c
= combine_controlled_uses_counters (c
, d
);
3230 ipa_set_controlled_uses (new_root_info
, src_idx
, c
);
3231 if (c
== 0 && new_root_info
->ipcp_orig_node
)
3233 struct cgraph_node
*n
;
3234 struct ipa_ref
*ref
;
3235 tree t
= new_root_info
->known_csts
[src_idx
];
3237 if (t
&& TREE_CODE (t
) == ADDR_EXPR
3238 && TREE_CODE (TREE_OPERAND (t
, 0)) == FUNCTION_DECL
3239 && (n
= cgraph_node::get (TREE_OPERAND (t
, 0)))
3240 && (ref
= new_root
->find_reference (n
, NULL
, 0)))
3243 fprintf (dump_file
, "ipa-prop: Removing cloning-created "
3244 "reference from %s/%i to %s/%i.\n",
3245 xstrdup_for_dump (new_root
->name ()),
3247 xstrdup_for_dump (n
->name ()), n
->order
);
3248 ref
->remove_reference ();
3252 else if (jf
->type
== IPA_JF_CONST
3253 && (rdesc
= jfunc_rdesc_usable (jf
)))
3255 int d
= ipa_get_controlled_uses (old_root_info
, i
);
3256 int c
= rdesc
->refcount
;
3257 rdesc
->refcount
= combine_controlled_uses_counters (c
, d
);
3258 if (rdesc
->refcount
== 0)
3260 tree cst
= ipa_get_jf_constant (jf
);
3261 struct cgraph_node
*n
;
3262 gcc_checking_assert (TREE_CODE (cst
) == ADDR_EXPR
3263 && TREE_CODE (TREE_OPERAND (cst
, 0))
3265 n
= cgraph_node::get (TREE_OPERAND (cst
, 0));
3268 struct cgraph_node
*clone
;
3270 ok
= remove_described_reference (n
, rdesc
);
3271 gcc_checking_assert (ok
);
3274 while (clone
->global
.inlined_to
3275 && clone
!= rdesc
->cs
->caller
3276 && IPA_NODE_REF (clone
)->ipcp_orig_node
)
3278 struct ipa_ref
*ref
;
3279 ref
= clone
->find_reference (n
, NULL
, 0);
3283 fprintf (dump_file
, "ipa-prop: Removing "
3284 "cloning-created reference "
3285 "from %s/%i to %s/%i.\n",
3286 xstrdup_for_dump (clone
->name ()),
3288 xstrdup_for_dump (n
->name ()),
3290 ref
->remove_reference ();
3292 clone
= clone
->callers
->caller
;
3299 for (i
= ipa_get_param_count (old_root_info
);
3300 i
< ipa_get_cs_argument_count (args
);
3303 struct ipa_jump_func
*jf
= ipa_get_ith_jump_func (args
, i
);
3305 if (jf
->type
== IPA_JF_CONST
)
3307 struct ipa_cst_ref_desc
*rdesc
= jfunc_rdesc_usable (jf
);
3309 rdesc
->refcount
= IPA_UNDESCRIBED_USE
;
3311 else if (jf
->type
== IPA_JF_PASS_THROUGH
)
3312 ipa_set_controlled_uses (new_root_info
,
3313 jf
->value
.pass_through
.formal_id
,
3314 IPA_UNDESCRIBED_USE
);
3318 /* Update jump functions and call note functions on inlining the call site CS.
3319 CS is expected to lead to a node already cloned by
3320 cgraph_clone_inline_nodes. Newly discovered indirect edges will be added to
3321 *NEW_EDGES, unless NEW_EDGES is NULL. Return true iff a new edge(s) were +
3325 ipa_propagate_indirect_call_infos (struct cgraph_edge
*cs
,
3326 vec
<cgraph_edge
*> *new_edges
)
3329 /* Do nothing if the preparation phase has not been carried out yet
3330 (i.e. during early inlining). */
3331 if (!ipa_node_params_sum
)
3333 gcc_assert (ipa_edge_args_vector
);
3335 propagate_controlled_uses (cs
);
3336 changed
= propagate_info_to_inlined_callees (cs
, cs
->callee
, new_edges
);
3341 /* Frees all dynamically allocated structures that the argument info points
3345 ipa_free_edge_args_substructures (struct ipa_edge_args
*args
)
3347 vec_free (args
->jump_functions
);
3348 memset (args
, 0, sizeof (*args
));
3351 /* Free all ipa_edge structures. */
3354 ipa_free_all_edge_args (void)
3357 struct ipa_edge_args
*args
;
3359 if (!ipa_edge_args_vector
)
3362 FOR_EACH_VEC_ELT (*ipa_edge_args_vector
, i
, args
)
3363 ipa_free_edge_args_substructures (args
);
3365 vec_free (ipa_edge_args_vector
);
3368 /* Frees all dynamically allocated structures that the param info points
3371 ipa_node_params::~ipa_node_params ()
3373 descriptors
.release ();
3375 /* Lattice values and their sources are deallocated with their alocation
3377 known_contexts
.release ();
3380 ipcp_orig_node
= NULL
;
3383 do_clone_for_all_contexts
= 0;
3384 is_all_contexts_clone
= 0;
3388 /* Free all ipa_node_params structures. */
3391 ipa_free_all_node_params (void)
3393 delete ipa_node_params_sum
;
3394 ipa_node_params_sum
= NULL
;
3397 /* Grow ipcp_transformations if necessary. */
3400 ipcp_grow_transformations_if_necessary (void)
3402 if (vec_safe_length (ipcp_transformations
)
3403 <= (unsigned) symtab
->cgraph_max_uid
)
3404 vec_safe_grow_cleared (ipcp_transformations
, symtab
->cgraph_max_uid
+ 1);
3407 /* Set the aggregate replacements of NODE to be AGGVALS. */
3410 ipa_set_node_agg_value_chain (struct cgraph_node
*node
,
3411 struct ipa_agg_replacement_value
*aggvals
)
3413 ipcp_grow_transformations_if_necessary ();
3414 (*ipcp_transformations
)[node
->uid
].agg_values
= aggvals
;
3417 /* Hook that is called by cgraph.c when an edge is removed. */
3420 ipa_edge_removal_hook (struct cgraph_edge
*cs
, void *data ATTRIBUTE_UNUSED
)
3422 struct ipa_edge_args
*args
;
3424 /* During IPA-CP updating we can be called on not-yet analyzed clones. */
3425 if (vec_safe_length (ipa_edge_args_vector
) <= (unsigned)cs
->uid
)
3428 args
= IPA_EDGE_REF (cs
);
3429 if (args
->jump_functions
)
3431 struct ipa_jump_func
*jf
;
3433 FOR_EACH_VEC_ELT (*args
->jump_functions
, i
, jf
)
3435 struct ipa_cst_ref_desc
*rdesc
;
3436 try_decrement_rdesc_refcount (jf
);
3437 if (jf
->type
== IPA_JF_CONST
3438 && (rdesc
= ipa_get_jf_constant_rdesc (jf
))
3444 ipa_free_edge_args_substructures (IPA_EDGE_REF (cs
));
3447 /* Hook that is called by cgraph.c when an edge is duplicated. */
3450 ipa_edge_duplication_hook (struct cgraph_edge
*src
, struct cgraph_edge
*dst
,
3453 struct ipa_edge_args
*old_args
, *new_args
;
3456 ipa_check_create_edge_args ();
3458 old_args
= IPA_EDGE_REF (src
);
3459 new_args
= IPA_EDGE_REF (dst
);
3461 new_args
->jump_functions
= vec_safe_copy (old_args
->jump_functions
);
3462 if (old_args
->polymorphic_call_contexts
)
3463 new_args
->polymorphic_call_contexts
3464 = vec_safe_copy (old_args
->polymorphic_call_contexts
);
3466 for (i
= 0; i
< vec_safe_length (old_args
->jump_functions
); i
++)
3468 struct ipa_jump_func
*src_jf
= ipa_get_ith_jump_func (old_args
, i
);
3469 struct ipa_jump_func
*dst_jf
= ipa_get_ith_jump_func (new_args
, i
);
3471 dst_jf
->agg
.items
= vec_safe_copy (dst_jf
->agg
.items
);
3473 if (src_jf
->type
== IPA_JF_CONST
)
3475 struct ipa_cst_ref_desc
*src_rdesc
= jfunc_rdesc_usable (src_jf
);
3478 dst_jf
->value
.constant
.rdesc
= NULL
;
3479 else if (src
->caller
== dst
->caller
)
3481 struct ipa_ref
*ref
;
3482 symtab_node
*n
= cgraph_node_for_jfunc (src_jf
);
3483 gcc_checking_assert (n
);
3484 ref
= src
->caller
->find_reference (n
, src
->call_stmt
,
3486 gcc_checking_assert (ref
);
3487 dst
->caller
->clone_reference (ref
, ref
->stmt
);
3489 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3490 dst_rdesc
->cs
= dst
;
3491 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3492 dst_rdesc
->next_duplicate
= NULL
;
3493 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3495 else if (src_rdesc
->cs
== src
)
3497 struct ipa_cst_ref_desc
*dst_rdesc
= ipa_refdesc_pool
.allocate ();
3498 dst_rdesc
->cs
= dst
;
3499 dst_rdesc
->refcount
= src_rdesc
->refcount
;
3500 dst_rdesc
->next_duplicate
= src_rdesc
->next_duplicate
;
3501 src_rdesc
->next_duplicate
= dst_rdesc
;
3502 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3506 struct ipa_cst_ref_desc
*dst_rdesc
;
3507 /* This can happen during inlining, when a JFUNC can refer to a
3508 reference taken in a function up in the tree of inline clones.
3509 We need to find the duplicate that refers to our tree of
3512 gcc_assert (dst
->caller
->global
.inlined_to
);
3513 for (dst_rdesc
= src_rdesc
->next_duplicate
;
3515 dst_rdesc
= dst_rdesc
->next_duplicate
)
3517 struct cgraph_node
*top
;
3518 top
= dst_rdesc
->cs
->caller
->global
.inlined_to
3519 ? dst_rdesc
->cs
->caller
->global
.inlined_to
3520 : dst_rdesc
->cs
->caller
;
3521 if (dst
->caller
->global
.inlined_to
== top
)
3524 gcc_assert (dst_rdesc
);
3525 dst_jf
->value
.constant
.rdesc
= dst_rdesc
;
3528 else if (dst_jf
->type
== IPA_JF_PASS_THROUGH
3529 && src
->caller
== dst
->caller
)
3531 struct cgraph_node
*inline_root
= dst
->caller
->global
.inlined_to
3532 ? dst
->caller
->global
.inlined_to
: dst
->caller
;
3533 struct ipa_node_params
*root_info
= IPA_NODE_REF (inline_root
);
3534 int idx
= ipa_get_jf_pass_through_formal_id (dst_jf
);
3536 int c
= ipa_get_controlled_uses (root_info
, idx
);
3537 if (c
!= IPA_UNDESCRIBED_USE
)
3540 ipa_set_controlled_uses (root_info
, idx
, c
);
3546 /* Analyze newly added function into callgraph. */
3549 ipa_add_new_function (cgraph_node
*node
, void *data ATTRIBUTE_UNUSED
)
3551 if (node
->has_gimple_body_p ())
3552 ipa_analyze_node (node
);
3555 /* Hook that is called by summary when a node is duplicated. */
3558 ipa_node_params_t::duplicate(cgraph_node
*src
, cgraph_node
*dst
,
3559 ipa_node_params
*old_info
,
3560 ipa_node_params
*new_info
)
3562 ipa_agg_replacement_value
*old_av
, *new_av
;
3564 new_info
->descriptors
= old_info
->descriptors
.copy ();
3565 new_info
->lattices
= NULL
;
3566 new_info
->ipcp_orig_node
= old_info
->ipcp_orig_node
;
3568 new_info
->analysis_done
= old_info
->analysis_done
;
3569 new_info
->node_enqueued
= old_info
->node_enqueued
;
3571 old_av
= ipa_get_agg_replacements_for_node (src
);
3577 struct ipa_agg_replacement_value
*v
;
3579 v
= ggc_alloc
<ipa_agg_replacement_value
> ();
3580 memcpy (v
, old_av
, sizeof (*v
));
3583 old_av
= old_av
->next
;
3585 ipa_set_node_agg_value_chain (dst
, new_av
);
3588 ipcp_transformation_summary
*src_trans
= ipcp_get_transformation_summary (src
);
3590 if (src_trans
&& vec_safe_length (src_trans
->alignments
) > 0)
3592 ipcp_grow_transformations_if_necessary ();
3593 src_trans
= ipcp_get_transformation_summary (src
);
3594 const vec
<ipa_alignment
, va_gc
> *src_alignments
= src_trans
->alignments
;
3595 vec
<ipa_alignment
, va_gc
> *&dst_alignments
3596 = ipcp_get_transformation_summary (dst
)->alignments
;
3597 vec_safe_reserve_exact (dst_alignments
, src_alignments
->length ());
3598 for (unsigned i
= 0; i
< src_alignments
->length (); ++i
)
3599 dst_alignments
->quick_push ((*src_alignments
)[i
]);
3603 /* Register our cgraph hooks if they are not already there. */
3606 ipa_register_cgraph_hooks (void)
3608 ipa_check_create_node_params ();
3610 if (!edge_removal_hook_holder
)
3611 edge_removal_hook_holder
=
3612 symtab
->add_edge_removal_hook (&ipa_edge_removal_hook
, NULL
);
3613 if (!edge_duplication_hook_holder
)
3614 edge_duplication_hook_holder
=
3615 symtab
->add_edge_duplication_hook (&ipa_edge_duplication_hook
, NULL
);
3616 function_insertion_hook_holder
=
3617 symtab
->add_cgraph_insertion_hook (&ipa_add_new_function
, NULL
);
3620 /* Unregister our cgraph hooks if they are not already there. */
3623 ipa_unregister_cgraph_hooks (void)
3625 symtab
->remove_edge_removal_hook (edge_removal_hook_holder
);
3626 edge_removal_hook_holder
= NULL
;
3627 symtab
->remove_edge_duplication_hook (edge_duplication_hook_holder
);
3628 edge_duplication_hook_holder
= NULL
;
3629 symtab
->remove_cgraph_insertion_hook (function_insertion_hook_holder
);
3630 function_insertion_hook_holder
= NULL
;
3633 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3634 longer needed after ipa-cp. */
3637 ipa_free_all_structures_after_ipa_cp (void)
3639 if (!optimize
&& !in_lto_p
)
3641 ipa_free_all_edge_args ();
3642 ipa_free_all_node_params ();
3643 ipcp_sources_pool
.release ();
3644 ipcp_cst_values_pool
.release ();
3645 ipcp_poly_ctx_values_pool
.release ();
3646 ipcp_agg_lattice_pool
.release ();
3647 ipa_unregister_cgraph_hooks ();
3648 ipa_refdesc_pool
.release ();
3652 /* Free all ipa_node_params and all ipa_edge_args structures if they are no
3653 longer needed after indirect inlining. */
3656 ipa_free_all_structures_after_iinln (void)
3658 ipa_free_all_edge_args ();
3659 ipa_free_all_node_params ();
3660 ipa_unregister_cgraph_hooks ();
3661 ipcp_sources_pool
.release ();
3662 ipcp_cst_values_pool
.release ();
3663 ipcp_poly_ctx_values_pool
.release ();
3664 ipcp_agg_lattice_pool
.release ();
3665 ipa_refdesc_pool
.release ();
3668 /* Print ipa_tree_map data structures of all functions in the
3672 ipa_print_node_params (FILE *f
, struct cgraph_node
*node
)
3675 struct ipa_node_params
*info
;
3677 if (!node
->definition
)
3679 info
= IPA_NODE_REF (node
);
3680 fprintf (f
, " function %s/%i parameter descriptors:\n",
3681 node
->name (), node
->order
);
3682 count
= ipa_get_param_count (info
);
3683 for (i
= 0; i
< count
; i
++)
3688 ipa_dump_param (f
, info
, i
);
3689 if (ipa_is_param_used (info
, i
))
3690 fprintf (f
, " used");
3691 c
= ipa_get_controlled_uses (info
, i
);
3692 if (c
== IPA_UNDESCRIBED_USE
)
3693 fprintf (f
, " undescribed_use");
3695 fprintf (f
, " controlled_uses=%i", c
);
3700 /* Print ipa_tree_map data structures of all functions in the
3704 ipa_print_all_params (FILE * f
)
3706 struct cgraph_node
*node
;
3708 fprintf (f
, "\nFunction parameters:\n");
3709 FOR_EACH_FUNCTION (node
)
3710 ipa_print_node_params (f
, node
);
3713 /* Return a heap allocated vector containing formal parameters of FNDECL. */
3716 ipa_get_vector_of_formal_parms (tree fndecl
)
3722 gcc_assert (!flag_wpa
);
3723 count
= count_formal_params (fndecl
);
3724 args
.create (count
);
3725 for (parm
= DECL_ARGUMENTS (fndecl
); parm
; parm
= DECL_CHAIN (parm
))
3726 args
.quick_push (parm
);
3731 /* Return a heap allocated vector containing types of formal parameters of
3732 function type FNTYPE. */
3735 ipa_get_vector_of_formal_parm_types (tree fntype
)
3741 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3744 types
.create (count
);
3745 for (t
= TYPE_ARG_TYPES (fntype
); t
; t
= TREE_CHAIN (t
))
3746 types
.quick_push (TREE_VALUE (t
));
3751 /* Modify the function declaration FNDECL and its type according to the plan in
3752 ADJUSTMENTS. It also sets base fields of individual adjustments structures
3753 to reflect the actual parameters being modified which are determined by the
3754 base_index field. */
3757 ipa_modify_formal_parameters (tree fndecl
, ipa_parm_adjustment_vec adjustments
)
3759 vec
<tree
> oparms
= ipa_get_vector_of_formal_parms (fndecl
);
3760 tree orig_type
= TREE_TYPE (fndecl
);
3761 tree old_arg_types
= TYPE_ARG_TYPES (orig_type
);
3763 /* The following test is an ugly hack, some functions simply don't have any
3764 arguments in their type. This is probably a bug but well... */
3765 bool care_for_types
= (old_arg_types
!= NULL_TREE
);
3766 bool last_parm_void
;
3770 last_parm_void
= (TREE_VALUE (tree_last (old_arg_types
))
3772 otypes
= ipa_get_vector_of_formal_parm_types (orig_type
);
3774 gcc_assert (oparms
.length () + 1 == otypes
.length ());
3776 gcc_assert (oparms
.length () == otypes
.length ());
3780 last_parm_void
= false;
3784 int len
= adjustments
.length ();
3785 tree
*link
= &DECL_ARGUMENTS (fndecl
);
3786 tree new_arg_types
= NULL
;
3787 for (int i
= 0; i
< len
; i
++)
3789 struct ipa_parm_adjustment
*adj
;
3792 adj
= &adjustments
[i
];
3794 if (adj
->op
== IPA_PARM_OP_NEW
)
3797 parm
= oparms
[adj
->base_index
];
3800 if (adj
->op
== IPA_PARM_OP_COPY
)
3803 new_arg_types
= tree_cons (NULL_TREE
, otypes
[adj
->base_index
],
3806 link
= &DECL_CHAIN (parm
);
3808 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3814 ptype
= build_pointer_type (adj
->type
);
3818 if (is_gimple_reg_type (ptype
))
3820 unsigned malign
= GET_MODE_ALIGNMENT (TYPE_MODE (ptype
));
3821 if (TYPE_ALIGN (ptype
) < malign
)
3822 ptype
= build_aligned_type (ptype
, malign
);
3827 new_arg_types
= tree_cons (NULL_TREE
, ptype
, new_arg_types
);
3829 new_parm
= build_decl (UNKNOWN_LOCATION
, PARM_DECL
, NULL_TREE
,
3831 const char *prefix
= adj
->arg_prefix
? adj
->arg_prefix
: "SYNTH";
3832 DECL_NAME (new_parm
) = create_tmp_var_name (prefix
);
3833 DECL_ARTIFICIAL (new_parm
) = 1;
3834 DECL_ARG_TYPE (new_parm
) = ptype
;
3835 DECL_CONTEXT (new_parm
) = fndecl
;
3836 TREE_USED (new_parm
) = 1;
3837 DECL_IGNORED_P (new_parm
) = 1;
3838 layout_decl (new_parm
, 0);
3840 if (adj
->op
== IPA_PARM_OP_NEW
)
3844 adj
->new_decl
= new_parm
;
3847 link
= &DECL_CHAIN (new_parm
);
3853 tree new_reversed
= NULL
;
3856 new_reversed
= nreverse (new_arg_types
);
3860 TREE_CHAIN (new_arg_types
) = void_list_node
;
3862 new_reversed
= void_list_node
;
3866 /* Use copy_node to preserve as much as possible from original type
3867 (debug info, attribute lists etc.)
3868 Exception is METHOD_TYPEs must have THIS argument.
3869 When we are asked to remove it, we need to build new FUNCTION_TYPE
3871 tree new_type
= NULL
;
3872 if (TREE_CODE (orig_type
) != METHOD_TYPE
3873 || (adjustments
[0].op
== IPA_PARM_OP_COPY
3874 && adjustments
[0].base_index
== 0))
3876 new_type
= build_distinct_type_copy (orig_type
);
3877 TYPE_ARG_TYPES (new_type
) = new_reversed
;
3882 = build_distinct_type_copy (build_function_type (TREE_TYPE (orig_type
),
3884 TYPE_CONTEXT (new_type
) = TYPE_CONTEXT (orig_type
);
3885 DECL_VINDEX (fndecl
) = NULL_TREE
;
3888 /* When signature changes, we need to clear builtin info. */
3889 if (DECL_BUILT_IN (fndecl
))
3891 DECL_BUILT_IN_CLASS (fndecl
) = NOT_BUILT_IN
;
3892 DECL_FUNCTION_CODE (fndecl
) = (enum built_in_function
) 0;
3895 TREE_TYPE (fndecl
) = new_type
;
3896 DECL_VIRTUAL_P (fndecl
) = 0;
3897 DECL_LANG_SPECIFIC (fndecl
) = NULL
;
3902 /* Modify actual arguments of a function call CS as indicated in ADJUSTMENTS.
3903 If this is a directly recursive call, CS must be NULL. Otherwise it must
3904 contain the corresponding call graph edge. */
3907 ipa_modify_call_arguments (struct cgraph_edge
*cs
, gcall
*stmt
,
3908 ipa_parm_adjustment_vec adjustments
)
3910 struct cgraph_node
*current_node
= cgraph_node::get (current_function_decl
);
3912 vec
<tree
, va_gc
> **debug_args
= NULL
;
3914 gimple_stmt_iterator gsi
, prev_gsi
;
3918 len
= adjustments
.length ();
3920 callee_decl
= !cs
? gimple_call_fndecl (stmt
) : cs
->callee
->decl
;
3921 current_node
->remove_stmt_references (stmt
);
3923 gsi
= gsi_for_stmt (stmt
);
3925 gsi_prev (&prev_gsi
);
3926 for (i
= 0; i
< len
; i
++)
3928 struct ipa_parm_adjustment
*adj
;
3930 adj
= &adjustments
[i
];
3932 if (adj
->op
== IPA_PARM_OP_COPY
)
3934 tree arg
= gimple_call_arg (stmt
, adj
->base_index
);
3936 vargs
.quick_push (arg
);
3938 else if (adj
->op
!= IPA_PARM_OP_REMOVE
)
3940 tree expr
, base
, off
;
3942 unsigned int deref_align
= 0;
3943 bool deref_base
= false;
3945 /* We create a new parameter out of the value of the old one, we can
3946 do the following kind of transformations:
3948 - A scalar passed by reference is converted to a scalar passed by
3949 value. (adj->by_ref is false and the type of the original
3950 actual argument is a pointer to a scalar).
3952 - A part of an aggregate is passed instead of the whole aggregate.
3953 The part can be passed either by value or by reference, this is
3954 determined by value of adj->by_ref. Moreover, the code below
3955 handles both situations when the original aggregate is passed by
3956 value (its type is not a pointer) and when it is passed by
3957 reference (it is a pointer to an aggregate).
3959 When the new argument is passed by reference (adj->by_ref is true)
3960 it must be a part of an aggregate and therefore we form it by
3961 simply taking the address of a reference inside the original
3964 gcc_checking_assert (adj
->offset
% BITS_PER_UNIT
== 0);
3965 base
= gimple_call_arg (stmt
, adj
->base_index
);
3966 loc
= DECL_P (base
) ? DECL_SOURCE_LOCATION (base
)
3967 : EXPR_LOCATION (base
);
3969 if (TREE_CODE (base
) != ADDR_EXPR
3970 && POINTER_TYPE_P (TREE_TYPE (base
)))
3971 off
= build_int_cst (adj
->alias_ptr_type
,
3972 adj
->offset
/ BITS_PER_UNIT
);
3975 HOST_WIDE_INT base_offset
;
3979 if (TREE_CODE (base
) == ADDR_EXPR
)
3981 base
= TREE_OPERAND (base
, 0);
3987 base
= get_addr_base_and_unit_offset (base
, &base_offset
);
3988 /* Aggregate arguments can have non-invariant addresses. */
3991 base
= build_fold_addr_expr (prev_base
);
3992 off
= build_int_cst (adj
->alias_ptr_type
,
3993 adj
->offset
/ BITS_PER_UNIT
);
3995 else if (TREE_CODE (base
) == MEM_REF
)
4000 deref_align
= TYPE_ALIGN (TREE_TYPE (base
));
4002 off
= build_int_cst (adj
->alias_ptr_type
,
4004 + adj
->offset
/ BITS_PER_UNIT
);
4005 off
= int_const_binop (PLUS_EXPR
, TREE_OPERAND (base
, 1),
4007 base
= TREE_OPERAND (base
, 0);
4011 off
= build_int_cst (adj
->alias_ptr_type
,
4013 + adj
->offset
/ BITS_PER_UNIT
);
4014 base
= build_fold_addr_expr (base
);
4020 tree type
= adj
->type
;
4022 unsigned HOST_WIDE_INT misalign
;
4026 align
= deref_align
;
4031 get_pointer_alignment_1 (base
, &align
, &misalign
);
4032 if (TYPE_ALIGN (type
) > align
)
4033 align
= TYPE_ALIGN (type
);
4035 misalign
+= (offset_int::from (off
, SIGNED
).to_short_addr ()
4037 misalign
= misalign
& (align
- 1);
4039 align
= (misalign
& -misalign
);
4040 if (align
< TYPE_ALIGN (type
))
4041 type
= build_aligned_type (type
, align
);
4042 base
= force_gimple_operand_gsi (&gsi
, base
,
4043 true, NULL
, true, GSI_SAME_STMT
);
4044 expr
= fold_build2_loc (loc
, MEM_REF
, type
, base
, off
);
4045 /* If expr is not a valid gimple call argument emit
4046 a load into a temporary. */
4047 if (is_gimple_reg_type (TREE_TYPE (expr
)))
4049 gimple tem
= gimple_build_assign (NULL_TREE
, expr
);
4050 if (gimple_in_ssa_p (cfun
))
4052 gimple_set_vuse (tem
, gimple_vuse (stmt
));
4053 expr
= make_ssa_name (TREE_TYPE (expr
), tem
);
4056 expr
= create_tmp_reg (TREE_TYPE (expr
));
4057 gimple_assign_set_lhs (tem
, expr
);
4058 gsi_insert_before (&gsi
, tem
, GSI_SAME_STMT
);
4063 expr
= fold_build2_loc (loc
, MEM_REF
, adj
->type
, base
, off
);
4064 expr
= build_fold_addr_expr (expr
);
4065 expr
= force_gimple_operand_gsi (&gsi
, expr
,
4066 true, NULL
, true, GSI_SAME_STMT
);
4068 vargs
.quick_push (expr
);
4070 if (adj
->op
!= IPA_PARM_OP_COPY
&& MAY_HAVE_DEBUG_STMTS
)
4073 tree ddecl
= NULL_TREE
, origin
= DECL_ORIGIN (adj
->base
), arg
;
4076 arg
= gimple_call_arg (stmt
, adj
->base_index
);
4077 if (!useless_type_conversion_p (TREE_TYPE (origin
), TREE_TYPE (arg
)))
4079 if (!fold_convertible_p (TREE_TYPE (origin
), arg
))
4081 arg
= fold_convert_loc (gimple_location (stmt
),
4082 TREE_TYPE (origin
), arg
);
4084 if (debug_args
== NULL
)
4085 debug_args
= decl_debug_args_insert (callee_decl
);
4086 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
); ix
+= 2)
4087 if (ddecl
== origin
)
4089 ddecl
= (**debug_args
)[ix
+ 1];
4094 ddecl
= make_node (DEBUG_EXPR_DECL
);
4095 DECL_ARTIFICIAL (ddecl
) = 1;
4096 TREE_TYPE (ddecl
) = TREE_TYPE (origin
);
4097 DECL_MODE (ddecl
) = DECL_MODE (origin
);
4099 vec_safe_push (*debug_args
, origin
);
4100 vec_safe_push (*debug_args
, ddecl
);
4102 def_temp
= gimple_build_debug_bind (ddecl
, unshare_expr (arg
), stmt
);
4103 gsi_insert_before (&gsi
, def_temp
, GSI_SAME_STMT
);
4107 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4109 fprintf (dump_file
, "replacing stmt:");
4110 print_gimple_stmt (dump_file
, gsi_stmt (gsi
), 0, 0);
4113 new_stmt
= gimple_build_call_vec (callee_decl
, vargs
);
4115 if (gimple_call_lhs (stmt
))
4116 gimple_call_set_lhs (new_stmt
, gimple_call_lhs (stmt
));
4118 gimple_set_block (new_stmt
, gimple_block (stmt
));
4119 if (gimple_has_location (stmt
))
4120 gimple_set_location (new_stmt
, gimple_location (stmt
));
4121 gimple_call_set_chain (new_stmt
, gimple_call_chain (stmt
));
4122 gimple_call_copy_flags (new_stmt
, stmt
);
4123 if (gimple_in_ssa_p (cfun
))
4125 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
4126 if (gimple_vdef (stmt
))
4128 gimple_set_vdef (new_stmt
, gimple_vdef (stmt
));
4129 SSA_NAME_DEF_STMT (gimple_vdef (new_stmt
)) = new_stmt
;
4133 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4135 fprintf (dump_file
, "with stmt:");
4136 print_gimple_stmt (dump_file
, new_stmt
, 0, 0);
4137 fprintf (dump_file
, "\n");
4139 gsi_replace (&gsi
, new_stmt
, true);
4141 cs
->set_call_stmt (new_stmt
);
4144 current_node
->record_stmt_references (gsi_stmt (gsi
));
4147 while (gsi_stmt (gsi
) != gsi_stmt (prev_gsi
));
4150 /* If the expression *EXPR should be replaced by a reduction of a parameter, do
4151 so. ADJUSTMENTS is a pointer to a vector of adjustments. CONVERT
4152 specifies whether the function should care about type incompatibility the
4153 current and new expressions. If it is false, the function will leave
4154 incompatibility issues to the caller. Return true iff the expression
4158 ipa_modify_expr (tree
*expr
, bool convert
,
4159 ipa_parm_adjustment_vec adjustments
)
4161 struct ipa_parm_adjustment
*cand
4162 = ipa_get_adjustment_candidate (&expr
, &convert
, adjustments
, false);
4168 src
= build_simple_mem_ref (cand
->new_decl
);
4170 src
= cand
->new_decl
;
4172 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4174 fprintf (dump_file
, "About to replace expr ");
4175 print_generic_expr (dump_file
, *expr
, 0);
4176 fprintf (dump_file
, " with ");
4177 print_generic_expr (dump_file
, src
, 0);
4178 fprintf (dump_file
, "\n");
4181 if (convert
&& !useless_type_conversion_p (TREE_TYPE (*expr
), cand
->type
))
4183 tree vce
= build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (*expr
), src
);
4191 /* If T is an SSA_NAME, return NULL if it is not a default def or
4192 return its base variable if it is. If IGNORE_DEFAULT_DEF is true,
4193 the base variable is always returned, regardless if it is a default
4194 def. Return T if it is not an SSA_NAME. */
4197 get_ssa_base_param (tree t
, bool ignore_default_def
)
4199 if (TREE_CODE (t
) == SSA_NAME
)
4201 if (ignore_default_def
|| SSA_NAME_IS_DEFAULT_DEF (t
))
4202 return SSA_NAME_VAR (t
);
4209 /* Given an expression, return an adjustment entry specifying the
4210 transformation to be done on EXPR. If no suitable adjustment entry
4211 was found, returns NULL.
4213 If IGNORE_DEFAULT_DEF is set, consider SSA_NAMEs which are not a
4214 default def, otherwise bail on them.
4216 If CONVERT is non-NULL, this function will set *CONVERT if the
4217 expression provided is a component reference. ADJUSTMENTS is the
4218 adjustments vector. */
4220 ipa_parm_adjustment
*
4221 ipa_get_adjustment_candidate (tree
**expr
, bool *convert
,
4222 ipa_parm_adjustment_vec adjustments
,
4223 bool ignore_default_def
)
4225 if (TREE_CODE (**expr
) == BIT_FIELD_REF
4226 || TREE_CODE (**expr
) == IMAGPART_EXPR
4227 || TREE_CODE (**expr
) == REALPART_EXPR
)
4229 *expr
= &TREE_OPERAND (**expr
, 0);
4234 HOST_WIDE_INT offset
, size
, max_size
;
4235 tree base
= get_ref_base_and_extent (**expr
, &offset
, &size
, &max_size
);
4236 if (!base
|| size
== -1 || max_size
== -1)
4239 if (TREE_CODE (base
) == MEM_REF
)
4241 offset
+= mem_ref_offset (base
).to_short_addr () * BITS_PER_UNIT
;
4242 base
= TREE_OPERAND (base
, 0);
4245 base
= get_ssa_base_param (base
, ignore_default_def
);
4246 if (!base
|| TREE_CODE (base
) != PARM_DECL
)
4249 struct ipa_parm_adjustment
*cand
= NULL
;
4250 unsigned int len
= adjustments
.length ();
4251 for (unsigned i
= 0; i
< len
; i
++)
4253 struct ipa_parm_adjustment
*adj
= &adjustments
[i
];
4255 if (adj
->base
== base
4256 && (adj
->offset
== offset
|| adj
->op
== IPA_PARM_OP_REMOVE
))
4263 if (!cand
|| cand
->op
== IPA_PARM_OP_COPY
|| cand
->op
== IPA_PARM_OP_REMOVE
)
4268 /* Return true iff BASE_INDEX is in ADJUSTMENTS more than once. */
4271 index_in_adjustments_multiple_times_p (int base_index
,
4272 ipa_parm_adjustment_vec adjustments
)
4274 int i
, len
= adjustments
.length ();
4277 for (i
= 0; i
< len
; i
++)
4279 struct ipa_parm_adjustment
*adj
;
4280 adj
= &adjustments
[i
];
4282 if (adj
->base_index
== base_index
)
4294 /* Return adjustments that should have the same effect on function parameters
4295 and call arguments as if they were first changed according to adjustments in
4296 INNER and then by adjustments in OUTER. */
4298 ipa_parm_adjustment_vec
4299 ipa_combine_adjustments (ipa_parm_adjustment_vec inner
,
4300 ipa_parm_adjustment_vec outer
)
4302 int i
, outlen
= outer
.length ();
4303 int inlen
= inner
.length ();
4305 ipa_parm_adjustment_vec adjustments
, tmp
;
4308 for (i
= 0; i
< inlen
; i
++)
4310 struct ipa_parm_adjustment
*n
;
4313 if (n
->op
== IPA_PARM_OP_REMOVE
)
4317 /* FIXME: Handling of new arguments are not implemented yet. */
4318 gcc_assert (n
->op
!= IPA_PARM_OP_NEW
);
4319 tmp
.quick_push (*n
);
4323 adjustments
.create (outlen
+ removals
);
4324 for (i
= 0; i
< outlen
; i
++)
4326 struct ipa_parm_adjustment r
;
4327 struct ipa_parm_adjustment
*out
= &outer
[i
];
4328 struct ipa_parm_adjustment
*in
= &tmp
[out
->base_index
];
4330 memset (&r
, 0, sizeof (r
));
4331 gcc_assert (in
->op
!= IPA_PARM_OP_REMOVE
);
4332 if (out
->op
== IPA_PARM_OP_REMOVE
)
4334 if (!index_in_adjustments_multiple_times_p (in
->base_index
, tmp
))
4336 r
.op
= IPA_PARM_OP_REMOVE
;
4337 adjustments
.quick_push (r
);
4343 /* FIXME: Handling of new arguments are not implemented yet. */
4344 gcc_assert (out
->op
!= IPA_PARM_OP_NEW
);
4347 r
.base_index
= in
->base_index
;
4350 /* FIXME: Create nonlocal value too. */
4352 if (in
->op
== IPA_PARM_OP_COPY
&& out
->op
== IPA_PARM_OP_COPY
)
4353 r
.op
= IPA_PARM_OP_COPY
;
4354 else if (in
->op
== IPA_PARM_OP_COPY
)
4355 r
.offset
= out
->offset
;
4356 else if (out
->op
== IPA_PARM_OP_COPY
)
4357 r
.offset
= in
->offset
;
4359 r
.offset
= in
->offset
+ out
->offset
;
4360 adjustments
.quick_push (r
);
4363 for (i
= 0; i
< inlen
; i
++)
4365 struct ipa_parm_adjustment
*n
= &inner
[i
];
4367 if (n
->op
== IPA_PARM_OP_REMOVE
)
4368 adjustments
.quick_push (*n
);
4375 /* Dump the adjustments in the vector ADJUSTMENTS to dump_file in a human
4376 friendly way, assuming they are meant to be applied to FNDECL. */
4379 ipa_dump_param_adjustments (FILE *file
, ipa_parm_adjustment_vec adjustments
,
4382 int i
, len
= adjustments
.length ();
4384 vec
<tree
> parms
= ipa_get_vector_of_formal_parms (fndecl
);
4386 fprintf (file
, "IPA param adjustments: ");
4387 for (i
= 0; i
< len
; i
++)
4389 struct ipa_parm_adjustment
*adj
;
4390 adj
= &adjustments
[i
];
4393 fprintf (file
, " ");
4397 fprintf (file
, "%i. base_index: %i - ", i
, adj
->base_index
);
4398 print_generic_expr (file
, parms
[adj
->base_index
], 0);
4401 fprintf (file
, ", base: ");
4402 print_generic_expr (file
, adj
->base
, 0);
4406 fprintf (file
, ", new_decl: ");
4407 print_generic_expr (file
, adj
->new_decl
, 0);
4409 if (adj
->new_ssa_base
)
4411 fprintf (file
, ", new_ssa_base: ");
4412 print_generic_expr (file
, adj
->new_ssa_base
, 0);
4415 if (adj
->op
== IPA_PARM_OP_COPY
)
4416 fprintf (file
, ", copy_param");
4417 else if (adj
->op
== IPA_PARM_OP_REMOVE
)
4418 fprintf (file
, ", remove_param");
4420 fprintf (file
, ", offset %li", (long) adj
->offset
);
4422 fprintf (file
, ", by_ref");
4423 print_node_brief (file
, ", type: ", adj
->type
, 0);
4424 fprintf (file
, "\n");
4429 /* Dump the AV linked list. */
4432 ipa_dump_agg_replacement_values (FILE *f
, struct ipa_agg_replacement_value
*av
)
4435 fprintf (f
, " Aggregate replacements:");
4436 for (; av
; av
= av
->next
)
4438 fprintf (f
, "%s %i[" HOST_WIDE_INT_PRINT_DEC
"]=", comma
? "," : "",
4439 av
->index
, av
->offset
);
4440 print_generic_expr (f
, av
->value
, 0);
4446 /* Stream out jump function JUMP_FUNC to OB. */
4449 ipa_write_jump_function (struct output_block
*ob
,
4450 struct ipa_jump_func
*jump_func
)
4452 struct ipa_agg_jf_item
*item
;
4453 struct bitpack_d bp
;
4456 streamer_write_uhwi (ob
, jump_func
->type
);
4457 switch (jump_func
->type
)
4459 case IPA_JF_UNKNOWN
:
4463 EXPR_LOCATION (jump_func
->value
.constant
.value
) == UNKNOWN_LOCATION
);
4464 stream_write_tree (ob
, jump_func
->value
.constant
.value
, true);
4466 case IPA_JF_PASS_THROUGH
:
4467 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.operation
);
4468 if (jump_func
->value
.pass_through
.operation
== NOP_EXPR
)
4470 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4471 bp
= bitpack_create (ob
->main_stream
);
4472 bp_pack_value (&bp
, jump_func
->value
.pass_through
.agg_preserved
, 1);
4473 streamer_write_bitpack (&bp
);
4477 stream_write_tree (ob
, jump_func
->value
.pass_through
.operand
, true);
4478 streamer_write_uhwi (ob
, jump_func
->value
.pass_through
.formal_id
);
4481 case IPA_JF_ANCESTOR
:
4482 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.offset
);
4483 streamer_write_uhwi (ob
, jump_func
->value
.ancestor
.formal_id
);
4484 bp
= bitpack_create (ob
->main_stream
);
4485 bp_pack_value (&bp
, jump_func
->value
.ancestor
.agg_preserved
, 1);
4486 streamer_write_bitpack (&bp
);
4490 count
= vec_safe_length (jump_func
->agg
.items
);
4491 streamer_write_uhwi (ob
, count
);
4494 bp
= bitpack_create (ob
->main_stream
);
4495 bp_pack_value (&bp
, jump_func
->agg
.by_ref
, 1);
4496 streamer_write_bitpack (&bp
);
4499 FOR_EACH_VEC_SAFE_ELT (jump_func
->agg
.items
, i
, item
)
4501 streamer_write_uhwi (ob
, item
->offset
);
4502 stream_write_tree (ob
, item
->value
, true);
4505 bp
= bitpack_create (ob
->main_stream
);
4506 bp_pack_value (&bp
, jump_func
->alignment
.known
, 1);
4507 streamer_write_bitpack (&bp
);
4508 if (jump_func
->alignment
.known
)
4510 streamer_write_uhwi (ob
, jump_func
->alignment
.align
);
4511 streamer_write_uhwi (ob
, jump_func
->alignment
.misalign
);
4515 /* Read in jump function JUMP_FUNC from IB. */
4518 ipa_read_jump_function (struct lto_input_block
*ib
,
4519 struct ipa_jump_func
*jump_func
,
4520 struct cgraph_edge
*cs
,
4521 struct data_in
*data_in
)
4523 enum jump_func_type jftype
;
4524 enum tree_code operation
;
4527 jftype
= (enum jump_func_type
) streamer_read_uhwi (ib
);
4530 case IPA_JF_UNKNOWN
:
4531 ipa_set_jf_unknown (jump_func
);
4534 ipa_set_jf_constant (jump_func
, stream_read_tree (ib
, data_in
), cs
);
4536 case IPA_JF_PASS_THROUGH
:
4537 operation
= (enum tree_code
) streamer_read_uhwi (ib
);
4538 if (operation
== NOP_EXPR
)
4540 int formal_id
= streamer_read_uhwi (ib
);
4541 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4542 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4543 ipa_set_jf_simple_pass_through (jump_func
, formal_id
, agg_preserved
);
4547 tree operand
= stream_read_tree (ib
, data_in
);
4548 int formal_id
= streamer_read_uhwi (ib
);
4549 ipa_set_jf_arith_pass_through (jump_func
, formal_id
, operand
,
4553 case IPA_JF_ANCESTOR
:
4555 HOST_WIDE_INT offset
= streamer_read_uhwi (ib
);
4556 int formal_id
= streamer_read_uhwi (ib
);
4557 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4558 bool agg_preserved
= bp_unpack_value (&bp
, 1);
4559 ipa_set_ancestor_jf (jump_func
, offset
, formal_id
, agg_preserved
);
4564 count
= streamer_read_uhwi (ib
);
4565 vec_alloc (jump_func
->agg
.items
, count
);
4568 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4569 jump_func
->agg
.by_ref
= bp_unpack_value (&bp
, 1);
4571 for (i
= 0; i
< count
; i
++)
4573 struct ipa_agg_jf_item item
;
4574 item
.offset
= streamer_read_uhwi (ib
);
4575 item
.value
= stream_read_tree (ib
, data_in
);
4576 jump_func
->agg
.items
->quick_push (item
);
4579 struct bitpack_d bp
= streamer_read_bitpack (ib
);
4580 bool alignment_known
= bp_unpack_value (&bp
, 1);
4581 if (alignment_known
)
4583 jump_func
->alignment
.known
= true;
4584 jump_func
->alignment
.align
= streamer_read_uhwi (ib
);
4585 jump_func
->alignment
.misalign
= streamer_read_uhwi (ib
);
4588 jump_func
->alignment
.known
= false;
4591 /* Stream out parts of cgraph_indirect_call_info corresponding to CS that are
4592 relevant to indirect inlining to OB. */
4595 ipa_write_indirect_edge_info (struct output_block
*ob
,
4596 struct cgraph_edge
*cs
)
4598 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4599 struct bitpack_d bp
;
4601 streamer_write_hwi (ob
, ii
->param_index
);
4602 bp
= bitpack_create (ob
->main_stream
);
4603 bp_pack_value (&bp
, ii
->polymorphic
, 1);
4604 bp_pack_value (&bp
, ii
->agg_contents
, 1);
4605 bp_pack_value (&bp
, ii
->member_ptr
, 1);
4606 bp_pack_value (&bp
, ii
->by_ref
, 1);
4607 bp_pack_value (&bp
, ii
->vptr_changed
, 1);
4608 streamer_write_bitpack (&bp
);
4609 if (ii
->agg_contents
|| ii
->polymorphic
)
4610 streamer_write_hwi (ob
, ii
->offset
);
4612 gcc_assert (ii
->offset
== 0);
4614 if (ii
->polymorphic
)
4616 streamer_write_hwi (ob
, ii
->otr_token
);
4617 stream_write_tree (ob
, ii
->otr_type
, true);
4618 ii
->context
.stream_out (ob
);
4622 /* Read in parts of cgraph_indirect_call_info corresponding to CS that are
4623 relevant to indirect inlining from IB. */
4626 ipa_read_indirect_edge_info (struct lto_input_block
*ib
,
4627 struct data_in
*data_in
,
4628 struct cgraph_edge
*cs
)
4630 struct cgraph_indirect_call_info
*ii
= cs
->indirect_info
;
4631 struct bitpack_d bp
;
4633 ii
->param_index
= (int) streamer_read_hwi (ib
);
4634 bp
= streamer_read_bitpack (ib
);
4635 ii
->polymorphic
= bp_unpack_value (&bp
, 1);
4636 ii
->agg_contents
= bp_unpack_value (&bp
, 1);
4637 ii
->member_ptr
= bp_unpack_value (&bp
, 1);
4638 ii
->by_ref
= bp_unpack_value (&bp
, 1);
4639 ii
->vptr_changed
= bp_unpack_value (&bp
, 1);
4640 if (ii
->agg_contents
|| ii
->polymorphic
)
4641 ii
->offset
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4644 if (ii
->polymorphic
)
4646 ii
->otr_token
= (HOST_WIDE_INT
) streamer_read_hwi (ib
);
4647 ii
->otr_type
= stream_read_tree (ib
, data_in
);
4648 ii
->context
.stream_in (ib
, data_in
);
4652 /* Stream out NODE info to OB. */
4655 ipa_write_node_info (struct output_block
*ob
, struct cgraph_node
*node
)
4658 lto_symtab_encoder_t encoder
;
4659 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4661 struct cgraph_edge
*e
;
4662 struct bitpack_d bp
;
4664 encoder
= ob
->decl_state
->symtab_node_encoder
;
4665 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4666 streamer_write_uhwi (ob
, node_ref
);
4668 streamer_write_uhwi (ob
, ipa_get_param_count (info
));
4669 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4670 streamer_write_uhwi (ob
, ipa_get_param_move_cost (info
, j
));
4671 bp
= bitpack_create (ob
->main_stream
);
4672 gcc_assert (info
->analysis_done
4673 || ipa_get_param_count (info
) == 0);
4674 gcc_assert (!info
->node_enqueued
);
4675 gcc_assert (!info
->ipcp_orig_node
);
4676 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4677 bp_pack_value (&bp
, ipa_is_param_used (info
, j
), 1);
4678 streamer_write_bitpack (&bp
);
4679 for (j
= 0; j
< ipa_get_param_count (info
); j
++)
4680 streamer_write_hwi (ob
, ipa_get_controlled_uses (info
, j
));
4681 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4683 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4685 streamer_write_uhwi (ob
,
4686 ipa_get_cs_argument_count (args
) * 2
4687 + (args
->polymorphic_call_contexts
!= NULL
));
4688 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4690 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4691 if (args
->polymorphic_call_contexts
!= NULL
)
4692 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4695 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4697 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4699 streamer_write_uhwi (ob
,
4700 ipa_get_cs_argument_count (args
) * 2
4701 + (args
->polymorphic_call_contexts
!= NULL
));
4702 for (j
= 0; j
< ipa_get_cs_argument_count (args
); j
++)
4704 ipa_write_jump_function (ob
, ipa_get_ith_jump_func (args
, j
));
4705 if (args
->polymorphic_call_contexts
!= NULL
)
4706 ipa_get_ith_polymorhic_call_context (args
, j
)->stream_out (ob
);
4708 ipa_write_indirect_edge_info (ob
, e
);
4712 /* Stream in NODE info from IB. */
4715 ipa_read_node_info (struct lto_input_block
*ib
, struct cgraph_node
*node
,
4716 struct data_in
*data_in
)
4718 struct ipa_node_params
*info
= IPA_NODE_REF (node
);
4720 struct cgraph_edge
*e
;
4721 struct bitpack_d bp
;
4723 ipa_alloc_node_params (node
, streamer_read_uhwi (ib
));
4725 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4726 info
->descriptors
[k
].move_cost
= streamer_read_uhwi (ib
);
4728 bp
= streamer_read_bitpack (ib
);
4729 if (ipa_get_param_count (info
) != 0)
4730 info
->analysis_done
= true;
4731 info
->node_enqueued
= false;
4732 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4733 ipa_set_param_used (info
, k
, bp_unpack_value (&bp
, 1));
4734 for (k
= 0; k
< ipa_get_param_count (info
); k
++)
4735 ipa_set_controlled_uses (info
, k
, streamer_read_hwi (ib
));
4736 for (e
= node
->callees
; e
; e
= e
->next_callee
)
4738 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4739 int count
= streamer_read_uhwi (ib
);
4740 bool contexts_computed
= count
& 1;
4745 vec_safe_grow_cleared (args
->jump_functions
, count
);
4746 if (contexts_computed
)
4747 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4749 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4751 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4753 if (contexts_computed
)
4754 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4757 for (e
= node
->indirect_calls
; e
; e
= e
->next_callee
)
4759 struct ipa_edge_args
*args
= IPA_EDGE_REF (e
);
4760 int count
= streamer_read_uhwi (ib
);
4761 bool contexts_computed
= count
& 1;
4766 vec_safe_grow_cleared (args
->jump_functions
, count
);
4767 if (contexts_computed
)
4768 vec_safe_grow_cleared (args
->polymorphic_call_contexts
, count
);
4769 for (k
= 0; k
< ipa_get_cs_argument_count (args
); k
++)
4771 ipa_read_jump_function (ib
, ipa_get_ith_jump_func (args
, k
), e
,
4773 if (contexts_computed
)
4774 ipa_get_ith_polymorhic_call_context (args
, k
)->stream_in (ib
, data_in
);
4777 ipa_read_indirect_edge_info (ib
, data_in
, e
);
4781 /* Write jump functions for nodes in SET. */
4784 ipa_prop_write_jump_functions (void)
4786 struct cgraph_node
*node
;
4787 struct output_block
*ob
;
4788 unsigned int count
= 0;
4789 lto_symtab_encoder_iterator lsei
;
4790 lto_symtab_encoder_t encoder
;
4792 if (!ipa_node_params_sum
)
4795 ob
= create_output_block (LTO_section_jump_functions
);
4796 encoder
= ob
->decl_state
->symtab_node_encoder
;
4798 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4799 lsei_next_function_in_partition (&lsei
))
4801 node
= lsei_cgraph_node (lsei
);
4802 if (node
->has_gimple_body_p ()
4803 && IPA_NODE_REF (node
) != NULL
)
4807 streamer_write_uhwi (ob
, count
);
4809 /* Process all of the functions. */
4810 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
4811 lsei_next_function_in_partition (&lsei
))
4813 node
= lsei_cgraph_node (lsei
);
4814 if (node
->has_gimple_body_p ()
4815 && IPA_NODE_REF (node
) != NULL
)
4816 ipa_write_node_info (ob
, node
);
4818 streamer_write_char_stream (ob
->main_stream
, 0);
4819 produce_asm (ob
, NULL
);
4820 destroy_output_block (ob
);
4823 /* Read section in file FILE_DATA of length LEN with data DATA. */
4826 ipa_prop_read_section (struct lto_file_decl_data
*file_data
, const char *data
,
4829 const struct lto_function_header
*header
=
4830 (const struct lto_function_header
*) data
;
4831 const int cfg_offset
= sizeof (struct lto_function_header
);
4832 const int main_offset
= cfg_offset
+ header
->cfg_size
;
4833 const int string_offset
= main_offset
+ header
->main_size
;
4834 struct data_in
*data_in
;
4838 lto_input_block
ib_main ((const char *) data
+ main_offset
,
4839 header
->main_size
, file_data
->mode_table
);
4842 lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
4843 header
->string_size
, vNULL
);
4844 count
= streamer_read_uhwi (&ib_main
);
4846 for (i
= 0; i
< count
; i
++)
4849 struct cgraph_node
*node
;
4850 lto_symtab_encoder_t encoder
;
4852 index
= streamer_read_uhwi (&ib_main
);
4853 encoder
= file_data
->symtab_node_encoder
;
4854 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
4856 gcc_assert (node
->definition
);
4857 ipa_read_node_info (&ib_main
, node
, data_in
);
4859 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
4861 lto_data_in_delete (data_in
);
4864 /* Read ipcp jump functions. */
4867 ipa_prop_read_jump_functions (void)
4869 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
4870 struct lto_file_decl_data
*file_data
;
4873 ipa_check_create_node_params ();
4874 ipa_check_create_edge_args ();
4875 ipa_register_cgraph_hooks ();
4877 while ((file_data
= file_data_vec
[j
++]))
4880 const char *data
= lto_get_section_data (file_data
, LTO_section_jump_functions
, NULL
, &len
);
4883 ipa_prop_read_section (file_data
, data
, len
);
4887 /* After merging units, we can get mismatch in argument counts.
4888 Also decl merging might've rendered parameter lists obsolete.
4889 Also compute called_with_variable_arg info. */
4892 ipa_update_after_lto_read (void)
4894 ipa_check_create_node_params ();
4895 ipa_check_create_edge_args ();
4899 write_ipcp_transformation_info (output_block
*ob
, cgraph_node
*node
)
4902 unsigned int count
= 0;
4903 lto_symtab_encoder_t encoder
;
4904 struct ipa_agg_replacement_value
*aggvals
, *av
;
4906 aggvals
= ipa_get_agg_replacements_for_node (node
);
4907 encoder
= ob
->decl_state
->symtab_node_encoder
;
4908 node_ref
= lto_symtab_encoder_encode (encoder
, node
);
4909 streamer_write_uhwi (ob
, node_ref
);
4911 for (av
= aggvals
; av
; av
= av
->next
)
4913 streamer_write_uhwi (ob
, count
);
4915 for (av
= aggvals
; av
; av
= av
->next
)
4917 struct bitpack_d bp
;
4919 streamer_write_uhwi (ob
, av
->offset
);
4920 streamer_write_uhwi (ob
, av
->index
);
4921 stream_write_tree (ob
, av
->value
, true);
4923 bp
= bitpack_create (ob
->main_stream
);
4924 bp_pack_value (&bp
, av
->by_ref
, 1);
4925 streamer_write_bitpack (&bp
);
4928 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4929 if (ts
&& vec_safe_length (ts
->alignments
) > 0)
4931 count
= ts
->alignments
->length ();
4933 streamer_write_uhwi (ob
, count
);
4934 for (unsigned i
= 0; i
< count
; ++i
)
4936 ipa_alignment
*parm_al
= &(*ts
->alignments
)[i
];
4938 struct bitpack_d bp
;
4939 bp
= bitpack_create (ob
->main_stream
);
4940 bp_pack_value (&bp
, parm_al
->known
, 1);
4941 streamer_write_bitpack (&bp
);
4944 streamer_write_uhwi (ob
, parm_al
->align
);
4945 streamer_write_hwi_in_range (ob
->main_stream
, 0, parm_al
->align
,
4951 streamer_write_uhwi (ob
, 0);
4954 /* Stream in the aggregate value replacement chain for NODE from IB. */
4957 read_ipcp_transformation_info (lto_input_block
*ib
, cgraph_node
*node
,
4960 struct ipa_agg_replacement_value
*aggvals
= NULL
;
4961 unsigned int count
, i
;
4963 count
= streamer_read_uhwi (ib
);
4964 for (i
= 0; i
<count
; i
++)
4966 struct ipa_agg_replacement_value
*av
;
4967 struct bitpack_d bp
;
4969 av
= ggc_alloc
<ipa_agg_replacement_value
> ();
4970 av
->offset
= streamer_read_uhwi (ib
);
4971 av
->index
= streamer_read_uhwi (ib
);
4972 av
->value
= stream_read_tree (ib
, data_in
);
4973 bp
= streamer_read_bitpack (ib
);
4974 av
->by_ref
= bp_unpack_value (&bp
, 1);
4978 ipa_set_node_agg_value_chain (node
, aggvals
);
4980 count
= streamer_read_uhwi (ib
);
4983 ipcp_grow_transformations_if_necessary ();
4985 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
4986 vec_safe_grow_cleared (ts
->alignments
, count
);
4988 for (i
= 0; i
< count
; i
++)
4990 ipa_alignment
*parm_al
;
4991 parm_al
= &(*ts
->alignments
)[i
];
4992 struct bitpack_d bp
;
4993 bp
= streamer_read_bitpack (ib
);
4994 parm_al
->known
= bp_unpack_value (&bp
, 1);
4997 parm_al
->align
= streamer_read_uhwi (ib
);
4999 = streamer_read_hwi_in_range (ib
, "ipa-prop misalign",
5006 /* Write all aggregate replacement for nodes in set. */
5009 ipcp_write_transformation_summaries (void)
5011 struct cgraph_node
*node
;
5012 struct output_block
*ob
;
5013 unsigned int count
= 0;
5014 lto_symtab_encoder_iterator lsei
;
5015 lto_symtab_encoder_t encoder
;
5017 ob
= create_output_block (LTO_section_ipcp_transform
);
5018 encoder
= ob
->decl_state
->symtab_node_encoder
;
5020 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5021 lsei_next_function_in_partition (&lsei
))
5023 node
= lsei_cgraph_node (lsei
);
5024 if (node
->has_gimple_body_p ())
5028 streamer_write_uhwi (ob
, count
);
5030 for (lsei
= lsei_start_function_in_partition (encoder
); !lsei_end_p (lsei
);
5031 lsei_next_function_in_partition (&lsei
))
5033 node
= lsei_cgraph_node (lsei
);
5034 if (node
->has_gimple_body_p ())
5035 write_ipcp_transformation_info (ob
, node
);
5037 streamer_write_char_stream (ob
->main_stream
, 0);
5038 produce_asm (ob
, NULL
);
5039 destroy_output_block (ob
);
5042 /* Read replacements section in file FILE_DATA of length LEN with data
5046 read_replacements_section (struct lto_file_decl_data
*file_data
,
5050 const struct lto_function_header
*header
=
5051 (const struct lto_function_header
*) data
;
5052 const int cfg_offset
= sizeof (struct lto_function_header
);
5053 const int main_offset
= cfg_offset
+ header
->cfg_size
;
5054 const int string_offset
= main_offset
+ header
->main_size
;
5055 struct data_in
*data_in
;
5059 lto_input_block
ib_main ((const char *) data
+ main_offset
,
5060 header
->main_size
, file_data
->mode_table
);
5062 data_in
= lto_data_in_create (file_data
, (const char *) data
+ string_offset
,
5063 header
->string_size
, vNULL
);
5064 count
= streamer_read_uhwi (&ib_main
);
5066 for (i
= 0; i
< count
; i
++)
5069 struct cgraph_node
*node
;
5070 lto_symtab_encoder_t encoder
;
5072 index
= streamer_read_uhwi (&ib_main
);
5073 encoder
= file_data
->symtab_node_encoder
;
5074 node
= dyn_cast
<cgraph_node
*> (lto_symtab_encoder_deref (encoder
,
5076 gcc_assert (node
->definition
);
5077 read_ipcp_transformation_info (&ib_main
, node
, data_in
);
5079 lto_free_section_data (file_data
, LTO_section_jump_functions
, NULL
, data
,
5081 lto_data_in_delete (data_in
);
5084 /* Read IPA-CP aggregate replacements. */
5087 ipcp_read_transformation_summaries (void)
5089 struct lto_file_decl_data
**file_data_vec
= lto_get_file_decl_data ();
5090 struct lto_file_decl_data
*file_data
;
5093 while ((file_data
= file_data_vec
[j
++]))
5096 const char *data
= lto_get_section_data (file_data
,
5097 LTO_section_ipcp_transform
,
5100 read_replacements_section (file_data
, data
, len
);
5104 /* Adjust the aggregate replacements in AGGVAL to reflect parameters skipped in
5108 adjust_agg_replacement_values (struct cgraph_node
*node
,
5109 struct ipa_agg_replacement_value
*aggval
)
5111 struct ipa_agg_replacement_value
*v
;
5112 int i
, c
= 0, d
= 0, *adj
;
5114 if (!node
->clone
.combined_args_to_skip
)
5117 for (v
= aggval
; v
; v
= v
->next
)
5119 gcc_assert (v
->index
>= 0);
5125 adj
= XALLOCAVEC (int, c
);
5126 for (i
= 0; i
< c
; i
++)
5127 if (bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5135 for (v
= aggval
; v
; v
= v
->next
)
5136 v
->index
= adj
[v
->index
];
5139 /* Dominator walker driving the ipcp modification phase. */
5141 class ipcp_modif_dom_walker
: public dom_walker
5144 ipcp_modif_dom_walker (struct func_body_info
*fbi
,
5145 vec
<ipa_param_descriptor
> descs
,
5146 struct ipa_agg_replacement_value
*av
,
5148 : dom_walker (CDI_DOMINATORS
), m_fbi (fbi
), m_descriptors (descs
),
5149 m_aggval (av
), m_something_changed (sc
), m_cfg_changed (cc
) {}
5151 virtual void before_dom_children (basic_block
);
5154 struct func_body_info
*m_fbi
;
5155 vec
<ipa_param_descriptor
> m_descriptors
;
5156 struct ipa_agg_replacement_value
*m_aggval
;
5157 bool *m_something_changed
, *m_cfg_changed
;
5161 ipcp_modif_dom_walker::before_dom_children (basic_block bb
)
5163 gimple_stmt_iterator gsi
;
5164 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5166 struct ipa_agg_replacement_value
*v
;
5167 gimple stmt
= gsi_stmt (gsi
);
5169 HOST_WIDE_INT offset
, size
;
5173 if (!gimple_assign_load_p (stmt
))
5175 rhs
= gimple_assign_rhs1 (stmt
);
5176 if (!is_gimple_reg_type (TREE_TYPE (rhs
)))
5181 while (handled_component_p (t
))
5183 /* V_C_E can do things like convert an array of integers to one
5184 bigger integer and similar things we do not handle below. */
5185 if (TREE_CODE (rhs
) == VIEW_CONVERT_EXPR
)
5190 t
= TREE_OPERAND (t
, 0);
5195 if (!ipa_load_from_parm_agg_1 (m_fbi
, m_descriptors
, stmt
, rhs
, &index
,
5196 &offset
, &size
, &by_ref
))
5198 for (v
= m_aggval
; v
; v
= v
->next
)
5199 if (v
->index
== index
5200 && v
->offset
== offset
)
5203 || v
->by_ref
!= by_ref
5204 || tree_to_shwi (TYPE_SIZE (TREE_TYPE (v
->value
))) != size
)
5207 gcc_checking_assert (is_gimple_ip_invariant (v
->value
));
5208 if (!useless_type_conversion_p (TREE_TYPE (rhs
), TREE_TYPE (v
->value
)))
5210 if (fold_convertible_p (TREE_TYPE (rhs
), v
->value
))
5211 val
= fold_build1 (NOP_EXPR
, TREE_TYPE (rhs
), v
->value
);
5212 else if (TYPE_SIZE (TREE_TYPE (rhs
))
5213 == TYPE_SIZE (TREE_TYPE (v
->value
)))
5214 val
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (rhs
), v
->value
);
5219 fprintf (dump_file
, " const ");
5220 print_generic_expr (dump_file
, v
->value
, 0);
5221 fprintf (dump_file
, " can't be converted to type of ");
5222 print_generic_expr (dump_file
, rhs
, 0);
5223 fprintf (dump_file
, "\n");
5231 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5233 fprintf (dump_file
, "Modifying stmt:\n ");
5234 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5236 gimple_assign_set_rhs_from_tree (&gsi
, val
);
5239 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5241 fprintf (dump_file
, "into:\n ");
5242 print_gimple_stmt (dump_file
, stmt
, 0, 0);
5243 fprintf (dump_file
, "\n");
5246 *m_something_changed
= true;
5247 if (maybe_clean_eh_stmt (stmt
)
5248 && gimple_purge_dead_eh_edges (gimple_bb (stmt
)))
5249 *m_cfg_changed
= true;
5254 /* Update alignment of formal parameters as described in
5255 ipcp_transformation_summary. */
5258 ipcp_update_alignments (struct cgraph_node
*node
)
5260 tree fndecl
= node
->decl
;
5261 tree parm
= DECL_ARGUMENTS (fndecl
);
5262 tree next_parm
= parm
;
5263 ipcp_transformation_summary
*ts
= ipcp_get_transformation_summary (node
);
5264 if (!ts
|| vec_safe_length (ts
->alignments
) == 0)
5266 const vec
<ipa_alignment
, va_gc
> &alignments
= *ts
->alignments
;
5267 unsigned count
= alignments
.length ();
5269 for (unsigned i
= 0; i
< count
; ++i
, parm
= next_parm
)
5271 if (node
->clone
.combined_args_to_skip
5272 && bitmap_bit_p (node
->clone
.combined_args_to_skip
, i
))
5274 gcc_checking_assert (parm
);
5275 next_parm
= DECL_CHAIN (parm
);
5277 if (!alignments
[i
].known
|| !is_gimple_reg (parm
))
5279 tree ddef
= ssa_default_def (DECL_STRUCT_FUNCTION (node
->decl
), parm
);
5284 fprintf (dump_file
, " Adjusting alignment of param %u to %u, "
5285 "misalignment to %u\n", i
, alignments
[i
].align
,
5286 alignments
[i
].misalign
);
5288 struct ptr_info_def
*pi
= get_ptr_info (ddef
);
5289 gcc_checking_assert (pi
);
5291 unsigned old_misalign
;
5292 bool old_known
= get_ptr_info_alignment (pi
, &old_align
, &old_misalign
);
5295 && old_align
>= alignments
[i
].align
)
5298 fprintf (dump_file
, " But the alignment was already %u.\n",
5302 set_ptr_info_alignment (pi
, alignments
[i
].align
, alignments
[i
].misalign
);
5306 /* IPCP transformation phase doing propagation of aggregate values. */
5309 ipcp_transform_function (struct cgraph_node
*node
)
5311 vec
<ipa_param_descriptor
> descriptors
= vNULL
;
5312 struct func_body_info fbi
;
5313 struct ipa_agg_replacement_value
*aggval
;
5315 bool cfg_changed
= false, something_changed
= false;
5317 gcc_checking_assert (cfun
);
5318 gcc_checking_assert (current_function_decl
);
5321 fprintf (dump_file
, "Modification phase of node %s/%i\n",
5322 node
->name (), node
->order
);
5324 ipcp_update_alignments (node
);
5325 aggval
= ipa_get_agg_replacements_for_node (node
);
5328 param_count
= count_formal_params (node
->decl
);
5329 if (param_count
== 0)
5331 adjust_agg_replacement_values (node
, aggval
);
5333 ipa_dump_agg_replacement_values (dump_file
, aggval
);
5337 fbi
.bb_infos
= vNULL
;
5338 fbi
.bb_infos
.safe_grow_cleared (last_basic_block_for_fn (cfun
));
5339 fbi
.param_count
= param_count
;
5342 descriptors
.safe_grow_cleared (param_count
);
5343 ipa_populate_param_decls (node
, descriptors
);
5344 calculate_dominance_info (CDI_DOMINATORS
);
5345 ipcp_modif_dom_walker (&fbi
, descriptors
, aggval
, &something_changed
,
5346 &cfg_changed
).walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5349 struct ipa_bb_info
*bi
;
5350 FOR_EACH_VEC_ELT (fbi
.bb_infos
, i
, bi
)
5351 free_ipa_bb_info (bi
);
5352 fbi
.bb_infos
.release ();
5353 free_dominance_info (CDI_DOMINATORS
);
5354 (*ipcp_transformations
)[node
->uid
].agg_values
= NULL
;
5355 (*ipcp_transformations
)[node
->uid
].alignments
= NULL
;
5356 descriptors
.release ();
5358 if (!something_changed
)
5360 else if (cfg_changed
)
5361 return TODO_update_ssa_only_virtuals
| TODO_cleanup_cfg
;
5363 return TODO_update_ssa_only_virtuals
;