This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: [RFC, PATCH 1/n] IPA C++ refactoring


> Hi,
>    this first patch continues with rafactoring of IPA infrastructure so that we will have C++ API. In the patch, I transformed many global functions to members of symtab_node and cgraph_node.
> 
> Example:
> cgraph_remove_node (struct cgraph_node *node) -> cgraph_node::remove (void)
> symtab_unregister_node (symtab_node *node) -> symtab_node::unregister (void)
> 
> The patch is being consulted with Honza and will iterate. We want to inform folk that we plan to do following changes.
> 
> After the patch is applied, I would like to transform varpool_node and cgraph_edge in the following patch.
> 
> Thank you for your comments,
> Martin

 
 /* Remove the node from cgraph.  */

Perhaps Remove function from symbol table.
(similarly for varpool, perhaps few other block comments needs revisiting.
We may do that incrementally.)
 
+  /* Add node into symbol table.  This function is not used directly, but via
+     cgraph/varpool node creation routines.  */
+  void register_symbol (void);
+
+  /* Remove symtab node from the symbol table.  */
+  void remove (void);
+
+  /* Dump symtab node to F.  */
+  void dump (FILE *f);
+
+  /* Dump symtab node to stderr.  */
+  void DEBUG_FUNCTION debug (void);
+
+  /* Verify consistency of node.  */
+  void DEBUG_FUNCTION verify (void);
+
+  /* Return ipa reference from this symtab_node to
+     REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
+     of the use and STMT the statement (if it exists).  */
+  struct ipa_ref *add_reference (symtab_node *referred_node,
+				enum ipa_ref_use use_type);
+
+  /* Return ipa reference from this symtab_node to
+     REFERED_NODE or REFERED_VARPOOL_NODE. USE_TYPE specify type
+     of the use and STMT the statement (if it exists).  */
+  struct ipa_ref *add_reference (symtab_node *referred_node,
+				 enum ipa_ref_use use_type, gimple stmt);
+
+  /* If VAL is a reference to a function or a variable, add a reference from
+     this symtab_node to the corresponding symbol table node.  USE_TYPE specify
+     type of the use and STMT the statement (if it exists).  Return the new
+     reference or NULL if none was created.  */
+  struct ipa_ref *maybe_add_reference (tree val, enum ipa_ref_use use_type,
+				       gimple stmt);
+
+  /* Clone all references from symtab NODE to this symtab_node.  */
+  void clone_references (symtab_node *node);
+
+  /* Remove all stmt references in non-speculative references.
+     Those are not maintained during inlining & clonning.
+     The exception are speculative references that are updated along
+     with callgraph edges associated with them.  */
+  void clone_referring (symtab_node *node);
+
+  /* Clone reference REF to this symtab_node and set its stmt to STMT.  */
+  struct ipa_ref *clone_reference (struct ipa_ref *ref, gimple stmt);
+
+  /* Find the structure describing a reference to REFERRED_NODE
+     and associated with statement STMT.  */
+  struct ipa_ref *find_reference (symtab_node *, gimple, unsigned int);
+
+  /* Remove all references that are associated with statement STMT.  */
+  void remove_stmt_references (gimple stmt);
+
+  /* Remove all stmt references in non-speculative references.
+     Those are not maintained during inlining & clonning.
+     The exception are speculative references that are updated along
+     with callgraph edges associated with them.  */
+  void clear_stmts_in_references (void);
+
+  /* Remove all references in ref list.  */
+  void remove_all_references (void);
+
+  /* Remove all referring items in ref list.  */
+  void remove_all_referring (void);
+
+  /* Dump references in ref list to FILE.  */
+  void dump_references (FILE *file);
+
+  /* Dump referring in list to FILE.  */
+  void dump_referring (FILE *);
+
+  /* Return true if symtab node and TARGET represents
+     semantically equivalent symbols.  */
+  bool semantically_equivalent_p (symtab_node *target);
+
+  /* Classify symbol symtab node for partitioning.  */
+  enum symbol_partitioning_class get_partitioning_class (void);
+
+  /* Return comdat group.  */
+  tree get_comdat_group ()
+    {
+      return x_comdat_group;
+    }
+
+  /* Return comdat group as identifier_node.  */
+  tree get_comdat_group_id ()
+    {
+      if (x_comdat_group && TREE_CODE (x_comdat_group) != IDENTIFIER_NODE)
+	x_comdat_group = DECL_ASSEMBLER_NAME (x_comdat_group);
+      return x_comdat_group;
+    }
+
+  /* Set comdat group.  */
+  void set_comdat_group (tree group)
+    {
+      gcc_checking_assert (!group || TREE_CODE (group) == IDENTIFIER_NODE
+			   || DECL_P (group));
+      x_comdat_group = group;
+    }
+
+  /* Return section as string.  */
+  const char * get_section ()
+    {
+      if (!x_section)
+	return NULL;
+      return x_section->name;
+    }
+
+  /* Remove node from same comdat group.   */
+  void remove_from_same_comdat_group (void);
+
+  /* Add this symtab_node to the same comdat group that OLD is in.  */
+  void add_to_same_comdat_group (symtab_node *old_node);
+
+  /* Dissolve the same_comdat_group list in which NODE resides.  */
+  void dissolve_same_comdat_group_list (void);
+
+  /* Return true when symtab_node is known to be used from other (non-LTO)
+     object file. Known only when doing LTO via linker plugin.  */
+  bool used_from_object_file_p (void);
+
+  /* Walk the alias chain to return the symbol NODE is alias of.
+     If NODE is not an alias, return NODE.
+     When AVAILABILITY is non-NULL, get minimal availability in the chain.  */
+  symtab_node *ultimate_alias_target (enum availability *avail = NULL);
+
+  /* Return next reachable static symbol with initializer after NODE.  */
+  inline symtab_node *next_defined_symbol (void);
+
+  /* Add reference recording that symtab node is alias of TARGET.
+     The function can fail in the case of aliasing cycles; in this case
+     it returns false.  */
+  bool resolve_alias (symtab_node *target);
+
+  /* C++ FE sometimes change linkage flags after producing same
+     body aliases.  */
+  void fixup_same_cpp_alias_visibility (symtab_node *target);
+
+  /* Call calback on symtab node and aliases associated to this node.
+     When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+     skipped. */
+  bool call_for_symbol_and_aliases (bool (*callback) (symtab_node *, void *),
+				  void *data,
+				  bool include_overwrite);
+
+  /* If node can not be interposable by static or dynamic linker to point to
+     different definition, return this symbol. Otherwise look for alias with
+     such property and if none exists, introduce new one.  */
+  symtab_node *noninterposable_alias (void);
+
+  /* Return node that alias N is aliasing.  */
+  inline symtab_node *get_alias_target (void);
+
+  /* Iterates I-th reference in the list, REF is also set.  */
+  struct ipa_ref *iterate_reference (unsigned i, struct ipa_ref *&ref);
+
+  /* Iterates I-th referring item in the list, REF is also set.  */
+  struct ipa_ref *iterate_referring (unsigned i, struct ipa_ref *&ref);

The iterators probably should go along with other reference API methods
earlier.
+
+  /* Iterates I-th referring alias item in the list, REF is also set.  */
+  struct ipa_ref *iterate_direct_aliases (unsigned i, struct ipa_ref *&ref);
+
+  /* Set section for symbol and its aliases.  */
+  void set_section (const char *section);
+
+  /* Set section, do not recurse into aliases.
+     When one wants to change section of symbol and its aliases,
+     use set_section.  */
+  void set_section_for_node (const char *section);
+
+  /* Set initialization priority to PRIORITY.  */
+  void set_init_priority (priority_type priority);
+
+  /* Return the initialization priority.  */
+  priority_type get_init_priority ();
+
+  /* Return availability of NODE.  */
+  enum availability get_availability (void);
+
+  /* Make DECL local.  */
+  void make_decl_local (void);
+
+  /* Return true if list contains an alias.  */
+  bool has_aliases_p (void);
+
+  /* Return true when the symbol is real symbol, i.e. it is not inline clone
+     or abstract function kept for debug info purposes only.  */
+  bool real_symbol_p (void);
+
+  /* Return true if NODE can be discarded by linker from the binary.  */
+  inline bool
+  can_be_discarded_p (void)
+  {
+    return (DECL_EXTERNAL (decl)
+	    || (get_comdat_group ()
+		&& resolution != LDPR_PREVAILING_DEF
+		&& resolution != LDPR_PREVAILING_DEF_IRONLY
+		&& resolution != LDPR_PREVAILING_DEF_IRONLY_EXP));
+  }
+
+  /* Return true if NODE is local to a particular COMDAT group, and must not
+     be named from outside the COMDAT.  This is used for C++ decloned
+     constructors.  */
+  inline bool
+  comdat_local_p (void)
+  {
+    return (same_comdat_group && !TREE_PUBLIC (decl));
+  }
+
+  /* Return true if ONE and TWO are part of the same COMDAT group.  */
+  inline bool in_same_comdat_group_p (symtab_node *target);
+
+  /* Return true when there is a reference to node and it is not vtable.  */
+  bool address_taken_from_non_vtable_p (void);
+
+  /* Return symbol table node associated with DECL, if any,
+     and NULL otherwise.  */
+  static inline symtab_node *get (const_tree decl)
+  {
+#ifdef ENABLE_CHECKING
+    /* Check that we are called for sane type of object - functions
+       and static or external variables.  */
+    gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL
+			 || (TREE_CODE (decl) == VAR_DECL
+			     && (TREE_STATIC (decl) || DECL_EXTERNAL (decl)
+				 || in_lto_p)));
+    /* Check that the mapping is sane - perhaps this check can go away,
+       but at the moment frontends tends to corrupt the mapping by calling
+       memcpy/memset on the tree nodes.  */
+    gcc_checking_assert (!decl->decl_with_vis.symtab_node
+			 || decl->decl_with_vis.symtab_node->decl == decl);
+#endif
+    return decl->decl_with_vis.symtab_node;
+  }
+
+  /* Dump symbol table to F.  */
+  static void dump_table (FILE *);
+
+  /* Dump symbol table to stderr.  */
+  static inline DEBUG_FUNCTION void debug_symtab (void)
+  {
+    dump_table (stderr);
+  }
+
+  /* Verify symbol table for internal consistency.  */
+  static DEBUG_FUNCTION void verify_symtab_nodes (void);
+
+  /* Return true when NODE is known to be used from other (non-LTO)
+     object file. Known only when doing LTO via linker plugin.  */
+  static bool used_from_object_file_p_worker (symtab_node *node);
+
   /* Type of the symbol.  */
   ENUM_BITFIELD (symtab_type) type : 8;
 
...

+  /* Given function symbol, walk the alias chain to return the function node
+     is alias of. Do not walk through thunks.
+     When AVAILABILITY is non-NULL, get minimal availability in the chain.  */
+
+  cgraph_node *function_or_thunk (enum availability *availability = NULL);

function_or_thunk_symbol I guess
+
+  /* Walk the alias chain to return the function cgraph_node is alias of.
+     Walk through thunk, too.
+     When AVAILABILITY is non-NULL, get minimal availability in the chain.  */
+  cgraph_node *function_node (enum availability *avail = NULL);

function_symbol
+
+  /* Add thunk alias into callgraph.  The alias declaration is ALIAS and it
+     aliases DECL with an adjustments made into the first parameter.
+     See comments in thunk_adjust for detail on the parameters.  */
+  cgraph_node * create_thunk (tree alias, tree, bool this_adjusting,
+			      HOST_WIDE_INT fixed_offset,
+			      HOST_WIDE_INT virtual_value,
+			      tree virtual_offset,
+			      tree real_alias);

Here I would put basic manipulation fifrst (create/get/remove), clones next
and thunks last.
+
+  /* Create node representing clone of N executed COUNT times.  Decrease
+     the execution counts from original node too.
+     The new clone will have decl set to DECL that may or may not be the same
+     as decl of N.
+
+     When UPDATE_ORIGINAL is true, the counts are subtracted from the original
+     function's profile to reflect the fact that part of execution is handled
+     by node.
+     When CALL_DUPLICATOIN_HOOK is true, the ipa passes are acknowledged about
+     the new clone. Otherwise the caller is responsible for doing so later.
+
+     If the new node is being inlined into another one, NEW_INLINED_TO should be
+     the outline function the new one is (even indirectly) inlined to.
+     All hooks will see this in node's global.inlined_to, when invoked.
+     Can be NULL if the node is not inlined.  */
+  cgraph_node *clone_node (tree decl, gcov_type count, int freq,

create_clone
+			   bool update_original,
+			   vec<cgraph_edge *> redirect_callers,
+			   bool call_duplication_hook,
+			   struct cgraph_node *new_inlined_to,
+			   bitmap args_to_skip);
+
+  /* Create callgraph node clone with new declaration.  The actual body will
+     be copied later at compilation stage.  */
+  cgraph_node *create_virtual_clone (vec<cgraph_edge *> redirect_callers,
+				     vec<ipa_replace_map *, va_gc> *tree_map,
+				     bitmap args_to_skip, const char * suffix);
+
+  /* cgraph node being removed from symbol table; see if its entry can be
+   replaced by other inline clone.  */
+  cgraph_node *find_replacement (void);

Shouldn't this be protected?
+
+  /* Create a new cgraph node which is the new version of
+     callgraph node.  REDIRECT_CALLERS holds the callers
+     edges which should be redirected to point to
+     NEW_VERSION.  ALL the callees edges of the node
+     are cloned to the new version node.  Return the new
+     version node.
+
+     If non-NULL BLOCK_TO_COPY determine what basic blocks
+     was copied to prevent duplications of calls that are dead
+     in the clone.  */
+
+  cgraph_node *copy_for_versioning (tree new_decl,
+				    vec<cgraph_edge *> redirect_callers,
+				    bitmap bbs_to_copy);

Hmm, perhaps better as create_version_clone (still bad name, but it should be
merged with clone)
+
+  /* Perform function versioning.
+     Function versioning includes copying of the tree and
+     a callgraph update (creating a new cgraph node and updating
+     its callees and callers).
+
+     REDIRECT_CALLERS varray includes the edges to be redirected
+     to the new version.
+
+     TREE_MAP is a mapping of tree nodes we want to replace with
+     new ones (according to results of prior analysis).
+
+     If non-NULL ARGS_TO_SKIP determine function parameters to remove
+     from new version.
+     If SKIP_RETURN is true, the new version will return void.
+     If non-NULL BLOCK_TO_COPY determine what basic blocks to copy.
+     If non_NULL NEW_ENTRY determine new entry BB of the clone.
+
+     Return the new version's cgraph node.  */
+  cgraph_node *function_versioning (vec<cgraph_edge *> redirect_callers,
+				    vec<ipa_replace_map *, va_gc> *tree_map,
+				    bitmap args_to_skip,
+				    bool skip_return,
+				    bitmap bbs_to_copy,
+				    basic_block new_entry_block,
+				    const char *clone_name);

create_version_clone_with_body
+
+  /* Insert a new cgraph_function_version_info node into cgraph_fnver_htab
+     corresponding to cgraph_node.  */
+  struct cgraph_function_version_info *insert_new_function_version (void);

It such that function_version here has nothing to do with function version above,
but I guess I will need to clean up incrementally.

Perhaps create_new_function_version as we use create for edges/symbols and others.
+
+  /* Discover all functions and variables that are trivially needed, analyze
+     them as well as all functions and variables referred by them  */
+  void analyze (void);
+
+  /* Expand thunk NODE to gimple if possible.
+     When FORCE_GIMPLE_THUNK is true, gimple thunk is created and
+     no assembler is produced.
+     When OUTPUT_ASM_THUNK is true, also produce assembler for
+     thunks that are not lowered.  */
+  bool expand_thunk (bool output_asm_thunks, bool force_gimple_thunk);
+
+  /* As an GCC extension we allow redefinition of the function.  The
+     semantics when both copies of bodies differ is not well defined.
+     We replace the old body with new body so in unit at a time mode
+     we always use new body, while in normal mode we may end up with
+     old body inlined into some functions and new body expanded and
+     inlined in others.  */
+  void reset (void);

THese three probably would better become protected if we reorgnanize cgraphunit
bit more, but incrementally.
+
+  /* Creates a wrapper from cgraph_node to TARGET node. Thunk is used for this
+     kind of wrapper method.  */
+  void make_wrapper (cgraph_node *target);

create_wrapper
+
+  /* Verify cgraph nodes of the cgraph node.  */
+  void DEBUG_FUNCTION verify_node (void);
+
+  /* Remove the node from cgraph.  */
+  void remove (void);
+
+  /* Dump call graph node to file F.  */
+  void dump (FILE *f);
+
+  /* Dump call graph node to stderr.  */
+  void DEBUG_FUNCTION debug (void);
+
+  /* When doing LTO, read cgraph_node's body from disk if it is not already
+     present.  */
+  bool get_body (void);
+
+  /* Release memory used to represent body of function.
+     Use this only for functions that are released before being translated to
+     target code (i.e. RTL).  Functions that are compiled to RTL and beyond
+     are free'd in final.c via free_after_compilation().  */
+  void release_body (void);

Those probably should go to start
+
+  /* Remove the node from cgraph and all inline clones inlined into it.
+     Skip however removal of FORBIDDEN_NODE and return true if it needs to be
+     removed.  This allows to call the function from outer loop walking clone
+     tree.  */
+  bool remove_symbol_and_inline_clones (cgraph_node *forbidden_node = NULL);
+
+  /* Record all references from cgraph_node that are taken
+     in statement STMT.  */
+  void record_stmt_references (gimple stmt);
+
+  /* Like cgraph_set_call_stmt but walk the clone tree and update all
+     clones sharing the same function body.
+     When WHOLE_SPECULATIVE_EDGES is true, all three components of
+     speculative edge gets updated.  Otherwise we update only direct
+     call.  */
+  void set_call_stmt_including_clones (gimple old_stmt, gimple new_stmt,
+				       bool update_speculative = true);

Those performing operations on function and its clones should be grouped together.
+
+  /* cgraph_node is no longer nested function; update cgraph accordingly.  */
+  void unnest (void);
+
+  /* Bring cgraph node local.  */
+  void make_local (void);
+
+  /* Likewise indicate that a node is having address taken.  */
+  void mark_address_taken (void);
+
+  /* Set fialization priority to PRIORITY.  */
+  void set_fini_priority (priority_type priority);
+
+  /* Return the finalization priority.  */
+  priority_type get_fini_priority ();
+
+  /* Remove all callees from the node.  */
+  void remove_callees (void);
+
+  /* Create edge from a given function to CALLEE in the cgraph.  */
+  struct cgraph_edge *create_edge (cgraph_node *callee,
+				   gimple call_stmt, gcov_type count,
+				   int freq);
+  /* Create an indirect edge with a yet-undetermined callee where the call
+     statement destination is a formal parameter of the caller with index
+     PARAM_INDEX. */
+  struct cgraph_edge *
+  create_indirect_edge (gimple call_stmt, int ecf_flags, gcov_type count,
+			int freq);
+
+  /* Like cgraph_create_edge walk the clone tree and update all clones sharing
+   same function body.  If clones already have edge for OLD_STMT; only
+   update the edge same way as cgraph_set_call_stmt_including_clones does.  */
+  void create_edge_including_clones (struct cgraph_node *callee,
+				     gimple old_stmt, gimple stmt,
+				     gcov_type count,
+				     int freq,
+				     cgraph_inline_failed_t reason);
+
+  /* Return the callgraph edge representing the GIMPLE_CALL statement
+     CALL_STMT.  */
+  cgraph_edge *get_edge (gimple call_stmt);
+
+  /* Collect all callers of cgraph_node and its aliases that are known to lead
+     to NODE (i.e. are not overwritable).  */
+  vec<cgraph_edge *> collect_callers (void);
+
+  /* Return function availability.  See cgraph.h for description of individual
+     return values.  */
+  enum availability get_body_availability (void);

This should be get_availability and shared with symtab/varpool equivalent.

+
+  /* Set TREE_NOTHROW on cgraph_node's decl and on aliases of the node
+     if any to NOTHROW.  */
+  void set_nothrow_flag (bool nothrow);
+
+  /* Set TREE_READONLY on cgraph_node's decl and on aliases of the node
+     if any to READONLY.  */
+  void set_const_flag (bool readonly, bool looping);
+
+  /* Set DECL_PURE_P on cgraph_node's decl and on aliases of the node
+     if any to PURE.  */
+  void set_pure_flag (bool pure, bool looping);
+
+  /* Call all node duplication hooks.  */
+  void call_duplication_hooks (cgraph_node *node2);

Can't it be private?
+
+  /* Call calback on function and aliases associated to the function.
+     When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+     skipped. */
+
+  bool call_for_symbol_and_aliases (bool (*callback) (cgraph_node *,
+						      void *),
+				    void *data, bool include_overwritable);
+
+  /* Call calback on cgraph_node, thunks and aliases associated to NODE.
+     When INCLUDE_OVERWRITABLE is false, overwritable aliases and thunks are
+     skipped.  */
+  bool call_for_symbol_thunks_and_aliases (bool (*callback) (cgraph_node *node,
+							   void *data),
+					 void *data,
+					 bool include_overwritable);
+
+  /* Call all node insertion hooks.  */
+  void call_function_insertion_hooks (void);

Likewise
+
+  /* Return true when function can be marked local.  */
+  bool local_p (void);

Probably also private? Because users should use local flag.
+
+  /* Return true if cgraph_node can be made local for API change.
+     Extern inline functions and C++ COMDAT functions can be made local
+     at the expense of possible code size growth if function is used in multiple
+     compilation units.  */
+  bool can_be_local_p (void);
+
+  /* Return true when cgraph_node can not return or throw and thus
+     it is safe to ignore its side effects for IPA analysis.  */
+  bool cannot_return_p (void);
+
+  /* Return true when function cgraph_node and all its aliases are only called
+     directly.
+     i.e. it is not externally visible, address was not taken and
+     it is not used in any other non-standard way.  */
+  bool only_called_directly_p (void);
+
+  /* Return true when function cgraph_node can be expected to be removed
+     from program when direct calls in this compilation unit are removed.
+
+     As a special case COMDAT functions are
+     cgraph_can_remove_if_no_direct_calls_p while the are not
+     cgraph_only_called_directly_p (it is possible they are called from other
+     unit)
+
+     This function behaves as cgraph_only_called_directly_p because eliminating
+     all uses of COMDAT function does not make it necessarily disappear from
+     the program unless we are compiling whole program or we do LTO.  In this
+     case we know we win since dynamic linking will not really discard the
+     linkonce section.  */
+
+  bool will_be_removed_from_program_if_no_direct_calls_p (void);
+
+  /* Return true when function can be removed from callgraph
+     if all direct calls are eliminated.  */
+  bool can_remove_if_no_direct_calls_and_refs_p (void);
+
+  /* Return true when function cgraph_node and its aliases can be removed from
+     callgraph if all direct calls are eliminated.  */
+  bool can_remove_if_no_direct_calls_p (void);
+
+  /* Get the cgraph_function_version_info node corresponding to node.  */
+  struct cgraph_function_version_info *function_version (void);

Group it with the version creation.
+
+  /* Dump the callgraph to file F.  */
+  static void dump_cgraph (FILE *f);
+
+  /* Dump the call graph to stderr.  */
+  static inline void debug_cgraph (void)
+  {
+    dump_cgraph (stderr);
+  }

Dumping and debug should go first into the basic manipulation.
+
+  /* Record that DECL1 and DECL2 are semantically identical function
+     versions.  */
+  static void record_function_versions (tree decl1, tree decl2);
+
+  /* Remove the cgraph_function_version_info and cgraph_node for DECL.  This
+     DECL is a duplicate declaration.  */
+  static void delete_function_version (tree decl);

also these two
+
+  /* Add the function FNDECL to the call graph.
+     Unlike cgraph_finalize_function, this function is intended to be used
+     by middle end and allows insertion of new function at arbitrary point
+     of compilation.  The function can be either in high, low or SSA form
+     GIMPLE.
+
+     The function is assumed to be reachable and have address taken (so no
+     API breaking optimizations are performed on it).
+
+     Main work done by this function is to enqueue the function for later
+     processing to avoid need the passes to be re-entrant.  */
+  static void add_new_function (tree fndecl, bool lowered);
+
+  /* Return callgraph node for given symbol and check it is a function. */
+  static inline cgraph_node *get (const_tree decl)
+  {
+    gcc_checking_assert (TREE_CODE (decl) == FUNCTION_DECL);
+    return dyn_cast <cgraph_node *> (symtab_node::get (decl));
+  }
+
+  /* Return cgraph node assigned to DECL.  Create new one when needed.  */
+  static cgraph_node * create (tree decl);

So should these three
+
+  /* Allocate new callgraph node and insert it into basic data structures.  */
+  static cgraph_node * create_empty (void);

private/protected?
+
+  /* Try to find a call graph node for declaration DECL and if it does not
+     exist or if it corresponds to an inline clone, create a new one.  */
+  static cgraph_node * get_create (tree);

Also to begginig, next to get and create.
+
+  /* Return the cgraph node that has ASMNAME for its DECL_ASSEMBLER_NAME.
+     Return NULL if there's no such node.  */
+  static cgraph_node *get_for_asmname (tree asmname);

Likewise
+
+  /* Attempt to mark ALIAS as an alias to DECL.  Return alias node if
+     successful and NULL otherwise.
+     Same body aliases are output whenever the body of DECL is output,
+     and cgraph_node::get (ALIAS) transparently
+     returns cgraph_node::get (DECL).  */
+  static cgraph_node * create_same_body_alias (tree alias, tree decl);

To alias API
+
+  /* Worker for cgraph_can_remove_if_no_direct_calls_p.  */
+  static bool used_from_object_file_p_worker (cgraph_node *node,
+					      void *data ATTRIBUTE_UNUSED)
+  {
+    return node->used_from_object_file_p ();
+  }
+
+  /* Return true when cgraph_node can not be local.
+     Worker for cgraph_local_node_p.  */
cgraph_local_node_p was probably renamed, but we should sanify predicates here.
Please group all functions dealing with local functions togehter, too.
+  static bool non_local_p (cgraph_node *node, void *data ATTRIBUTE_UNUSED);
+
+  /* Verify whole cgraph structure.  */
+  static void DEBUG_FUNCTION verify_cgraph_nodes (void);
+
+  /* Worker to bring NODE local.  */
+  static bool make_local (cgraph_node *node, void *data ATTRIBUTE_UNUSED);
+
+  /* Mark ALIAS as an alias to DECL.  DECL_NODE is cgraph node representing
+     the function body is associated
+     with (not necessarily cgraph_node (DECL).  */
+  static cgraph_node *create_alias (tree alias, tree target);
+
+  static cgraph_edge * create_edge (cgraph_node *caller, cgraph_node *callee,
+				    gimple call_stmt, gcov_type count,
+				    int freq,
+				    bool indir_unknown_callee);

Also edges and aliases should be grouped.

With these changes patch looks good. Probably we will need one additional cleanup pass, but it would be better
to do it incrementally.  Please write changelog that carefuly records what was renamed to what.

Honza


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]