[patch] gcc/*: Fix comment typos.

Kazu Hirata kazu@codesourcery.com
Wed Jan 31 04:00:00 GMT 2007


Hi,

Committed as obvious.

Kazu Hirata

2007-01-31  Kazu Hirata  <kazu@codesourcery.com>

	* cgraphunit.c, config/arm/arm.c, config/m68k/m68k.c,
	ipa-inline.c, tree-profile.c, tree-ssa-live.c,
	tree-ssa-math-opts.c, tree-ssanames.c, tree-vect-analyze.c,
	value-prof.c: Fix comment typos.

Index: cgraphunit.c
===================================================================
--- cgraphunit.c	(revision 121370)
+++ cgraphunit.c	(working copy)
@@ -783,7 +783,7 @@ process_function_and_variable_attributes
 /* Process CGRAPH_NODES_NEEDED queue, analyze each function (and transitively
    each reachable functions) and build cgraph.
    The function can be called multiple times after inserting new nodes
-   into beggining of queue.  Just the new part of queue is re-scanned then.  */
+   into beginning of queue.  Just the new part of queue is re-scanned then.  */
 
 static void
 cgraph_analyze_functions (void)
Index: config/arm/arm.c
===================================================================
--- config/arm/arm.c	(revision 121370)
+++ config/arm/arm.c	(working copy)
@@ -7710,7 +7710,7 @@ get_jump_table_size (rtx insn)
       switch (modesize)
 	{
 	case 1:
-	  /* Round up size of TBB table to a haflword boundary.  */
+	  /* Round up size  of TBB table to a halfword boundary.  */
 	  size = (size + 1) & ~(HOST_WIDE_INT)1;
 	  break;
 	case 2:
Index: config/m68k/m68k.c
===================================================================
--- config/m68k/m68k.c	(revision 121370)
+++ config/m68k/m68k.c	(working copy)
@@ -1957,7 +1957,7 @@ m68k_rtx_costs (rtx x, int code, int out
     }
 }
 
-/* Return an instruction to move CONST_INT OPERANDS[1] into data regsiter
+/* Return an instruction to move CONST_INT OPERANDS[1] into data register
    OPERANDS[0].  */
 
 static const char *
@@ -2838,7 +2838,7 @@ notice_update_cc (rtx exp, rtx insn)
 	 codes.  Normal moves _do_ set the condition codes, but not in
 	 a way that is appropriate for comparison with 0, because -0.0
 	 would be treated as a negative nonzero number.  Note that it
-	 isn't appropriate to conditionalize this restiction on
+	 isn't appropriate to conditionalize this restriction on
 	 HONOR_SIGNED_ZEROS because that macro merely indicates whether
 	 we care about the difference between -0.0 and +0.0.  */
       else if (!FP_REG_P (SET_DEST (exp))
Index: ipa-inline.c
===================================================================
--- ipa-inline.c	(revision 121370)
+++ ipa-inline.c	(working copy)
@@ -91,11 +91,11 @@ Software Foundation, 51 Franklin Street,
      maintained by pass manager).  The functions after inlining are early
      optimized so the early inliner sees unoptimized function itself, but
      all considered callees are already optimized allowing it to unfold
-     abstraction penalty on C++ effectivly and cheaply.
+     abstraction penalty on C++ effectively and cheaply.
 
    pass_ipa_early_inlining
 
-     With profiling, the early inlining is also neccesary to reduce
+     With profiling, the early inlining is also necessary to reduce
      instrumentation costs on program with high abstraction penalty (doing
      many redundant calls).  This can't happen in parallel with early
      optimization and profile instrumentation, because we would end up
@@ -751,7 +751,7 @@ cgraph_set_inline_failed (struct cgraph_
       e->inline_failed = reason;
 }
 
-/* Given whole compilation unit esitmate of INSNS, compute how large we can
+/* Given whole compilation unit estimate of INSNS, compute how large we can
    allow the unit to grow.  */
 static int
 compute_max_insns (int insns)
@@ -1043,7 +1043,7 @@ cgraph_decide_inlining (void)
 		     e->caller->global.insns);
 	}
       /* Inlining self recursive function might introduce new calls to
-	 thsemselves we didn't see in the loop above.  Fill in the proper
+	 themselves we didn't see in the loop above.  Fill in the proper
 	 reason why inline failed.  */
       for (e = node->callers; e; e = e->next_caller)
 	if (e->inline_failed)
@@ -1126,7 +1126,7 @@ cgraph_decide_inlining (void)
    recursive inlining, but as an special case, we want to try harder inline
    ALWAYS_INLINE functions: consider callgraph a->b->c->b, with a being
    flatten, b being always inline.  Flattening 'a' will collapse
-   a->b->c before hitting cycle.  To accomondate always inline, we however
+   a->b->c before hitting cycle.  To accommodate always inline, we however
    need to inline a->b->c->b.
 
    So after hitting cycle first time, we switch into ALWAYS_INLINE mode and
@@ -1145,7 +1145,7 @@ try_inline (struct cgraph_edge *e, enum 
 	 mode yet.  and the function in question is always_inline.  */
       if (always_inline && mode != INLINE_ALWAYS_INLINE)
 	mode = INLINE_ALWAYS_INLINE;
-      /* Otheriwse it is time to give up.  */
+      /* Otherwise it is time to give up.  */
       else
 	{
 	  if (dump_file)
Index: tree-profile.c
===================================================================
--- tree-profile.c	(revision 121370)
+++ tree-profile.c	(working copy)
@@ -64,7 +64,7 @@ static GTY(()) tree ptr_void;
 
 /* Add code:
    static gcov*	__gcov_indirect_call_counters; // pointer to actual counter
-   static void*	__gcov_indirect_call_callee; // actual callie addres
+   static void*	__gcov_indirect_call_callee; // actual callee address
 */
 static void
 tree_init_ic_make_global_vars (void)
@@ -269,7 +269,7 @@ tree_gen_one_value_profiler (histogram_v
 
 /* Output instructions as GIMPLE trees for code to find the most
    common called function in indirect call.  
-   VALUE is the call expression whose indirect callie is profiled.
+   VALUE is the call expression whose indirect callee is profiled.
    TAG is the tag of the section for counters, BASE is offset of the
    counter position.  */
 
@@ -308,7 +308,7 @@ tree_gen_ic_profiler (histogram_value va
 
 /* Output instructions as GIMPLE trees for code to find the most
    common called function in indirect call. Insert instructions at the
-   begining of every possible called function.
+   beginning of every possible called function.
   */
 
 static void
Index: tree-ssa-live.c
===================================================================
--- tree-ssa-live.c	(revision 121370)
+++ tree-ssa-live.c	(working copy)
@@ -504,7 +504,7 @@ remove_unused_locals (void)
 
   /* Remove unused variables from REFERENCED_VARs.  As an special exception
      keep the variables that are believed to be aliased.  Those can't be
-     easilly removed from the alias sets and and operand caches.
+     easily removed from the alias sets and and operand caches.
      They will be removed shortly after next may_alias pass is performed.  */
   FOR_EACH_REFERENCED_VAR (t, rvi)
     if (!is_global_var (t)
Index: tree-ssa-math-opts.c
===================================================================
--- tree-ssa-math-opts.c	(revision 121370)
+++ tree-ssa-math-opts.c	(working copy)
@@ -521,9 +521,9 @@ struct tree_opt_pass pass_cse_reciprocal
   0				        /* letter */
 };
 
-/* Records an occurance at statement USE_STMT in the vector of trees
+/* Records an occurrence at statement USE_STMT in the vector of trees
    STMTS if it is dominated by *TOP_BB or dominates it or this basic block
-   is not yet initialized.  Returns true if the occurance was pushed on
+   is not yet initialized.  Returns true if the occurrence was pushed on
    the vector.  Adjusts *TOP_BB to be the basic block dominating all
    statements in the vector.  */
 
Index: tree-ssanames.c
===================================================================
--- tree-ssanames.c	(revision 121370)
+++ tree-ssanames.c	(working copy)
@@ -318,7 +318,7 @@ release_dead_ssa_names (void)
   referenced_var_iterator rvi;
 
   /* Current defs point to various dead SSA names that in turn points to dead
-     statements so bunch of dead memory is holded from releasing.  */
+     statements so bunch of dead memory is held from releasing.  */
   FOR_EACH_REFERENCED_VAR (t, rvi)
     set_current_def (t, NULL);
   /* Now release the freelist.  */
@@ -328,7 +328,7 @@ release_dead_ssa_names (void)
       /* Dangling pointers might make GGC to still see dead SSA names, so it is
  	 important to unlink the list and avoid GGC from seeing all subsequent
 	 SSA names.  In longer run we want to have all dangling pointers here
-	 removed (since they usually go trhough dead statements that consume
+	 removed (since they usually go through dead statements that consume
 	 considerable amounts of memory).  */
       TREE_CHAIN (t) = NULL_TREE;
       n++;
Index: tree-vect-analyze.c
===================================================================
--- tree-vect-analyze.c	(revision 121370)
+++ tree-vect-analyze.c	(working copy)
@@ -164,7 +164,7 @@ vect_determine_vectorization_factor (loo
 		 arguments (e.g. demotion, promotion), vectype will be reset 
 		 appropriately (later).  Note that we have to visit the smallest 
 		 datatype in this function, because that determines the VF.  
-		 If the samallest datatype in the loop is present only as the 
+		 If the smallest datatype in the loop is present only as the 
 		 rhs of a promotion operation - we'd miss it here.
 		 However, in such a case, that a variable of this datatype
 		 does not appear in the lhs anywhere in the loop, it shouldn't
@@ -1752,7 +1752,7 @@ vect_analyze_data_ref_access (struct dat
                   return false; 
                 }
 
-              /* Check that there is no load-store dependecies for this loads 
+              /* Check that there is no load-store dependencies for this loads 
                  to prevent a case of load-store-load to the same location.  */
               if (DR_GROUP_READ_WRITE_DEPENDENCE (vinfo_for_stmt (next))
                   || DR_GROUP_READ_WRITE_DEPENDENCE (vinfo_for_stmt (prev)))
Index: value-prof.c
===================================================================
--- value-prof.c	(revision 121370)
+++ value-prof.c	(working copy)
@@ -63,7 +63,7 @@ static struct value_prof_hooks *value_pr
 
    3) Indirect/virtual call specialization. If we can determine most
       common function callee in indirect/virtual call. We can use this
-      information to improve code effectivity (espetialy info for
+      information to improve code effectiveness (especially info for
       inliner).
 
    Every such optimization should add its requirements for profiled values to



More information about the Gcc-patches mailing list