This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

[tuples] Use is_gimple_assign and is_gimple_call


Hi!

Just a mechanical patch to use is_gimple_call and is_gimple_assign
helpers where possible.  Will commit after full bootstrap/regtest.

2008-07-16  Jakub Jelinek  <jakub@redhat.com>

	* tree-ssa-loop-niter.c (infer_loop_bounds_from_array): Use
	is_gimple_assign instead of gimple_code () == GIMPLE_ASSIGN and
	is_gimple_call instead of gimple_code () == GIMPLE_CALL.
	* tree-ssa-propagate.c (update_call_from_tree, substitute_and_fold):
	Likewise.
	* tree-ssa-sccvn.c (visit_use): Likewise.
	* tree-eh.c (stmt_could_throw_p): Likewise.
	* tree-optimize.c (execute_fixup_cfg): Likewise.
	* omp-low.c (check_omp_nesting_restrictions, scan_omp_1_stmt,
	optimize_omp_library_calls): Likewise.
	* tree-ssa-loop-im.c (movement_possibility, stmt_cost,
	determine_invariantness_stmt): Likewise.
	* tree-ssa-phiprop.c (phivn_valid_p, phiprop_insert_phi,
	propagate_with_phi): Likewise.
	* tree-ssa-ccp.c (get_default_value, surely_varying_stmt_p,
	ccp_fold_builtin, gimplify_and_update_call_from_tree): Likewise.
	* ipa-struct-reorg.c (is_result_of_mult, create_general_new_stmt):
	Likewise.
	* tree-ssa-coalesce.c (build_ssa_conflict_graph): Likewise.
	* tree-object-size.c (alloc_object_size, call_object_size,
	check_for_plus_in_loops): Likewise.
	* tree-ssa.c (verify_ssa): Likewise.
	* predict.c (expr_expected_value_1, tree_bb_level_predictions,
	tree_estimate_probability): Likewise.
	* tree-cfg.c (verify_stmt): Likewise.
	* tree-ssa-loop-ch.c (should_duplicate_loop_header_p,
	copy_loop_headers): Likewise.
	* tree-ssa-ter.c (is_replaceable_p): Likewise.
	* ipa-prop.c (ipa_count_arguments, ipa_compute_jump_functions):
	Likewise.
	* tree-ssa-dom.c (gimple_assign_unary_useless_conversion_p,
	record_equivalences_from_stmt, optimize_stmt,
	get_lhs_or_phi_result): Likewise.
	* tree-ssa-sink.c (is_hidden_global_store): Likewise.
	* tree-nrv.c (tree_nrv, execute_return_slot_opt): Likewise.
	* value-prof.c (gimple_divmod_fixed_value,
	gimple_mod_pow2, gimple_mod_subtract): Likewise.
	* tree-predcom.c (name_for_ref, find_looparound_phi,
	replace_ref_with, remove_name_from_operation): Likewise.
	* tree-ssa-math-opts.c (is_division_by, execute_cse_reciprocals,
	execute_cse_sincos, execute_convert_to_rsqrt): Likewise.
	* tree-complex.c (expand_complex_move, expand_complex_operations_1):
	Likewise.
	* tree-outof-ssa.c (identical_copies_p): Likewise.
	* tree-ssa-pre.c (is_exception_related): Likewise.
	* tree-sra.c (scalarize_use, scalarize_copy): Likewise.
	* tree-ssa-alias.c (count_uses_and_derefs, update_alias_info_1,
	is_escape_site): Likewise.
	* lambda-code.c (can_put_in_inner_loop,
	cannot_convert_bb_to_perfect_nest): Likewise.
	* tree-tailcall.c (find_tail_calls, eliminate_tail_call): Likewise.
	* ipa-type-escape.c (look_for_casts_stmt, is_cast_from_non_pointer):
	Likewise.
	* tree-vect-transform.c (vectorizable_reduction): Likewise.
	* tree-ssa-threadedge.c (record_temporary_equivalences_from_stmts):
	Likewise.
	* tree-ssa-phiopt.c (nt_init_block): Likewise.
	* tree-ssa-structalias.c (find_func_aliases): Likewise. 
	* tree-ssa-forwprop.c (can_propagate_from,
	forward_propagate_comparison, simplify_not_neg_expr,
	simplify_gimple_switch, tree_ssa_forward_propagate_single_use_vars):
	Likewise.
	* tree-ssa-dce.c (eliminate_unnecessary_stmts): Likewise.
	* tree-ssa-dse.c (get_kill_of_stmt_lhs, dse_possible_dead_store_p,
	dse_optimize_stmt, execute_simple_dse): Likewise.
	* tree-ssa-loop-ivopts.c (find_interesting_uses_op,
	find_interesting_uses_stmt, rewrite_use_nonlinear_expr): Likewise.
	* tree-vrp.c (stmt_overflow_infinity, vrp_stmt_computes_nonzero,
	register_edge_assert_for_2, register_edge_assert_for,
	find_assert_locations, check_all_array_refs,
	remove_range_assertions, stmt_interesting_for_vrp, vrp_visit_stmt,
	simplify_stmt_using_ranges): Likewise.
	* tree-ssa-loop-prefetch.c (gather_memory_references): Likewise.
	* tree-ssa-copy.c (may_propagate_copy_into_stmt,
	propagate_tree_value_into_stmt): Likewise.
	* tree-inline.c (copy_bb, expand_call_inline,
	gimple_expand_calls_inline, fold_marked_statements): Likewise.

--- gcc/tree-ssa-loop-niter.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-ssa-loop-niter.c	2008-07-16 12:31:12.000000000 +0200
@@ -2699,7 +2699,7 @@ infer_loop_bounds_from_ref (struct loop 
 static void
 infer_loop_bounds_from_array (struct loop *loop, gimple stmt, bool reliable)
 {
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       tree op0 = gimple_assign_lhs (stmt);
       tree op1 = gimple_assign_rhs1 (stmt);
@@ -2712,7 +2712,7 @@ infer_loop_bounds_from_array (struct loo
       if (REFERENCE_CLASS_P (op1))
 	infer_loop_bounds_from_ref (loop, stmt, op1, reliable);
     }
-  else if (gimple_code (stmt) == GIMPLE_CALL)
+  else if (is_gimple_call (stmt))
     {
       tree arg, lhs;
       unsigned i, n = gimple_call_num_args (stmt);
--- gcc/tree-ssa-propagate.c.jj	2008-07-15 11:38:20.000000000 +0200
+++ gcc/tree-ssa-propagate.c	2008-07-16 12:31:12.000000000 +0200
@@ -717,7 +717,7 @@ update_call_from_tree (gimple_stmt_itera
 
   gimple stmt = gsi_stmt (*si_p);
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (stmt));
 
   lhs = gimple_call_lhs (stmt);
 
@@ -1357,7 +1357,7 @@ substitute_and_fold (prop_value_t *prop_
 	      if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
 		gimple_purge_dead_eh_edges (bb);
 
-              if (gimple_code (stmt) == GIMPLE_ASSIGN
+              if (is_gimple_assign (stmt)
                   && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
                       == GIMPLE_SINGLE_RHS))
               {
--- gcc/tree-ssa-sccvn.c.jj	2008-07-16 09:22:27.000000000 +0200
+++ gcc/tree-ssa-sccvn.c	2008-07-16 12:31:12.000000000 +0200
@@ -2115,7 +2115,7 @@ visit_use (tree use)
 	       || gimple_has_volatile_ops (stmt)
 	       || stmt_could_throw_p (stmt))
 	changed = defs_to_varying (stmt);
-      else if (gimple_code (stmt) == GIMPLE_ASSIGN)
+      else if (is_gimple_assign (stmt))
 	{
 	  tree lhs = gimple_assign_lhs (stmt);
 	  tree simplified;
--- gcc/tree-eh.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-eh.c	2008-07-16 12:31:11.000000000 +0200
@@ -2383,7 +2383,7 @@ stmt_could_throw_p (gimple stmt)
 
   if (code == GIMPLE_ASSIGN || code == GIMPLE_COND)
     return stmt_could_throw_1_p (stmt);
-  else if (gimple_code (stmt) == GIMPLE_CALL)
+  else if (is_gimple_call (stmt))
     {
       tree t = gimple_call_fndecl (stmt);
 
--- gcc/tree-optimize.c.jj	2008-07-07 18:45:28.000000000 +0200
+++ gcc/tree-optimize.c	2008-07-16 12:31:11.000000000 +0200
@@ -300,7 +300,7 @@ execute_fixup_cfg (void)
 	for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
 	  {
 	    gimple stmt = gsi_stmt (gsi);
-	    tree decl = gimple_code (stmt) == GIMPLE_CALL
+	    tree decl = is_gimple_call (stmt)
 	                ? gimple_call_fndecl (stmt)
 			: NULL;
 
--- gcc/omp-low.c.jj	2008-07-16 09:22:27.000000000 +0200
+++ gcc/omp-low.c	2008-07-16 12:31:11.000000000 +0200
@@ -1811,7 +1811,7 @@ check_omp_nesting_restrictions (gimple  
 	  case GIMPLE_OMP_ORDERED:
 	  case GIMPLE_OMP_MASTER:
 	  case GIMPLE_OMP_TASK:
-	    if (gimple_code (stmt) == GIMPLE_CALL)
+	    if (is_gimple_call (stmt))
 	      {
 		warning (0, "barrier region may not be closely nested inside "
 			    "of work-sharing, critical, ordered, master or "
@@ -1937,7 +1937,7 @@ scan_omp_1_stmt (gimple_stmt_iterator *g
     {
       if (is_gimple_omp (stmt))
 	check_omp_nesting_restrictions (stmt, ctx);
-      else if (gimple_code (stmt) == GIMPLE_CALL)
+      else if (is_gimple_call (stmt))
 	{
 	  tree fndecl = gimple_call_fndecl (stmt);
 	  if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
@@ -3188,7 +3188,7 @@ optimize_omp_library_calls (gimple entry
 	gimple call = gsi_stmt (gsi);
 	tree decl;
 
-	if (gimple_code (call) == GIMPLE_CALL
+	if (is_gimple_call (call)
 	    && (decl = gimple_call_fndecl (call))
 	    && DECL_EXTERNAL (decl)
 	    && TREE_PUBLIC (decl)
--- gcc/tree-ssa-loop-im.c.jj	2008-07-02 15:49:19.000000000 +0200
+++ gcc/tree-ssa-loop-im.c	2008-07-16 12:31:12.000000000 +0200
@@ -371,7 +371,7 @@ movement_possibility (gimple stmt)
       || stmt_could_throw_p (stmt))
     return MOVE_IMPOSSIBLE;
 
-  if (gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_call (stmt))
     {
       /* While pure or const call is guaranteed to have no side effects, we
 	 cannot move it arbitrarily.  Consider code like
@@ -394,7 +394,7 @@ movement_possibility (gimple stmt)
       ret = MOVE_PRESERVE_EXECUTION;
       lhs = gimple_call_lhs (stmt);
     }
-  else if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  else if (is_gimple_assign (stmt))
     lhs = gimple_assign_lhs (stmt);
   else
     return MOVE_IMPOSSIBLE;
@@ -520,7 +520,7 @@ stmt_cost (gimple stmt)
   if (gimple_references_memory_p (stmt))
     cost += 20;
 
-  if (gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_call (stmt))
     {
       /* We should be hoisting calls if possible.  */
 
@@ -917,7 +917,7 @@ determine_invariantness_stmt (struct dom
 	  continue;
 	}
 
-      if (gimple_code (stmt) == GIMPLE_ASSIGN
+      if (is_gimple_assign (stmt)
 	  && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
 	      == GIMPLE_BINARY_RHS))
 	{
--- gcc/tree-ssa-phiprop.c.jj	2008-06-13 14:30:50.000000000 +0200
+++ gcc/tree-ssa-phiprop.c	2008-07-16 12:31:12.000000000 +0200
@@ -119,7 +119,7 @@ phivn_valid_p (struct phiprop_d *phivn, 
       FOR_EACH_IMM_USE_STMT (use_stmt, ui2, vuse)
 	{
 	  /* If BB does not dominate a VDEF, the value is invalid.  */
-	  if (((gimple_code (use_stmt) == GIMPLE_ASSIGN
+	  if (((is_gimple_assign (use_stmt)
 	        && !ZERO_SSA_OPERANDS (use_stmt, SSA_OP_VDEF))
 	       || gimple_code (use_stmt) == GIMPLE_PHI)
 	      && !dominated_by_p (CDI_DOMINATORS, gimple_bb (use_stmt), bb))
@@ -147,7 +147,7 @@ phiprop_insert_phi (basic_block bb, gimp
   edge_iterator ei;
   edge e;
 
-  gcc_assert (gimple_code (use_stmt) == GIMPLE_ASSIGN
+  gcc_assert (is_gimple_assign (use_stmt)
 	      && gimple_assign_rhs_code (use_stmt) == INDIRECT_REF);
 
   /* Build a new PHI node to replace the definition of
@@ -263,7 +263,7 @@ propagate_with_phi (basic_block bb, gimp
   /* Find a dereferencing use.  First follow (single use) ssa
      copy chains for ptr.  */
   while (single_imm_use (ptr, &use, &use_stmt)
-	 && gimple_code (use_stmt) == GIMPLE_ASSIGN
+	 && is_gimple_assign (use_stmt)
 	 && gimple_assign_rhs1 (use_stmt) == ptr
 	 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME)
     ptr = gimple_assign_lhs (use_stmt);
@@ -277,7 +277,7 @@ propagate_with_phi (basic_block bb, gimp
       tree vuse;
 
       /* Check whether this is a load of *ptr.  */
-      if (!(gimple_code (use_stmt) == GIMPLE_ASSIGN
+      if (!(is_gimple_assign (use_stmt)
 	    && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME 
 	    && gimple_assign_rhs_code (use_stmt) == INDIRECT_REF
 	    && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
--- gcc/tree-ssa-ccp.c.jj	2008-07-16 09:22:27.000000000 +0200
+++ gcc/tree-ssa-ccp.c	2008-07-16 12:31:12.000000000 +0200
@@ -353,10 +353,10 @@ get_default_value (tree var)
 	  else
 	    val.lattice_val = VARYING;
 	}
-      else if (gimple_code (stmt) == GIMPLE_ASSIGN
+      else if (is_gimple_assign (stmt)
                /* Value-returning GIMPLE_CALL statements assign to
                   a variable, and are treated similarly to GIMPLE_ASSIGN.  */
-               || (gimple_code (stmt) == GIMPLE_CALL
+               || (is_gimple_call (stmt)
                    && gimple_call_lhs (stmt) != NULL_TREE)
 	       || gimple_code (stmt) == GIMPLE_PHI)
         {
@@ -631,7 +631,7 @@ surely_varying_stmt_p (gimple stmt)
     }
 
   /* If it contains a call, it is varying.  */
-  if (gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_call (stmt))
     return true;
 
   /* Anything other than assignments and conditional jumps are not
@@ -2441,7 +2441,7 @@ ccp_fold_builtin (gimple stmt)
   bool ignore;
   int nargs;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (stmt));
 
   ignore = (gimple_call_lhs (stmt) == NULL);
 
@@ -3069,7 +3069,7 @@ gimplify_and_update_call_from_tree (gimp
 
   stmt = gsi_stmt (*si_p);
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (stmt));
 
   lhs = gimple_call_lhs (stmt);
 
--- gcc/ipa-struct-reorg.c.jj	2008-07-02 11:39:00.000000000 +0200
+++ gcc/ipa-struct-reorg.c	2008-07-16 12:31:11.000000000 +0200
@@ -314,7 +314,7 @@ is_result_of_mult (tree arg, tree *num, 
      D.2229_10 = <alloc_func> (D.2228_9);
      then size_def_stmt can be D.2228_9 = num.3_8 * 8;  */
 
-  if (size_def_stmt && gimple_code (size_def_stmt) == GIMPLE_ASSIGN)
+  if (size_def_stmt && is_gimple_assign (size_def_stmt))
     {
       tree lhs = gimple_assign_lhs (size_def_stmt);
 
@@ -1259,7 +1259,7 @@ create_general_new_stmt (struct access_s
       gcc_assert (new_var);
       finalize_var_creation (new_var);
 
-      if (gimple_code (new_stmt) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (new_stmt))
 	{
 	  lhs = gimple_assign_lhs (new_stmt);
 	  
--- gcc/tree-ssa-coalesce.c.jj	2008-07-02 11:38:21.000000000 +0200
+++ gcc/tree-ssa-coalesce.c	2008-07-16 12:31:12.000000000 +0200
@@ -858,7 +858,7 @@ build_ssa_conflict_graph (tree_live_info
 	     
 	     This is handled by simply removing the SRC of the copy from the 
 	     live list, and processing the stmt normally.  */
-	  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+	  if (is_gimple_assign (stmt))
 	    {
 	      tree lhs = gimple_assign_lhs (stmt);
 	      tree rhs1 = gimple_assign_rhs1 (stmt);
--- gcc/tree-object-size.c.jj	2008-06-27 18:54:55.000000000 +0200
+++ gcc/tree-object-size.c	2008-07-16 12:31:11.000000000 +0200
@@ -231,7 +231,7 @@ alloc_object_size (const_gimple call, in
   tree alloc_size;
   int arg1 = -1, arg2 = -1;
 
-  gcc_assert (gimple_code (call) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (call));
 
   callee = gimple_call_fndecl (call);
   if (!callee)
@@ -502,7 +502,7 @@ call_object_size (struct object_size_inf
   unsigned int varno = SSA_NAME_VERSION (ptr);
   unsigned HOST_WIDE_INT bytes;
 
-  gcc_assert (gimple_code (call) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (call));
 
   gcc_assert (object_sizes[object_size_type][varno]
 	      != unknown[object_size_type]);
@@ -971,7 +971,7 @@ check_for_plus_in_loops (struct object_s
      argument, if any.  In GIMPLE, however, such an expression
      is not a valid call operand.  */
 
-  if (gimple_code (stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (stmt)
       && gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
     {
       tree basevar = gimple_assign_rhs1 (stmt);
--- gcc/tree-ssa.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-ssa.c	2008-07-16 12:31:12.000000000 +0200
@@ -805,7 +805,7 @@ verify_ssa (bool check_modified_stmt)
 	      goto err;
 	    }
 
-	  if (gimple_code (stmt) == GIMPLE_ASSIGN
+	  if (is_gimple_assign (stmt)
 	      && TREE_CODE (gimple_assign_lhs (stmt)) != SSA_NAME)
 	    {
 	      tree lhs, base_address;
--- gcc/predict.c.jj	2008-07-08 18:41:26.000000000 +0200
+++ gcc/predict.c	2008-07-16 12:31:11.000000000 +0200
@@ -1001,7 +1001,7 @@ expr_expected_value_1 (tree type, tree o
 	    }
 	  return val;
 	}
-      if (gimple_code (def) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (def))
 	{
 	  if (gimple_assign_lhs (def) != op0)
 	    return NULL;
@@ -1013,7 +1013,7 @@ expr_expected_value_1 (tree type, tree o
 					visited);
 	}
 
-      if (gimple_code (def) == GIMPLE_CALL)
+      if (is_gimple_call (def))
 	{
 	  tree decl = gimple_call_fndecl (def);
 	  if (!decl)
@@ -1349,7 +1349,7 @@ tree_bb_level_predictions (void)
 	  gimple stmt = gsi_stmt (gsi);
 	  tree decl;
 
-	  if (gimple_code (stmt) == GIMPLE_CALL)
+	  if (is_gimple_call (stmt))
 	    {
 	      if (gimple_call_flags (stmt) & ECF_NORETURN)
 		predict_paths_leading_to (bb, PRED_NORETURN,
@@ -1475,7 +1475,7 @@ tree_estimate_probability (void)
 		   gsi_next (&bi))
 		{
 		  gimple stmt = gsi_stmt (bi);
-		  if (gimple_code (stmt) == GIMPLE_CALL
+		  if (is_gimple_call (stmt)
 		      /* Constant and pure calls are hardly used to signalize
 			 something exceptional.  */
 		      && gimple_has_side_effects (stmt))
--- gcc/tree-cfg.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-cfg.c	2008-07-16 12:31:11.000000000 +0200
@@ -3808,7 +3808,7 @@ verify_stmt (gimple_stmt_iterator *gsi)
 
   /* FIXME.  The C frontend passes unpromoted arguments in case it
      didn't see a function declaration before the call.  */
-  if (gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_call (stmt))
     {
       tree decl = gimple_call_fn (stmt);
 
--- gcc/tree-ssa-loop-ch.c.jj	2008-06-13 14:30:50.000000000 +0200
+++ gcc/tree-ssa-loop-ch.c	2008-07-16 12:31:12.000000000 +0200
@@ -83,7 +83,7 @@ should_duplicate_loop_header_p (basic_bl
       if (gimple_code (last) == GIMPLE_LABEL)
 	continue;
 
-      if (gimple_code (last) == GIMPLE_CALL)
+      if (is_gimple_call (last))
 	return false;
 
       *limit -= estimate_num_insns (last, &eni_size_weights);
@@ -224,7 +224,7 @@ copy_loop_headers (void)
 		  gimple stmt = gsi_stmt (bsi);
 		  if (gimple_code (stmt) == GIMPLE_COND)
 		    gimple_set_no_warning (stmt, true);
-		  else if (gimple_code (stmt) == GIMPLE_ASSIGN)
+		  else if (is_gimple_assign (stmt))
 		    {
 		      enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
 		      if (TREE_CODE_CLASS (rhs_code) == tcc_comparison)
--- gcc/tree-ssa-ter.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-ssa-ter.c	2008-07-16 12:31:12.000000000 +0200
@@ -406,7 +406,7 @@ is_replaceable_p (gimple stmt)
     return false;
 
   /* No function calls can be replaced.  */
-  if (gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_call (stmt))
     return false;
 
   /* Leave any stmt with volatile operands alone as well.  */
--- gcc/ipa-prop.c.jj	2008-07-08 18:41:53.000000000 +0200
+++ gcc/ipa-prop.c	2008-07-16 12:31:11.000000000 +0200
@@ -238,7 +238,7 @@ ipa_count_arguments (struct cgraph_edge 
   int arg_num;
 
   stmt = cs->call_stmt;
-  gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (stmt));
   arg_num = gimple_call_num_args (stmt);
   ipa_set_cs_argument_count (IPA_EDGE_REF (cs), arg_num);
 }
@@ -263,7 +263,7 @@ ipa_compute_jump_functions (struct cgrap
 				   ipa_get_cs_argument_count (args));
 
   stmt = cs->call_stmt;
-  gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (stmt));
 
   for (arg_num = 0; arg_num < gimple_call_num_args (stmt); arg_num++)
     {
--- gcc/tree-ssa-dom.c.jj	2008-07-08 18:41:19.000000000 +0200
+++ gcc/tree-ssa-dom.c	2008-07-16 12:31:12.000000000 +0200
@@ -1848,14 +1848,14 @@ eliminate_redundant_computations (gimple
   opt_stats.num_exprs_considered++;
 
   /* Get the type of the expression we are trying to optimize.  */
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       expr_type = TREE_TYPE (gimple_assign_lhs (stmt));
       assigns_var_p = true;
     }
   else if (gimple_code (stmt) == GIMPLE_COND)
     expr_type = boolean_type_node;
-  else if (gimple_code (stmt) == GIMPLE_CALL)
+  else if (is_gimple_call (stmt))
     {
       gcc_assert (gimple_call_lhs (stmt));
       expr_type = TREE_TYPE (gimple_call_lhs (stmt));
@@ -1921,7 +1921,7 @@ eliminate_redundant_computations (gimple
 static bool
 gimple_assign_unary_useless_conversion_p (gimple gs)
 {
-  if (gimple_code (gs) == GIMPLE_ASSIGN
+  if (is_gimple_assign (gs)
       && (gimple_assign_rhs_code (gs) == NOP_EXPR
           || gimple_assign_rhs_code (gs) == CONVERT_EXPR
           || gimple_assign_rhs_code (gs) == VIEW_CONVERT_EXPR
@@ -1947,7 +1947,7 @@ record_equivalences_from_stmt (gimple st
   tree lhs;
   enum tree_code lhs_code;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (stmt));
 
   lhs = gimple_assign_lhs (stmt);
   lhs_code = TREE_CODE (lhs);
@@ -2238,9 +2238,9 @@ optimize_stmt (struct dom_walk_data *wal
   /* Check for redundant computations.  Do this optimization only
      for assignments that have no volatile ops and conditionals.  */
   may_optimize_p = (!gimple_has_volatile_ops (stmt)
-                    && ((gimple_code (stmt) == GIMPLE_ASSIGN
+                    && ((is_gimple_assign (stmt)
                          && !gimple_rhs_has_side_effects (stmt))
-                        || (gimple_code (stmt) == GIMPLE_CALL
+                        || (is_gimple_call (stmt)
                             && gimple_call_lhs (stmt) != NULL_TREE
                             && !gimple_rhs_has_side_effects (stmt))
                         || gimple_code (stmt) == GIMPLE_COND
@@ -2253,7 +2253,7 @@ optimize_stmt (struct dom_walk_data *wal
     }
 
   /* Record any additional equivalences created by this statement.  */
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     record_equivalences_from_stmt (stmt, may_optimize_p);
 
   /* If STMT is a COND_EXPR and it was modified, then we may know
@@ -2552,7 +2552,7 @@ get_lhs_or_phi_result (gimple stmt)
 {
   if (gimple_code (stmt) == GIMPLE_PHI)
     return gimple_phi_result (stmt);
-  else if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  else if (is_gimple_assign (stmt))
     return gimple_assign_lhs (stmt);
   else
     gcc_unreachable ();
--- gcc/tree-ssa-sink.c.jj	2008-06-13 17:28:43.000000000 +0200
+++ gcc/tree-ssa-sink.c	2008-07-16 12:34:02.000000000 +0200
@@ -143,8 +143,7 @@ is_hidden_global_store (gimple stmt)
     {
       tree lhs;
 
-      gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN
-                  || gimple_code (stmt) == GIMPLE_CALL);
+      gcc_assert (is_gimple_assign (stmt) || is_gimple_call (stmt));
 
       /* Note that we must not check the individual virtual operands
 	 here.  In particular, if this is an aliased store, we could
--- gcc/tree-nrv.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-nrv.c	2008-07-16 12:31:11.000000000 +0200
@@ -138,7 +138,7 @@ tree_nrv (void)
 	      if (ret_val)
 		gcc_assert (ret_val == result);
 	    }
-	  else if (gimple_code (stmt) == GIMPLE_ASSIGN
+	  else if (is_gimple_assign (stmt)
 		   && gimple_assign_lhs (stmt) == result)
 	    {
               tree rhs;
@@ -173,7 +173,7 @@ tree_nrv (void)
 					        TREE_TYPE (found)))
 		return 0;
 	    }
-	  else if (gimple_code (stmt) == GIMPLE_ASSIGN)
+	  else if (is_gimple_assign (stmt))
 	    {
 	      tree addr = get_base_address (gimple_assign_lhs (stmt));
 	       /* If there's any MODIFY of component of RESULT, 
@@ -305,7 +305,7 @@ execute_return_slot_opt (void)
 	  gimple stmt = gsi_stmt (gsi);
 	  bool slot_opt_p;
 
-	  if (gimple_code (stmt) == GIMPLE_CALL
+	  if (is_gimple_call (stmt)
 	      && gimple_call_lhs (stmt)
 	      && !gimple_call_return_slot_opt_p (stmt)
 	      && aggregate_value_p (TREE_TYPE (gimple_call_lhs (stmt)),
--- gcc/value-prof.c.jj	2008-07-02 14:57:07.000000000 +0200
+++ gcc/value-prof.c	2008-07-16 12:31:12.000000000 +0200
@@ -551,7 +551,7 @@ gimple_divmod_fixed_value (gimple stmt, 
   edge e12, e13, e23, e24, e34;
   gimple_stmt_iterator gsi;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN
+  gcc_assert (is_gimple_assign (stmt)
 	      && (gimple_assign_rhs_code (stmt) == TRUNC_DIV_EXPR
 		  || gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR));
 
@@ -712,7 +712,7 @@ gimple_mod_pow2 (gimple stmt, int prob, 
   gimple_stmt_iterator gsi;
   tree result;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN
+  gcc_assert (is_gimple_assign (stmt)
 	      && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
 
   optype = TREE_TYPE (gimple_assign_lhs (stmt));
@@ -872,7 +872,7 @@ gimple_mod_subtract (gimple stmt, int pr
   gimple_stmt_iterator gsi;
   tree result;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN
+  gcc_assert (is_gimple_assign (stmt)
 	      && gimple_assign_rhs_code (stmt) == TRUNC_MOD_EXPR);
 
   optype = TREE_TYPE (gimple_assign_lhs (stmt));
--- gcc/tree-predcom.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-predcom.c	2008-07-16 12:31:11.000000000 +0200
@@ -995,7 +995,7 @@ name_for_ref (dref ref)
 {
   tree name;
 
-  if (gimple_code (ref->stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (ref->stmt))
     {
       if (!ref->ref || DR_IS_READ (ref->ref))
 	name = gimple_assign_lhs (ref->stmt);
@@ -1067,7 +1067,7 @@ find_looparound_phi (struct loop *loop, 
   struct data_reference init_dr;
   gimple_stmt_iterator psi;
 
-  if (gimple_code (ref->stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (ref->stmt))
     {
       if (DR_IS_READ (ref->ref))
 	name = gimple_assign_lhs (ref->stmt);
@@ -1252,7 +1252,7 @@ replace_ref_with (gimple stmt, tree new,
       
   /* Since the reference is of gimple_reg type, it should only
      appear as lhs or rhs of modify statement.  */
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (stmt));
 
   bsi = gsi_for_stmt (stmt);
 
@@ -2183,7 +2183,7 @@ remove_name_from_operation (gimple stmt,
   tree other_op;
   gimple_stmt_iterator si;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (stmt));
 
   if (gimple_assign_rhs1 (stmt) == op)
     other_op = gimple_assign_rhs2 (stmt);
--- gcc/tree-ssa-math-opts.c.jj	2008-07-15 11:38:20.000000000 +0200
+++ gcc/tree-ssa-math-opts.c	2008-07-16 12:31:12.000000000 +0200
@@ -273,7 +273,7 @@ compute_merit (struct occurrence *occ)
 static inline bool
 is_division_by (gimple use_stmt, tree def)
 {
-  return gimple_code (use_stmt) == GIMPLE_ASSIGN
+  return is_gimple_assign (use_stmt)
 	 && gimple_assign_rhs_code (use_stmt) == RDIV_EXPR
 	 && gimple_assign_rhs2 (use_stmt) == def
 	 /* Do not recognize x / x as valid division, as we are getting
@@ -506,7 +506,7 @@ execute_cse_reciprocals (void)
 	  gimple stmt = gsi_stmt (gsi);
 	  tree fndecl;
 
-	  if (gimple_code (stmt) == GIMPLE_ASSIGN
+	  if (is_gimple_assign (stmt)
 	      && gimple_assign_rhs_code (stmt) == RDIV_EXPR)
 	    {
 	      tree arg1 = gimple_assign_rhs2 (stmt);
@@ -517,7 +517,7 @@ execute_cse_reciprocals (void)
 
 	      stmt1 = SSA_NAME_DEF_STMT (arg1);
 
-	      if (gimple_code (stmt1) == GIMPLE_CALL
+	      if (is_gimple_call (stmt1)
 		  && gimple_call_lhs (stmt1)
 		  && (fndecl = gimple_call_fndecl (stmt1))
 		  && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
@@ -728,7 +728,7 @@ execute_cse_sincos (void)
 	  gimple stmt = gsi_stmt (gsi);
 	  tree fndecl;
 
-	  if (gimple_code (stmt) == GIMPLE_CALL
+	  if (is_gimple_call (stmt)
 	      && gimple_call_lhs (stmt)
 	      && (fndecl = gimple_call_fndecl (stmt))
 	      && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
@@ -801,7 +801,7 @@ execute_convert_to_rsqrt (void)
 	  gimple stmt = gsi_stmt (gsi);
 	  tree fndecl;
 
-	  if (gimple_code (stmt) == GIMPLE_CALL
+	  if (is_gimple_call (stmt)
 	      && gimple_call_lhs (stmt)
 	      && (fndecl = gimple_call_fndecl (stmt))
 	      && (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
@@ -826,7 +826,7 @@ execute_convert_to_rsqrt (void)
 
 	      stmt1 = SSA_NAME_DEF_STMT (arg1);
 
-	      if (gimple_code (stmt1) == GIMPLE_ASSIGN
+	      if (is_gimple_assign (stmt1)
 		  && gimple_assign_rhs_code (stmt1) == RDIV_EXPR)
 		{
 		  tree arg10, arg11;
--- gcc/tree-complex.c.jj	2008-07-02 11:35:29.000000000 +0200
+++ gcc/tree-complex.c	2008-07-16 12:31:11.000000000 +0200
@@ -766,7 +766,7 @@ expand_complex_move (gimple_stmt_iterato
   tree r, i, lhs, rhs;
   gimple stmt = gsi_stmt (*gsi);
 
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       lhs = gimple_assign_lhs (stmt);
       if (gimple_num_ops (stmt) == 2)
@@ -774,7 +774,7 @@ expand_complex_move (gimple_stmt_iterato
       else
 	rhs = NULL_TREE;
     }
-  else if (gimple_code (stmt) == GIMPLE_CALL)
+  else if (is_gimple_call (stmt))
     {
       lhs = gimple_call_lhs (stmt);
       rhs = NULL_TREE;
@@ -802,7 +802,7 @@ expand_complex_move (gimple_stmt_iterato
 	  i = build1 (IMAGPART_EXPR, inner_type, lhs);
 	  update_complex_components_on_edge (e, lhs, r, i);
 	}
-      else if (gimple_code (stmt) == GIMPLE_CALL
+      else if (is_gimple_call (stmt)
 	       || gimple_has_side_effects (stmt)
 	       || gimple_assign_rhs_code (stmt) == PAREN_EXPR)
 	{
@@ -1475,7 +1475,7 @@ expand_complex_operations_1 (gimple_stmt
 
   /* Extract the components of the two complex values.  Make sure and
      handle the common case of the same value used twice specially.  */
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       ac = gimple_assign_rhs1 (stmt);
       bc = (gimple_num_ops (stmt) > 2) ? gimple_assign_rhs2 (stmt) : NULL;
--- gcc/tree-outof-ssa.c.jj	2008-07-15 13:00:29.000000000 +0200
+++ gcc/tree-outof-ssa.c	2008-07-16 12:31:11.000000000 +0200
@@ -803,8 +803,8 @@ static inline bool
 identical_copies_p (const_gimple s1, const_gimple s2)
 {
 #ifdef ENABLE_CHECKING
-  gcc_assert (gimple_code (s1) == GIMPLE_ASSIGN);
-  gcc_assert (gimple_code (s2) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (s1));
+  gcc_assert (is_gimple_assign (s2));
   gcc_assert (DECL_P (gimple_assign_lhs (s1)));
   gcc_assert (DECL_P (gimple_assign_lhs (s2)));
 #endif
--- gcc/tree-ssa-pre.c.jj	2008-07-16 10:43:05.000000000 +0200
+++ gcc/tree-ssa-pre.c	2008-07-16 12:31:12.000000000 +0200
@@ -2341,7 +2341,7 @@ can_value_number_call (gimple stmt)
 static bool
 is_exception_related (gimple stmt)
 {
-  return (gimple_code (stmt) == GIMPLE_ASSIGN
+  return (is_gimple_assign (stmt)
 	  && (gimple_assign_rhs_code (stmt) == FILTER_EXPR
 	      || gimple_assign_rhs_code (stmt) == EXC_PTR_EXPR));
 }
--- gcc/tree-sra.c.jj	2008-07-09 16:58:01.000000000 +0200
+++ gcc/tree-sra.c	2008-07-16 12:31:11.000000000 +0200
@@ -3136,7 +3136,7 @@ scalarize_use (struct sra_elt *elt, tree
       if (is_output
 	  && TREE_CODE (elt->replacement) == BIT_FIELD_REF
 	  && is_gimple_reg (TREE_OPERAND (elt->replacement, 0))
-	  && gimple_code (stmt) == GIMPLE_ASSIGN
+	  && is_gimple_assign (stmt)
 	  && gimple_assign_lhs_ptr (stmt) == expr_p)
 	{
           gimple_seq newseq;
@@ -3148,7 +3148,7 @@ scalarize_use (struct sra_elt *elt, tree
 	}
       else if (!is_output
 	       && TREE_CODE (elt->replacement) == BIT_FIELD_REF
-	       && gimple_code (stmt) == GIMPLE_ASSIGN
+	       && is_gimple_assign (stmt)
 	       && gimple_assign_rhs1_ptr (stmt) == expr_p)
 	{
 	  tree tmp = make_rename_temp
@@ -3164,7 +3164,7 @@ scalarize_use (struct sra_elt *elt, tree
       update_stmt (stmt);
     }
   else if (use_all && is_output
-	   && gimple_code (stmt) == GIMPLE_ASSIGN
+	   && is_gimple_assign (stmt)
 	   && TREE_CODE (bfexpr
 			 = gimple_assign_lhs (stmt)) == BIT_FIELD_REF
 	   && &TREE_OPERAND (bfexpr, 0) == expr_p
@@ -3225,7 +3225,7 @@ scalarize_use (struct sra_elt *elt, tree
 	update_stmt (stmt);
     }
   else if (use_all && !is_output
-	   && gimple_code (stmt) == GIMPLE_ASSIGN
+	   && is_gimple_assign (stmt)
 	   && TREE_CODE (bfexpr
 			 = gimple_assign_rhs1 (stmt)) == BIT_FIELD_REF
 	   && &TREE_OPERAND (gimple_assign_rhs1 (stmt), 0) == expr_p
@@ -3328,7 +3328,7 @@ scalarize_copy (struct sra_elt *lhs_elt,
 
       /* See the commentary in sra_walk_function concerning
 	 RETURN_EXPR, and why we should never see one here.  */
-      gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+      gcc_assert (is_gimple_assign (stmt));
       gcc_assert (gimple_assign_copy_p (stmt));
 
 
--- gcc/tree-ssa-alias.c.jj	2008-07-16 09:22:27.000000000 +0200
+++ gcc/tree-ssa-alias.c	2008-07-16 12:31:12.000000000 +0200
@@ -1915,10 +1915,10 @@ count_uses_and_derefs (tree ptr, gimple 
      find all the indirect and direct uses of x_1 inside.  The only
      shortcut we can take is the fact that GIMPLE only allows
      INDIRECT_REFs inside the expressions below.  */
-  if (gimple_code (stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (stmt)
       || gimple_code (stmt) == GIMPLE_RETURN
       || gimple_code (stmt) == GIMPLE_ASM
-      || gimple_code (stmt) == GIMPLE_CALL)
+      || is_gimple_call (stmt))
     {
       struct walk_stmt_info wi;
       struct count_ptr_d count;
@@ -2644,7 +2644,7 @@ update_alias_info_1 (gimple stmt, struct
 	  /* If the statement makes a function call, assume
 	     that pointer OP will be dereferenced in a store
 	     operation inside the called function.  */
-	  if (gimple_code (stmt) == GIMPLE_CALL
+	  if (is_gimple_call (stmt)
 	      || stmt_escape_type == ESCAPE_STORED_IN_GLOBAL)
 	    {
 	      pointer_set_insert (ai->dereferenced_ptrs_store, var);
@@ -3161,7 +3161,7 @@ set_pt_anything (tree ptr)
 enum escape_type
 is_escape_site (gimple stmt)
 {
-  if (gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_call (stmt))
     {
       if (gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
 	return ESCAPE_TO_PURE_CONST;
@@ -3170,7 +3170,7 @@ is_escape_site (gimple stmt)
     }
   else if (gimple_code (stmt) == GIMPLE_ASM)
     return ESCAPE_TO_ASM;
-  else if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  else if (is_gimple_assign (stmt))
     {
       tree lhs = gimple_assign_lhs (stmt);
 
--- gcc/lambda-code.c.jj	2008-06-13 14:30:50.000000000 +0200
+++ gcc/lambda-code.c	2008-07-16 12:31:11.000000000 +0200
@@ -2141,7 +2141,7 @@ can_put_in_inner_loop (struct loop *inne
   imm_use_iterator imm_iter;
   use_operand_p use_p;
   
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (stmt));
   if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
       || !stmt_invariant_in_loop_p (inner, stmt))
     return false;
@@ -2279,7 +2279,7 @@ cannot_convert_bb_to_perfect_nest (basic
 	  || stmt_is_bumper_for_loop (loop, stmt))
 	continue;
 
-      if (gimple_code (stmt) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (stmt))
 	{
 	  if (cannot_convert_modify_to_perfect_nest (stmt, loop))
 	    return true;
--- gcc/tree-tailcall.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-tailcall.c	2008-07-16 12:31:12.000000000 +0200
@@ -396,7 +396,7 @@ find_tail_calls (basic_block bb, struct 
 	continue;
 
       /* Check for a call.  */
-      if (gimple_code (stmt) == GIMPLE_CALL)
+      if (is_gimple_call (stmt))
 	{
 	  call = stmt;
 	  ass_var = gimple_call_lhs (stmt);
@@ -721,7 +721,7 @@ eliminate_tail_call (struct tailcall *t)
       fprintf (dump_file, "\n");
     }
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_CALL);
+  gcc_assert (is_gimple_call (stmt));
 
   first = single_succ (ENTRY_BLOCK_PTR);
 
--- gcc/ipa-type-escape.c.jj	2008-06-27 18:54:54.000000000 +0200
+++ gcc/ipa-type-escape.c	2008-07-16 12:31:11.000000000 +0200
@@ -768,7 +768,7 @@ look_for_casts_stmt (gimple s)
 {
   unsigned int cast = 0;
 
-  gcc_assert (gimple_code (s) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (s));
 
   if (gimple_assign_cast_p (s))
     {
@@ -810,7 +810,7 @@ is_cast_from_non_pointer (tree var, gimp
   if (SSA_NAME_IS_DEFAULT_DEF (var))
       return false;
 
-  if (gimple_code (def_stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (def_stmt))
     {
       use_operand_p use_p; 
       ssa_op_iter iter;
--- gcc/tree-vect-transform.c.jj	2008-07-15 11:38:20.000000000 +0200
+++ gcc/tree-vect-transform.c	2008-07-16 12:31:12.000000000 +0200
@@ -2802,7 +2802,7 @@ vectorizable_reduction (gimple stmt, gim
         inside the loop body. The last operand is the reduction variable,
         which is defined by the loop-header-phi.  */
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (stmt));
 
   /* Flatten RHS */
   switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
--- gcc/tree-ssa-threadedge.c.jj	2008-07-08 18:42:55.000000000 +0200
+++ gcc/tree-ssa-threadedge.c	2008-07-16 12:31:12.000000000 +0200
@@ -322,7 +322,7 @@ record_temporary_equivalences_from_stmts
 	 remaining bytes. If we use only one edge on the phi, the result will
 	 change to be the remaining bytes for the corresponding phi argument. */
 
-      if (gimple_code (stmt) == GIMPLE_CALL)
+      if (is_gimple_call (stmt))
 	{
 	  tree fndecl = gimple_call_fndecl (stmt);
 	  if (fndecl && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_OBJECT_SIZE)
@@ -371,7 +371,7 @@ record_temporary_equivalences_from_stmts
 
 	  /* Try to fold/lookup the new expression.  Inserting the
 	     expression into the hash table is unlikely to help.  */
-          if (gimple_code (stmt) == GIMPLE_CALL)
+          if (is_gimple_call (stmt))
             cached_lhs = fold_call_stmt (stmt, false);
 	  else
             cached_lhs = fold_assignment_stmt (stmt);
--- gcc/tree-ssa-phiopt.c.jj	2008-07-02 11:38:19.000000000 +0200
+++ gcc/tree-ssa-phiopt.c	2008-07-16 12:31:12.000000000 +0200
@@ -1101,7 +1101,7 @@ nt_init_block (struct dom_walk_data *dat
     {
       gimple stmt = gsi_stmt (gsi);
 
-      if (gimple_code (stmt) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (stmt))
 	{
 	  add_or_mark_expr (bb, gimple_assign_lhs (stmt), nontrap_set, true);
 	  add_or_mark_expr (bb, gimple_assign_rhs1 (stmt), nontrap_set, false);
--- gcc/tree-ssa-structalias.c.jj	2008-07-11 00:39:58.000000000 +0200
+++ gcc/tree-ssa-structalias.c	2008-07-16 12:31:12.000000000 +0200
@@ -3747,7 +3747,7 @@ find_func_aliases (gimple origt)
 
      In non-ipa mode, we need to generate constraints for each
      pointer passed by address.  */
-  else if (gimple_code (t) == GIMPLE_CALL)
+  else if (is_gimple_call (t))
     {
       if (!in_ipa_mode)
 	{
@@ -3859,7 +3859,7 @@ find_func_aliases (gimple origt)
   /* Otherwise, just a regular assignment statement.  Only care about
      operations with pointer result, others are dealt with as escape
      points if they have pointer operands.  */
-  else if (gimple_code (t) == GIMPLE_ASSIGN
+  else if (is_gimple_assign (t)
 	   && could_have_pointers (gimple_assign_lhs (t)))
     {
       /* Otherwise, just a regular assignment statement.  */
--- gcc/tree-ssa-forwprop.c.jj	2008-07-16 11:16:57.000000000 +0200
+++ gcc/tree-ssa-forwprop.c	2008-07-16 12:31:12.000000000 +0200
@@ -257,7 +257,7 @@ can_propagate_from (gimple def_stmt)
   use_operand_p use_p;
   ssa_op_iter iter;
 
-  gcc_assert (gimple_code (def_stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (def_stmt));
   /* If the rhs has side-effects we cannot propagate from it.  */
   if (gimple_has_volatile_ops (def_stmt))
     return false;
@@ -281,7 +281,7 @@ can_propagate_from (gimple def_stmt)
      then we can not apply optimizations as some targets require
      function pointers to be canonicalized and in this case this
      optimization could eliminate a necessary canonicalization.  */
-  if (gimple_code (def_stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (def_stmt)
       && (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
     {
       tree rhs = gimple_assign_rhs1 (def_stmt);
@@ -938,7 +938,7 @@ forward_propagate_comparison (gimple stm
     return false;
 
   /* Conversion of the condition result to another integral type.  */
-  if (gimple_code (use_stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (use_stmt)
       && (IS_CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (use_stmt))
 	  || TREE_CODE_CLASS (gimple_assign_rhs_code (use_stmt))
 	     == tcc_comparison
@@ -1041,7 +1041,7 @@ simplify_not_neg_expr (gimple_stmt_itera
   gimple rhs_def_stmt = SSA_NAME_DEF_STMT (rhs);
 
   /* See if the RHS_DEF_STMT has the same form as our statement.  */
-  if (gimple_code (rhs_def_stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (rhs_def_stmt)
       && gimple_assign_rhs_code (rhs_def_stmt) == gimple_assign_rhs_code (stmt))
     {
       tree rhs_def_operand = gimple_assign_rhs1 (rhs_def_stmt);
@@ -1073,7 +1073,7 @@ simplify_gimple_switch (gimple stmt)
   if (TREE_CODE (cond) == SSA_NAME)
     {
       def_stmt = SSA_NAME_DEF_STMT (cond);
-      if (gimple_code (def_stmt) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (def_stmt))
 	{
 	  if (gimple_assign_rhs_code (def_stmt) == NOP_EXPR)
 	    {
@@ -1135,7 +1135,7 @@ tree_ssa_forward_propagate_single_use_va
 
 	  /* If this statement sets an SSA_NAME to an address,
 	     try to propagate the address into the uses of the SSA_NAME.  */
-	  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+	  if (is_gimple_assign (stmt))
 	    {
 	      tree lhs = gimple_assign_lhs (stmt);
 	      tree rhs = gimple_assign_rhs1 (stmt);
--- gcc/tree-ssa-dce.c.jj	2008-07-07 18:45:28.000000000 +0200
+++ gcc/tree-ssa-dce.c	2008-07-16 12:31:12.000000000 +0200
@@ -686,7 +686,7 @@ eliminate_unnecessary_stmts (void)
 	      remove_dead_stmt (&gsi, bb);
 	      something_changed = true;
 	    }
-	  else if (gimple_code (stmt) == GIMPLE_CALL)
+	  else if (is_gimple_call (stmt))
 	    {
 	      call = gimple_call_fndecl (stmt);
 	      if (call)
--- gcc/tree-ssa-dse.c.jj	2008-07-14 21:47:38.000000000 +0200
+++ gcc/tree-ssa-dse.c	2008-07-16 12:31:12.000000000 +0200
@@ -221,7 +221,7 @@ get_kill_of_stmt_lhs (gimple stmt,
 {
   tree lhs;
 
-  gcc_assert (gimple_code (stmt) == GIMPLE_ASSIGN);
+  gcc_assert (is_gimple_assign (stmt));
 
   lhs = gimple_assign_lhs (stmt);
 
@@ -249,10 +249,10 @@ get_kill_of_stmt_lhs (gimple stmt,
       if (gimple_code (stmt) != GIMPLE_ASSIGN)
 	return false;
 
-      if (gimple_code (stmt) == GIMPLE_CALL)
+      if (is_gimple_call (stmt))
 	return false;
 
-      if (gimple_code (stmt) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (stmt))
         {
 	  use_rhs = gimple_assign_rhs1 (stmt);
 	  if (!is_gimple_min_invariant (use_rhs) 
@@ -334,7 +334,7 @@ dse_possible_dead_store_p (gimple stmt,
 
 	   So we must make sure we're talking about the same LHS.
       */
-      if (gimple_code (temp) == GIMPLE_ASSIGN)
+      if (is_gimple_assign (temp))
 	{
 	  tree base1 = get_base_address (gimple_assign_lhs (stmt));
 	  tree base2 = get_base_address (gimple_assign_lhs (temp));
@@ -406,13 +406,13 @@ dse_optimize_stmt (struct dom_walk_data 
 
   /* We know we have virtual definitions.  If this is a GIMPLE_ASSIGN
      that's not also a function call, then record it into our table.  */
-  if (gimple_code (stmt) == GIMPLE_CALL && gimple_call_fndecl (stmt))
+  if (is_gimple_call (stmt) && gimple_call_fndecl (stmt))
     return;
 
   if (gimple_has_volatile_ops (stmt))
     return;
 
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       use_operand_p first_use_p = NULL_USE_OPERAND_P;
       use_operand_p use_p = NULL;
@@ -672,8 +672,8 @@ execute_simple_dse (void)
 
 	if (gimple_stored_syms (stmt)
 	    && !bitmap_empty_p (gimple_stored_syms (stmt))
-            && (gimple_code (stmt) == GIMPLE_ASSIGN
-	        || (gimple_code (stmt) == GIMPLE_CALL
+            && (is_gimple_assign (stmt)
+	        || (is_gimple_call (stmt)
                     && gimple_call_lhs (stmt)))
 	    && !bitmap_intersect_p (gimple_stored_syms (stmt), variables_loaded))
 	  {
@@ -712,7 +712,7 @@ execute_simple_dse (void)
 	    /* Look for possible occurrence var = indirect_ref (...) where
 	       indirect_ref itself is volatile.  */
 
-	    if (dead && gimple_code (stmt) == GIMPLE_ASSIGN
+	    if (dead && is_gimple_assign (stmt)
 	        && TREE_THIS_VOLATILE (gimple_assign_rhs1 (stmt)))
 	      dead = false;
 
@@ -720,7 +720,7 @@ execute_simple_dse (void)
 	      {
 		/* When LHS of var = call (); is dead, simplify it into
 		   call (); saving one operand.  */
-                if (gimple_code (stmt) == GIMPLE_CALL
+                if (is_gimple_call (stmt)
                     && gimple_has_side_effects (stmt))
 		  {
 		    if (dump_file && (dump_flags & TDF_DETAILS))
--- gcc/tree-ssa-loop-ivopts.c.jj	2008-07-08 18:41:18.000000000 +0200
+++ gcc/tree-ssa-loop-ivopts.c	2008-07-16 12:31:12.000000000 +0200
@@ -1165,7 +1165,7 @@ find_interesting_uses_op (struct ivopts_
 
   stmt = SSA_NAME_DEF_STMT (op);
   gcc_assert (gimple_code (stmt) == GIMPLE_PHI
-	      || gimple_code (stmt) == GIMPLE_ASSIGN);
+	      || is_gimple_assign (stmt));
 
   use = record_use (data, NULL, civ, stmt, USE_NONLINEAR_EXPR);
   iv->use_id = use->id;
@@ -1713,7 +1713,7 @@ find_interesting_uses_stmt (struct ivopt
       return;
     }
 
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       lhs = gimple_assign_lhs_ptr (stmt);
       rhs = gimple_assign_rhs1_ptr (stmt);
@@ -5041,7 +5041,7 @@ rewrite_use_nonlinear_expr (struct ivopt
       tree step, ctype, utype;
       enum tree_code incr_code = PLUS_EXPR, old_code;
 
-      gcc_assert (gimple_code (use->stmt) == GIMPLE_ASSIGN);
+      gcc_assert (is_gimple_assign (use->stmt));
       gcc_assert (gimple_assign_lhs (use->stmt) == cand->var_after);
 
       step = cand->iv->step;
--- gcc/tree-vrp.c.jj	2008-07-07 18:45:28.000000000 +0200
+++ gcc/tree-vrp.c	2008-07-16 12:36:24.000000000 +0200
@@ -282,7 +282,7 @@ is_overflow_infinity (const_tree val)
 static inline bool
 stmt_overflow_infinity (gimple stmt)
 {
-  if (gimple_code (stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (stmt)
       && get_gimple_rhs_class (gimple_assign_rhs_code (stmt)) ==
       GIMPLE_SINGLE_RHS)
     return is_overflow_infinity (gimple_assign_rhs1 (stmt));
@@ -923,7 +923,7 @@ vrp_stmt_computes_nonzero (gimple stmt, 
 
   /* If we have an expression of the form &X->a, then the expression
      is nonnull if X is nonnull.  */
-  if (gimple_code (stmt) == GIMPLE_ASSIGN
+  if (is_gimple_assign (stmt)
       && gimple_assign_rhs_code (stmt) == ADDR_EXPR)
     {
       tree expr = gimple_assign_rhs1 (stmt);
@@ -3929,7 +3929,7 @@ register_edge_assert_for_2 (tree name, e
       tree cst2 = NULL_TREE, name2 = NULL_TREE, name3 = NULL_TREE;
 
       /* Extract CST2 from the (optional) addition.  */
-      if (gimple_code (def_stmt) == GIMPLE_ASSIGN
+      if (is_gimple_assign (def_stmt)
 	  && gimple_assign_rhs_code (def_stmt) == PLUS_EXPR)
 	{
 	  name2 = gimple_assign_rhs1 (def_stmt);
@@ -4149,7 +4149,7 @@ register_edge_assert_for (tree name, edg
     {
       gimple def_stmt = SSA_NAME_DEF_STMT (name);
 
-      if (gimple_code (def_stmt) == GIMPLE_ASSIGN
+      if (is_gimple_assign (def_stmt)
 	  && (gimple_assign_rhs_code (def_stmt) == TRUTH_AND_EXPR
 	      || gimple_assign_rhs_code (def_stmt) == BIT_AND_EXPR))
 	{
@@ -4168,7 +4168,7 @@ register_edge_assert_for (tree name, edg
     {
       gimple def_stmt = SSA_NAME_DEF_STMT (name);
 
-      if (gimple_code (def_stmt) == GIMPLE_ASSIGN
+      if (is_gimple_assign (def_stmt)
 	  && (gimple_assign_rhs_code (def_stmt) == TRUTH_OR_EXPR
 	      /* For BIT_IOR_EXPR only if NAME == 0 both operands have
 		 necessarily zero value.  */
@@ -4532,7 +4532,7 @@ find_assert_locations (basic_block bb)
 		  tree t = op;
 		  gimple def_stmt = SSA_NAME_DEF_STMT (t);
 	
-		  while (gimple_code (def_stmt) == GIMPLE_ASSIGN
+		  while (is_gimple_assign (def_stmt)
 			 && gimple_assign_rhs_code (def_stmt)  == NOP_EXPR
 			 && TREE_CODE
 			     (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
@@ -4930,7 +4930,7 @@ check_all_array_refs (void)
 	  if (!gimple_has_location (stmt))
 	    continue;
 
-	  if (gimple_code (stmt) == GIMPLE_CALL)
+	  if (is_gimple_call (stmt))
 	    {
 	      size_t i;
 	      size_t n = gimple_call_num_args (stmt);
@@ -4992,7 +4992,7 @@ remove_range_assertions (void)
 	gimple stmt = gsi_stmt (si);
 	gimple use_stmt;
 
-	if (gimple_code (stmt) == GIMPLE_ASSIGN
+	if (is_gimple_assign (stmt)
 	    && gimple_assign_rhs_code (stmt) == ASSERT_EXPR)
 	  {
 	    tree rhs = gimple_assign_rhs1 (stmt);
@@ -5035,8 +5035,7 @@ stmt_interesting_for_vrp (gimple stmt)
       && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_phi_result (stmt)))
 	  || POINTER_TYPE_P (TREE_TYPE (gimple_phi_result (stmt)))))
     return true;
-  else if (gimple_code (stmt) == GIMPLE_ASSIGN
-	   || gimple_code (stmt) == GIMPLE_CALL)
+  else if (is_gimple_assign (stmt) || is_gimple_call (stmt))
     {
       tree lhs = gimple_get_lhs (stmt);
 
@@ -5046,7 +5045,7 @@ stmt_interesting_for_vrp (gimple stmt)
       if (lhs && TREE_CODE (lhs) == SSA_NAME
 	  && (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
 	      || POINTER_TYPE_P (TREE_TYPE (lhs)))
-	  && ((gimple_code (stmt) == GIMPLE_CALL
+	  && ((is_gimple_call (stmt)
 	       && TREE_CODE (gimple_call_fn (stmt)) == ADDR_EXPR
 	       && DECL_P (TREE_OPERAND (gimple_call_fn (stmt), 0))
 	       && DECL_IS_BUILTIN (TREE_OPERAND (gimple_call_fn (stmt), 0)))
@@ -5839,14 +5838,13 @@ vrp_visit_stmt (gimple stmt, edge *taken
       fprintf (dump_file, "\n");
     }
 
-  if (gimple_code (stmt) == GIMPLE_ASSIGN
-      || gimple_code (stmt) == GIMPLE_CALL)
+  if (is_gimple_assign (stmt) || is_gimple_call (stmt))
     {
       /* In general, assignments with virtual operands are not useful
 	 for deriving ranges, with the obvious exception of calls to
 	 builtin functions.  */
 
-      if ((gimple_code (stmt) == GIMPLE_CALL
+      if ((is_gimple_call (stmt)
 	   && TREE_CODE (gimple_call_fn (stmt)) == ADDR_EXPR
 	   && DECL_P (TREE_OPERAND (gimple_call_fn (stmt), 0))
 	   && DECL_IS_BUILTIN (TREE_OPERAND (gimple_call_fn (stmt), 0)))
@@ -6525,7 +6523,7 @@ simplify_switch_using_ranges (gimple stm
 void
 simplify_stmt_using_ranges (gimple stmt)
 {
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
 
--- gcc/tree-ssa-loop-prefetch.c.jj	2008-07-02 11:38:39.000000000 +0200
+++ gcc/tree-ssa-loop-prefetch.c	2008-07-16 12:31:12.000000000 +0200
@@ -502,7 +502,7 @@ gather_memory_references (struct loop *l
 	  if (gimple_code (stmt) != GIMPLE_ASSIGN)
 	    {
 	      if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS)
-		  || (gimple_code (stmt) == GIMPLE_CALL
+		  || (is_gimple_call (stmt)
 		      && !(gimple_call_flags (stmt) & ECF_CONST)))
 		*no_other_refs = false;
 	      continue;
--- gcc/tree-ssa-copy.c.jj	2008-07-11 00:39:58.000000000 +0200
+++ gcc/tree-ssa-copy.c	2008-07-16 12:31:12.000000000 +0200
@@ -215,11 +215,11 @@ may_propagate_copy_into_stmt (gimple des
           ||  TREE_CODE (SSA_NAME_VAR (orig)) == MEMORY_PARTITION_TAG))
     return false;
 
-  if (gimple_code (dest) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (dest))
     type_d = TREE_TYPE (gimple_assign_lhs (dest));
   else if (gimple_code (dest) == GIMPLE_COND)
     type_d = boolean_type_node;
-  else if (gimple_code (dest) == GIMPLE_CALL
+  else if (is_gimple_call (dest)
            && gimple_call_lhs (dest) != NULL_TREE)
     type_d = TREE_TYPE (gimple_call_lhs (dest));
   else
@@ -439,7 +439,7 @@ propagate_tree_value_into_stmt (gimple_s
 {
   gimple stmt = gsi_stmt (*gsi);
 
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       tree expr = NULL_TREE;
       if (gimple_assign_single_p (stmt))
@@ -457,7 +457,7 @@ propagate_tree_value_into_stmt (gimple_s
       gimple_cond_set_lhs (stmt, lhs);
       gimple_cond_set_rhs (stmt, rhs);
     }
-  else if (gimple_code (stmt) == GIMPLE_CALL
+  else if (is_gimple_call (stmt)
            && gimple_call_lhs (stmt) != NULL_TREE)
     {
       gimple new_stmt;
--- gcc/tree-inline.c.jj	2008-07-16 09:22:27.000000000 +0200
+++ gcc/tree-inline.c	2008-07-16 12:31:11.000000000 +0200
@@ -1274,7 +1274,7 @@ copy_bb (copy_body_data *id, basic_block
 
       /* With return slot optimization we can end up with
 	 non-gimple (foo *)&this->m, fix that here.  */
-      if ((gimple_code (stmt) == GIMPLE_ASSIGN
+      if ((is_gimple_assign (stmt)
 	    && gimple_assign_rhs_code (stmt) == NOP_EXPR
 	    && !is_gimple_val (gimple_assign_rhs1 (stmt)))
 	  || id->regimplify)
@@ -1295,7 +1295,7 @@ copy_bb (copy_body_data *id, basic_block
 	 need to process all of them.  */
       while (!gsi_end_p (copy_gsi))
 	{
-	  if (gimple_code (stmt) == GIMPLE_CALL
+	  if (is_gimple_call (stmt)
 	      && gimple_call_va_arg_pack_p (stmt)
 	      && id->gimple_call)
 	    {
@@ -1342,7 +1342,7 @@ copy_bb (copy_body_data *id, basic_block
 	      gsi_replace (&copy_gsi, new_call, false);
 	      stmt = new_call;
 	    }
-	  else if (gimple_code (stmt) == GIMPLE_CALL
+	  else if (is_gimple_call (stmt)
 		   && id->gimple_call
 		   && (decl = gimple_call_fndecl (stmt))
 		   && DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL
@@ -3350,7 +3350,7 @@ expand_call_inline (basic_block bb, gimp
   /* If the value of the new expression is ignored, that's OK.  We
      don't warn about this for CALL_EXPRs, so we shouldn't warn about
      the equivalent inlined version either.  */
-  if (gimple_code (stmt) == GIMPLE_ASSIGN)
+  if (is_gimple_assign (stmt))
     {
       gcc_assert (gimple_assign_single_p (stmt));
       TREE_USED (gimple_assign_rhs1 (stmt)) = 1;
@@ -3388,7 +3388,7 @@ gimple_expand_calls_inline (basic_block 
     {
       gimple stmt = gsi_stmt (gsi);
 
-      if (gimple_code (stmt) == GIMPLE_CALL
+      if (is_gimple_call (stmt)
 	  && expand_call_inline (bb, stmt, id))
 	return true;
     }
@@ -3422,7 +3422,7 @@ fold_marked_statements (int first, struc
 		  gimple new_stmt = gsi_stmt (gsi);
 		  update_stmt (new_stmt);
 
-		  if (gimple_code (old_stmt) == GIMPLE_CALL)
+		  if (is_gimple_call (old_stmt))
 		    cgraph_update_edges_for_call_stmt (old_stmt, new_stmt);
 
 		  if (maybe_clean_or_replace_eh_stmt (old_stmt, new_stmt))

	Jakub


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]