This is the mail archive of the fortran@gcc.gnu.org mailing list for the GNU Fortran project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]
Other format: [Raw text]

Re: [PATCH] middle-end only piece of middle-end array expressions


On Tue, 21 Jun 2011, Richard Guenther wrote:

> 
> This is the middle-end pieces, without the C/C++ frontend changes.
> Enough to work on the Fortran parts.
> 
> Doesn't even break bootstrap.

And here is a variant that doesn't need the tree-cfg.c hunk for
the Fortran patch (thus all type problems fixed) and also does not
suffer from the -O2 miscompilation.  -O0 still doesn't work.

Richard.

2008-04-11  Richard Guenther  <rguenther@suse.de>

	* gimplify.c (internal_get_tmp_var): Mark array temporaries
	as gimple register.
	(gimple_add_tmp_var): Do not require constant size for array
	temporaries.
	(maybe_with_size_expr): Do not wrap VLA_VIEW_EXPR or VLA_RIDX_EXPR
	inside WITH_SIZE_EXPR.
	(get_array_arg_type): New helper function.
	(vla_to_vla_view_expr): Likewise.
	(lower_builtin_array_expr): Helper for gimplifying array builtins
	to VLA tree expressions.
	(gimplify_call_expr): Call it.
	(gimplify_modify_expr): Do not wrap copies of array temporaries
	inside WITH_SIZE_EXPR.  Do not lower copies of array temporaries
	to memcpy calls.
	(gimplify_expr): Handle VLA_VIEW_EXPR, VLA_IDX_EXPR, VLA_RIDX_EXPR
	and VLA_DELTA_EXPR.
	* tree-gimple.c (is_gimple_formal_tmp_rhs): Allow VLA_IDX_EXPR,
	VLA_RIDX_EXPR and VLA_DELTA_EXPR.
	(is_gimple_lvalue): Allow VLA_VIEW_EXPR.
	(is_gimple_reg_type): Allow arrays.
	(is_gimple_reg): Likewise.
	* cfgexpand.c (estimated_stack_frame_size): Gimple registers do
	not consume stack space.
	* tree-cfg.c (verify_gimple_expr): Handle WITH_SIZE_EXPR,
	VLA_VIEW_EXPR, VLA_IDX_EXPR, VLA_RIDX_EXPR and VLA_DELTA_EXPR.
	* tree-inline.c (estimate_num_insns_1): Handle VLA_IDX_EXPR
	and VLA_RIDX_EXPR.  Treat VLA_VIEW_EXPR and VLA_DELTA_EXPR
	as expensive as OMP stuff.
	* tree-pretty-print.c (dump_generic_node): Handle VLA_VIEW_EXPR,
	VLA_IDX_EXPR, VLA_RIDX_EXPR and VLA_DELTA_EXPR.
	* tree.def (VLA_VIEW_EXPR): New tree code.
	(VLA_IDX_EXPR): Likewise.
	(VLA_RIDX_EXPR): Likewise.
	(VLA_DELTA_EXPR): Likewise.
	* tree-ssa-operands.c (get_expr_operands): Handle them all.
	* tree-ssa-sccvn.c (visit_use): Handle only calls like calls.
	* tree-ssa-sink.c (is_hidden_global_store): VLA_VIEW_EXPR is one.
	* ipa-pure-const.c (check_tree): Look int VLA_VIEW_EXPRs VLA
	argument.
	* tree-dfa.c (refs_may_alias_p): Allow VLA_VIEW_EXPR.
	* tree-sra.c (maybe_lookup_element_for_expr): Make sure to
	not scalarize arrays used in VLA_VIEW_EXPR.
	(sra_walk_expr): Likewise.

	* Makefile.in (OBJS-common): Add tree-lower-arrays.o.
	(tree-lower-arrays.o): New target.
	* tree-lower-arrays.c: New file.
	* passes.c (init_optimization_passes): Schedule pass_arrlower
	before loop header copying.
	* timevar.def (TV_TREE_ARRLOWER): Add.
	* tree-pass.h (pass_arrlower): Declare.

Index: trunk/gcc/gimplify.c
===================================================================
*** trunk.orig/gcc/gimplify.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/gimplify.c	2011-06-21 17:36:49.000000000 +0200
*************** internal_get_tmp_var (tree val, gimple_s
*** 583,589 ****
  
    if (is_formal
        && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
! 	  || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE))
      DECL_GIMPLE_REG_P (t) = 1;
  
    mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
--- 583,591 ----
  
    if (is_formal
        && (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
! 	  || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE
! 	  || TREE_CODE (val) == VLA_RIDX_EXPR
! 	  || TREE_CODE (val) == VLA_VIEW_EXPR))
      DECL_GIMPLE_REG_P (t) = 1;
  
    mod = build2 (INIT_EXPR, TREE_TYPE (t), t, unshare_expr (val));
*************** gimple_add_tmp_var (tree tmp)
*** 706,712 ****
    /* Later processing assumes that the object size is constant, which might
       not be true at this point.  Force the use of a constant upper bound in
       this case.  */
!   if (!host_integerp (DECL_SIZE_UNIT (tmp), 1))
      force_constant_size (tmp);
  
    DECL_CONTEXT (tmp) = current_function_decl;
--- 708,715 ----
    /* Later processing assumes that the object size is constant, which might
       not be true at this point.  Force the use of a constant upper bound in
       this case.  */
!   if (!host_integerp (DECL_SIZE_UNIT (tmp), 1)
!       && TREE_CODE (TREE_TYPE (tmp)) != ARRAY_TYPE)
      force_constant_size (tmp);
  
    DECL_CONTEXT (tmp) = current_function_decl;
*************** maybe_with_size_expr (tree *expr_p)
*** 2227,2232 ****
--- 2230,2241 ----
        || type == error_mark_node)
      return;
  
+   /* If this is already wrapped in a VLA_VIEW_EXPR, we don't need another
+      WITH_SIZE_EXPR here.  */
+   if (TREE_CODE (expr) == VLA_VIEW_EXPR
+       || TREE_CODE (expr) == VLA_RIDX_EXPR)
+     return;
+ 
    /* If the size isn't known or is a constant, we have nothing to do.  */
    size = TYPE_SIZE_UNIT (type);
    if (!size || TREE_CODE (size) == INTEGER_CST)
*************** gimplify_arg (tree *arg_p, gimple_seq *p
*** 2283,2288 ****
--- 2292,2305 ----
    return gimplify_expr (arg_p, pre_p, NULL, test, fb);
  }
  
+ /* Stricter than is_gimple_val, only allow a register or an invariant.  */
+ 
+ static bool
+ is_gimple_reg_or_invariant_rhs (tree t)
+ {
+   return is_gimple_reg (t) || is_gimple_min_invariant (t);
+ }
+ 
  /* Gimplify the CALL_EXPR node *EXPR_P into the GIMPLE sequence PRE_P.
     WANT_VALUE is true if the result of the call is desired.  */
  
*************** gimplify_init_ctor_eval (tree object, VE
*** 3585,3591 ****
  gimple_predicate
  rhs_predicate_for (tree lhs)
  {
!   if (is_gimple_reg (lhs))
      return is_gimple_reg_rhs_or_call;
    else
      return is_gimple_mem_rhs_or_call;
--- 3602,3610 ----
  gimple_predicate
  rhs_predicate_for (tree lhs)
  {
!   if (TREE_CODE (lhs) == VLA_VIEW_EXPR)
!     return is_gimple_reg_or_invariant_rhs;
!   else if (is_gimple_reg (lhs))
      return is_gimple_reg_rhs_or_call;
    else
      return is_gimple_mem_rhs_or_call;
*************** gimplify_modify_expr (tree *expr_p, gimp
*** 4536,4542 ****
       PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
       the size of the expression to be copied, not of the destination, so
       that is what we must do here.  */
!   maybe_with_size_expr (from_p);
  
    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
    if (ret == GS_ERROR)
--- 4555,4562 ----
       PLACEHOLDER_EXPRs in the size.  Also note that the RTL expander uses
       the size of the expression to be copied, not of the destination, so
       that is what we must do here.  */
!   if (TREE_CODE (*to_p) != VLA_VIEW_EXPR)
!     maybe_with_size_expr (from_p);
  
    ret = gimplify_expr (to_p, pre_p, post_p, is_gimple_lvalue, fb_lvalue);
    if (ret == GS_ERROR)
*************** gimplify_modify_expr (tree *expr_p, gimp
*** 4573,4582 ****
        tree from = TREE_OPERAND (*from_p, 0);
        tree size = TREE_OPERAND (*from_p, 1);
  
!       if (TREE_CODE (from) == CONSTRUCTOR)
  	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
  
!       if (is_gimple_addressable (from))
  	{
  	  *from_p = from;
  	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
--- 4593,4604 ----
        tree from = TREE_OPERAND (*from_p, 0);
        tree size = TREE_OPERAND (*from_p, 1);
  
!       if (DECL_P (*to_p) && DECL_GIMPLE_REG_P (*to_p))
! 	;
!       else if (TREE_CODE (from) == CONSTRUCTOR)
  	return gimplify_modify_expr_to_memset (expr_p, size, want_value, pre_p);
  
!       else if (is_gimple_addressable (from))
  	{
  	  *from_p = from;
  	  return gimplify_modify_expr_to_memcpy (expr_p, size, want_value,
*************** gimplify_expr (tree *expr_p, gimple_seq
*** 6592,6598 ****
             || gimple_test_f == is_gimple_reg_rhs
             || gimple_test_f == is_gimple_reg_rhs_or_call
             || gimple_test_f == is_gimple_asm_val
! 	   || gimple_test_f == is_gimple_mem_ref_addr)
      gcc_assert (fallback & fb_rvalue);
    else if (gimple_test_f == is_gimple_min_lval
  	   || gimple_test_f == is_gimple_lvalue)
--- 6614,6621 ----
             || gimple_test_f == is_gimple_reg_rhs
             || gimple_test_f == is_gimple_reg_rhs_or_call
             || gimple_test_f == is_gimple_asm_val
! 	   || gimple_test_f == is_gimple_mem_ref_addr
! 	   || gimple_test_f == is_gimple_reg_or_invariant_rhs)
      gcc_assert (fallback & fb_rvalue);
    else if (gimple_test_f == is_gimple_min_lval
  	   || gimple_test_f == is_gimple_lvalue)
*************** gimplify_expr (tree *expr_p, gimple_seq
*** 7140,7145 ****
--- 7163,7200 ----
  	  ret = GS_ALL_DONE;
  	  break;
  
+ 	case VLA_VIEW_EXPR:
+           {
+ 	    int i;
+ 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
+ 				 is_gimple_lvalue, fb_lvalue);
+ 	    for (i = 2; i < VL_EXP_OPERAND_LENGTH (*expr_p); ++i)
+ 	      {
+ 		enum gimplify_status tret;
+ 		tret = gimplify_expr (&TREE_OPERAND (*expr_p, i), pre_p, post_p,
+ 				      is_gimple_val, fb_rvalue);
+ 		ret = MIN (ret, tret);
+ 	      }
+ 	    break;
+ 	  }
+ 
+ 	case VLA_IDX_EXPR:
+ 	case VLA_RIDX_EXPR:
+ 	case VLA_DELTA_EXPR:
+ 	  {
+ 	    int i;
+ 	    ret = gimplify_expr (&TREE_OPERAND (*expr_p, 1), pre_p, post_p,
+ 				 is_gimple_reg_or_invariant_rhs, fb_rvalue);
+ 	    for (i = 2; i < VL_EXP_OPERAND_LENGTH (*expr_p); ++i)
+ 	      {
+ 		enum gimplify_status tret;
+ 		tret = gimplify_expr (&TREE_OPERAND (*expr_p, i), pre_p, post_p,
+ 				      is_gimple_val, fb_rvalue);
+ 		ret = MIN (ret, tret);
+ 	      }
+ 	    break;
+ 	  }
+ 
  	case OMP_PARALLEL:
  	  gimplify_omp_parallel (expr_p, pre_p);
  	  ret = GS_ALL_DONE;
Index: trunk/gcc/Makefile.in
===================================================================
*** trunk.orig/gcc/Makefile.in	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/Makefile.in	2011-06-21 14:15:49.000000000 +0200
*************** OBJS = \
*** 1423,1428 ****
--- 1423,1429 ----
  	tree-into-ssa.o \
  	tree-iterator.o \
  	tree-loop-distribution.o \
+ 	tree-lower-arrays.o \
  	tree-nested.o \
  	tree-nomudflap.o \
  	tree-nrv.o \
*************** tree-ssa-forwprop.o : tree-ssa-forwprop.
*** 2476,2481 ****
--- 2477,2486 ----
     $(TM_H) $(TREE_H) $(TM_P_H) $(BASIC_BLOCK_H) \
     $(TREE_FLOW_H) $(TREE_PASS_H) $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
     langhooks.h $(FLAGS_H) $(GIMPLE_H) tree-pretty-print.h $(EXPR_H)
+ tree-lower-arrays.o : tree-lower-arrays.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
+    $(TM_H) $(TREE_H) $(TM_P_H) $(BASIC_BLOCK_H) \
+    $(TREE_FLOW_H) $(TREE_PASS_H) $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
+    langhooks.h $(FLAGS_H) $(GIMPLE_H) tree-pretty-print.h $(EXPR_H)
  tree-ssa-phiprop.o : tree-ssa-phiprop.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
     $(TM_H) $(TREE_H) $(TM_P_H) $(BASIC_BLOCK_H) \
     $(TREE_FLOW_H) $(TREE_PASS_H) $(TREE_DUMP_H) $(DIAGNOSTIC_H) $(TIMEVAR_H) \
Index: trunk/gcc/passes.c
===================================================================
*** trunk.orig/gcc/passes.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/passes.c	2011-06-21 17:31:26.000000000 +0200
*************** init_optimization_passes (void)
*** 1289,1294 ****
--- 1289,1295 ----
        NEXT_PASS (pass_tree_ifcombine);
        NEXT_PASS (pass_phiopt);
        NEXT_PASS (pass_tail_recursion);
+       NEXT_PASS (pass_arrlower);
        NEXT_PASS (pass_ch);
        NEXT_PASS (pass_stdarg);
        NEXT_PASS (pass_lower_complex);
*************** init_optimization_passes (void)
*** 1389,1394 ****
--- 1390,1396 ----
        NEXT_PASS (pass_uncprop);
        NEXT_PASS (pass_local_pure_const);
      }
+   NEXT_PASS (pass_arrlower_O0);
    NEXT_PASS (pass_lower_complex_O0);
    NEXT_PASS (pass_cleanup_eh);
    NEXT_PASS (pass_lower_resx);
Index: trunk/gcc/timevar.def
===================================================================
*** trunk.orig/gcc/timevar.def	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/timevar.def	2011-06-21 14:15:49.000000000 +0200
*************** DEFTIMEVAR (TV_TREE_IFCOMBINE        , "
*** 250,255 ****
--- 250,256 ----
  DEFTIMEVAR (TV_TREE_UNINIT           , "uninit var analysis")
  DEFTIMEVAR (TV_PLUGIN_INIT           , "plugin initialization")
  DEFTIMEVAR (TV_PLUGIN_RUN            , "plugin execution")
+ DEFTIMEVAR (TV_TREE_ARRLOWER         , "array expression lowering")
  
  /* Everything else in rest_of_compilation not included above.  */
  DEFTIMEVAR (TV_EARLY_LOCAL	     , "early local passes")
Index: trunk/gcc/tree-lower-arrays.c
===================================================================
*** /dev/null	1970-01-01 00:00:00.000000000 +0000
--- trunk/gcc/tree-lower-arrays.c	2011-06-21 17:36:40.000000000 +0200
***************
*** 0 ****
--- 1,688 ----
+ /* Lowering of expressions on arrays to loops.
+    Copyright (C) 2008 Free Software Foundation, Inc.
+    Contributed by Richard Guenther <rguenther@suse.de>
+ 
+ This file is part of GCC.
+ 
+ GCC is free software; you can redistribute it and/or modify
+ it under the terms of the GNU General Public License as published by
+ the Free Software Foundation; either version 3, or (at your option)
+ any later version.
+ 
+ GCC is distributed in the hope that it will be useful,
+ but WITHOUT ANY WARRANTY; without even the implied warranty of
+ MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+ GNU General Public License for more details.
+ 
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3.  If not see
+ <http://www.gnu.org/licenses/>.  */
+ 
+ #include "config.h"
+ #include "system.h"
+ #include "coretypes.h"
+ #include "tm.h"
+ #include "ggc.h"
+ #include "tree.h"
+ #include "rtl.h"
+ #include "tm_p.h"
+ #include "basic-block.h"
+ #include "timevar.h"
+ #include "diagnostic.h"
+ #include "tree-flow.h"
+ #include "tree-pass.h"
+ #include "tree-dump.h"
+ #include "gimple-pretty-print.h"
+ #include "langhooks.h"
+ #include "flags.h"
+ 
+ 
+ /* Recursively walk NAMEs defs marking all defining statements that
+    need lowering (and thus are not loop invariant) in the bitmap
+    of SSA name versions NAMES.  */
+ 
+ static bool
+ need_lowering (tree name, bitmap names, bitmap indices)
+ {
+   gimple stmt = SSA_NAME_DEF_STMT (name);
+   ssa_op_iter iter;
+   use_operand_p op_p;
+   tree rhs;
+   bool res = false;
+ 
+   if (bitmap_bit_p (indices, SSA_NAME_VERSION (name)))
+     return true;
+ 
+   if (gimple_nop_p (stmt))
+     return false;
+ 
+   FOR_EACH_PHI_OR_STMT_USE (op_p, stmt, iter, SSA_OP_USE)
+     {
+       tree op = USE_FROM_PTR (op_p);
+       if (TREE_CODE (op) == SSA_NAME)
+ 	res |= need_lowering (op, names, indices);
+     }
+   if (res)
+     {
+       bitmap_set_bit (names, SSA_NAME_VERSION (name));
+       return true;
+     }
+ 
+   if (!gimple_assign_single_p (stmt))
+     return false;
+ 
+   rhs = gimple_assign_rhs1 (stmt);
+ 
+   /* Collect indices.  They need to be introduced via RIDX or DELTA
+      before they can be used in expressions or IDX.  */
+   if (TREE_CODE (rhs) == VLA_RIDX_EXPR)
+     {
+       int i;
+       for (i = 2; i < tree_operand_length (rhs); ++i)
+ 	bitmap_set_bit (indices, SSA_NAME_VERSION (TREE_OPERAND (rhs, i)));
+     }
+   else if (TREE_CODE (rhs) == VLA_DELTA_EXPR)
+     {
+       int i;
+       for (i = 3; i < tree_operand_length (rhs); ++i)
+ 	bitmap_set_bit (indices, SSA_NAME_VERSION (TREE_OPERAND (rhs, i)));
+     }
+ 
+   if (TREE_CODE (rhs) == VLA_VIEW_EXPR
+       || TREE_CODE (rhs) == VLA_IDX_EXPR
+       || TREE_CODE (rhs) == VLA_RIDX_EXPR
+       || TREE_CODE (rhs) == VLA_DELTA_EXPR)
+     {
+       bitmap_set_bit (names, SSA_NAME_VERSION (name));
+       return true;
+     }
+ 
+   return false;
+ }
+ 
+ /* Check if NAME is (indirectly) used in a VLA_RIDX_EXPR.  */
+ 
+ static bool
+ used_in_ridx_p (tree name)
+ {
+   imm_use_iterator iter;
+   gimple use_stmt;
+ 
+   if (TREE_CODE (name) != SSA_NAME)
+     return false;
+ 
+   FOR_EACH_IMM_USE_STMT (use_stmt, iter, name)
+     {
+       if (!gimple_has_lhs (use_stmt))
+ 	continue;
+ 
+       if ((gimple_assign_single_p (use_stmt)
+ 	   && (gimple_assign_rhs_code (use_stmt) == VLA_RIDX_EXPR
+ 	       || gimple_assign_rhs_code (use_stmt) == VLA_DELTA_EXPR))
+ 	  || used_in_ridx_p (gimple_get_lhs (use_stmt)))
+ 	{
+ 	  end_imm_use_stmt_traverse (&iter);
+ 	  return true;
+ 	}
+     }
+ 
+   return false;
+ }
+ 
+ /* Lower the single statement that defines NAME prepending newly
+    created statements at BSI and adjusting TARGETs rhs to the lowered
+    value.  */
+ 
+ static void
+ lower_def (gimple_stmt_iterator *gsi, tree name, tree *target,
+ 	   VEC (tree, heap) *ivs, bitmap names, tree *idx_map,
+ 	   bitmap indices)
+ {
+   gimple def_stmt = SSA_NAME_DEF_STMT (name);
+   gimple stmt;
+   tree rhs;
+   unsigned i;
+ 
+   if (dump_file && (dump_flags & TDF_DETAILS))
+     {
+       tree idx;
+       fprintf (dump_file, "lowering ");
+       print_gimple_stmt (dump_file, def_stmt, 0, 0);
+       fprintf (dump_file, "\n  with ivs[] ");
+       for (i = 0; VEC_iterate (tree, ivs, i, idx); ++i)
+ 	{
+ 	  print_generic_expr (dump_file, idx, 0);
+ 	  fprintf (dump_file, " ");
+ 	}
+       fprintf (dump_file, "\n");
+     }
+ 
+   /* Lower bare indices to their associated induction variable.  */
+   if (idx_map[SSA_NAME_VERSION (name)])
+     {
+       *target = idx_map[SSA_NAME_VERSION (name)];
+       return;
+     }
+ 
+   /* Control flow handling is not implemented.  */
+   gcc_assert (gimple_code (def_stmt) != GIMPLE_PHI);
+ 
+   rhs = gimple_assign_rhs1 (def_stmt);
+ 
+   if (TREE_CODE (rhs) == VLA_VIEW_EXPR)
+     {
+       tree ref = unshare_expr (TREE_OPERAND (rhs, 1));
+       tree atype = TREE_TYPE (rhs);
+       int i;
+       int n = (tree_operand_length (rhs) - 2) / 2;
+       for (i = n - 1; i >= 0; --i)
+ 	{
+ 	  ref = build4 (ARRAY_REF, TREE_TYPE (atype),
+ 			ref, VEC_index (tree, ivs, i), NULL_TREE,
+ 			TREE_OPERAND (rhs, 2 + 2 * i + 1));
+ 	  atype = TREE_TYPE (atype);
+ 	}
+       *target = ref;
+       return;
+     }
+   else if (TREE_CODE (rhs) == VLA_IDX_EXPR)
+     {
+       int i, n = tree_operand_length (rhs) - 2;
+       VEC (tree, heap) *ivs2 = VEC_alloc (tree, heap, n);
+       for (i = 2; i < tree_operand_length (rhs); ++i)
+ 	{
+ 	  tree idx = TREE_OPERAND (rhs, i);
+ 	  if (TREE_CODE (idx) == SSA_NAME
+ 	      && !bitmap_bit_p (indices, SSA_NAME_VERSION (idx)))
+ 	    {
+ 	      tree newidx, tmp2;
+ 	      gimple stmt;
+ 	      tmp2 = NULL_TREE;
+ 	      lower_def (gsi, idx, &tmp2, ivs, names, idx_map, indices);
+ 	      stmt = gimple_build_assign_with_ops
+ 		       (useless_type_conversion_p (TREE_TYPE (idx),
+ 						   TREE_TYPE (tmp2))
+ 			? SSA_NAME : NOP_EXPR,
+ 			SSA_NAME_VAR (idx), tmp2, NULL_TREE);
+ 	      newidx = make_ssa_name (SSA_NAME_VAR (idx), stmt);
+ 	      gimple_assign_set_lhs (stmt, newidx);
+ 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ 	      idx_map[SSA_NAME_VERSION (idx)] = newidx;
+ 	    }
+ 	  if (dump_file && (dump_flags & TDF_DETAILS))
+ 	    {
+ 	      fprintf (dump_file, "  for ");
+ 	      print_generic_expr (dump_file, idx, 0);
+ 	      fprintf (dump_file, " we have ");
+ 	      if (TREE_CODE (idx) == SSA_NAME)
+ 	        print_generic_expr (dump_file, idx_map[SSA_NAME_VERSION (idx)], 0);
+ 	      else
+ 	        print_generic_expr (dump_file, idx, 0);
+ 	      fprintf (dump_file, "\n");
+ 	    }
+ 	  if (TREE_CODE (idx) == SSA_NAME)
+ 	    VEC_quick_push (tree, ivs2, idx_map[SSA_NAME_VERSION (idx)]);
+ 	  else
+ 	    VEC_quick_push (tree, ivs2, idx);
+ 	}
+       lower_def (gsi, TREE_OPERAND (rhs, 1), target, ivs2, names, idx_map, indices);
+       VEC_free (tree, heap, ivs2);
+       return;
+     }
+   else if (TREE_CODE (rhs) == VLA_RIDX_EXPR)
+     {
+       tree *saved;
+       int i;
+       saved = XALLOCAVEC (tree, tree_operand_length (rhs) - 2);
+       for (i = 2; i < tree_operand_length (rhs); ++i)
+ 	{
+ 	  tree idx = TREE_OPERAND (rhs, i);
+ 	  tree ivname = VEC_index (tree, ivs, i - 2);
+ 	  saved[i - 2] = idx_map[SSA_NAME_VERSION (idx)];
+ 	  if (!useless_type_conversion_p (TREE_TYPE (idx), TREE_TYPE (ivname)))
+ 	    {
+ 	      stmt = gimple_build_assign_with_ops (NOP_EXPR, SSA_NAME_VAR (idx),
+ 						   ivname, NULL_TREE);
+ 	      gimple_assign_set_lhs (stmt,
+ 				     make_ssa_name (SSA_NAME_VAR (idx), stmt));
+ 	      gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+ 	      idx_map[SSA_NAME_VERSION (idx)] = gimple_assign_lhs (stmt);
+ 	    }
+ 	  else
+ 	    idx_map[SSA_NAME_VERSION (idx)] = ivname;
+ 	}
+       lower_def (gsi, TREE_OPERAND (rhs, 1), target, ivs, names, idx_map, indices);
+       for (i = tree_operand_length (rhs) - 1; i >= 2; --i)
+ 	{
+ 	  tree idx = TREE_OPERAND (rhs, i);
+ 	  idx_map[SSA_NAME_VERSION (idx)] = saved[i - 2];
+ 	}
+       return;
+     }
+   else if (TREE_CODE (rhs) == VLA_DELTA_EXPR)
+     {
+       tree extent = TREE_OPERAND (rhs, 2);
+       tree ivname, incrname;
+       gimple stmt, cond, phi, incr;
+       basic_block bb;
+       gimple_stmt_iterator gsi2;
+       edge e;
+       tree iv, var, sum, *saved, tmp2 = NULL_TREE;
+       int i;
+ 
+       extent = force_gimple_operand_gsi (gsi, extent, true, NULL_TREE,
+ 					 true, GSI_SAME_STMT);
+ 
+       var = create_tmp_reg (TREE_TYPE (rhs), "redtmp");
+ 
+       /* Create the loop.  */
+       stmt = gsi_stmt (*gsi);
+       if (stmt != gsi_stmt (gsi_after_labels (gimple_bb (stmt))))
+ 	{
+ 	  gsi2 = gsi_for_stmt (stmt);
+ 	  gsi_prev (&gsi2);
+ 	  split_block (gimple_bb (stmt), gsi_stmt (gsi2));
+ 	  *gsi = gsi_for_stmt (stmt);
+ 	}
+       bb = gimple_bb (gsi_stmt (*gsi));
+       iv = create_tmp_reg (TREE_TYPE (extent), "ivtmp");
+       add_referenced_var (iv);
+       phi = create_phi_node (iv, bb);
+       ivname = PHI_RESULT (phi);
+       add_phi_arg (phi, build_int_cst (TREE_TYPE (iv), 0),
+ 		   single_pred_edge (bb), UNKNOWN_LOCATION);
+       phi = create_phi_node (var, bb);
+       sum = PHI_RESULT (phi);
+       add_phi_arg (phi, build_zero_cst (TREE_TYPE (sum)),
+ 		   single_pred_edge (bb), UNKNOWN_LOCATION);
+       gsi2 = gsi_for_stmt (gsi_stmt (*gsi));
+       incr = gimple_build_assign_with_ops (PLUS_EXPR, iv, ivname,
+ 					   build_int_cst (TREE_TYPE (iv), 1));
+       incrname = make_ssa_name (iv, incr);
+       gimple_assign_set_lhs (incr, incrname);
+       gsi_insert_before (&gsi2, incr, GSI_NEW_STMT);
+       cond = gimple_build_cond (LT_EXPR, incrname, extent,
+ 				NULL_TREE, NULL_TREE);
+       gsi_insert_before (gsi, cond, GSI_SAME_STMT);
+       split_block (bb, cond);
+       e = single_succ_edge (bb);
+       e->flags &= ~EDGE_FALLTHRU;
+       e->flags |= EDGE_FALSE_VALUE;
+       e = make_edge (bb, bb, EDGE_TRUE_VALUE);
+       e->flags |= EDGE_DFS_BACK;
+       add_phi_arg (SSA_NAME_DEF_STMT (ivname), incrname, e, UNKNOWN_LOCATION);
+ 
+       /* Lower the reduction body.  */
+       saved = XALLOCAVEC (tree, tree_operand_length (rhs) - 3);
+       for (i = 3; i < tree_operand_length (rhs); ++i)
+ 	{
+ 	  tree idx = TREE_OPERAND (rhs, i);
+ 	  saved[i - 3] = idx_map[SSA_NAME_VERSION (idx)];
+ 	  if (!useless_type_conversion_p (TREE_TYPE (idx), TREE_TYPE (ivname)))
+ 	    {
+ 	      stmt = gimple_build_assign_with_ops (NOP_EXPR, SSA_NAME_VAR (idx),
+ 						   ivname, NULL_TREE);
+ 	      gimple_assign_set_lhs (stmt,
+ 				     make_ssa_name (SSA_NAME_VAR (idx), stmt));
+ 	      gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
+ 	      idx_map[SSA_NAME_VERSION (idx)] = gimple_assign_lhs (stmt);
+ 	    }
+ 	  else
+ 	    idx_map[SSA_NAME_VERSION (idx)] = ivname;
+ 	}
+       tmp2 = NULL_TREE;
+       lower_def (&gsi2, TREE_OPERAND (rhs, 1), &tmp2, ivs, names, idx_map, indices);
+       stmt = gimple_build_assign (var, tmp2);
+       gimple_assign_set_lhs (stmt, make_ssa_name (var, stmt));
+       gsi_insert_before (&gsi2, stmt, GSI_SAME_STMT);
+       mark_virtual_ops_for_renaming (stmt);
+       for (i = tree_operand_length (rhs) - 1; i >= 3; --i)
+ 	{
+ 	  tree idx = TREE_OPERAND (rhs, i);
+ 	  idx_map[SSA_NAME_VERSION (idx)] = saved[i - 3];
+ 	}
+       incr = gimple_build_assign_with_ops (PLUS_EXPR, var,
+ 					   sum, gimple_assign_lhs (stmt));
+       incrname = make_ssa_name (var, incr);
+       gimple_assign_set_lhs (incr, incrname);
+       gsi_insert_before (&gsi2, incr, GSI_SAME_STMT);
+       add_phi_arg (SSA_NAME_DEF_STMT (sum), incrname, e, UNKNOWN_LOCATION);
+       *target = incrname;
+ 
+       *gsi = gsi_for_stmt (gsi_stmt (*gsi));
+       return;
+     }
+ 
+   gcc_assert (is_gimple_assign (def_stmt) || is_gimple_call (def_stmt));
+ 
+   stmt = gimple_copy (def_stmt);
+   gimple_set_lhs (stmt, make_ssa_name (SSA_NAME_VAR (name), stmt));
+ 
+   /* Lower other stmts by copying them and replacing to be lowered
+      operands by their lowered values.  */
+   for (i = is_gimple_assign (stmt) ? 1 : 3;
+        i < gimple_num_ops (stmt);
+        ++i)
+     {
+       tree *op_p = gimple_op_ptr (stmt, i);
+       tree op = *op_p;
+ 
+       if (!op || TREE_CODE (op) != SSA_NAME)
+ 	continue;
+ 
+       if (idx_map[SSA_NAME_VERSION (op)])
+ 	*op_p = idx_map[SSA_NAME_VERSION (op)];
+       else if (bitmap_bit_p (names, SSA_NAME_VERSION (op)))
+ 	{
+ 	  gimple tmp;
+ 	  tree var, newop, tmp2 = NULL_TREE;
+ 	  var = SSA_NAME_VAR (op);
+ 	  tmp2 = NULL_TREE;
+ 	  lower_def (gsi, op, &tmp2, ivs, names, idx_map, indices);
+ 	  tmp = gimple_build_assign (var, tmp2);
+ 	  newop = make_ssa_name (var, tmp);
+ 	  gimple_assign_set_lhs (tmp, newop);
+ 	  gsi_insert_before (gsi, tmp, GSI_SAME_STMT);
+ 	  mark_virtual_ops_for_renaming (tmp);
+ 	  *op_p = newop;
+ 
+ 	  /* Record the newly created name for re-use.  */
+ 	  idx_map[SSA_NAME_VERSION (op)] = newop;
+ 	}
+     }
+ 
+   gsi_insert_before (gsi, stmt, GSI_SAME_STMT);
+   update_stmt (stmt);
+   *target = gimple_get_lhs (stmt);
+ }
+ 
+ /* Lower the array expression that manifests itself by the store of
+    the result at BSI.  */
+ 
+ static void
+ lower_array_expression (gimple stmt, tree *idx_map)
+ {
+   gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
+   tree lhs, rhs;
+   int i, n;
+   VEC (tree, heap) *ivs = NULL, *extent = NULL;
+   basic_block bb, out;
+   edge e;
+   tree elt_type, elt, eltname, ref, atype, tmp2;
+   bitmap names, indices;
+   gimple tmp;
+ 
+   /* All information we need for determining the dimensionality and the
+      extent of the index walk is contained in the VLA_VIEW_EXPR of the lhs of
+      the store of the result of the array expression.  */
+   lhs = gimple_assign_lhs (stmt);
+   rhs = gimple_assign_rhs1 (stmt);
+ 
+   /* Scan for to be lowered stmts.  */
+   names = BITMAP_ALLOC (NULL);
+   indices = BITMAP_ALLOC (NULL);
+   need_lowering (rhs, names, indices);
+ 
+   n = (VL_EXP_OPERAND_LENGTH (lhs) - 2) / 2;
+   VEC_safe_grow_cleared (tree, heap, ivs, n);
+   VEC_safe_grow_cleared (tree, heap, extent, n);
+   elt_type = TREE_TYPE (lhs);
+   while (TREE_CODE (elt_type) == ARRAY_TYPE)
+     elt_type = TREE_TYPE (elt_type);
+   for (i = 0; i < n; ++i)
+     VEC_replace (tree, extent, i, TREE_OPERAND (lhs, 2 + 2 * i));
+ 
+   /* We will now have to build a loop nest of depth n with iteration
+      space [0..extentN-1] operating on the element type type.  */
+ 
+   /* Create a new temporary from which we create SSA names for
+      induction variables.  */
+ 
+   /* We enter lowering with the array store at bsi at the end of its
+      basic-block with a single fallthrough edge.  */
+   out = single_succ (gimple_bb (gsi_stmt (gsi)));
+ 
+   /* Now build loop header copies.  Loops run from zero to extent - 1.  */
+   for (i = 0; i < n; ++i)
+     {
+       tree cond, x;
+       gimple cond_stmt;
+       edge e;
+       gimple_stmt_iterator gsi2;
+ 
+       /* If 0 < extent is true then we always enter the loop.  If it
+ 	 is false all of the loop nest is dead and we can stop lowering.  */
+       x = VEC_index (tree, extent, i);
+       cond = fold_build2 (LT_EXPR, boolean_type_node,
+ 			  build_int_cst (TREE_TYPE (x), 0), x);
+       if (integer_onep (cond))
+ 	continue;
+       else if (integer_zerop (cond))
+ 	return;
+ 
+       /* Otherwise split the block after the inserted condition and
+ 	 set up edges to after the loop nest and the next header copy.  */
+       cond_stmt = gimple_build_cond_from_tree (cond, NULL_TREE, NULL_TREE);
+       gsi2 = gsi_for_stmt (gsi_stmt (gsi));
+       gsi_insert_before (&gsi2, cond_stmt, GSI_NEW_STMT);
+       bb = gimple_bb (gsi_stmt (gsi2));
+       split_block (bb, gsi_stmt (gsi2));
+       e = single_succ_edge (bb);
+       e->flags &= ~EDGE_FALLTHRU;
+       e->flags |= EDGE_TRUE_VALUE;
+       e = make_edge (bb, out, EDGE_FALSE_VALUE);
+     }
+ 
+   /* Make sure stmt is in its own basic block, even if all of the
+      loop header copy tests were not emitted.  */
+   if (stmt != gsi_stmt (gsi_after_labels (gimple_bb (stmt))))
+     {
+       gimple_stmt_iterator gsi2 = gsi_for_stmt (stmt);
+       gsi_prev (&gsi2);
+       split_block (gimple_bb (stmt), gsi_stmt (gsi2));
+     }
+ 
+   /* Now build the loop CFG nest.
+ 
+      loopN:
+        # j_2 = PHI <0(entry), j_3(loopN)>
+ 
+      loopN-1:
+        # i_2 = PHI <0(entry), i_3(loopN-1)>
+        ...
+        i_3 = i_2 + 1;
+        if (i_3 < iextent) goto loopN-1;
+ 
+      <tailN>
+        j_3 = j_2 + 1;
+        if (j_3 < jextent) goto loopN;
+ 
+      out:
+     */
+   bb = gimple_bb (gsi_stmt (gsi));
+   for (i = n - 1; i >= 0; --i)
+     {
+       tree incrname, iv;
+       gimple phi, incr, cond;
+       gimple_stmt_iterator gsi2;
+       basic_block bb2, bb3;
+       tree this_extent = VEC_index (tree, extent, i);
+       e = single_pred_edge (bb);
+       bb3 = split_edge (e);
+       iv = create_tmp_reg (TREE_TYPE (this_extent), "ivtmp");
+       add_referenced_var (iv);
+       phi = create_phi_node (iv, bb3);
+       VEC_replace (tree, ivs, i, PHI_RESULT (phi));
+       add_phi_arg (phi, build_int_cst (TREE_TYPE (iv), 0),
+ 		   single_pred_edge (bb3), UNKNOWN_LOCATION);
+       incr = gimple_build_assign_with_ops (PLUS_EXPR, iv,
+ 					   VEC_index (tree, ivs, i),
+ 					   build_int_cst (TREE_TYPE (iv), 1));
+       incrname = make_ssa_name (iv, incr);
+       gimple_assign_set_lhs (incr, incrname);
+       gsi2 = gsi_for_stmt (gsi_stmt (gsi));
+       gsi_insert_after (&gsi2, incr, GSI_NEW_STMT);
+       cond = gimple_build_cond (LT_EXPR, incrname, this_extent,
+ 				NULL_TREE, NULL_TREE);
+       gsi_insert_after (&gsi2, cond, GSI_NEW_STMT);
+       bb2 = gimple_bb (gsi_stmt (gsi2));
+       if (gsi_stmt (gsi2) != last_stmt (bb2))
+ 	split_block (bb2, gsi_stmt (gsi2));
+       e = single_succ_edge (bb);
+       e->flags &= ~EDGE_FALLTHRU;
+       e->flags |= EDGE_FALSE_VALUE;
+       e = make_edge (bb2, bb3, EDGE_TRUE_VALUE);
+       e->flags |= EDGE_DFS_BACK;
+       add_phi_arg (SSA_NAME_DEF_STMT (VEC_index (tree, ivs, i)), incrname, e,
+ 		   UNKNOWN_LOCATION);
+     }
+ 
+   /* FIXME.  Now bb is where the real code(TM) is supposed to live.  */
+   elt = create_tmp_reg (elt_type, "elttmp");
+   add_referenced_var (elt);
+ 
+   gsi = gsi_for_stmt (stmt);
+ 
+   /* Now walk the defs of the store stmt uses and build up the
+      lowered expression.  */
+   tmp2 = NULL_TREE;
+   lower_def (&gsi, rhs, &tmp2, ivs, names, idx_map, indices);
+   tmp = gimple_build_assign (elt, tmp2);
+   eltname = make_ssa_name (elt, tmp);
+   gimple_assign_set_lhs (tmp, eltname);
+   gsi_insert_before (&gsi, tmp, GSI_SAME_STMT);
+ 
+   /* Build the final store instruction.  */
+   ref = TREE_OPERAND (lhs, 1);
+   atype = TREE_TYPE (lhs);
+   for (i = n - 1; i >= 0; --i)
+     {
+       ref = build4 (ARRAY_REF, TREE_TYPE (atype),
+ 		    ref, VEC_index (tree, ivs, i), NULL_TREE,
+ 		    TREE_OPERAND (lhs, 2 + 2 * i + 1));
+       atype = TREE_TYPE (atype);
+     }
+   tmp = gimple_build_assign (ref, eltname);
+   gsi_insert_before (&gsi, tmp, GSI_SAME_STMT);
+   update_stmt (tmp);
+   mark_virtual_ops_for_renaming (tmp);
+ 
+   BITMAP_FREE (names);
+   BITMAP_FREE (indices);
+   VEC_free (tree, heap, ivs);
+ }
+ 
+ /* Poor man's lowering of expressions on arrays to loop form.  */
+ 
+ static unsigned int
+ tree_lower_array_expressions (void)
+ {
+   basic_block bb;
+   gimple_stmt_iterator gsi;
+   int orig_num_ssa_names = num_ssa_names;
+   tree *idx_map = XCNEWVEC (tree, num_ssa_names);
+ 
+   FOR_EACH_BB (bb)
+     {
+       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi);)
+ 	{
+ 	  gimple stmt = gsi_stmt (gsi);
+ 
+ 	  /* The final array stores are where we build up our loops from.
+ 	     We split the block with the store after the store to make
+ 	     CFG manipulations easier for the lowering process.  */
+ 	  if (gimple_assign_single_p (stmt)
+ 	      && TREE_CODE (gimple_assign_lhs (stmt)) == VLA_VIEW_EXPR)
+ 	    {
+ 	      if (stmt != last_stmt (bb))
+ 	        split_block (bb, stmt);
+ 	      memset (idx_map, 0, sizeof (tree) * orig_num_ssa_names);
+ 	      lower_array_expression (stmt, idx_map);
+ 	      /* ???  At -O0 we also have to remove the dead stmts
+ 		 and vars leave around.  Otherwise we'll ICE trying to
+ 		 expand the array temporary variables and stmts.  */
+ 	      gsi = gsi_for_stmt (stmt);
+ 	      gsi_remove (&gsi, true);
+ 	      break;
+ 	    }
+ 	  else if (gimple_assign_single_p (stmt)
+ 	           && gimple_assign_rhs_code (stmt) == VLA_DELTA_EXPR
+ 		   && !used_in_ridx_p (gimple_assign_lhs (stmt)))
+ 	    {
+ 	      tree lhs, rhs = NULL_TREE;
+ 	      bitmap names, indices;
+ 	      lhs = gimple_assign_lhs (stmt);
+ 	      names = BITMAP_ALLOC (NULL);
+ 	      indices = BITMAP_ALLOC (NULL);
+ 	      need_lowering (lhs, names, indices);
+ 	      memset (idx_map, 0, sizeof (tree) * orig_num_ssa_names);
+ 	      lower_def (&gsi, lhs, &rhs, NULL, names, idx_map, indices);
+ 	      gimple_assign_set_rhs1 (stmt, rhs);
+ 	      update_stmt (stmt);
+ 	      gsi = gsi_for_stmt (stmt);
+ 	      BITMAP_FREE (names);
+ 	      BITMAP_FREE (indices);
+ 	      gsi_next (&gsi);
+ 	      break;
+ 	    }
+ 	  else
+ 	    gsi_next (&gsi);
+ 	}
+     }
+ 
+   free_dominance_info (CDI_DOMINATORS);
+   free_dominance_info (CDI_POST_DOMINATORS);
+ 
+   return 0;
+ }
+ 
+ static bool
+ gate_arrlower (void)
+ {
+   return !(cfun->curr_properties & PROP_gimple_larr);
+ }
+ 
+ struct gimple_opt_pass pass_arrlower =
+ {
+  {
+   GIMPLE_PASS,
+   "arrlower0",			/* name */
+   gate_arrlower,		/* gate */
+   tree_lower_array_expressions,	/* execute */
+   NULL,				/* sub */
+   NULL,				/* next */
+   0,				/* static_pass_number */
+   TV_TREE_ARRLOWER,		/* tv_id */
+   PROP_cfg | PROP_ssa,		/* properties_required */
+   PROP_gimple_larr,		/* properties_provided */
+   0,				/* properties_destroyed */
+   0,				/* todo_flags_start */
+   TODO_dump_func
+   | TODO_ggc_collect
+   | TODO_update_ssa
+   | TODO_verify_ssa		/* todo_flags_finish */
+  }
+ };
+ 
+ struct gimple_opt_pass pass_arrlower_O0 =
+ {
+  {
+   GIMPLE_PASS,
+   "arrlower",			/* name */
+   gate_arrlower,		/* gate */
+   tree_lower_array_expressions,	/* execute */
+   NULL,				/* sub */
+   NULL,				/* next */
+   0,				/* static_pass_number */
+   TV_TREE_ARRLOWER,		/* tv_id */
+   PROP_cfg | PROP_ssa,		/* properties_required */
+   PROP_gimple_larr,		/* properties_provided */
+   0,				/* properties_destroyed */
+   0,				/* todo_flags_start */
+   TODO_dump_func
+   | TODO_ggc_collect
+   | TODO_update_ssa
+   | TODO_verify_ssa		/* todo_flags_finish */
+  }
+ };
+ 
Index: trunk/gcc/tree-pass.h
===================================================================
*** trunk.orig/gcc/tree-pass.h	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-pass.h	2011-06-21 17:31:06.000000000 +0200
*************** struct dump_file_info
*** 238,243 ****
--- 238,244 ----
  #define PROP_gimple_lomp	(1 << 8)	/* lowered OpenMP directives */
  #define PROP_cfglayout	 	(1 << 9)	/* cfglayout mode on RTL */
  #define PROP_gimple_lcx		(1 << 10)       /* lowered complex */
+ #define PROP_gimple_larr	(1 << 11)       /* lowered arrays */
  
  #define PROP_trees \
    (PROP_gimple_any | PROP_gimple_lcf | PROP_gimple_leh | PROP_gimple_lomp)
*************** extern struct gimple_opt_pass pass_trace
*** 446,451 ****
--- 447,454 ----
  extern struct gimple_opt_pass pass_warn_unused_result;
  extern struct gimple_opt_pass pass_split_functions;
  extern struct gimple_opt_pass pass_feedback_split_functions;
+ extern struct gimple_opt_pass pass_arrlower;
+ extern struct gimple_opt_pass pass_arrlower_O0;
  
  /* IPA Passes */
  extern struct simple_ipa_opt_pass pass_ipa_lower_emutls;
Index: trunk/gcc/cfgexpand.c
===================================================================
*** trunk.orig/gcc/cfgexpand.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/cfgexpand.c	2011-06-21 14:15:49.000000000 +0200
*************** estimated_stack_frame_size (struct cgrap
*** 1333,1339 ****
  
    gcc_checking_assert (gimple_referenced_vars (fn));
    FOR_EACH_REFERENCED_VAR (fn, var, rvi)
!     size += expand_one_var (var, true, false);
  
    if (stack_vars_num > 0)
      {
--- 1333,1340 ----
  
    gcc_checking_assert (gimple_referenced_vars (fn));
    FOR_EACH_REFERENCED_VAR (fn, var, rvi)
!     if (!DECL_GIMPLE_REG_P (var))
!       size += expand_one_var (var, true, false);
  
    if (stack_vars_num > 0)
      {
*************** expand_debug_expr (tree exp)
*** 3286,3291 ****
--- 3287,3298 ----
      case FMA_EXPR:
        return simplify_gen_ternary (FMA, mode, inner_mode, op0, op1, op2);
  
+     case VLA_VIEW_EXPR:
+     case VLA_RIDX_EXPR:
+     case VLA_IDX_EXPR:
+     case VLA_DELTA_EXPR:
+       return NULL;
+ 
      default:
      flag_unsupported:
  #ifdef ENABLE_CHECKING
Index: trunk/gcc/tree-cfg.c
===================================================================
*** trunk.orig/gcc/tree-cfg.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-cfg.c	2011-06-21 17:27:48.000000000 +0200
*************** verify_gimple_assign_single (gimple stmt
*** 3906,3911 ****
--- 3906,3918 ----
        /* FIXME.  */
        return res;
  
+     case VLA_VIEW_EXPR:
+     case VLA_IDX_EXPR:
+     case VLA_RIDX_EXPR:
+     case VLA_DELTA_EXPR:
+       /* FIXME.  */
+       return false;
+ 
      default:;
      }
  
Index: trunk/gcc/tree-pretty-print.c
===================================================================
*** trunk.orig/gcc/tree-pretty-print.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-pretty-print.c	2011-06-21 14:15:49.000000000 +0200
*************** dump_generic_node (pretty_printer *buffe
*** 1217,1222 ****
--- 1217,1252 ----
        pp_string (buffer, ">");
        break;
  
+     case VLA_VIEW_EXPR:
+     case VLA_IDX_EXPR:
+     case VLA_RIDX_EXPR:
+     case VLA_DELTA_EXPR:
+       {
+ 	int n;
+ 	if (TREE_CODE (node) == VLA_VIEW_EXPR)
+ 	  pp_string (buffer, "VLA <");
+ 	else if (TREE_CODE (node) == VLA_IDX_EXPR)
+ 	  pp_string (buffer, "VLA_IDX <");
+ 	else if (TREE_CODE (node) == VLA_RIDX_EXPR)
+ 	  pp_string (buffer, "VLA_RIDX <");
+ 	else if (TREE_CODE (node) == VLA_DELTA_EXPR)
+ 	  pp_string (buffer, "VLA_DELTA <");
+ 	for (n = 2; n < VL_EXP_OPERAND_LENGTH (node) - 1; ++n)
+ 	  {
+ 	    dump_generic_node (buffer, TREE_OPERAND (node, n),
+ 			       spc, flags, false);
+ 	    pp_string (buffer, ", ");
+ 	  }
+ 	dump_generic_node (buffer,
+ 			   TREE_OPERAND (node,
+ 					 VL_EXP_OPERAND_LENGTH (node) - 1),
+ 			   spc, flags, false);
+ 	pp_string (buffer, "> (");
+ 	dump_generic_node (buffer, TREE_OPERAND (node, 1), spc, flags, false);
+ 	pp_string (buffer, ")");
+       }
+       break;
+ 
      case ARRAY_REF:
      case ARRAY_RANGE_REF:
        op0 = TREE_OPERAND (node, 0);
Index: trunk/gcc/tree.def
===================================================================
*** trunk.orig/gcc/tree.def	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree.def	2011-06-21 14:15:49.000000000 +0200
*************** DEFTREECODE (ARRAY_REF, "array_ref", tcc
*** 417,422 ****
--- 417,458 ----
     of the range is taken from the type of the expression.  */
  DEFTREECODE (ARRAY_RANGE_REF, "array_range_ref", tcc_reference, 4)
  
+ /* Variable-length array view operation on a piece of memory.
+    This returns a vla object starting at the base of operand zero.
+    The remaining arguments specify length and stride of each dimension.
+      VLA_VIEW_EXPR <object, extent0, stride0, ..., extentN, strideN>
+    The dimensionality is the vl operand length minus one.
+    The result type is the respective vla type.  */
+ DEFTREECODE (VLA_VIEW_EXPR, "vla_view_expr", tcc_vl_exp, 1)
+ 
+ /* Variable-length array indexing operation which creates a placeholder
+    for any element in the indexed array subject to variable indices.
+    The first operand is the array to be indexed and the following
+    operands are placeholders for indices or constants.  As many
+    extra operands as the array has dimensions have to be provided.
+      VLA_IDX_EXPR <array, index0, ..., indexN>
+    The result is of type of the array elements.  */
+ DEFTREECODE (VLA_IDX_EXPR, "vla_idx_expr", tcc_vl_exp, 1)
+ 
+ /* Variable-length array construction operation which creates an array
+    from a scalar that may be computed from a placeholder expression
+    The first operand is the scalar expression to be used to fill the
+    array elements indexed by the following operands that are
+    placeholders for indices.  As many extra operands as the array has
+    dimensions have to be provided.
+      VLA_RIDX_EXPR <scalar, index0, ..., indexN>
+    The result is of array type.  */
+ DEFTREECODE (VLA_RIDX_EXPR, "vla_ridx_expr", tcc_vl_exp, 1)
+ 
+ /* Variable-length array contraction operation.  This represents
+    a reduction loop contracting the indices specified by the third
+    and following operands of the first operand which is a scalar
+    computed by placeholder expressions.  The second operand is
+    the common extent of the dimensions the specified indices are used in.
+      VLA_DELTA_EXPR <scalar, extent, index0, ..., indexN>
+    The result is of scalar type.  */
+ DEFTREECODE (VLA_DELTA_EXPR, "vla_delta_expr", tcc_vl_exp, 1)
+ 
  /* C unary `*' or Pascal `^'.  One operand, an expression for a pointer.  */
  DEFTREECODE (INDIRECT_REF, "indirect_ref", tcc_reference, 1)
  
Index: trunk/gcc/tree-ssa-operands.c
===================================================================
*** trunk.orig/gcc/tree-ssa-operands.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-ssa-operands.c	2011-06-21 14:15:49.000000000 +0200
*************** get_expr_operands (gimple stmt, tree *ex
*** 948,953 ****
--- 948,973 ----
        get_expr_operands (stmt, &TREE_OPERAND (expr, 2), uflags);
        return;
  
+     case VLA_VIEW_EXPR:
+       {
+ 	int i;
+         get_expr_operands (stmt, &TREE_OPERAND (expr, 1), flags);
+ 	for (i = 2; i < VL_EXP_OPERAND_LENGTH (expr); ++i)
+ 	  get_expr_operands (stmt, &TREE_OPERAND (expr, i), opf_use);
+ 
+ 	return;
+       }
+     case VLA_IDX_EXPR:
+     case VLA_RIDX_EXPR:
+     case VLA_DELTA_EXPR:
+       {
+ 	int i;
+ 	for (i = 1; i < VL_EXP_OPERAND_LENGTH (expr); ++i)
+ 	  get_expr_operands (stmt, &TREE_OPERAND (expr, i), opf_use);
+ 
+ 	return;
+       }
+ 
      case CONSTRUCTOR:
        {
  	/* General aggregate CONSTRUCTORs have been decomposed, but they
Index: trunk/gcc/tree-ssa-sink.c
===================================================================
*** trunk.orig/gcc/tree-ssa-sink.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-ssa-sink.c	2011-06-21 14:15:49.000000000 +0200
*************** is_hidden_global_store (gimple stmt)
*** 194,199 ****
--- 194,202 ----
  	       || TREE_CODE (lhs) == MEM_REF
  	       || TREE_CODE (lhs) == TARGET_MEM_REF)
  	return ptr_deref_may_alias_global_p (TREE_OPERAND (lhs, 0));
+       else if (TREE_CODE (lhs) == VLA_VIEW_EXPR)
+ 	/* FIXME.  */
+ 	return true;
        else if (CONSTANT_CLASS_P (lhs))
  	return true;
        else
Index: trunk/gcc/gimple.c
===================================================================
*** trunk.orig/gcc/gimple.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/gimple.c	2011-06-21 14:15:49.000000000 +0200
*************** get_gimple_rhs_num_ops (enum tree_code c
*** 2598,2603 ****
--- 2598,2607 ----
        || (SYM) == ASSERT_EXPR						    \
        || (SYM) == ADDR_EXPR						    \
        || (SYM) == WITH_SIZE_EXPR					    \
+       || (SYM) == VLA_IDX_EXPR						    \
+       || (SYM) == VLA_RIDX_EXPR						    \
+       || (SYM) == VLA_DELTA_EXPR					    \
+       || (SYM) == VLA_VIEW_EXPR						    \
        || (SYM) == SSA_NAME						    \
        || (SYM) == VEC_COND_EXPR) ? GIMPLE_SINGLE_RHS			    \
     : GIMPLE_INVALID_RHS),
*************** is_gimple_lvalue (tree t)
*** 2644,2649 ****
--- 2648,2654 ----
  {
    return (is_gimple_addressable (t)
  	  || TREE_CODE (t) == WITH_SIZE_EXPR
+ 	  || TREE_CODE (t) == VLA_VIEW_EXPR
  	  /* These are complex lvalues, but don't have addresses, so they
  	     go here.  */
  	  || TREE_CODE (t) == BIT_FIELD_REF);
*************** is_gimple_reg (tree t)
*** 2926,2934 ****
    if (!is_gimple_variable (t))
      return false;
  
-   if (!is_gimple_reg_type (TREE_TYPE (t)))
-     return false;
- 
    /* A volatile decl is not acceptable because we can't reuse it as
       needed.  We need to copy it into a temp first.  */
    if (TREE_THIS_VOLATILE (t))
--- 2931,2936 ----
*************** is_gimple_reg (tree t)
*** 2955,2964 ****
    /* Complex and vector values must have been put into SSA-like form.
       That is, no assignments to the individual components.  */
    if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
!       || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE)
      return DECL_GIMPLE_REG_P (t);
  
!   return true;
  }
  
  
--- 2957,2967 ----
    /* Complex and vector values must have been put into SSA-like form.
       That is, no assignments to the individual components.  */
    if (TREE_CODE (TREE_TYPE (t)) == COMPLEX_TYPE
!       || TREE_CODE (TREE_TYPE (t)) == VECTOR_TYPE
!       || TREE_CODE (TREE_TYPE (t)) == ARRAY_TYPE)
      return DECL_GIMPLE_REG_P (t);
  
!   return is_gimple_reg_type (TREE_TYPE (t));
  }
  
  
*************** get_base_address (tree t)
*** 3059,3064 ****
--- 3062,3070 ----
    while (handled_component_p (t))
      t = TREE_OPERAND (t, 0);
  
+   if (TREE_CODE (t) == VLA_VIEW_EXPR)
+     t = TREE_OPERAND (t, 1);
+ 
    if ((TREE_CODE (t) == MEM_REF
         || TREE_CODE (t) == TARGET_MEM_REF)
        && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
*************** get_base_loadstore (tree op)
*** 5187,5192 ****
--- 5193,5200 ----
  {
    while (handled_component_p (op))
      op = TREE_OPERAND (op, 0);
+   if (TREE_CODE (op) == VLA_VIEW_EXPR)
+     op = TREE_OPERAND (op, 1);
    if (DECL_P (op)
        || INDIRECT_REF_P (op)
        || TREE_CODE (op) == MEM_REF
Index: trunk/gcc/tree-ssa-alias.c
===================================================================
*** trunk.orig/gcc/tree-ssa-alias.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-ssa-alias.c	2011-06-21 14:15:49.000000000 +0200
*************** refs_may_alias_p_1 (ao_ref *ref1, ao_ref
*** 997,1010 ****
  			|| TREE_CODE (ref1->ref) == STRING_CST
  			|| handled_component_p (ref1->ref)
  			|| TREE_CODE (ref1->ref) == MEM_REF
! 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF)
  		       && (!ref2->ref
  			   || TREE_CODE (ref2->ref) == SSA_NAME
  			   || DECL_P (ref2->ref)
  			   || TREE_CODE (ref2->ref) == STRING_CST
  			   || handled_component_p (ref2->ref)
  			   || TREE_CODE (ref2->ref) == MEM_REF
! 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
  
    /* Decompose the references into their base objects and the access.  */
    base1 = ao_ref_base (ref1);
--- 997,1012 ----
  			|| TREE_CODE (ref1->ref) == STRING_CST
  			|| handled_component_p (ref1->ref)
  			|| TREE_CODE (ref1->ref) == MEM_REF
! 			|| TREE_CODE (ref1->ref) == TARGET_MEM_REF
! 			|| TREE_CODE (ref1->ref) == VLA_VIEW_EXPR)
  		       && (!ref2->ref
  			   || TREE_CODE (ref2->ref) == SSA_NAME
  			   || DECL_P (ref2->ref)
  			   || TREE_CODE (ref2->ref) == STRING_CST
  			   || handled_component_p (ref2->ref)
  			   || TREE_CODE (ref2->ref) == MEM_REF
! 			   || TREE_CODE (ref2->ref) == TARGET_MEM_REF
! 			   || TREE_CODE (ref2->ref) == VLA_VIEW_EXPR));
  
    /* Decompose the references into their base objects and the access.  */
    base1 = ao_ref_base (ref1);
Index: trunk/gcc/tree-ssa-sccvn.c
===================================================================
*** trunk.orig/gcc/tree-ssa-sccvn.c	2011-06-21 14:15:33.000000000 +0200
--- trunk/gcc/tree-ssa-sccvn.c	2011-06-21 14:15:49.000000000 +0200
*************** copy_reference_ops_from_ref (tree ref, V
*** 617,622 ****
--- 617,629 ----
  
        switch (temp.opcode)
  	{
+ 	case VLA_VIEW_EXPR:
+ 	case VLA_IDX_EXPR:
+ 	case VLA_RIDX_EXPR:
+ 	case VLA_DELTA_EXPR:
+ 	  /* FIXME.  */
+ 	  temp.op0 = ref;
+ 	  break;
  	case MEM_REF:
  	  /* The base address gets its own vn_reference_op_s structure.  */
  	  temp.op0 = TREE_OPERAND (ref, 1);


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]