This is the mail archive of the gcc-patches@gcc.gnu.org mailing list for the GCC project.


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]

[PATCH] re-write implementation of Java synchonized


Mainly, this is a re-write of the implementation of "synchronized"
to use the CLEANUP_POINT_EXPR and WITH_CLEANUP_EXPR tree codes.
It also has some extra code in the class file writer to handle
break/continue/return from within a synchronized block.
While doing this, I ran into and fixed a couple of verifier-related problems.

My intention is to also re-do the try/finally implementation to
use the same primitives.  That should fix some problems,
and hoepfully simplify the code a bit.  But I haven't done
that part yet.

The patch has been checked into egcs.

	--Per Bothner
Cygnus Solutions     bothner@cygnus.com     http://www.cygnus.com/~bothner

Tue Dec 22 11:25:19 1998  Per Bothner  <bothner@cygnus.com>

	* expr.c (process_jvm_instruction):  Do load_type_state after JSR.
	* verify.c (verify_jvm_instructions):  Fix off-by-one error.

	* jcf-write.c (CHECK_PUT):  Add (void) cast to avoid -Wall warnings.
	(localvar_alloc):  Change return type to void,
	(emit_unop):  Remove unused variable size.
	
	* jcf-write.c (struct jcf_block):  Add new union.
	(PENDING_CLEANUP_PC, PENDING_EXIT_PC, UNDEFINED_PC):  New macros.
	(call_cleanups):  New functions.
	(struct jcf_partial):  New fields num_finalizers and return_value_decl.
	(generate_bytecode_insns):  Support CLEANUP_POINT_EXPR and
	WITH_CLEANUP_EXPR.  Handle cleanups in RETURN_EXPR and EXIT_BLOCK_EXPR.
	* lang.c (lang_init):  Call using_eh_for_cleanups.
	* parse.y (java_complete_lhs):  For SYNCHRONIZED_EXPR, defer
	completing operands to patch_synchronized_statement.
	Support CLEANUP_POINT_EXPR, WITH_CLEANUP_EXPR.
	(patch_synchronized_statement): Re-write suing CLEANUP_POINT_EXPR and
	WITH_CLEANUP_EXPR instead of TRY_EXPR.	

Index: expr.c
===================================================================
RCS file: /cvs/cvsfiles/devo/gcc/java/expr.c,v
retrieving revision 1.82.2.10
diff -u -p -r1.82.2.10 expr.c
--- expr.c	1998/12/13 05:55:21	1.82.2.10
+++ expr.c	1998/12/23 00:44:18
@@ -2191,6 +2191,7 @@ process_jvm_instruction (PC, byte_ops, l
     tree where = lookup_label (oldpc+OPERAND_VALUE);	\
     tree ret   = lookup_label (PC);			\
     build_java_jsr (where, ret);			\
+    load_type_state (ret);				\
   }
 
 /* Push a constant onto the stack. */
Index: jcf-write.c
===================================================================
RCS file: /cvs/cvsfiles/devo/gcc/java/jcf-write.c,v
retrieving revision 1.13.2.12
diff -u -p -r1.13.2.12 jcf-write.c
--- jcf-write.c	1998/12/13 05:55:21	1.13.2.12
+++ jcf-write.c	1998/12/23 00:44:18
@@ -96,19 +96,35 @@ struct chunk
   int size;
 };
 
+#define PENDING_CLEANUP_PC (-3)
+#define PENDING_EXIT_PC (-2)
+#define UNDEFINED_PC (-1)
+
 /* Each "block" represents a label plus the bytecode instructions following.
    There may be branches out of the block, but no incoming jumps, except
-   to the beginning of the block. */
+   to the beginning of the block.
+
+   If (pc < 0), the jcf_block is not an actual block (i.e. it has no
+   assocated code yet), but it is an undefined label.
+*/
 
 struct jcf_block
 {
   /* For blocks that that are defined, the next block (in pc order).
-     For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR,
-     this is the next (outer) such end label, in a stack heaed by
+     For blocks that are the not-yet-defined end label of a LABELED_BLOCK_EXPR
+     or a cleanup expression (from a WITH_CLEANUP_EXPR),
+     this is the next (outer) such end label, in a stack headed by
      labeled_blocks in jcf_partial. */
   struct jcf_block *next;
+
+  /* In the not-yet-defined end label for an unfinished EXIT_BLOCK_EXPR.
+     pc is PENDING_EXIT_PC.
+     In the not-yet-defined end label for pending cleanup subroutine,
+     pc is PENDING_CLEANUP_PC.
+     For other not-yet-defined labels, pc is UNDEFINED_PC.
 
-  /* Until perform_relocations is finished, this is the maximum possible
+     If the label has been defined:
+     Until perform_relocations is finished, this is the maximum possible
      value of the bytecode offset at the begnning of this block.
      After perform_relocations, it is the actual offset (pc). */
   int pc;
@@ -117,14 +133,21 @@ struct jcf_block
 
   /* After finish_jcf_block is called, The actual instructions contained in this block.
      Before than NULL, and the instructions are in state->bytecode. */
-  struct chunk *chunk;
+  union {
+    struct chunk *chunk;
 
+    /* If pc==PENDING_CLEANUP_PC, start_label is the start of the region
+       coveed by the cleanup. */
+    struct jcf_block *start_label;
+  } v;
+
   union {
     /* Set of relocations (in reverse offset order) for this block. */
     struct jcf_relocation *relocations;
 
     /* If this block is that of the not-yet-defined end label of
-       a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR. */
+       a LABELED_BLOCK_EXPR, where LABELED_BLOCK is that LABELED_BLOCK_EXPR.
+       If pc==PENDING_CLEANUP_PC, the cleanup that needs to be run. */
     tree labeled_block;
   } u;
 };
@@ -240,6 +263,12 @@ struct jcf_partial
   /* Number of exception handlers for the current method. */
   int num_handlers;
 
+  /* Number of finalizers we are currently nested within. */
+  int num_finalizers;
+
+  /* If non-NULL, use this for the return value. */
+  tree return_value_decl;
+
   /* Information about the current switch statemenet. */
   struct jcf_switch_state *sw_state;
 };
@@ -263,7 +292,7 @@ CHECK_PUT(ptr, state, i)
   return 0;
 }
 #else
-#define CHECK_PUT(PTR, STATE, I) 0
+#define CHECK_PUT(PTR, STATE, I) ((void)0)
 #endif
 
 #define PUT1(X)  (CHECK_PUT(ptr, state, 1), *ptr++ = (X))
@@ -308,7 +337,7 @@ CHECK_OP(struct jcf_partial *state)
   return 0;
 }
 #else
-#define CHECK_OP(STATE) 0
+#define CHECK_OP(STATE) ((void)0)
 #endif
 
 unsigned char *
@@ -341,7 +370,7 @@ gen_jcf_label (state)
     obstack_alloc (state->chunk_obstack, sizeof (struct jcf_block));
   block->next =	NULL;
   block->linenumber = -1;
-  block->pc = -1;
+  block->pc = UNDEFINED_PC;
   return block;
 }
 
@@ -355,10 +384,10 @@ finish_jcf_block (state)
   int pc = state->code_length;
   append_chunk_copy (state->bytecode.data, code_length, state);
   BUFFER_RESET (&state->bytecode);
-  block->chunk = state->chunk;
+  block->v.chunk = state->chunk;
 
   /* Calculate code_length to the maximum value it can have. */
-  pc += block->chunk->size;
+  pc += block->v.chunk->size;
   for (reloc = block->u.relocations;  reloc != NULL;  reloc = reloc->next)
     {
       int kind = reloc->kind;
@@ -465,7 +494,7 @@ struct localvar_info
 #define localvar_max \
   ((struct localvar_info**) state->localvars.ptr - localvar_buffer)
 
-int
+void
 localvar_alloc (decl, state)
      tree decl;
      struct jcf_partial *state;
@@ -915,7 +944,6 @@ emit_unop (opcode, type, state)
      tree type;
      struct jcf_partial *state;
 {
-  int size = TYPE_IS_WIDE (type) ? 2 : 1;
   RESERVE(1);
   OP1 (opcode);
 }
@@ -932,7 +960,7 @@ emit_binop (opcode, type, state)
   NOTE_POP (size);
 }
 
-static struct jcf_relocation *
+static void
 emit_reloc (value, kind, target, state)
      HOST_WIDE_INT value;
      int kind;
@@ -1225,6 +1253,23 @@ generate_bytecode_conditional (exp, true
     fatal ("internal error - SP mismatch");
 }
 
+/* Call pending cleanups i.e. those for surrounding CLEANUP_POINT_EXPRs
+   but only as far out as LIMIT (since we are about to jump to the
+   emit label that is LIMIT). */
+
+static void
+call_cleanups (limit, state)
+     struct jcf_block *limit;
+     struct jcf_partial *state;
+{
+  struct jcf_block *block = state->labeled_blocks;
+  for (;  block != limit;  block = block->next)
+    {
+      if (block->pc == PENDING_CLEANUP_PC)
+	emit_jsr (block, state);
+    }
+}
+
 /* Generate bytecode for sub-expression EXP of METHOD.
    TARGET is one of STACK_TARGET or IGNORE_TARGET. */
 
@@ -1593,12 +1638,12 @@ generate_bytecode_insns (exp, target, st
 	switch_length = state->code_length - switch_instruction->pc;
 	switch_instruction->pc = body_block->pc;
 	instruction_last->next = body_block;
-	instruction_last->chunk->next = body_block->chunk;
+	instruction_last->v.chunk->next = body_block->v.chunk;
 	expression_last->next = switch_instruction;
-	expression_last->chunk->next = switch_instruction->chunk;
+	expression_last->v.chunk->next = switch_instruction->v.chunk;
 	body_last->next = sw_state.default_label;
-	body_last->chunk->next = NULL;
-	state->chunk = body_last->chunk;
+	body_last->v.chunk->next = NULL;
+	state->chunk = body_last->v.chunk;
 	for (;  body_block != sw_state.default_label;  body_block = body_block->next)
 	  body_block->pc += switch_length;
 
@@ -1608,7 +1653,10 @@ generate_bytecode_insns (exp, target, st
 
     case RETURN_EXPR:
       if (!TREE_OPERAND (exp, 0))
-	op = OPCODE_return;
+	{
+	  op = OPCODE_return;
+	  call_cleanups (NULL_TREE, state);
+	}
       else
 	{
 	  exp = TREE_OPERAND (exp, 0);
@@ -1617,6 +1665,23 @@ generate_bytecode_insns (exp, target, st
 	  exp = TREE_OPERAND (exp, 1);
 	  op = OPCODE_ireturn + adjust_typed_op (TREE_TYPE (exp), 4);
 	  generate_bytecode_insns (exp, STACK_TARGET, state);
+	  if (state->num_finalizers > 0)
+	    {
+	      if (state->return_value_decl == NULL_TREE)
+		{
+		  state->return_value_decl
+		    = build_decl (VAR_DECL, NULL_TREE, TREE_TYPE (exp));
+		  localvar_alloc (state->return_value_decl, state);
+		}
+	      emit_store (state->return_value_decl, state);
+	      call_cleanups (NULL_TREE, state);
+	      emit_load (state->return_value_decl, state);
+	      /* If we call localvar_free (state->return_value_decl, state),
+		 then we risk the save decl erroneously re-used in the
+		 finalizer.  Instead, we keep the state->return_value_decl
+		 allocated through the rest of the method.  This is not
+		 the greatest solution, but it is at least simple and safe. */
+	    }
 	}
       RESERVE (1);
       OP1 (op);
@@ -1626,6 +1691,7 @@ generate_bytecode_insns (exp, target, st
 	struct jcf_block *end_label = gen_jcf_label (state);
 	end_label->next = state->labeled_blocks;
 	state->labeled_blocks = end_label;
+	end_label->pc = PENDING_EXIT_PC;
 	end_label->u.labeled_block = exp;
 	if (LABELED_BLOCK_BODY (exp))
 	  generate_bytecode_insns (LABELED_BLOCK_BODY (exp), target, state);
@@ -1681,6 +1747,7 @@ generate_bytecode_insns (exp, target, st
 	if (TREE_OPERAND (exp, 1) != NULL) goto notimpl;
 	while (label->u.labeled_block != TREE_OPERAND (exp, 0))
 	  label = label->next;
+	call_cleanups (label, state);
 	emit_goto (label, state);
       }
       break;
@@ -1990,6 +2057,79 @@ generate_bytecode_insns (exp, target, st
 	  }
       }
       break;
+
+    case CLEANUP_POINT_EXPR:
+      {
+	struct jcf_block *save_labeled_blocks = state->labeled_blocks;
+	int can_complete = CAN_COMPLETE_NORMALLY (TREE_OPERAND (exp, 0));
+	generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
+	if (target != IGNORE_TARGET)
+	  abort ();
+	while (state->labeled_blocks != save_labeled_blocks)
+	  {
+	    struct jcf_block *finished_label = NULL;
+	    tree return_link;
+	    tree exception_type = build_pointer_type (throwable_type_node);
+	    tree exception_decl = build_decl (VAR_DECL, NULL_TREE,
+					      exception_type);
+	    struct jcf_block *end_label = get_jcf_label_here (state);
+	    struct jcf_block *label = state->labeled_blocks;
+	    struct jcf_handler *handler;
+	    tree cleanup = label->u.labeled_block;
+	    state->labeled_blocks = label->next;
+	    state->num_finalizers--;
+	    if (can_complete)
+	      {
+		finished_label = gen_jcf_label (state);
+		emit_jsr (label, state);
+		emit_goto (finished_label, state);
+		if (! CAN_COMPLETE_NORMALLY (cleanup))
+		  can_complete = 0;
+	      }
+	    handler = alloc_handler (label->v.start_label, end_label, state);
+	    handler->type = NULL_TREE;
+	    localvar_alloc (exception_decl, state);
+	    NOTE_PUSH (1);
+            emit_store (exception_decl, state);
+	    emit_jsr (label, state);
+	    emit_load (exception_decl, state);
+	    RESERVE (1);
+	    OP1 (OPCODE_athrow);
+	    NOTE_POP (1);
+
+	    /* The finally block. */
+	    return_link = build_decl (VAR_DECL, NULL_TREE,
+				      return_address_type_node);
+	    define_jcf_label (label, state);
+	    NOTE_PUSH (1);
+	    localvar_alloc (return_link, state);
+	    emit_store (return_link, state);
+	    generate_bytecode_insns (cleanup, IGNORE_TARGET, state);
+	    maybe_wide (OPCODE_ret, DECL_LOCAL_INDEX (return_link), state);
+	    localvar_free (return_link, state);
+	    localvar_free (exception_decl, state);
+	    if (finished_label != NULL)
+	      define_jcf_label (finished_label, state);
+	  }
+      }
+      break;
+
+    case WITH_CLEANUP_EXPR:
+      {
+	struct jcf_block *label;
+	generate_bytecode_insns (TREE_OPERAND (exp, 0), IGNORE_TARGET, state);
+	label = gen_jcf_label (state);
+	label->pc = PENDING_CLEANUP_PC;
+	label->next = state->labeled_blocks;
+	state->labeled_blocks = label;
+	state->num_finalizers++;
+	label->u.labeled_block = TREE_OPERAND (exp, 2);
+	label->v.start_label = get_jcf_label_here (state);
+	if (target != IGNORE_TARGET)
+	  abort ();
+      }
+      break;
+
     case TRY_EXPR:
       {
 	tree try_clause = TREE_OPERAND (exp, 0);
@@ -2259,7 +2399,7 @@ perform_relocations (state)
   shrink = 0;
   for (block = state->blocks;  block != NULL;  block = block->next)
     {
-      int block_size = block->chunk->size;
+      int block_size = block->v.chunk->size;
 
       block->pc = pc;
 
@@ -2273,7 +2413,7 @@ perform_relocations (state)
 	{
 	  reloc = reloc->next;
 	  block->u.relocations = reloc;
-	  block->chunk->size -= 3;
+	  block->v.chunk->size -= 3;
 	  block_size -= 3;
 	  shrink += 3;
 	}
@@ -2309,15 +2449,13 @@ perform_relocations (state)
 
   for (block = state->blocks;  block != NULL;  block = block->next)
     {
-      struct chunk *chunk = block->chunk;
+      struct chunk *chunk = block->v.chunk;
       int old_size = chunk->size;
       int next_pc = block->next == NULL ? pc : block->next->pc;
       int new_size = next_pc - block->pc;
-      int offset = 0;
       unsigned char *new_ptr;
       unsigned char *old_buffer = chunk->data;
       unsigned char *old_ptr = old_buffer + old_size;
-      int new_end = new_size;
       if (new_size != old_size)
 	{
 	  chunk->data = (unsigned char *)
@@ -2440,6 +2578,8 @@ init_jcf_method (state, method)
   state->handlers = NULL;
   state->last_handler = NULL;
   state->num_handlers = 0;
+  state->num_finalizers = 0;
+  state->return_value_decl = NULL_TREE;
 }
 
 void
@@ -2585,6 +2725,8 @@ generate_classfile (clas, state)
 	    }
 	  for (t = DECL_ARGUMENTS (part);  t != NULL_TREE;  t = TREE_CHAIN (t))
 	    localvar_free (t, state);
+	  if (state->return_value_decl != NULL_TREE)
+	    localvar_free (state->return_value_decl, state);
 	  finish_jcf_block (state);
 	  perform_relocations (state);
 
Index: lang.c
===================================================================
RCS file: /cvs/cvsfiles/devo/gcc/java/lang.c,v
retrieving revision 1.32.2.7
diff -u -p -r1.32.2.7 lang.c
--- lang.c	1998/11/26 01:13:32	1.32.2.7
+++ lang.c	1998/12/23 00:44:18
@@ -494,6 +494,8 @@ lang_init ()
 	 (char *)(tree_code_name + (int) LAST_AND_UNUSED_TREE_CODE),
 	 (LAST_JAVA_TREE_CODE - 
 	  (int)LAST_AND_UNUSED_TREE_CODE) * sizeof (char *));
+
+  using_eh_for_cleanups ();
 }
 
 /* This doesn't do anything on purpose. It's used to satisfy the
Index: parse.y
===================================================================
RCS file: /cvs/cvsfiles/devo/gcc/java/parse.y,v
retrieving revision 1.43.2.29
diff -u -p -r1.43.2.29 parse.y
--- parse.y	1998/12/18 18:07:11	1.43.2.29
+++ parse.y	1998/12/23 00:44:18
@@ -7491,13 +7522,24 @@ java_complete_lhs (node)
 
     case SYNCHRONIZED_EXPR:
       wfl_op1 = TREE_OPERAND (node, 0);
-      COMPLETE_CHECK_OP_0 (node);
-      COMPLETE_CHECK_OP_1 (node);
       return patch_synchronized_statement (node, wfl_op1);
 
     case TRY_EXPR:
       return patch_try_statement (node);
 
+    case CLEANUP_POINT_EXPR:
+      COMPLETE_CHECK_OP_0 (node);
+      TREE_TYPE (node) = void_type_node;
+      CAN_COMPLETE_NORMALLY (node) = CAN_COMPLETE_NORMALLY (TREE_OPERAND (node, 0));
+      return node;
+
+    case WITH_CLEANUP_EXPR:
+      COMPLETE_CHECK_OP_0 (node);
+      COMPLETE_CHECK_OP_2 (node);
+      CAN_COMPLETE_NORMALLY (node) = CAN_COMPLETE_NORMALLY (TREE_OPERAND (node, 0));
+      TREE_TYPE (node) = void_type_node;
+      return node;
+
     case LABELED_BLOCK_EXPR:
       PUSH_LABELED_BLOCK (node);
       if (LABELED_BLOCK_BODY (node))
@@ -10740,12 +10782,19 @@ static tree
 patch_synchronized_statement (node, wfl_op1)
     tree node, wfl_op1;
 {
-  tree expr = TREE_OPERAND (node, 0);
+  tree expr = java_complete_tree (TREE_OPERAND (node, 0));
   tree block = TREE_OPERAND (node, 1);
-  tree try_block, catch_all, stmt, compound, decl;
+
+  tree enter, exit, finally, expr_decl;
+
+  if (expr == error_mark_node)
+    {
+      block = java_complete_tree (block);
+      return expr;
+    }
 
   /* The TYPE of expr must be a reference type */
-  if (!JREFERENCE_TYPE_P (TREE_TYPE (TREE_OPERAND (node, 0))))
+  if (!JREFERENCE_TYPE_P (TREE_TYPE (expr)))
     {
       SET_WFL_OPERATOR (wfl_operator, node, wfl_op1);
       parse_error_context (wfl_operator, "Incompatible type for `synchronized'"
@@ -10770,35 +10819,24 @@ patch_synchronized_statement (node, wfl_
 	 _Jv_MonitorExit (expression)
 	 Throw (e);
        } */
+
+  expr_decl = build_decl (VAR_DECL, generate_name (), TREE_TYPE (expr));
+  BUILD_MONITOR_ENTER (enter, expr_decl);
+  BUILD_MONITOR_EXIT (exit, expr_decl);
+  CAN_COMPLETE_NORMALLY (enter) = 1;
+  CAN_COMPLETE_NORMALLY (exit) = 1;
+  node = build1 (CLEANUP_POINT_EXPR, NULL_TREE,
+		 build (COMPOUND_EXPR, NULL_TREE,
+			build (WITH_CLEANUP_EXPR, NULL_TREE,
+			       build (COMPOUND_EXPR, NULL_TREE,
+				      build (MODIFY_EXPR, NULL_TREE,
+					     expr_decl, expr),
+				      enter),
+			       NULL_TREE, exit),
+			block));
+  node = build_expr_block (node, expr_decl);
 
-  /* TRY block */
-  BUILD_MONITOR_ENTER (stmt, expr);
-  compound = add_stmt_to_compound (NULL_TREE, int_type_node, stmt);
-  compound = add_stmt_to_compound (compound, void_type_node, block);
-  if (CAN_COMPLETE_NORMALLY (block))
-    {
-      BUILD_MONITOR_EXIT (stmt, expr);
-      compound = add_stmt_to_compound (compound, int_type_node, stmt);
-    }
-  try_block = build_expr_block (compound, NULL_TREE);
-  CAN_COMPLETE_NORMALLY (try_block) = CAN_COMPLETE_NORMALLY (block);
-
-  /* CATCH_ALL block */
-  decl = build_decl (VAR_DECL, generate_name (), ptr_type_node);
-  BUILD_ASSIGN_EXCEPTION_INFO (stmt, decl);
-  compound = add_stmt_to_compound (NULL_TREE, void_type_node, stmt);
-  BUILD_MONITOR_EXIT (stmt, expr);
-  compound = add_stmt_to_compound (compound, int_type_node, stmt);
-  BUILD_THROW (stmt, decl);
-  compound = add_stmt_to_compound (compound, void_type_node, stmt);
-  catch_all = build_expr_block (compound, decl);
-  catch_all = build_expr_block (catch_all, NULL_TREE);
-  catch_all = build1 (CATCH_EXPR, void_type_node, catch_all);
-
-  /* TRY-CATCH statement */
-  compound = build (TRY_EXPR, void_type_node, try_block, catch_all, NULL_TREE);
-  CAN_COMPLETE_NORMALLY (compound) = CAN_COMPLETE_NORMALLY (try_block);
-  return compound;
+  return java_complete_tree (node);
 }
 
 /* 14.16 The throw Statement */
@@ -11126,7 +11164,7 @@ fold_constant_for_init (node, context)
       if (val == NULL_TREE || ! TREE_CONSTANT (val))
 	return NULL_TREE;
       TREE_OPERAND (node, 0) = val;
-      node = patch_unaryop (node, op0);
+      return patch_unaryop (node, op0);
       break;
 
     case COND_EXPR:
Index: verify.c
===================================================================
RCS file: /cvs/cvsfiles/devo/gcc/java/verify.c,v
retrieving revision 1.32.2.1
diff -u -p -r1.32.2.1 verify.c
--- verify.c	1998/11/15 01:25:24	1.32.2.1
+++ verify.c	1998/12/23 00:44:18
@@ -1035,10 +1035,10 @@ verify_jvm_instructions (jcf, byte_ops, 
 		int nlocals = DECL_MAX_LOCALS (current_function_decl);
 		index = nlocals + DECL_MAX_STACK (current_function_decl);
 		return_type_map = make_tree_vec (index);
-		while (--index >= nlocals)
-		  TREE_VEC_ELT (return_type_map, index) = TYPE_UNKNOWN;
-		while (--index >= 0)
-		  TREE_VEC_ELT (return_type_map, index) = TYPE_UNUSED;
+		while (index > nlocals)
+		  TREE_VEC_ELT (return_type_map, --index) = TYPE_UNKNOWN;
+		while (index > 0)
+		  TREE_VEC_ELT (return_type_map, --index) = TYPE_UNUSED;
 		LABEL_RETURN_LABEL (target)
 		  = build_decl (LABEL_DECL, NULL_TREE, TREE_TYPE (target));
 		LABEL_PC (LABEL_RETURN_LABEL (target)) = -1;


Index Nav: [Date Index] [Subject Index] [Author Index] [Thread Index]
Message Nav: [Date Prev] [Date Next] [Thread Prev] [Thread Next]