* bitmap.h (BITMAP_XMALLOC): New macro.
* flow.c (CLEAN_ALLOCA): Remove.
(delete_unreachable_blocks): Use xmalloc/xcalloc instead of alloca.
(life_analysis): Likewise.
(update_life_info): Don't use CLEAN_ALLOCA.
(life_analysis_1): Use xmalloc/xcalloc instead of alloca.
(calculate_global_regs_live): Likewise.
(print_rtl_with_bb): Likewise.
(verify_flow_info): Likewise.
* global.c (global_alloc): Likewise.
(global_conflicts): Likewise.
* integrate.c (save_for_inline_nocopy): Likewise.
(expand_inline_function): Likewise.
* jump.c (jump_optimize_1): Likewise.
(duplicate_loop_exit_test): Likewise.
(thread_jumps): Likewise.
* loop.c (loop_optimize): Likewise.
(combine_givs): Likewise.
(recombine_givs): Likewise.
* reorg.c (dbr_schedule): Likewise.
* unroll.c (unroll_loop): Likewise.
From-SVN: r30333
Mon Nov 1 15:41:01 1999 Mark P. Mitchell <mark@codesourcery.com>
+ * bitmap.h (BITMAP_XMALLOC): New macro.
+ * flow.c (CLEAN_ALLOCA): Remove.
+ (delete_unreachable_blocks): Use xmalloc/xcalloc instead of alloca.
+ (life_analysis): Likewise.
+ (update_life_info): Don't use CLEAN_ALLOCA.
+ (life_analysis_1): Use xmalloc/xcalloc instead of alloca.
+ (calculate_global_regs_live): Likewise.
+ (print_rtl_with_bb): Likewise.
+ (verify_flow_info): Likewise.
+ * global.c (global_alloc): Likewise.
+ (global_conflicts): Likewise.
+ * integrate.c (save_for_inline_nocopy): Likewise.
+ (expand_inline_function): Likewise.
+ * jump.c (jump_optimize_1): Likewise.
+ (duplicate_loop_exit_test): Likewise.
+ (thread_jumps): Likewise.
+ * loop.c (loop_optimize): Likewise.
+ (combine_givs): Likewise.
+ (recombine_givs): Likewise.
+ * reorg.c (dbr_schedule): Likewise.
+
* combine.c (combine_instructions): Use xmalloc instead of alloca.
Mon Nov 1 13:22:30 1999 Richard Henderson <rth@cygnus.com>
#define BITMAP_ALLOCA() \
bitmap_initialize ((bitmap) alloca (sizeof (bitmap_head)))
+/* Allocate a bitmap with xmalloc. */
+#define BITMAP_XMALLOC() \
+ bitmap_initialize ((bitmap) xmalloc (sizeof (bitmap_head)))
+
/* Do any cleanup needed on a bitmap when it is no longer used. */
#define BITMAP_FREE(BITMAP) \
do { \
#define HAVE_prologue 0
#endif
-#ifdef USE_C_ALLOCA
-#define CLEAN_ALLOCA alloca (0)
-#else
-#define CLEAN_ALLOCA
-#endif
-
-
/* The contents of the current function definition are allocated
in this obstack, and all are freed at the end of the function.
For top-level functions, this is temporary_obstack.
int i, n;
n = n_basic_blocks;
- tos = worklist = (basic_block *) alloca (sizeof (basic_block) * n);
+ tos = worklist = (basic_block *) xmalloc (sizeof (basic_block) * n);
/* Use basic_block->aux as a marker. Clear them all. */
blocks to remove as well. */
if (deleted_handler)
delete_eh_regions ();
+
+ free (worklist);
}
/* Find EH regions for which there is no longer a handler, and delete them. */
#endif
/* Allocate a bitmap to be filled in by record_volatile_insns. */
- uid_volatile = BITMAP_ALLOCA ();
+ uid_volatile = BITMAP_XMALLOC ();
/* We want alias analysis information for local dead store elimination. */
init_alias_analysis ();
dump_flow_info (file);
BITMAP_FREE (uid_volatile);
+ free (uid_volatile);
free_basic_block_vars (1);
}
if (extent == UPDATE_LIFE_LOCAL)
verify_local_live_at_start (tmp, bb);
-
- CLEAN_ALLOCA;
});
FREE_REG_SET (tmp);
allocate_reg_life_data ();
allocate_bb_life_data ();
- reg_next_use = (rtx *) alloca (nregs * sizeof (rtx));
- memset (reg_next_use, 0, nregs * sizeof (rtx));
+ reg_next_use = (rtx *) xcalloc (nregs, sizeof (rtx));
/* Assume that the stack pointer is unchanging if alloca hasn't been used.
This will be cleared by record_volatile_insns if it encounters an insn
COPY_REG_SET (tmp, bb->global_live_at_end);
propagate_block (tmp, bb->head, bb->end, (regset) NULL, i, flags);
-
- CLEAN_ALLOCA;
}
FREE_REG_SET (tmp);
if (reload_completed)
memcpy (regs_ever_live, save_regs_ever_live, sizeof (regs_ever_live));
+ /* Clean up. */
+ free (reg_next_use);
reg_next_use = NULL;
}
/* Create a worklist. Allocate an extra slot for ENTRY_BLOCK, and one
because the `head == tail' style test for an empty queue doesn't
work with a full queue. */
- queue = (basic_block *) alloca ((n_basic_blocks + 2) * sizeof (*queue));
+ queue = (basic_block *) xmalloc ((n_basic_blocks + 2) * sizeof (*queue));
qtail = queue;
qhead = qend = queue + n_basic_blocks + 2;
basic_block bb = BASIC_BLOCK (i);
FREE_REG_SET (bb->local_set);
});
+
+ free (queue);
}
\f
/* Subroutines of life analysis. */
enum bb_state { NOT_IN_BB, IN_ONE_BB, IN_MULTIPLE_BB };
int max_uid = get_max_uid ();
basic_block *start = (basic_block *)
- alloca (max_uid * sizeof (basic_block));
+ xcalloc (max_uid, sizeof (basic_block));
basic_block *end = (basic_block *)
- alloca (max_uid * sizeof (basic_block));
+ xcalloc (max_uid, sizeof (basic_block));
enum bb_state *in_bb_p = (enum bb_state *)
- alloca (max_uid * sizeof (enum bb_state));
-
- memset (start, 0, max_uid * sizeof (basic_block));
- memset (end, 0, max_uid * sizeof (basic_block));
- memset (in_bb_p, 0, max_uid * sizeof (enum bb_state));
+ xcalloc (max_uid, sizeof (enum bb_state));
for (i = n_basic_blocks - 1; i >= 0; i--)
{
if (did_output)
putc ('\n', outf);
}
+
+ free (start);
+ free (end);
+ free (in_bb_p);
}
if (current_function_epilogue_delay_list != 0)
rtx x;
int i, err = 0;
- bb_info = (basic_block *) alloca (max_uid * sizeof (basic_block));
- memset (bb_info, 0, max_uid * sizeof (basic_block));
+ bb_info = (basic_block *) xcalloc (max_uid, sizeof (basic_block));
/* First pass check head/end pointers and set bb_info array used by
later passes. */
if (err)
abort ();
+
+ /* Clean up. */
+ free (bb_info);
}
\f
/* Functions to access an edge list with a vector representation.
/* Establish mappings from register number to allocation number
and vice versa. In the process, count the allocnos. */
- reg_allocno = (int *) alloca (max_regno * sizeof (int));
+ reg_allocno = (int *) xmalloc (max_regno * sizeof (int));
for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
reg_allocno[i] = -1;
/* Initialize the shared-hard-reg mapping
from the list of pairs that may share. */
- reg_may_share = (int *) alloca (max_regno * sizeof (int));
- bzero ((char *) reg_may_share, max_regno * sizeof (int));
+ reg_may_share = (int *) xcalloc (max_regno, sizeof (int));
for (x = regs_may_share; x; x = XEXP (XEXP (x, 1), 1))
{
int r1 = REGNO (XEXP (x, 0));
else
reg_allocno[i] = -1;
- allocno_reg = (int *) alloca (max_allocno * sizeof (int));
- allocno_size = (int *) alloca (max_allocno * sizeof (int));
- allocno_calls_crossed = (int *) alloca (max_allocno * sizeof (int));
- allocno_n_refs = (int *) alloca (max_allocno * sizeof (int));
- allocno_live_length = (int *) alloca (max_allocno * sizeof (int));
- bzero ((char *) allocno_size, max_allocno * sizeof (int));
- bzero ((char *) allocno_calls_crossed, max_allocno * sizeof (int));
- bzero ((char *) allocno_n_refs, max_allocno * sizeof (int));
- bzero ((char *) allocno_live_length, max_allocno * sizeof (int));
+ allocno_reg = (int *) xmalloc (max_allocno * sizeof (int));
+ allocno_size = (int *) xcalloc (max_allocno, sizeof (int));
+ allocno_calls_crossed = (int *) xcalloc (max_allocno, sizeof (int));
+ allocno_n_refs = (int *) xcalloc (max_allocno, sizeof (int));
+ allocno_live_length = (int *) xcalloc (max_allocno, sizeof (int));
for (i = FIRST_PSEUDO_REGISTER; i < (size_t) max_regno; i++)
if (reg_allocno[i] >= 0)
initialize them. */
hard_reg_conflicts
- = (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
- bzero ((char *) hard_reg_conflicts, max_allocno * sizeof (HARD_REG_SET));
-
+ = (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
hard_reg_preferences
- = (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
- bzero ((char *) hard_reg_preferences, max_allocno * sizeof (HARD_REG_SET));
-
+ = (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
hard_reg_copy_preferences
- = (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
- bzero ((char *) hard_reg_copy_preferences,
- max_allocno * sizeof (HARD_REG_SET));
-
+ = (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
hard_reg_full_preferences
- = (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
- bzero ((char *) hard_reg_full_preferences,
- max_allocno * sizeof (HARD_REG_SET));
-
+ = (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
regs_someone_prefers
- = (HARD_REG_SET *) alloca (max_allocno * sizeof (HARD_REG_SET));
- bzero ((char *) regs_someone_prefers, max_allocno * sizeof (HARD_REG_SET));
+ = (HARD_REG_SET *) xcalloc (max_allocno, sizeof (HARD_REG_SET));
allocno_row_words = (max_allocno + INT_BITS - 1) / INT_BITS;
conflicts = (INT_TYPE *) xcalloc (max_allocno * allocno_row_words,
sizeof (INT_TYPE));
- allocnos_live = (INT_TYPE *) alloca (allocno_row_words * sizeof (INT_TYPE));
+ allocnos_live = (INT_TYPE *) xmalloc (allocno_row_words * sizeof (INT_TYPE));
/* If there is work to be done (at least one reg to allocate),
perform global conflict analysis and allocate the regs. */
/* Determine the order to allocate the remaining pseudo registers. */
- allocno_order = (int *) alloca (max_allocno * sizeof (int));
+ allocno_order = (int *) xmalloc (max_allocno * sizeof (int));
for (i = 0; i < (size_t) max_allocno; i++)
allocno_order[i] = i;
if (reg_alternate_class (allocno_reg[allocno_order[i]]) != NO_REGS)
find_reg (allocno_order[i], 0, 1, 0, 0);
}
+
+ free (allocno_order);
}
/* Do the reloads now while the allocno data still exist, so that we can
retval = reload (get_insns (), 1, file);
}
+ /* Clean up. */
+ free (reg_allocno);
+ free (reg_may_share);
+ free (allocno_reg);
+ free (allocno_size);
+ free (allocno_calls_crossed);
+ free (allocno_n_refs);
+ free (allocno_live_length);
+ free (hard_reg_conflicts);
+ free (hard_reg_preferences);
+ free (hard_reg_copy_preferences);
+ free (hard_reg_full_preferences);
+ free (regs_someone_prefers);
free (conflicts);
+ free (allocnos_live);
+
return retval;
}
int *block_start_allocnos;
/* Make a vector that mark_reg_{store,clobber} will store in. */
- regs_set = (rtx *) alloca (max_parallel * sizeof (rtx) * 2);
+ regs_set = (rtx *) xmalloc (max_parallel * sizeof (rtx) * 2);
- block_start_allocnos = (int *) alloca (max_allocno * sizeof (int));
+ block_start_allocnos = (int *) xmalloc (max_allocno * sizeof (int));
for (b = 0; b < n_basic_blocks; b++)
{
insn = NEXT_INSN (insn);
}
}
+
+ /* Clean up. */
+ free (block_start_allocnos);
+ free (regs_set);
}
/* Expand the preference information by looking for cases where one allocno
dies in an insn that sets an allocno. If those two allocnos don't conflict,
for the parms, prior to elimination of virtual registers.
These values are needed for substituting parms properly. */
- parmdecl_map = (tree *) alloca (max_parm_reg * sizeof (tree));
+ parmdecl_map = (tree *) xmalloc (max_parm_reg * sizeof (tree));
/* Make and emit a return-label if we have not already done so. */
current_function->original_arg_vector = argvec;
current_function->original_decl_initial = DECL_INITIAL (fndecl);
DECL_SAVED_INSNS (fndecl) = current_function;
+
+ /* Clean up. */
+ free (parmdecl_map);
}
\f
/* Note whether a parameter is modified or not. */
/* Expand the function arguments. Do this first so that any
new registers get created before we allocate the maps. */
- arg_vals = (rtx *) alloca (nargs * sizeof (rtx));
- arg_trees = (tree *) alloca (nargs * sizeof (tree));
+ arg_vals = (rtx *) xmalloc (nargs * sizeof (rtx));
+ arg_trees = (tree *) xmalloc (nargs * sizeof (tree));
for (formal = DECL_ARGUMENTS (fndecl), actual = parms, i = 0;
formal;
/* Allocate the structures we use to remap things. */
- map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
+ map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
map->fndecl = fndecl;
- map->reg_map = (rtx *) alloca (max_regno * sizeof (rtx));
- bzero ((char *) map->reg_map, max_regno * sizeof (rtx));
+ map->reg_map = (rtx *) xcalloc (max_regno, sizeof (rtx));
/* We used to use alloca here, but the size of what it would try to
allocate would occasionally cause it to exceed the stack limit and
map->label_map = real_label_map;
inl_max_uid = (inl_f->emit->x_cur_insn_uid + 1);
- map->insn_map = (rtx *) alloca (inl_max_uid * sizeof (rtx));
- bzero ((char *) map->insn_map, inl_max_uid * sizeof (rtx));
+ map->insn_map = (rtx *) xcalloc (inl_max_uid, sizeof (rtx));
map->min_insnno = 0;
map->max_insnno = inl_max_uid;
/* Make sure we free the things we explicitly allocated with xmalloc. */
if (real_label_map)
free (real_label_map);
- if (map)
- VARRAY_FREE (map->const_equiv_varray);
+ VARRAY_FREE (map->const_equiv_varray);
+ free (map->reg_map);
+ free (map->insn_map);
+ free (map);
+ free (arg_vals);
+ free (arg_trees);
+
inlining = inlining_previous;
return target;
/* Leave some extra room for labels and duplicate exit test insns
we make. */
max_jump_chain = max_uid * 14 / 10;
- jump_chain = (rtx *) alloca (max_jump_chain * sizeof (rtx));
- bzero ((char *) jump_chain, max_jump_chain * sizeof (rtx));
+ jump_chain = (rtx *) xcalloc (max_jump_chain, sizeof (rtx));
mark_all_labels (f, cross_jump);
/* Quit now if we just wanted to rebuild the JUMP_LABEL and REG_LABEL
notes and recompute LABEL_NUSES. */
if (mark_labels_only)
- return;
+ goto end;
exception_optimize ();
/* Zero the "deleted" flag of all the "deleted" insns. */
for (insn = f; insn; insn = NEXT_INSN (insn))
INSN_DELETED_P (insn) = 0;
-
- /* Show that the jump chain is not valid. */
- jump_chain = 0;
- return;
+
+ goto end;
}
#ifdef HAVE_return
if (calculate_can_reach_end (last_insn, 0, 1))
can_reach_end = 1;
- /* Show JUMP_CHAIN no longer valid. */
+end:
+ /* Clean up. */
+ free (jump_chain);
jump_chain = 0;
}
\f
/* We can do the replacement. Allocate reg_map if this is the
first replacement we found. */
if (reg_map == 0)
- {
- reg_map = (rtx *) alloca (max_reg * sizeof (rtx));
- bzero ((char *) reg_map, max_reg * sizeof (rtx));
- }
+ reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
REG_LOOP_TEST_P (reg) = 1;
emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
delete_insn (next_nonnote_insn (loop_start));
+
+ /* Clean up. */
+ if (reg_map)
+ free (reg_map);
return 1;
}
int *all_reset;
/* Allocate register tables and quick-reset table. */
- modified_regs = (char *) alloca (max_reg * sizeof (char));
- same_regs = (int *) alloca (max_reg * sizeof (int));
- all_reset = (int *) alloca (max_reg * sizeof (int));
+ modified_regs = (char *) xmalloc (max_reg * sizeof (char));
+ same_regs = (int *) xmalloc (max_reg * sizeof (int));
+ all_reset = (int *) xmalloc (max_reg * sizeof (int));
for (i = 0; i < max_reg; i++)
all_reset[i] = -1;
}
}
}
+
+ /* Clean up. */
+ free (modified_regs);
+ free (same_regs);
+ free (all_reset);
}
\f
/* This is like RTX_EQUAL_P except that it knows about our handling of
max_reg_before_loop = max_reg_num ();
- moved_once = (char *) alloca (max_reg_before_loop);
- bzero (moved_once, max_reg_before_loop);
-
regs_may_share = 0;
/* Count the number of loops. */
if (max_loop_num == 0)
return;
+ moved_once = (char *) xcalloc (max_reg_before_loop, sizeof (char));
+
/* Get size to use for tables indexed by uids.
Leave some space for labels allocated by find_and_verify_loops. */
max_uid_for_loop = get_max_uid () + 1 + max_loop_num * 32;
- uid_luid = (int *) alloca (max_uid_for_loop * sizeof (int));
- uid_loop_num = (int *) alloca (max_uid_for_loop * sizeof (int));
-
- bzero ((char *) uid_luid, max_uid_for_loop * sizeof (int));
- bzero ((char *) uid_loop_num, max_uid_for_loop * sizeof (int));
+ uid_luid = (int *) xcalloc (max_uid_for_loop, sizeof (int));
+ uid_loop_num = (int *) xcalloc (max_uid_for_loop, sizeof (int));
/* Allocate tables for recording each loop. We set each entry, so they need
not be zeroed. */
- loop_number_loop_starts = (rtx *) alloca (max_loop_num * sizeof (rtx));
- loop_number_loop_ends = (rtx *) alloca (max_loop_num * sizeof (rtx));
- loop_number_loop_cont = (rtx *) alloca (max_loop_num * sizeof (rtx));
- loop_number_cont_dominator = (rtx *) alloca (max_loop_num * sizeof (rtx));
- loop_outer_loop = (int *) alloca (max_loop_num * sizeof (int));
- loop_invalid = (char *) alloca (max_loop_num * sizeof (char));
- loop_number_exit_labels = (rtx *) alloca (max_loop_num * sizeof (rtx));
- loop_number_exit_count = (int *) alloca (max_loop_num * sizeof (int));
+ loop_number_loop_starts = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
+ loop_number_loop_ends = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
+ loop_number_loop_cont = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
+ loop_number_cont_dominator = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
+ loop_outer_loop = (int *) xmalloc (max_loop_num * sizeof (int));
+ loop_invalid = (char *) xmalloc (max_loop_num * sizeof (char));
+ loop_number_exit_labels = (rtx *) xmalloc (max_loop_num * sizeof (rtx));
+ loop_number_exit_count = (int *) xmalloc (max_loop_num * sizeof (int));
#ifdef HAVE_decrement_and_branch_on_count
/* Allocate for BCT optimization */
- loop_used_count_register = (int *) alloca (max_loop_num * sizeof (int));
- bzero ((char *) loop_used_count_register, max_loop_num * sizeof (int));
+ loop_used_count_register = (int *) xcalloc (max_loop_num, sizeof (int));
#endif /* HAVE_decrement_and_branch_on_count */
/* Find and process each loop.
unroll_block_trees ();
end_alias_analysis ();
+
+ /* Clean up. */
+ free (moved_once);
+ free (uid_luid);
+ free (uid_loop_num);
+ free (loop_number_loop_starts);
+ free (loop_number_loop_ends);
+ free (loop_number_loop_cont);
+ free (loop_number_cont_dominator);
+ free (loop_outer_loop);
+ free (loop_invalid);
+ free (loop_number_exit_labels);
+ free (loop_number_exit_count);
+#ifdef HAVE_decrement_and_branch_on_count
+ free (loop_used_count_register);
+#endif /* HAVE_decrement_and_branch_on_count */
+
}
\f
/* Returns the next insn, in execution order, after INSN. START and
if (!g1->ignore)
giv_array[i++] = g1;
- stats = (struct combine_givs_stats *) alloca (giv_count * sizeof (*stats));
- bzero ((char *) stats, giv_count * sizeof (*stats));
-
- can_combine = (rtx *) alloca (giv_count * giv_count * sizeof(rtx));
- bzero ((char *) can_combine, giv_count * giv_count * sizeof(rtx));
+ stats = (struct combine_givs_stats *) xcalloc (giv_count, sizeof (*stats));
+ can_combine = (rtx *) xcalloc (giv_count, giv_count * sizeof(rtx));
for (i = 0; i < giv_count; i++)
{
goto restart;
}
}
+
+ /* Clean up. */
+ free (stats);
+ free (can_combine);
}
\f
struct recombine_givs_stats
giv_count++;
}
giv_array
- = (struct induction **) alloca (giv_count * sizeof (struct induction *));
- stats = (struct recombine_givs_stats *) alloca (giv_count * sizeof *stats);
+ = (struct induction **) xmalloc (giv_count * sizeof (struct induction *));
+ stats = (struct recombine_givs_stats *) xmalloc (giv_count * sizeof *stats);
/* Initialize stats and set up the ix field for each giv in stats to name
the corresponding index into stats. */
rescan = i;
}
}
+
+ /* Clean up. */
+ free (giv_array);
+ free (stats);
}
\f
/* EMIT code before INSERT_BEFORE to set REG = B * M + A. */
epilogue_insn = insn;
}
- uid_to_ruid = (int *) alloca ((max_uid + 1) * sizeof (int));
+ uid_to_ruid = (int *) xmalloc ((max_uid + 1) * sizeof (int));
for (i = 0, insn = first; insn; i++, insn = NEXT_INSN (insn))
uid_to_ruid[INSN_UID (insn)] = i;
REG_NOTES (insn));
}
free_resource_info ();
+ free (uid_to_ruid);
}
#endif /* DELAY_SLOTS */
max_labelno = max_label_num ();
max_insnno = get_max_uid ();
- map = (struct inline_remap *) alloca (sizeof (struct inline_remap));
+ map = (struct inline_remap *) xmalloc (sizeof (struct inline_remap));
map->integrating = 0;
map->const_equiv_varray = 0;
if (max_labelno > 0)
{
- map->label_map = (rtx *) alloca (max_labelno * sizeof (rtx));
+ map->label_map = (rtx *) xmalloc (max_labelno * sizeof (rtx));
- local_label = (char *) alloca (max_labelno);
- bzero (local_label, max_labelno);
+ local_label = (char *) xcalloc (max_labelno, sizeof (char));
}
else
map->label_map = 0;
/* Allocate space for the insn map. */
- map->insn_map = (rtx *) alloca (max_insnno * sizeof (rtx));
+ map->insn_map = (rtx *) xmalloc (max_insnno * sizeof (rtx));
/* Set this to zero, to indicate that we are doing loop unrolling,
not function inlining. */
preconditioning code and find_splittable_regs will never be used
to access the splittable_regs[] and addr_combined_regs[] arrays. */
- splittable_regs = (rtx *) alloca (maxregnum * sizeof (rtx));
- bzero ((char *) splittable_regs, maxregnum * sizeof (rtx));
- derived_regs = (char *) alloca (maxregnum);
- bzero (derived_regs, maxregnum);
- splittable_regs_updates = (int *) alloca (maxregnum * sizeof (int));
- bzero ((char *) splittable_regs_updates, maxregnum * sizeof (int));
+ splittable_regs = (rtx *) xcalloc (maxregnum, sizeof (rtx));
+ derived_regs = (char *) xcalloc (maxregnum, sizeof (char));
+ splittable_regs_updates = (int *) xcalloc (maxregnum, sizeof (int));
addr_combined_regs
- = (struct induction **) alloca (maxregnum * sizeof (struct induction *));
- bzero ((char *) addr_combined_regs, maxregnum * sizeof (struct induction *));
- local_regno = (char *) alloca (maxregnum);
- bzero (local_regno, maxregnum);
+ = (struct induction **) xcalloc (maxregnum, sizeof (struct induction *));
+ local_regno = (char *) xcalloc (maxregnum, sizeof (char));
/* Mark all local registers, i.e. the ones which are referenced only
inside the loop. */
rtx *labels;
int abs_inc, neg_inc;
- map->reg_map = (rtx *) alloca (maxregnum * sizeof (rtx));
+ map->reg_map = (rtx *) xmalloc (maxregnum * sizeof (rtx));
VARRAY_CONST_EQUIV_INIT (map->const_equiv_varray, maxregnum,
"unroll_loop");
/* Now emit a sequence of branches to jump to the proper precond
loop entry point. */
- labels = (rtx *) alloca (sizeof (rtx) * unroll_number);
+ labels = (rtx *) xmalloc (sizeof (rtx) * unroll_number);
for (i = 0; i < unroll_number; i++)
labels[i] = gen_label_rtx ();
/* Set unroll type to MODULO now. */
unroll_type = UNROLL_MODULO;
loop_preconditioned = 1;
+
+ /* Clean up. */
+ free (labels);
}
}
the constant maps also. */
maxregnum = max_reg_num ();
- map->reg_map = (rtx *) alloca (maxregnum * sizeof (rtx));
+ map->reg_map = (rtx *) xmalloc (maxregnum * sizeof (rtx));
init_reg_map (map, maxregnum);
emit_label_after (exit_label, loop_end);
egress:
- if (map && map->const_equiv_varray)
+ if (map->const_equiv_varray)
VARRAY_FREE (map->const_equiv_varray);
+ if (map->label_map)
+ {
+ free (map->label_map);
+ free (local_label);
+ }
+ free (map->insn_map);
+ free (splittable_regs);
+ free (derived_regs);
+ free (splittable_regs_updates);
+ free (addr_combined_regs);
+ free (local_regno);
+ if (map->reg_map)
+ free (map->reg_map);
+ free (map);
}
\f
/* Return true if the loop can be safely, and profitably, preconditioned