expand_computed_goto (exp)
tree exp;
{
- rtx x = expand_expr (exp, 0, VOIDmode, 0);
+ rtx x = expand_expr (exp, NULL_RTX, VOIDmode, 0);
emit_queue ();
emit_indirect_jump (x);
}
addr = replace_rtx (copy_rtx (addr),
virtual_stack_vars_rtx, frame_pointer_rtx);
- emit_stack_restore (SAVE_NONLOCAL, addr, 0);
+ emit_stack_restore (SAVE_NONLOCAL, addr, NULL_RTX);
/* Put in the static chain register the nonlocal label address. */
emit_move_insn (static_chain_rtx,
}
}
else
- expand_goto_internal (label, label_rtx (label), 0);
+ expand_goto_internal (label, label_rtx (label), NULL_RTX);
}
/* Generate RTL code for a `goto' statement with target label BODY.
/* Execute the cleanups for blocks we are exiting. */
if (block->data.block.cleanups != 0)
{
- expand_cleanups (block->data.block.cleanups, 0);
+ expand_cleanups (block->data.block.cleanups, NULL_TREE);
do_pending_stack_adjust ();
}
}
the stack pointer. This one should be deleted as dead by flow. */
clear_pending_stack_adjust ();
do_pending_stack_adjust ();
- emit_stack_restore (SAVE_BLOCK, stack_level, 0);
+ emit_stack_restore (SAVE_BLOCK, stack_level, NULL_RTX);
}
if (body != 0 && DECL_TOO_LATE (body))
#endif
)
|| block->data.block.cleanups)
- ? tree_cons (0, block->data.block.cleanups,
+ ? tree_cons (NULL_TREE, block->data.block.cleanups,
block->data.block.outer_cleanups)
: 0);
fixup->next = goto_fixup_chain;
&& (after_label == 0
|| INSN_UID (first_insn) < INSN_UID (after_label))
&& INSN_UID (first_insn) > INSN_UID (f->before_jump)
- && ! TREE_REGDECL (f->target))
+ && ! DECL_REGISTER (f->target))
{
error_with_decl (f->target,
"label `%s' used before containing binding contour");
/* Prevent multiple errors for one label. */
- TREE_REGDECL (f->target) = 1;
+ DECL_REGISTER (f->target) = 1;
}
/* Execute cleanups for blocks this jump exits. */
&& TREE_CODE (val) != INDIRECT_REF)
TREE_VALUE (tail) = save_expr (TREE_VALUE (tail));
- output_rtx[i] = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
+ output_rtx[i] = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
}
if (ninputs + noutputs > MAX_RECOG_OPERANDS)
}
XVECEXP (body, 3, i) /* argvec */
- = expand_expr (TREE_VALUE (tail), 0, VOIDmode, 0);
+ = expand_expr (TREE_VALUE (tail), NULL_RTX, VOIDmode, 0);
XVECEXP (body, 4, i) /* constraints */
= gen_rtx (ASM_INPUT, TYPE_MODE (TREE_TYPE (TREE_VALUE (tail))),
TREE_STRING_POINTER (TREE_PURPOSE (tail)));
}
last_expr_type = TREE_TYPE (exp);
if (! flag_syntax_only)
- last_expr_value = expand_expr (exp, expr_stmts_for_value ? 0 : const0_rtx,
+ last_expr_value = expand_expr (exp,
+ (expr_stmts_for_value
+ ? NULL_RTX : const0_rtx),
VOIDmode, 0);
/* If all we do is reference a volatile value in memory,
/* Compare the value with itself to reference it. */
emit_cmp_insn (last_expr_value, last_expr_value, EQ,
expand_expr (TYPE_SIZE (last_expr_type),
- 0, VOIDmode, 0),
+ NULL_RTX, VOIDmode, 0),
BLKmode, 0,
TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
}
}
+*/
/* Return nonzero iff in a try block at level LEVEL. */
except_stack = thishandler;
nesting_stack = thishandler;
- do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL);
+ do_jump (try_clause, thishandler->data.except_stmt.except_label, NULL_RTX);
}
/* End of a TRY block. Nothing to do for now. */
expand_end_try ()
{
except_stack->data.except_stmt.after_label = gen_label_rtx ();
- expand_goto_internal (NULL, except_stack->data.except_stmt.after_label, 0);
+ expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
+ NULL_RTX);
}
/* Start an `except' nesting contour.
for (n = except_stack; n; n = n->next)
if (n->data.except_stmt.escape_label != 0)
{
- expand_goto_internal (0, n->data.except_stmt.escape_label, 0);
+ expand_goto_internal (NULL_TREE,
+ n->data.except_stmt.escape_label, NULL_RTX);
return 1;
}
{
if (except_stack == 0 || except_stack->data.except_stmt.after_label == 0)
return 0;
- expand_goto_internal (0, except_stack->data.except_stmt.after_label, 0);
+ expand_goto_internal (NULL_TREE, except_stack->data.except_stmt.after_label,
+ NULL_RTX);
return 1;
}
\f
cond_stack = thiscond;
nesting_stack = thiscond;
- do_jump (cond, thiscond->data.cond.next_label, NULL);
+ do_jump (cond, thiscond->data.cond.next_label, NULL_RTX);
}
/* Generate RTL between then-clause and the elseif-clause
emit_jump (cond_stack->data.cond.endif_label);
emit_label (cond_stack->data.cond.next_label);
cond_stack->data.cond.next_label = gen_label_rtx ();
- do_jump (cond, cond_stack->data.cond.next_label, NULL);
+ do_jump (cond, cond_stack->data.cond.next_label, NULL_RTX);
}
/* Generate RTL between the then-clause and the else-clause
do_pending_stack_adjust ();
emit_queue ();
- emit_note (0, NOTE_INSN_LOOP_BEG);
+ emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
emit_label (thisloop->data.loop.start_label);
return thisloop;
expand_loop_continue_here ()
{
do_pending_stack_adjust ();
- emit_note (0, NOTE_INSN_LOOP_CONT);
+ emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
emit_label (loop_stack->data.loop.continue_label);
}
}
emit_jump (start_label);
- emit_note (0, NOTE_INSN_LOOP_END);
+ emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
emit_label (loop_stack->data.loop.end_label);
POPSTACK (loop_stack);
whichloop = loop_stack;
if (whichloop == 0)
return 0;
- expand_goto_internal (0, whichloop->data.loop.continue_label, 0);
+ expand_goto_internal (NULL_TREE, whichloop->data.loop.continue_label,
+ NULL_RTX);
return 1;
}
whichloop = loop_stack;
if (whichloop == 0)
return 0;
- expand_goto_internal (0, whichloop->data.loop.end_label, 0);
+ expand_goto_internal (NULL_TREE, whichloop->data.loop.end_label, NULL_RTX);
return 1;
}
whichloop = loop_stack;
if (whichloop == 0)
return 0;
- do_jump (cond, whichloop->data.loop.end_label, NULL);
+ do_jump (cond, whichloop->data.loop.end_label, NULL_RTX);
return 1;
}
for (n = nesting_stack; n; n = n->all)
if (n->exit_label != 0)
{
- expand_goto_internal (0, n->exit_label, 0);
+ expand_goto_internal (NULL_TREE, n->exit_label, NULL_RTX);
return 1;
}
{
if (end_label == 0)
end_label = return_label = gen_label_rtx ();
- expand_goto_internal (0, end_label, last_insn);
+ expand_goto_internal (NULL_TREE, end_label, last_insn);
return;
}
#endif
/* Otherwise jump to the epilogue. */
- expand_goto_internal (0, end_label, last_insn);
+ expand_goto_internal (NULL_TREE, end_label, last_insn);
}
\f
/* Generate RTL to evaluate the expression RETVAL and return it
/* If function wants no value, give it none. */
if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl))) == VOID_TYPE)
{
- expand_expr (retval, 0, VOIDmode, 0);
+ expand_expr (retval, NULL_RTX, VOIDmode, 0);
expand_null_return ();
return;
}
|| TREE_CODE (TREE_OPERAND (retval_rhs, 2)) == CALL_EXPR))
{
rtx label = gen_label_rtx ();
- do_jump (TREE_OPERAND (retval_rhs, 0), label, 0);
+ do_jump (TREE_OPERAND (retval_rhs, 0), label, NULL_RTX);
expand_return (build (MODIFY_EXPR, TREE_TYPE (current_function_decl),
DECL_RESULT (current_function_decl),
TREE_OPERAND (retval_rhs, 1)));
tail_recursion_reentry);
}
emit_queue ();
- expand_goto_internal (0, tail_recursion_label, last_insn);
+ expand_goto_internal (NULL_TREE, tail_recursion_label, last_insn);
emit_barrier ();
return;
}
&& GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) == REG)
{
/* Calculate the return value into a pseudo reg. */
- val = expand_expr (retval_rhs, 0, VOIDmode, 0);
+ val = expand_expr (retval_rhs, NULL_RTX, VOIDmode, 0);
emit_queue ();
/* All temporaries have now been used. */
free_temp_slots ();
{
/* No cleanups or no hard reg used;
calculate value into hard return reg. */
- expand_expr (retval, 0, VOIDmode, 0);
+ expand_expr (retval, NULL_RTX, VOIDmode, 0);
emit_queue ();
free_temp_slots ();
expand_value_return (DECL_RTL (DECL_RESULT (current_function_decl)));
argvec = (rtx *) alloca (i * sizeof (rtx));
for (a = actuals, i = 0; a; a = TREE_CHAIN (a), i++)
- argvec[i] = expand_expr (TREE_VALUE (a), 0, VOIDmode, 0);
+ argvec[i] = expand_expr (TREE_VALUE (a), NULL_RTX, VOIDmode, 0);
/* Find which actual values refer to current values of previous formals.
Copy each of them now, before any formal is changed. */
{
struct nesting *thisblock = ALLOC_NESTING ();
- rtx note = emit_note (0, NOTE_INSN_BLOCK_BEG);
+ rtx note = emit_note (NULL_PTR, NOTE_INSN_BLOCK_BEG);
/* Make an entry on block_stack for the block we are entering. */
/* Mark the beginning and end of the scope if requested. */
if (mark_ends)
- emit_note (0, NOTE_INSN_BLOCK_END);
+ emit_note (NULL_PTR, NOTE_INSN_BLOCK_END);
else
/* Get rid of the beginning-mark if we don't make an end-mark. */
NOTE_LINE_NUMBER (thisblock->data.block.first_insn) = NOTE_INSN_DELETED;
expr_stmts_for_value = 0;
/* Do the cleanups. */
- expand_cleanups (thisblock->data.block.cleanups, 0);
+ expand_cleanups (thisblock->data.block.cleanups, NULL_TREE);
do_pending_stack_adjust ();
expr_stmts_for_value = old_expr_stmts_for_value;
if (thisblock->data.block.stack_level != 0)
{
emit_stack_restore (thisblock->next ? SAVE_BLOCK : SAVE_FUNCTION,
- thisblock->data.block.stack_level, 0);
+ thisblock->data.block.stack_level, NULL_RTX);
if (nonlocal_goto_handler_slot != 0)
- emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
+ emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level,
+ NULL_RTX);
}
/* Any gotos out of this block must also do these things.
if (TREE_CODE (decl) != VAR_DECL)
return;
- if (TREE_STATIC (decl) || TREE_EXTERNAL (decl))
+ if (TREE_STATIC (decl) || DECL_EXTERNAL (decl))
return;
/* Create the RTL representation for the variable. */
&& TREE_CODE (type) == REAL_TYPE)
&& ! TREE_THIS_VOLATILE (decl)
&& ! TREE_ADDRESSABLE (decl)
- && (TREE_REGDECL (decl) || ! obey_regdecls))
+ && (DECL_REGISTER (decl) || ! obey_regdecls))
{
/* Automatic variable that can go in a register. */
DECL_RTL (decl) = gen_reg_rtx (DECL_MODE (decl));
size = expand_expr (size_binop (CEIL_DIV_EXPR,
DECL_SIZE (decl),
size_int (BITS_PER_UNIT)),
- 0, VOIDmode, 0);
+ NULL_RTX, VOIDmode, 0);
free_temp_slots ();
/* This is equivalent to calling alloca. */
current_function_calls_alloca = 1;
/* Allocate space on the stack for the variable. */
- address = allocate_dynamic_stack_space (size, 0, DECL_ALIGN (decl));
+ address = allocate_dynamic_stack_space (size, NULL_RTX,
+ DECL_ALIGN (decl));
if (nonlocal_goto_handler_slot != 0)
- emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, 0);
+ emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
/* Reference the variable indirect through that rtx. */
DECL_RTL (decl) = gen_rtx (MEM, DECL_MODE (decl), address);
rtx beyond_jump = get_last_insn ();
rtx new_before_jump;
- expand_cleanups (list, 0);
+ expand_cleanups (list, NULL_TREE);
/* Pop any pushes done in the cleanups,
in case function is about to return. */
do_pending_stack_adjust ();
/* Make sure case_stmt.start points to something that won't
need any transformation before expand_end_case. */
if (GET_CODE (get_last_insn ()) != NOTE)
- emit_note (0, NOTE_INSN_DELETED);
+ emit_note (NULL_PTR, NOTE_INSN_DELETED);
thiscase->data.case_stmt.start = get_last_insn ();
}
if (index_type == error_mark_node)
return 0;
- /* There may be NOP_EXPR around the value if we got it from an enum. */
- STRIP_NOPS (value);
-
/* Convert VALUE to the type in which the comparisons are nominally done. */
if (value != 0)
value = convert (nominal_type, value);
}
case_stack->data.case_stmt.seenlabel = 1;
- /* There may be NOP_EXPR around the value if we got it from an enum. */
- STRIP_NOPS (value1);
- STRIP_NOPS (value2);
-
/* Convert VALUEs to type in which the comparisons are nominally done. */
if (value1 == 0) /* Negative infinity. */
value1 = TYPE_MIN_VALUE(index_type);
else if (TREE_INT_CST_HIGH (range) != 0
|| count < CASE_VALUES_THRESHOLD
- || (unsigned) (TREE_INT_CST_LOW (range)) > 10 * count
+ || ((unsigned HOST_WIDE_INT) (TREE_INT_CST_LOW (range))
+ > 10 * count)
|| TREE_CODE (index_expr) == INTEGER_CST
/* These will reduce to a constant. */
|| (TREE_CODE (index_expr) == CALL_EXPR
|| (TREE_CODE (index_expr) == COMPOUND_EXPR
&& TREE_CODE (TREE_OPERAND (index_expr, 1)) == INTEGER_CST))
{
- index = expand_expr (index_expr, 0, VOIDmode, 0);
+ index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
/* If the index is a short or char that we do not have
an insn to handle comparisons directly, convert it to
use_cost_table
= (TREE_CODE (TREE_TYPE (orig_index)) != ENUMERAL_TYPE
&& estimate_case_costs (thiscase->data.case_stmt.case_list));
- balance_case_nodes (&thiscase->data.case_stmt.case_list, 0);
+ balance_case_nodes (&thiscase->data.case_stmt.case_list,
+ NULL_PTR);
emit_case_nodes (index, thiscase->data.case_stmt.case_list,
default_label, TREE_TYPE (index_expr));
emit_jump_if_reachable (default_label);
> GET_MODE_BITSIZE (index_mode))
{
enum machine_mode omode = TYPE_MODE (TREE_TYPE (index_expr));
- rtx rangertx = expand_expr (range, 0, VOIDmode, 0);
+ rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
/* We must handle the endpoints in the original mode. */
index_expr = build (MINUS_EXPR, TREE_TYPE (index_expr),
index_expr, minval);
minval = integer_zero_node;
- index = expand_expr (index_expr, 0, VOIDmode, 0);
- emit_cmp_insn (rangertx, index, LTU, 0, omode, 0, 0);
+ index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
+ emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 0, 0);
emit_jump_insn (gen_bltu (default_label));
/* Now we can safely truncate. */
index = convert_to_mode (index_mode, index, 0);
if (TYPE_MODE (TREE_TYPE (index_expr)) != index_mode)
index_expr = convert (type_for_size (index_bits, 0),
index_expr);
- index = expand_expr (index_expr, 0, VOIDmode, 0);
+ index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
}
emit_queue ();
index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
- emit_jump_insn (gen_casesi (index, expand_expr (minval, 0, VOIDmode, 0),
- expand_expr (range, 0, VOIDmode, 0),
+ emit_jump_insn (gen_casesi (index, expand_expr (minval, NULL_RTX,
+ VOIDmode, 0),
+ expand_expr (range, NULL_RTX,
+ VOIDmode, 0),
table_label, default_label));
win = 1;
}
fold (build (MINUS_EXPR,
TREE_TYPE (index_expr),
index_expr, minval)));
- index = expand_expr (index_expr, 0, VOIDmode, 0);
+ index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
emit_queue ();
index = protect_from_queue (index, 0);
do_pending_stack_adjust ();
do_tablejump (index, TYPE_MODE (TREE_TYPE (index_expr)),
- expand_expr (range, 0, VOIDmode, 0),
+ expand_expr (range, NULL_RTX, VOIDmode, 0),
table_label, default_label);
win = 1;
}
for (n = thiscase->data.case_stmt.case_list; n; n = n->right)
{
- register int i
+ register HOST_WIDE_INT i
= TREE_INT_CST_LOW (n->low) - TREE_INT_CST_LOW (minval);
while (1)
emit_label (table_label);
/* This would be a lot nicer if CASE_VECTOR_PC_RELATIVE
- were an expression, instead of a an #ifdef/#ifndef. */
+ were an expression, instead of an #ifdef/#ifndef. */
if (
#ifdef CASE_VECTOR_PC_RELATIVE
1 ||
enum machine_mode mode = GET_MODE (op1);
if (mode == VOIDmode)
mode = GET_MODE (op2);
- emit_cmp_insn (op1, op2, EQ, 0, mode, unsignedp, 0);
+ emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn (gen_beq (label));
}
}
/* Node is single valued. First see if the index expression matches
this node and then check our children, if any. */
- do_jump_if_equal (index, expand_expr (node->low, 0, VOIDmode, 0),
+ do_jump_if_equal (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
label_rtx (node->code_label), unsignedp);
if (node->right != 0 && node->left != 0)
if (node_is_bounded (node->right, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- GT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
emit_case_nodes (index, node->left, default_label, index_type);
else if (node_is_bounded (node->left, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, 0,
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
VOIDmode, 0),
- LT, 0, mode, unsignedp, 0);
+ LT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
emit_case_nodes (index, node->right, default_label, index_type);
}
= build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
/* See if the value is on the right. */
- emit_cmp_insn (index, expand_expr (node->high, 0,
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
VOIDmode, 0),
- GT, 0, mode, unsignedp, 0);
+ GT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
/* Value must be on the left.
{
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- LT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_blt_pat) (default_label));
}
since we haven't ruled out the numbers less than
this node's value. So handle node->right explicitly. */
do_jump_if_equal (index,
- expand_expr (node->right->low, 0, VOIDmode, 0),
+ expand_expr (node->right->low, NULL_RTX,
+ VOIDmode, 0),
label_rtx (node->right->code_label), unsignedp);
}
{
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- GT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bgt_pat) (default_label));
}
since we haven't ruled out the numbers less than
this node's value. So handle node->left explicitly. */
do_jump_if_equal (index,
- expand_expr (node->left->low, 0, VOIDmode, 0),
+ expand_expr (node->left->low, NULL_RTX,
+ VOIDmode, 0),
label_rtx (node->left->code_label), unsignedp);
}
}
then handle the two subtrees. */
tree test_label = 0;
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- GT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0);
if (node_is_bounded (node->right, index_type))
/* Right hand node is fully bounded so we can eliminate any
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
- GE, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
+ GE, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
/* Handle the left-hand subtree. */
if they are possible. */
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
- LT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_blt_pat) (default_label));
}
/* Value belongs to this node or to the right-hand subtree. */
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- LE, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ LE, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
emit_case_nodes (index, node->right, default_label, index_type);
if they are possible. */
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- GT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bgt_pat) (default_label));
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
- GE, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
+ GE, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
emit_case_nodes (index, node->left, default_label, index_type);
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, 0, VOIDmode, 0),
- GT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_bgt_pat) (default_label));
}
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->low, 0, VOIDmode, 0),
- LT, 0, mode, unsignedp, 0);
+ emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0);
emit_jump_insn ((*gen_blt_pat) (default_label));
}
/* These routines are used by the loop unrolling code. They copy BLOCK trees
so that the debugging info will be correct for the unrolled loop. */
-/* Indexed by loop number, contains pointer to the first block in the loop,
- or zero if none. Only valid if doing loop unrolling and outputting debugger
- info. */
-
-tree *loop_number_first_block;
-
-/* Indexed by loop number, contains pointer to the last block in the loop,
- only valid if loop_number_first_block is nonzero. */
-
-tree *loop_number_last_block;
-
-/* Indexed by loop number, contains nesting level of first block in the
- loop, if any. Only valid if doing loop unrolling and outputting debugger
- info. */
-
-int *loop_number_block_level;
+/* Indexed by block number, contains a pointer to the N'th block node. */
-/* Scan the function looking for loops, and walk the BLOCK tree at the
- same time. Record the first and last BLOCK tree corresponding to each
- loop. This function is similar to find_and_verify_loops in loop.c. */
+static tree *block_vector;
void
-find_loop_tree_blocks (f)
- rtx f;
+find_loop_tree_blocks ()
{
- rtx insn;
- int current_loop = -1;
- int next_loop = -1;
- int loop;
- int block_level, tree_level;
- tree tree_block, parent_tree_block;
-
- tree_block = DECL_INITIAL (current_function_decl);
- parent_tree_block = 0;
- block_level = 0;
- tree_level = -1;
-
- /* Find boundaries of loops, and save the first and last BLOCK tree
- corresponding to each loop. */
-
- for (insn = f; insn; insn = NEXT_INSN (insn))
- {
- if (GET_CODE (insn) == NOTE)
- switch (NOTE_LINE_NUMBER (insn))
- {
- case NOTE_INSN_LOOP_BEG:
- loop_number_block_level[++next_loop] = block_level;
- loop_number_first_block[next_loop] = 0;
- current_loop = next_loop;
- break;
-
- case NOTE_INSN_LOOP_END:
- if (current_loop == -1)
- abort ();
+ tree block = DECL_INITIAL (current_function_decl);
- current_loop = loop_outer_loop[current_loop];
- break;
+ /* There first block is for the function body, and does not have
+ corresponding block notes. Don't include it in the block vector. */
+ block = BLOCK_SUBBLOCKS (block);
- case NOTE_INSN_BLOCK_BEG:
- if (tree_level < block_level)
- {
- /* We have seen two NOTE_INSN_BLOCK_BEG notes in a row, so
- we must now visit the subtree of the current block. */
- parent_tree_block = tree_block;
- tree_block = BLOCK_SUBBLOCKS (tree_block);
- tree_level++;
- }
- else if (tree_level > block_level)
- abort ();
-
- /* Save this block tree here for all nested loops for which
- this is the topmost block. */
- for (loop = current_loop;
- loop != -1 && block_level == loop_number_block_level[loop];
- loop = loop_outer_loop[loop])
- {
- if (loop_number_first_block[loop] == 0)
- loop_number_first_block[loop] = tree_block;
- loop_number_last_block[loop] = tree_block;
- }
-
- block_level++;
- break;
-
- case NOTE_INSN_BLOCK_END:
- block_level--;
- if (tree_level > block_level)
- {
- /* We have seen two NOTE_INSN_BLOCK_END notes in a row, so
- we must now visit the parent of the current tree. */
- if (tree_block != 0 || parent_tree_block == 0)
- abort ();
- tree_block = parent_tree_block;
- parent_tree_block = BLOCK_SUPERCONTEXT (parent_tree_block);
- tree_level--;
- }
- tree_block = BLOCK_CHAIN (tree_block);
- break;
- }
- }
+ block_vector = identify_blocks (block, get_insns ());
}
-/* This routine will make COPIES-1 copies of all BLOCK trees that correspond
- to BLOCK_BEG notes inside the loop LOOP_NUMBER.
-
- Note that we only copy the topmost level of tree nodes; they will share
- pointers to the same subblocks. */
-
void
-unroll_block_trees (loop_number, copies)
- int loop_number;
- int copies;
+unroll_block_trees ()
{
- int i;
+ tree block = DECL_INITIAL (current_function_decl);
- /* First check whether there are any blocks that need to be copied. */
- if (loop_number_first_block[loop_number])
- {
- tree first_block = loop_number_first_block[loop_number];
- tree last_block = loop_number_last_block[loop_number];
- tree last_block_created = 0;
-
- for (i = 0; i < copies - 1; i++)
- {
- tree block = first_block;
- tree insert_after = last_block;
- tree copied_block;
-
- /* Copy every block between first_block and last_block inclusive,
- inserting the new blocks after last_block. */
- do
- {
- tree new_block = make_node (BLOCK);
- BLOCK_VARS (new_block) = BLOCK_VARS (block);
- BLOCK_TYPE_TAGS (new_block) = BLOCK_TYPE_TAGS (block);
- BLOCK_SUBBLOCKS (new_block) = BLOCK_SUBBLOCKS (block);
- BLOCK_SUPERCONTEXT (new_block) = BLOCK_SUPERCONTEXT (block);
- TREE_USED (new_block) = TREE_USED (block);
-
- /* Insert the new block after the insertion point, and move
- the insertion point to the new block. This ensures that
- the copies are inserted in the right order. */
- BLOCK_CHAIN (new_block) = BLOCK_CHAIN (insert_after);
- BLOCK_CHAIN (insert_after) = new_block;
- insert_after = new_block;
-
- copied_block = block;
- block = BLOCK_CHAIN (block);
- }
- while (copied_block != last_block);
-
- /* Remember the last block created, so that we can update the
- info in the tables. */
- if (last_block_created == 0)
- last_block_created = insert_after;
- }
-
- /* For all nested loops for which LAST_BLOCK was originally the last
- block, update the tables to indicate that LAST_BLOCK_CREATED is
- now the last block in the loop. */
- for (i = loop_number; last_block == loop_number_last_block[i];
- i = loop_outer_loop[i])
- loop_number_last_block[i] = last_block_created;
- }
+ reorder_blocks (block_vector, block, get_insns ());
}
+