copy_size_rtx, NULL_RTX, 0,
OPTAB_LIB_WIDEN);
- emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
- GET_MODE (size), 0, 0);
label = gen_label_rtx ();
- emit_jump_insn (gen_blt (label));
+ emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
+ GET_MODE (size), 0, 0, label);
}
if (size != const0_rtx)
if (! (GET_CODE (index_val) == CONST_INT
&& GET_CODE (lo_r) == CONST_INT))
{
- emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
- GET_MODE (index_val), iunsignedp, 0);
- emit_jump_insn (gen_blt (op1));
+ emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
+ GET_MODE (index_val), iunsignedp, 0, op1);
}
if (! (GET_CODE (index_val) == CONST_INT
&& GET_CODE (hi_r) == CONST_INT))
{
- emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
- GET_MODE (index_val), iunsignedp, 0);
- emit_jump_insn (gen_bgt (op1));
+ emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
+ GET_MODE (index_val), iunsignedp, 0, op1);
}
/* Calculate the element number of bit zero in the first word
temp = copy_to_reg (temp);
op1 = gen_label_rtx ();
- emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
- GET_MODE (temp), unsignedp, 0);
- emit_jump_insn (gen_beq (op1));
+ emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
+ GET_MODE (temp), unsignedp, 0, op1);
emit_move_insn (temp, const1_rtx);
emit_label (op1);
return temp;
/* Test the result; if it is NaN, set errno=EDOM because
the argument was not in the domain. */
- emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
- emit_jump_insn (gen_beq (lab1));
+ emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
+ 0, 0, lab1);
#ifdef TARGET_EDOM
{
or equal to the minimum value of the range and less than or equal to
the maximum value of the range. */
- emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
- emit_jump_insn (gen_bgtu (default_label));
+ emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
+ 0, default_label);
/* If index is in range, it must fit in Pmode.
Convert to Pmode so we can index with it. */
constant and Y is not a constant, then the comparison is swapped to
ensure that the comparison RTL has the canonical form.
- MODE is the mode of the inputs (in case they are const_int).
- UNSIGNEDP nonzero says that X and Y are unsigned;
- this matters if they need to be widened.
+ UNSIGNEDP nonzero says that X and Y are unsigned; this matters if they
+ need to be widened by emit_cmp_insn. UNSIGNEDP is also used to select
+ the proper branch condition code.
- If they have mode BLKmode, then SIZE specifies the size of both X and Y,
- and ALIGN specifies the known shared alignment of X and Y.
+ If X and Y have mode BLKmode, then SIZE specifies the size of both X and Y,
+ and ALIGN specifies the known shared alignment of X and Y.
- COMPARISON is the rtl operator to compare with (EQ, NE, GT, etc.).
- It is ignored for fixed-point and block comparisons;
- it is used only for floating-point comparisons. */
+ MODE is the mode of the inputs (in case they are const_int).
+
+ COMPARISON is the rtl operator to compare with (EQ, NE, GT, etc.). It will
+ be passed unchanged to emit_cmp_insn, then potentially converted into an
+ unsigned variant based on UNSIGNEDP to select a proper jump instruction. */
void
emit_cmp_and_jump_insns (x, y, comparison, size, mode, unsignedp, align, label)
op1 = y;
}
emit_cmp_insn (op0, op1, comparison, size, mode, unsignedp, align);
+
+ if (unsignedp)
+ comparison = unsigned_condition (comparison);
emit_jump_insn ((*bcc_gen_fctn[(int) comparison]) (label));
}
correct its value by 2**bitwidth. */
do_pending_stack_adjust ();
- emit_cmp_insn (from, const0_rtx, GE, NULL_RTX, GET_MODE (from), 0, 0);
- emit_jump_insn (gen_bge (label));
+ emit_cmp_and_jump_insns (from, const0_rtx, GE, NULL_RTX, GET_MODE (from),
+ 0, 0, label);
/* On SCO 3.2.1, ldexp rejects values outside [0.5, 1).
Rather than setting up a dconst_dot_5, let's hope SCO
/* See if we need to do the subtraction. */
do_pending_stack_adjust ();
- emit_cmp_insn (from, limit, GE, NULL_RTX, GET_MODE (from), 0, 0);
- emit_jump_insn (gen_bge (lab1));
+ emit_cmp_and_jump_insns (from, limit, GE, NULL_RTX, GET_MODE (from),
+ 0, 0, lab1);
/* If not, do the signed "fix" and branch around fixup code. */
expand_fix (to, from, 0);
rtx lab = gen_label_rtx ();
/* Compare the value with itself to reference it. */
- emit_cmp_insn (last_expr_value, last_expr_value, EQ,
- expand_expr (TYPE_SIZE (last_expr_type),
- NULL_RTX, VOIDmode, 0),
- BLKmode, 0,
- TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT);
- emit_jump_insn ((*bcc_gen_fctn[(int) EQ]) (lab));
+ emit_cmp_and_jump_insns (last_expr_value, last_expr_value, EQ,
+ expand_expr (TYPE_SIZE (last_expr_type),
+ NULL_RTX, VOIDmode, 0),
+ BLKmode, 0,
+ TYPE_ALIGN (last_expr_type) / BITS_PER_UNIT,
+ lab);
emit_label (lab);
}
}
index_expr, minval);
minval = integer_zero_node;
index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
- emit_cmp_insn (rangertx, index, LTU, NULL_RTX, omode, 1, 0);
- emit_jump_insn (gen_bltu (default_label));
+ emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
+ omode, 1, 0, default_label);
/* Now we can safely truncate. */
index = convert_to_mode (index_mode, index, 0);
}
enum machine_mode mode = GET_MODE (op1);
if (mode == VOIDmode)
mode = GET_MODE (op2);
- emit_cmp_insn (op1, op2, EQ, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn (gen_beq (label));
+ emit_cmp_and_jump_insns (op1, op2, EQ, NULL_RTX, mode, unsignedp,
+ 0, label);
}
}
\f
/* If INDEX has an unsigned type, we must make unsigned branches. */
int unsignedp = TREE_UNSIGNED (index_type);
typedef rtx rtx_fn ();
- rtx_fn *gen_bgt_pat = unsignedp ? gen_bgtu : gen_bgt;
- rtx_fn *gen_bge_pat = unsignedp ? gen_bgeu : gen_bge;
- rtx_fn *gen_blt_pat = unsignedp ? gen_bltu : gen_blt;
- rtx_fn *gen_ble_pat = unsignedp ? gen_bleu : gen_ble;
enum machine_mode mode = GET_MODE (index);
/* See if our parents have already tested everything for us.
if (node_is_bounded (node->right, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0);
-
- emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->right->code_label));
emit_case_nodes (index, node->left, default_label, index_type);
}
else if (node_is_bounded (node->left, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- LT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_blt_pat) (label_rtx (node->left->code_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->left->code_label));
emit_case_nodes (index, node->right, default_label, index_type);
}
= build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
/* See if the value is on the right. */
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (test_label));
/* Value must be on the left.
Handle the left-hand subtree. */
{
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- LT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_blt_pat) (default_label));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high,
+ NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0,
+ default_label);
}
emit_case_nodes (index, node->right, default_label, index_type);
{
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_bgt_pat) (default_label));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high,
+ NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ default_label);
}
emit_case_nodes (index, node->left, default_label, index_type);
then handle the two subtrees. */
tree test_label = 0;
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0);
if (node_is_bounded (node->right, index_type))
/* Right hand node is fully bounded so we can eliminate any
testing and branch directly to the target code. */
- emit_jump_insn ((*gen_bgt_pat) (label_rtx (node->right->code_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->right->code_label));
else
{
/* Right hand node requires testing.
Branch to a label where we will handle it later. */
test_label = build_decl (LABEL_DECL, NULL_TREE, NULL_TREE);
- emit_jump_insn ((*gen_bgt_pat) (label_rtx (test_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (test_label));
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
- GE, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ GE, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->code_label));
/* Handle the left-hand subtree. */
emit_case_nodes (index, node->left, default_label, index_type);
if they are possible. */
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
- LT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_blt_pat) (default_label));
+ emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0,
+ default_label);
}
/* Value belongs to this node or to the right-hand subtree. */
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- LE, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_ble_pat) (label_rtx (node->code_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ LE, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->code_label));
emit_case_nodes (index, node->right, default_label, index_type);
}
if they are possible. */
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_bgt_pat) (default_label));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ default_label);
}
/* Value belongs to this node or to the left-hand subtree. */
- emit_cmp_insn (index, expand_expr (node->low, NULL_RTX, VOIDmode, 0),
- GE, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_bge_pat) (label_rtx (node->code_label)));
+ emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ GE, NULL_RTX, mode, unsignedp, 0,
+ label_rtx (node->code_label));
emit_case_nodes (index, node->left, default_label, index_type);
}
if (!node_has_high_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->high, NULL_RTX,
- VOIDmode, 0),
- GT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_bgt_pat) (default_label));
+ emit_cmp_and_jump_insns (index, expand_expr (node->high, NULL_RTX,
+ VOIDmode, 0),
+ GT, NULL_RTX, mode, unsignedp, 0,
+ default_label);
}
if (!node_has_low_bound (node, index_type))
{
- emit_cmp_insn (index, expand_expr (node->low, NULL_RTX,
- VOIDmode, 0),
- LT, NULL_RTX, mode, unsignedp, 0);
- emit_jump_insn ((*gen_blt_pat) (default_label));
+ emit_cmp_and_jump_insns (index, expand_expr (node->low, NULL_RTX,
+ VOIDmode, 0),
+ LT, NULL_RTX, mode, unsignedp, 0,
+ default_label);
}
emit_jump (label_rtx (node->code_label));