This is the mail archive of the
gcc-patches@gcc.gnu.org
mailing list for the GCC project.
[Patch ARM] Use existing predicates for RTL objects
- From: Shujing Zhao <pearly dot zhao at oracle dot com>
- To: gcc-patches at gcc dot gnu dot org
- Cc: Paolo Carlini <paolo dot carlini at oracle dot com>
- Date: Mon, 25 May 2009 17:58:26 +0800
- Subject: [Patch ARM] Use existing predicates for RTL objects
According to projects/beginner.html
* Use predicates for RTL objects
This patch is to use existing predicates for RTL objects at config/arm
directory.
Tested on arm-elf simulator.
Cheers,
Pearly
2009-05-25 Shujing Zhao <pearly.zhao@oracle.com>
* config/arm/arm.c: Use REG_P, MEM_P, CONST_INT_P, LABEL_P, JUMP_P,
CALL_P, NONJUMP_INSN_P, NOTE_P, BARRIER_P and JUMP_TABLE_DATA_P where
applicable.
* config/arm/arm.h: Ditto.
* config/arm/arm.md: Ditto.
* config/arm/cirrus.md: Ditto.
* config/arm/fpa.md: Ditto.
* config/arm/pe.c: Ditto.
* config/arm/predicates.md: Ditto.
* config/arm/thumb2.md: Ditto.
Index: arm.c
===================================================================
--- arm.c (revision 147651)
+++ arm.c (working copy)
@@ -1862,7 +1862,7 @@ use_return_insn (int iscond, rtx sibling
/* ... or for a tail-call argument ... */
if (sibling)
{
- gcc_assert (GET_CODE (sibling) == CALL_INSN);
+ gcc_assert (CALL_P (sibling));
if (find_regno_fusage (sibling, USE, 3))
return 0;
@@ -2042,7 +2042,7 @@ arm_split_constant (enum rtx_code code,
cond = NULL_RTX;
if (subtargets || code == SET
- || (GET_CODE (target) == REG && GET_CODE (source) == REG
+ || (REG_P (target) && REG_P (source)
&& REGNO (target) != REGNO (source)))
{
/* After arm_reorg has been called, we can't fix up expensive
@@ -3686,7 +3686,7 @@ legitimize_pic_address (rtx orig, enum m
&& GET_CODE (XEXP (XEXP (orig, 0), 0)) == UNSPEC
&& XINT (XEXP (XEXP (orig, 0), 0), 1) == UNSPEC_TLS)
{
- gcc_assert (GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT);
+ gcc_assert (CONST_INT_P (XEXP (XEXP (orig, 0), 1)));
return orig;
}
@@ -3702,7 +3702,7 @@ legitimize_pic_address (rtx orig, enum m
offset = legitimize_pic_address (XEXP (XEXP (orig, 0), 1), Pmode,
base == reg ? 0 : reg);
- if (GET_CODE (offset) == CONST_INT)
+ if (CONST_INT_P (offset))
{
/* The base register doesn't really matter, we only want to
test the index for the appropriate mode. */
@@ -3712,7 +3712,7 @@ legitimize_pic_address (rtx orig, enum m
offset = force_reg (Pmode, offset);
}
- if (GET_CODE (offset) == CONST_INT)
+ if (CONST_INT_P (offset))
return plus_constant (base, INTVAL (offset));
}
@@ -3889,7 +3889,7 @@ arm_address_register_rtx_p (rtx x, int s
{
int regno;
- if (GET_CODE (x) != REG)
+ if (!REG_P (x))
return 0;
regno = REGNO (x);
@@ -3947,7 +3947,7 @@ arm_legitimate_address_outer_p (enum mac
to fixup invalid register choices. */
if (use_ldrd
&& GET_CODE (x) == POST_MODIFY
- && GET_CODE (addend) == REG)
+ && REG_P (addend))
return 0;
return ((use_ldrd || GET_MODE_SIZE (mode) <= 4)
@@ -3961,7 +3961,7 @@ arm_legitimate_address_outer_p (enum mac
|| (code == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1)))))
return 1;
else if (mode == TImode || (TARGET_NEON && VALID_NEON_STRUCT_MODE (mode)))
@@ -3973,7 +3973,7 @@ arm_legitimate_address_outer_p (enum mac
rtx xop1 = XEXP (x, 1);
return ((arm_address_register_rtx_p (xop0, strict_p)
- && GET_CODE(xop1) == CONST_INT
+ && CONST_INT_P(xop1)
&& arm_legitimate_index_p (mode, xop1, outer, strict_p))
|| (arm_address_register_rtx_p (xop1, strict_p)
&& arm_legitimate_index_p (mode, xop0, outer, strict_p)));
@@ -4030,7 +4030,7 @@ thumb2_legitimate_address_p (enum machin
rtx addend = XEXP (XEXP (x, 1), 1);
HOST_WIDE_INT offset;
- if (GET_CODE (addend) != CONST_INT)
+ if (!CONST_INT_P (addend))
return 0;
offset = INTVAL(addend);
@@ -4048,7 +4048,7 @@ thumb2_legitimate_address_p (enum machin
|| (code == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1)))))
return 1;
else if (mode == TImode || (TARGET_NEON && VALID_NEON_STRUCT_MODE (mode)))
@@ -4147,7 +4147,7 @@ arm_legitimate_index_p (enum machine_mod
rtx op = XEXP (index, 1);
return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
- && GET_CODE (op) == CONST_INT
+ && CONST_INT_P (op)
&& INTVAL (op) > 0
&& INTVAL (op) <= 31);
}
@@ -4177,7 +4177,7 @@ thumb2_index_mul_operand (rtx op)
{
HOST_WIDE_INT val;
- if (GET_CODE(op) != CONST_INT)
+ if (!CONST_INT_P(op))
return false;
val = INTVAL(op);
@@ -4250,7 +4250,7 @@ thumb2_legitimate_index_p (enum machine_
rtx op = XEXP (index, 1);
return (arm_address_register_rtx_p (XEXP (index, 0), strict_p)
- && GET_CODE (op) == CONST_INT
+ && CONST_INT_P (op)
&& INTVAL (op) > 0
&& INTVAL (op) <= 3);
}
@@ -4266,7 +4266,7 @@ thumb1_base_register_rtx_p (rtx x, enum
{
int regno;
- if (GET_CODE (x) != REG)
+ if (!REG_P (x))
return 0;
regno = REGNO (x);
@@ -4339,7 +4339,7 @@ thumb1_legitimate_address_p (enum machin
|| (GET_CODE (x) == CONST
&& GET_CODE (XEXP (x, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (x, 0), 0)) == LABEL_REF
- && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (x, 0), 1)))))
return 1;
/* Post-inc indexing only supported for SImode and larger. */
@@ -4363,7 +4363,7 @@ thumb1_legitimate_address_p (enum machin
/* REG+const has 5-7 bit offset for non-SP registers. */
else if ((thumb1_index_register_rtx_p (XEXP (x, 0), strict_p)
|| XEXP (x, 0) == arg_pointer_rtx)
- && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (x, 1))
&& thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
return 1;
@@ -4371,22 +4371,22 @@ thumb1_legitimate_address_p (enum machin
larger is supported. */
/* ??? Should probably check for DI/DFmode overflow here
just like GO_IF_LEGITIMATE_OFFSET does. */
- else if (GET_CODE (XEXP (x, 0)) == REG
+ else if (REG_P (XEXP (x, 0))
&& REGNO (XEXP (x, 0)) == STACK_POINTER_REGNUM
&& GET_MODE_SIZE (mode) >= 4
- && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (x, 1))
&& INTVAL (XEXP (x, 1)) >= 0
&& INTVAL (XEXP (x, 1)) + GET_MODE_SIZE (mode) <= 1024
&& (INTVAL (XEXP (x, 1)) & 3) == 0)
return 1;
- else if (GET_CODE (XEXP (x, 0)) == REG
+ else if (REG_P (XEXP (x, 0))
&& (REGNO (XEXP (x, 0)) == FRAME_POINTER_REGNUM
|| REGNO (XEXP (x, 0)) == ARG_POINTER_REGNUM
|| (REGNO (XEXP (x, 0)) >= FIRST_VIRTUAL_REGISTER
&& REGNO (XEXP (x, 0)) <= LAST_VIRTUAL_REGISTER))
&& GET_MODE_SIZE (mode) >= 4
- && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (x, 1))
&& (INTVAL (XEXP (x, 1)) & 3) == 0)
return 1;
}
@@ -4633,7 +4633,7 @@ arm_legitimize_address (rtx x, rtx orig_
xop1 = force_reg (SImode, xop1);
if (ARM_BASE_REGISTER_RTX_P (xop0)
- && GET_CODE (xop1) == CONST_INT)
+ && CONST_INT_P (xop1))
{
HOST_WIDE_INT n, low_n;
rtx base_reg, val;
@@ -4689,7 +4689,7 @@ arm_legitimize_address (rtx x, rtx orig_
with absolute addresses which often allows for the base register to
be factorized for multiple adjacent memory references, and it might
even allows for the mini pool to be avoided entirely. */
- else if (GET_CODE (x) == CONST_INT && optimize > 0)
+ else if (CONST_INT_P (x) && optimize > 0)
{
unsigned int bits;
HOST_WIDE_INT mask, base, index;
@@ -4736,7 +4736,7 @@ thumb_legitimize_address (rtx x, rtx ori
return legitimize_tls_address (x, NULL_RTX);
if (GET_CODE (x) == PLUS
- && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (x, 1))
&& (INTVAL (XEXP (x, 1)) >= 32 * GET_MODE_SIZE (mode)
|| INTVAL (XEXP (x, 1)) < 0))
{
@@ -4809,7 +4809,7 @@ thumb_legitimize_reload_address (rtx *x_
&& GET_MODE_SIZE (mode) < 4
&& REG_P (XEXP (x, 0))
&& XEXP (x, 0) == stack_pointer_rtx
- && GET_CODE (XEXP (x, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (x, 1))
&& !thumb_legitimate_offset_p (mode, INTVAL (XEXP (x, 1))))
{
rtx orig_x = x;
@@ -4902,11 +4902,11 @@ arm_cannot_force_const_mem (rtx x)
}
#define REG_OR_SUBREG_REG(X) \
- (GET_CODE (X) == REG \
- || (GET_CODE (X) == SUBREG && GET_CODE (SUBREG_REG (X)) == REG))
+ (REG_P (X) \
+ || (GET_CODE (X) == SUBREG && REG_P (SUBREG_REG (X))))
#define REG_OR_SUBREG_RTX(X) \
- (GET_CODE (X) == REG ? (X) : SUBREG_REG (X))
+ (REG_P (X) ? (X) : SUBREG_REG (X))
#ifndef COSTS_N_INSNS
#define COSTS_N_INSNS(N) ((N) * 4 - 2)
@@ -4930,7 +4930,7 @@ thumb1_rtx_costs (rtx x, enum rtx_code c
return COSTS_N_INSNS (1);
case MULT:
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
int cycles = 0;
unsigned HOST_WIDE_INT i = INTVAL (XEXP (x, 1));
@@ -4946,8 +4946,8 @@ thumb1_rtx_costs (rtx x, enum rtx_code c
case SET:
return (COSTS_N_INSNS (1)
- + 4 * ((GET_CODE (SET_SRC (x)) == MEM)
- + GET_CODE (SET_DEST (x)) == MEM));
+ + 4 * ((MEM_P (SET_SRC (x)))
+ + MEM_P (SET_DEST (x))));
case CONST_INT:
if (outer == SET)
@@ -5010,14 +5010,14 @@ thumb1_rtx_costs (rtx x, enum rtx_code c
{
case QImode:
return (1 + (mode == DImode ? 4 : 0)
- + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
+ + (MEM_P (XEXP (x, 0)) ? 10 : 0));
case HImode:
return (4 + (mode == DImode ? 4 : 0)
- + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
+ + (MEM_P (XEXP (x, 0)) ? 10 : 0));
case SImode:
- return (1 + (GET_CODE (XEXP (x, 0)) == MEM ? 10 : 0));
+ return (1 + (MEM_P (XEXP (x, 0)) ? 10 : 0));
default:
return 99;
@@ -5059,9 +5059,9 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
return false;
case ROTATE:
- if (GET_CODE (XEXP (x, 1)) == REG)
+ if (REG_P (XEXP (x, 1)))
*total = COSTS_N_INSNS (1); /* Need to subtract from 32 */
- else if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ else if (!CONST_INT_P (XEXP (x, 1)))
*total = rtx_cost (XEXP (x, 1), code, speed);
/* Fall through */
@@ -5085,7 +5085,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
/* Increase the cost of complex shifts because they aren't any faster,
and reduce dual issue opportunities. */
if (arm_tune_cortex_a9
- && outer != SET && GET_CODE (XEXP (x, 1)) != CONST_INT)
+ && outer != SET && !CONST_INT_P (XEXP (x, 1)))
++*total;
return true;
@@ -5111,14 +5111,14 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
if (mode == DImode)
{
*total = COSTS_N_INSNS (ARM_NUM_REGS (mode));
- if (GET_CODE (XEXP (x, 0)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 0))
&& const_ok_for_arm (INTVAL (XEXP (x, 0))))
{
*total += rtx_cost (XEXP (x, 1), code, speed);
return true;
}
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 1))
&& const_ok_for_arm (INTVAL (XEXP (x, 1))))
{
*total += rtx_cost (XEXP (x, 0), code, speed);
@@ -5154,7 +5154,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
}
*total = COSTS_N_INSNS (1);
- if (GET_CODE (XEXP (x, 0)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 0))
&& const_ok_for_arm (INTVAL (XEXP (x, 0))))
{
*total += rtx_cost (XEXP (x, 1), code, speed);
@@ -5192,7 +5192,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
|| GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) == RTX_COMM_COMPARE)
{
*total = COSTS_N_INSNS (1) + rtx_cost (XEXP (x, 0), code, speed);
- if (GET_CODE (XEXP (XEXP (x, 1), 0)) == REG
+ if (REG_P (XEXP (XEXP (x, 1), 0))
&& REGNO (XEXP (XEXP (x, 1), 0)) != CC_REGNUM)
*total += COSTS_N_INSNS (1);
@@ -5246,7 +5246,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
|| GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == RTX_COMM_COMPARE)
{
*total = COSTS_N_INSNS (1) + rtx_cost (XEXP (x, 1), code, speed);
- if (GET_CODE (XEXP (XEXP (x, 0), 0)) == REG
+ if (REG_P (XEXP (XEXP (x, 0), 0))
&& REGNO (XEXP (XEXP (x, 0), 0)) != CC_REGNUM)
*total += COSTS_N_INSNS (1);
return true;
@@ -5264,7 +5264,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
operand. */
if ((REG_OR_SUBREG_REG (XEXP (x, 0))
&& ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))
- && GET_CODE (XEXP (x, 1)) != CONST_INT)
+ && !CONST_INT_P (XEXP (x, 1)))
|| (REG_OR_SUBREG_REG (XEXP (x, 0))
&& ARM_FRAME_RTX (REG_OR_SUBREG_RTX (XEXP (x, 0)))))
*total = 4;
@@ -5272,7 +5272,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
if (mode == DImode)
{
*total += COSTS_N_INSNS (2);
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 1))
&& const_ok_for_op (INTVAL (XEXP (x, 1)), code))
{
*total += rtx_cost (XEXP (x, 0), code, speed);
@@ -5283,7 +5283,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
}
*total += COSTS_N_INSNS (1);
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 1))
&& const_ok_for_op (INTVAL (XEXP (x, 1)), code))
{
*total += rtx_cost (XEXP (x, 0), code, speed);
@@ -5361,7 +5361,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
{
*total += rtx_cost (XEXP (XEXP (x, 0), 0), subcode, speed);
/* Register shifts cost an extra cycle. */
- if (GET_CODE (XEXP (XEXP (x, 0), 1)) != CONST_INT)
+ if (!CONST_INT_P (XEXP (XEXP (x, 0), 1)))
*total += COSTS_N_INSNS (1) + rtx_cost (XEXP (XEXP (x, 0), 1),
subcode, speed);
return true;
@@ -5381,7 +5381,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
if (!((GET_RTX_CLASS (GET_CODE (operand)) == RTX_COMPARE
|| GET_RTX_CLASS (GET_CODE (operand)) == RTX_COMM_COMPARE)
- && GET_CODE (XEXP (operand, 0)) == REG
+ && REG_P (XEXP (operand, 0))
&& REGNO (XEXP (operand, 0)) == CC_REGNUM))
*total += COSTS_N_INSNS (1);
*total += (rtx_cost (XEXP (x, 1), code, speed)
@@ -5397,7 +5397,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
goto scc_insn;
case GE:
- if ((GET_CODE (XEXP (x, 0)) != REG || REGNO (XEXP (x, 0)) != CC_REGNUM)
+ if ((!REG_P (XEXP (x, 0)) || REGNO (XEXP (x, 0)) != CC_REGNUM)
&& mode == SImode && XEXP (x, 1) == const0_rtx)
{
*total = COSTS_N_INSNS (2) + rtx_cost (XEXP (x, 0), code, speed);
@@ -5406,7 +5406,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
goto scc_insn;
case LT:
- if ((GET_CODE (XEXP (x, 0)) != REG || REGNO (XEXP (x, 0)) != CC_REGNUM)
+ if ((!REG_P (XEXP (x, 0)) || REGNO (XEXP (x, 0)) != CC_REGNUM)
&& mode == SImode && XEXP (x, 1) == const0_rtx)
{
*total = COSTS_N_INSNS (1) + rtx_cost (XEXP (x, 0), code, speed);
@@ -5433,21 +5433,21 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
performed, then they cost 2 instructions. Otherwise they need
an additional comparison before them. */
*total = COSTS_N_INSNS (2);
- if (GET_CODE (XEXP (x, 0)) == REG && REGNO (XEXP (x, 0)) == CC_REGNUM)
+ if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) == CC_REGNUM)
{
return true;
}
/* Fall through */
case COMPARE:
- if (GET_CODE (XEXP (x, 0)) == REG && REGNO (XEXP (x, 0)) == CC_REGNUM)
+ if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) == CC_REGNUM)
{
*total = 0;
return true;
}
*total += COSTS_N_INSNS (1);
- if (GET_CODE (XEXP (x, 1)) == CONST_INT
+ if (CONST_INT_P (XEXP (x, 1))
&& const_ok_for_op (INTVAL (XEXP (x, 1)), code))
{
*total += rtx_cost (XEXP (x, 0), code, speed);
@@ -5479,7 +5479,7 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
case SMIN:
case SMAX:
*total = COSTS_N_INSNS (2) + rtx_cost (XEXP (x, 0), code, speed);
- if (GET_CODE (XEXP (x, 1)) != CONST_INT
+ if (!CONST_INT_P (XEXP (x, 1))
|| !const_ok_for_arm (INTVAL (XEXP (x, 1))))
*total += rtx_cost (XEXP (x, 1), code, speed);
return true;
@@ -5511,10 +5511,10 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
{
if (arm_arch6)
{
- if (GET_CODE (XEXP (x, 0)) != MEM)
+ if (!MEM_P (XEXP (x, 0)))
*total += COSTS_N_INSNS (1);
}
- else if (!arm_arch4 || GET_CODE (XEXP (x, 0)) != MEM)
+ else if (!arm_arch4 || !MEM_P (XEXP (x, 0)))
*total += COSTS_N_INSNS (2);
}
@@ -5533,10 +5533,10 @@ arm_rtx_costs_1 (rtx x, enum rtx_code ou
{
if (arm_arch6)
{
- if (GET_CODE (XEXP (x, 0)) != MEM)
+ if (!MEM_P (XEXP (x, 0)))
*total += COSTS_N_INSNS (1);
}
- else if (!arm_arch4 || GET_CODE (XEXP (x, 0)) != MEM)
+ else if (!arm_arch4 || !MEM_P (XEXP (x, 0)))
*total += COSTS_N_INSNS (GET_MODE (XEXP (x, 0)) == QImode ?
1 : 2);
}
@@ -5636,7 +5636,7 @@ arm_size_rtx_costs (rtx x, enum rtx_code
return false;
case ROTATE:
- if (mode == SImode && GET_CODE (XEXP (x, 1)) == REG)
+ if (mode == SImode && REG_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (2) + rtx_cost (XEXP (x, 0), code, false);
return true;
@@ -5646,7 +5646,7 @@ arm_size_rtx_costs (rtx x, enum rtx_code
case ASHIFT:
case LSHIFTRT:
case ASHIFTRT:
- if (mode == DImode && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (mode == DImode && CONST_INT_P (XEXP (x, 1)))
{
*total = COSTS_N_INSNS (3) + rtx_cost (XEXP (x, 0), code, false);
return true;
@@ -5655,7 +5655,7 @@ arm_size_rtx_costs (rtx x, enum rtx_code
{
*total = COSTS_N_INSNS (1) + rtx_cost (XEXP (x, 0), code, false);
/* Slightly disparage register shifts, but not by much. */
- if (GET_CODE (XEXP (x, 1)) != CONST_INT)
+ if (!CONST_INT_P (XEXP (x, 1)))
*total += 1 + rtx_cost (XEXP (x, 1), code, false);
return true;
}
@@ -5887,7 +5887,7 @@ arm_slowmul_rtx_costs (rtx x, enum rtx_c
return false;
}
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
& (unsigned HOST_WIDE_INT) 0xffffffff);
@@ -5953,7 +5953,7 @@ arm_fastmul_rtx_costs (rtx x, enum rtx_c
return false;
}
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
unsigned HOST_WIDE_INT i = (INTVAL (XEXP (x, 1))
& (unsigned HOST_WIDE_INT) 0xffffffff);
@@ -6042,7 +6042,7 @@ arm_xscale_rtx_costs (rtx x, enum rtx_co
return false;
}
- if (GET_CODE (XEXP (x, 1)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 1)))
{
/* If operand 1 is a constant we can more accurately
calculate the cost of the multiply. The multiplier can
@@ -6168,7 +6168,7 @@ arm_arm_address_cost (rtx x)
if (c == PLUS || c == MINUS)
{
- if (GET_CODE (XEXP (x, 0)) == CONST_INT)
+ if (CONST_INT_P (XEXP (x, 0)))
return 2;
if (ARITHMETIC_P (XEXP (x, 0)) || ARITHMETIC_P (XEXP (x, 1)))
@@ -6188,8 +6188,8 @@ arm_thumb_address_cost (rtx x)
if (c == REG)
return 1;
if (c == PLUS
- && GET_CODE (XEXP (x, 0)) == REG
- && GET_CODE (XEXP (x, 1)) == CONST_INT)
+ && REG_P (XEXP (x, 0))
+ && CONST_INT_P (XEXP (x, 1)))
return 1;
return 2;
@@ -6255,13 +6255,13 @@ arm_adjust_cost (rtx insn, rtx link, rtx
/* Call insns don't incur a stall, even if they follow a load. */
if (REG_NOTE_KIND (link) == 0
- && GET_CODE (insn) == CALL_INSN)
+ && CALL_P (insn))
return 1;
if ((i_pat = single_set (insn)) != NULL
- && GET_CODE (SET_SRC (i_pat)) == MEM
+ && MEM_P (SET_SRC (i_pat))
&& (d_pat = single_set (dep)) != NULL
- && GET_CODE (SET_DEST (d_pat)) == MEM)
+ && MEM_P (SET_DEST (d_pat)))
{
rtx src_mem = XEXP (SET_SRC (i_pat), 0);
/* This is a load after a store, there is no conflict if the load reads
@@ -6563,7 +6563,7 @@ neon_valid_immediate (rtx op, enum machi
unsigned HOST_WIDE_INT elpart;
unsigned int part, parts;
- if (GET_CODE (el) == CONST_INT)
+ if (CONST_INT_P (el))
{
elpart = INTVAL (el);
parts = 1;
@@ -6816,7 +6816,7 @@ bounds_check (rtx operand, HOST_WIDE_INT
{
HOST_WIDE_INT lane;
- gcc_assert (GET_CODE (operand) == CONST_INT);
+ gcc_assert (CONST_INT_P (operand));
lane = INTVAL (operand);
@@ -6866,23 +6866,23 @@ cirrus_memory_offset (rtx op)
|| reg_mentioned_p (virtual_stack_vars_rtx, op)))
return 0;
- if (GET_CODE (op) == MEM)
+ if (MEM_P (op))
{
rtx ind;
ind = XEXP (op, 0);
/* Match: (mem (reg)). */
- if (GET_CODE (ind) == REG)
+ if (REG_P (ind))
return 1;
/* Match:
(mem (plus (reg)
(const))). */
if (GET_CODE (ind) == PLUS
- && GET_CODE (XEXP (ind, 0)) == REG
+ && REG_P (XEXP (ind, 0))
&& REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
- && GET_CODE (XEXP (ind, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (ind, 1)))
return 1;
}
@@ -6910,7 +6910,7 @@ arm_coproc_mem_operand (rtx op, bool wb)
return FALSE;
/* Constants are converted into offsets from labels. */
- if (GET_CODE (op) != MEM)
+ if (!MEM_P (op))
return FALSE;
ind = XEXP (op, 0);
@@ -6920,11 +6920,11 @@ arm_coproc_mem_operand (rtx op, bool wb)
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
- && GET_CODE (XEXP (XEXP (ind, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (ind, 0), 1)))))
return TRUE;
/* Match: (mem (reg)). */
- if (GET_CODE (ind) == REG)
+ if (REG_P (ind))
return arm_address_register_rtx_p (ind, 0);
/* Autoincremment addressing modes. POST_INC and PRE_DEC are
@@ -6949,9 +6949,9 @@ arm_coproc_mem_operand (rtx op, bool wb)
(plus (reg)
(const)). */
if (GET_CODE (ind) == PLUS
- && GET_CODE (XEXP (ind, 0)) == REG
+ && REG_P (XEXP (ind, 0))
&& REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
- && GET_CODE (XEXP (ind, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (ind, 1))
&& INTVAL (XEXP (ind, 1)) > -1024
&& INTVAL (XEXP (ind, 1)) < 1024
&& (INTVAL (XEXP (ind, 1)) & 3) == 0)
@@ -6982,7 +6982,7 @@ neon_vector_mem_operand (rtx op, int typ
return FALSE;
/* Constants are converted into offsets from labels. */
- if (GET_CODE (op) != MEM)
+ if (!MEM_P (op))
return FALSE;
ind = XEXP (op, 0);
@@ -6992,11 +6992,11 @@ neon_vector_mem_operand (rtx op, int typ
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
- && GET_CODE (XEXP (XEXP (ind, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (ind, 0), 1)))))
return TRUE;
/* Match: (mem (reg)). */
- if (GET_CODE (ind) == REG)
+ if (REG_P (ind))
return arm_address_register_rtx_p (ind, 0);
/* Allow post-increment with Neon registers. */
@@ -7010,9 +7010,9 @@ neon_vector_mem_operand (rtx op, int typ
(const)). */
if (type == 0
&& GET_CODE (ind) == PLUS
- && GET_CODE (XEXP (ind, 0)) == REG
+ && REG_P (XEXP (ind, 0))
&& REG_MODE_OK_FOR_BASE_P (XEXP (ind, 0), VOIDmode)
- && GET_CODE (XEXP (ind, 1)) == CONST_INT
+ && CONST_INT_P (XEXP (ind, 1))
&& INTVAL (XEXP (ind, 1)) > -1024
&& INTVAL (XEXP (ind, 1)) < 1016
&& (INTVAL (XEXP (ind, 1)) & 3) == 0)
@@ -7039,7 +7039,7 @@ neon_struct_mem_operand (rtx op)
return FALSE;
/* Constants are converted into offsets from labels. */
- if (GET_CODE (op) != MEM)
+ if (!MEM_P (op))
return FALSE;
ind = XEXP (op, 0);
@@ -7049,11 +7049,11 @@ neon_struct_mem_operand (rtx op)
|| (GET_CODE (ind) == CONST
&& GET_CODE (XEXP (ind, 0)) == PLUS
&& GET_CODE (XEXP (XEXP (ind, 0), 0)) == LABEL_REF
- && GET_CODE (XEXP (XEXP (ind, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (ind, 0), 1)))))
return TRUE;
/* Match: (mem (reg)). */
- if (GET_CODE (ind) == REG)
+ if (REG_P (ind))
return arm_address_register_rtx_p (ind, 0);
return FALSE;
@@ -7107,7 +7107,7 @@ arm_memory_load_p (rtx insn)
{
rtx body, lhs, rhs;;
- if (insn == NULL_RTX || GET_CODE (insn) != INSN)
+ if (insn == NULL_RTX || !NONJUMP_INSN_P (insn))
return false;
body = PATTERN (insn);
@@ -7122,14 +7122,14 @@ arm_memory_load_p (rtx insn)
/* If the destination is not a general purpose
register we do not have to worry. */
- if (GET_CODE (lhs) != REG
+ if (!REG_P (lhs)
|| REGNO_REG_CLASS (REGNO (lhs)) != GENERAL_REGS)
return false;
/* As well as loads from memory we also have to react
to loads of invalid constants which will be turned
into loads from the minipool. */
- return (GET_CODE (rhs) == MEM
+ return (MEM_P (rhs)
|| GET_CODE (rhs) == SYMBOL_REF
|| note_invalid_constants (insn, -1, false));
}
@@ -7142,7 +7142,7 @@ arm_cirrus_insn_p (rtx insn)
/* get_attr cannot accept USE or CLOBBER. */
if (!insn
- || GET_CODE (insn) != INSN
+ || !NONJUMP_INSN_P (insn)
|| GET_CODE (PATTERN (insn)) == USE
|| GET_CODE (PATTERN (insn)) == CLOBBER)
return 0;
@@ -7162,7 +7162,7 @@ cirrus_reorg (rtx first)
int nops;
/* Any branch must be followed by 2 non Cirrus instructions. */
- if (GET_CODE (first) == JUMP_INSN && GET_CODE (body) != RETURN)
+ if (JUMP_P (first) && GET_CODE (body) != RETURN)
{
nops = 0;
t = next_nonnote_insn (first);
@@ -7210,11 +7210,11 @@ cirrus_reorg (rtx first)
cfmvsr mvf0, r0. */
/* Get Arm register number for ldr insn. */
- if (GET_CODE (lhs) == REG)
+ if (REG_P (lhs))
arm_regno = REGNO (lhs);
else
{
- gcc_assert (GET_CODE (rhs) == REG);
+ gcc_assert (REG_P (rhs));
arm_regno = REGNO (rhs);
}
@@ -7234,7 +7234,7 @@ cirrus_reorg (rtx first)
body = XEXP (body, 0);
if (get_attr_cirrus (first) == CIRRUS_MOVE
- && GET_CODE (XEXP (body, 1)) == REG
+ && REG_P (XEXP (body, 1))
&& arm_regno == REGNO (XEXP (body, 1)))
emit_insn_after (gen_nop (), first);
@@ -7244,7 +7244,7 @@ cirrus_reorg (rtx first)
/* get_attr cannot accept USE or CLOBBER. */
if (!first
- || GET_CODE (first) != INSN
+ || !NONJUMP_INSN_P (first)
|| GET_CODE (PATTERN (first)) == USE
|| GET_CODE (PATTERN (first)) == CLOBBER)
return;
@@ -7371,7 +7371,7 @@ arm_cannot_copy_insn_p (rtx insn)
&& XINT (rhs, 1) == UNSPEC_PIC_BASE)
return TRUE;
- if (GET_CODE (rhs) == MEM
+ if (MEM_P (rhs)
&& GET_CODE (XEXP (rhs, 0)) == UNSPEC
&& XINT (XEXP (rhs, 0), 1) == UNSPEC_PIC_BASE)
return TRUE;
@@ -7408,12 +7408,12 @@ adjacent_mem_locations (rtx a, rtx b)
if (volatile_refs_p (a) || volatile_refs_p (b))
return 0;
- if ((GET_CODE (XEXP (a, 0)) == REG
+ if ((REG_P (XEXP (a, 0))
|| (GET_CODE (XEXP (a, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (a, 0), 1)) == CONST_INT))
- && (GET_CODE (XEXP (b, 0)) == REG
+ && CONST_INT_P (XEXP (XEXP (a, 0), 1))))
+ && (REG_P (XEXP (b, 0))
|| (GET_CODE (XEXP (b, 0)) == PLUS
- && GET_CODE (XEXP (XEXP (b, 0), 1)) == CONST_INT)))
+ && CONST_INT_P (XEXP (XEXP (b, 0), 1)))))
{
HOST_WIDE_INT val0 = 0, val1 = 0;
rtx reg0, reg1;
@@ -7494,7 +7494,7 @@ load_multiple_sequence (rtx *operands, i
if (GET_CODE (operands[nops + i]) == SUBREG)
operands[nops + i] = alter_subreg (operands + (nops + i));
- gcc_assert (GET_CODE (operands[nops + i]) == MEM);
+ gcc_assert (MEM_P (operands[nops + i]));
/* Don't reorder volatile memory references; it doesn't seem worth
looking for the case where the order is ok anyway. */
@@ -7503,21 +7503,19 @@ load_multiple_sequence (rtx *operands, i
offset = const0_rtx;
- if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
+ if ((REG_P (reg = XEXP (operands[nops + i], 0))
|| (GET_CODE (reg) == SUBREG
- && GET_CODE (reg = SUBREG_REG (reg)) == REG))
+ && REG_P (reg = SUBREG_REG (reg))))
|| (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
- && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
- == REG)
+ && ((REG_P (reg = XEXP (XEXP (operands[nops + i], 0), 0)))
|| (GET_CODE (reg) == SUBREG
- && GET_CODE (reg = SUBREG_REG (reg)) == REG))
- && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
- == CONST_INT)))
+ && REG_P (reg = SUBREG_REG (reg))))
+ && (CONST_INT_P (offset = XEXP (XEXP (operands[nops + i], 0), 1)))))
{
if (i == 0)
{
base_reg = REGNO (reg);
- unsorted_regs[0] = (GET_CODE (operands[i]) == REG
+ unsorted_regs[0] = (REG_P (operands[i])
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
order[0] = 0;
@@ -7528,7 +7526,7 @@ load_multiple_sequence (rtx *operands, i
/* Not addressed from the same base register. */
return 0;
- unsorted_regs[i] = (GET_CODE (operands[i]) == REG
+ unsorted_regs[i] = (REG_P (operands[i])
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
if (unsorted_regs[i] < unsorted_regs[order[0]])
@@ -7723,7 +7721,7 @@ store_multiple_sequence (rtx *operands,
if (GET_CODE (operands[nops + i]) == SUBREG)
operands[nops + i] = alter_subreg (operands + (nops + i));
- gcc_assert (GET_CODE (operands[nops + i]) == MEM);
+ gcc_assert (MEM_P (operands[nops + i]));
/* Don't reorder volatile memory references; it doesn't seem worth
looking for the case where the order is ok anyway. */
@@ -7732,21 +7730,19 @@ store_multiple_sequence (rtx *operands,
offset = const0_rtx;
- if ((GET_CODE (reg = XEXP (operands[nops + i], 0)) == REG
+ if ((REG_P (reg = XEXP (operands[nops + i], 0))
|| (GET_CODE (reg) == SUBREG
- && GET_CODE (reg = SUBREG_REG (reg)) == REG))
+ && REG_P (reg = SUBREG_REG (reg))))
|| (GET_CODE (XEXP (operands[nops + i], 0)) == PLUS
- && ((GET_CODE (reg = XEXP (XEXP (operands[nops + i], 0), 0))
- == REG)
+ && ((REG_P (reg = XEXP (XEXP (operands[nops + i], 0), 0)))
|| (GET_CODE (reg) == SUBREG
- && GET_CODE (reg = SUBREG_REG (reg)) == REG))
- && (GET_CODE (offset = XEXP (XEXP (operands[nops + i], 0), 1))
- == CONST_INT)))
+ && REG_P (reg = SUBREG_REG (reg))))
+ && (CONST_INT_P (offset = XEXP (XEXP (operands[nops + i], 0), 1)))))
{
if (i == 0)
{
base_reg = REGNO (reg);
- unsorted_regs[0] = (GET_CODE (operands[i]) == REG
+ unsorted_regs[0] = (REG_P (operands[i])
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
order[0] = 0;
@@ -7757,7 +7753,7 @@ store_multiple_sequence (rtx *operands,
/* Not addressed from the same base register. */
return 0;
- unsorted_regs[i] = (GET_CODE (operands[i]) == REG
+ unsorted_regs[i] = (REG_P (operands[i])
? REGNO (operands[i])
: REGNO (SUBREG_REG (operands[i])));
if (unsorted_regs[i] < unsorted_regs[order[0]])
@@ -8036,8 +8032,8 @@ arm_gen_movmemqi (rtx *operands)
rtx part_bytes_reg = NULL;
rtx mem;
- if (GET_CODE (operands[2]) != CONST_INT
- || GET_CODE (operands[3]) != CONST_INT
+ if (!CONST_INT_P (operands[2])
+ || !CONST_INT_P (operands[3])
|| INTVAL (operands[2]) > 64
|| INTVAL (operands[3]) & 3)
return 0;
@@ -8356,7 +8352,7 @@ arm_select_cc_mode (enum rtx_code op, rt
/* A compare with a shifted operand. Because of canonicalization, the
comparison will have to be swapped when we emit the assembler. */
- if (GET_MODE (y) == SImode && GET_CODE (y) == REG
+ if (GET_MODE (y) == SImode && REG_P (y)
&& (GET_CODE (x) == ASHIFT || GET_CODE (x) == ASHIFTRT
|| GET_CODE (x) == LSHIFTRT || GET_CODE (x) == ROTATE
|| GET_CODE (x) == ROTATERT))
@@ -8375,13 +8371,13 @@ arm_select_cc_mode (enum rtx_code op, rt
equalities and unsigned inequalities). */
if (GET_MODE (x) == SImode
&& GET_CODE (x) == ASHIFT
- && GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 24
+ && CONST_INT_P (XEXP (x, 1)) && INTVAL (XEXP (x, 1)) == 24
&& GET_CODE (XEXP (x, 0)) == SUBREG
- && GET_CODE (SUBREG_REG (XEXP (x, 0))) == MEM
+ && MEM_P (SUBREG_REG (XEXP (x, 0)))
&& GET_MODE (SUBREG_REG (XEXP (x, 0))) == QImode
&& (op == EQ || op == NE
|| op == GEU || op == GTU || op == LTU || op == LEU)
- && GET_CODE (y) == CONST_INT)
+ && CONST_INT_P (y))
return CC_Zmode;
/* A construct for a conditional compare, if the false arm contains
@@ -8487,7 +8483,7 @@ arm_reload_in_hi (rtx *operands)
ref = SUBREG_REG (ref);
}
- if (GET_CODE (ref) == REG)
+ if (REG_P (ref))
{
/* We have a pseudo which has been spilt onto the stack; there
are two cases here: the first where there is a simple
@@ -8507,7 +8503,7 @@ arm_reload_in_hi (rtx *operands)
/* Handle the case where the address is too complex to be offset by 1. */
if (GET_CODE (base) == MINUS
- || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
+ || (GET_CODE (base) == PLUS && !CONST_INT_P (XEXP (base, 1))))
{
rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
@@ -8604,7 +8600,7 @@ arm_reload_out_hi (rtx *operands)
ref = SUBREG_REG (ref);
}
- if (GET_CODE (ref) == REG)
+ if (REG_P (ref))
{
/* We have a pseudo which has been spilt onto the stack; there
are two cases here: the first where there is a simple
@@ -8626,7 +8622,7 @@ arm_reload_out_hi (rtx *operands)
/* Handle the case where the address is too complex to be offset by 1. */
if (GET_CODE (base) == MINUS
- || (GET_CODE (base) == PLUS && GET_CODE (XEXP (base, 1)) != CONST_INT))
+ || (GET_CODE (base) == PLUS && !CONST_INT_P (XEXP (base, 1))))
{
rtx base_plus = gen_rtx_REG (SImode, REGNO (operands[2]) + 1);
@@ -8991,14 +8987,12 @@ is_jump_table (rtx insn)
{
rtx table;
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
&& JUMP_LABEL (insn) != NULL
&& ((table = next_real_insn (JUMP_LABEL (insn)))
== next_real_insn (insn))
&& table != NULL
- && GET_CODE (table) == JUMP_INSN
- && (GET_CODE (PATTERN (table)) == ADDR_VEC
- || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
+ && JUMP_TABLE_DATA_P (table)
return table;
return NULL_RTX;
@@ -9129,7 +9123,7 @@ add_minipool_forward_ref (Mfix *fix)
{
if (GET_CODE (fix->value) == GET_CODE (mp->value)
&& fix->mode == mp->mode
- && (GET_CODE (fix->value) != CODE_LABEL
+ && (!LABEL_P (fix->value)
|| (CODE_LABEL_NUMBER (fix->value)
== CODE_LABEL_NUMBER (mp->value)))
&& rtx_equal_p (fix->value, mp->value))
@@ -9306,7 +9300,7 @@ add_minipool_backward_ref (Mfix *fix)
{
if (GET_CODE (fix->value) == GET_CODE (mp->value)
&& fix->mode == mp->mode
- && (GET_CODE (fix->value) != CODE_LABEL
+ && (!LABEL_P (fix->value)
|| (CODE_LABEL_NUMBER (fix->value)
== CODE_LABEL_NUMBER (mp->value)))
&& rtx_equal_p (fix->value, mp->value)
@@ -9538,7 +9532,7 @@ arm_barrier_cost (rtx insn)
int base_cost = 50;
rtx next = next_nonnote_insn (insn);
- if (next != NULL && GET_CODE (next) == CODE_LABEL)
+ if (next != NULL && LABEL_P (next))
base_cost -= 20;
switch (GET_CODE (insn))
@@ -9589,7 +9583,7 @@ create_fix_barrier (Mfix *fix, HOST_WIDE
/* This code shouldn't have been called if there was a natural barrier
within range. */
- gcc_assert (GET_CODE (from) != BARRIER);
+ gcc_assert (!BARRIER_P (from));
/* Count the length of this insn. */
count += get_attr_length (from);
@@ -9742,8 +9736,8 @@ arm_const_double_inline_cost (rtx val)
lowpart = gen_lowpart (SImode, val);
highpart = gen_highpart_mode (SImode, mode, val);
- gcc_assert (GET_CODE (lowpart) == CONST_INT);
- gcc_assert (GET_CODE (highpart) == CONST_INT);
+ gcc_assert (CONST_INT_P (lowpart));
+ gcc_assert (CONST_INT_P (highpart));
return (arm_gen_constant (SET, SImode, NULL_RTX, INTVAL (lowpart),
NULL_RTX, NULL_RTX, 0, 0)
@@ -9769,7 +9763,7 @@ arm_const_double_by_parts (rtx val)
part = gen_highpart_mode (SImode, mode, val);
- gcc_assert (GET_CODE (part) == CONST_INT);
+ gcc_assert (CONST_INT_P (part));
if (const_ok_for_arm (INTVAL (part))
|| const_ok_for_arm (~INTVAL (part)))
@@ -9777,7 +9771,7 @@ arm_const_double_by_parts (rtx val)
part = gen_lowpart (SImode, val);
- gcc_assert (GET_CODE (part) == CONST_INT);
+ gcc_assert (CONST_INT_P (part));
if (const_ok_for_arm (INTVAL (part))
|| const_ok_for_arm (~INTVAL (part)))
@@ -9830,7 +9824,7 @@ note_invalid_constants (rtx insn, HOST_W
recog_data.operand_mode[opno], op);
result = true;
}
- else if (GET_CODE (op) == MEM
+ else if (MEM_P (op)
&& GET_CODE (XEXP (op, 0)) == SYMBOL_REF
&& CONSTANT_POOL_ADDRESS_P (XEXP (op, 0)))
{
@@ -9875,7 +9869,7 @@ arm_reorg (void)
/* The first insn must always be a note, or the code below won't
scan it properly. */
insn = get_insns ();
- gcc_assert (GET_CODE (insn) == NOTE);
+ gcc_assert (NOTE_P (insn));
minipool_pad = 0;
/* Scan all the insns and record the operands that will need fixing. */
@@ -9883,11 +9877,11 @@ arm_reorg (void)
{
if (TARGET_CIRRUS_FIX_INVALID_INSNS
&& (arm_cirrus_insn_p (insn)
- || GET_CODE (insn) == JUMP_INSN
+ || JUMP_P (insn)
|| arm_memory_load_p (insn)))
cirrus_reorg (insn);
- if (GET_CODE (insn) == BARRIER)
+ if (BARRIER_P (insn))
push_minipool_barrier (insn, address);
else if (INSN_P (insn))
{
@@ -9918,7 +9912,7 @@ arm_reorg (void)
Mfix * this_fix;
/* Skip any further barriers before the next fix. */
- while (fix && GET_CODE (fix->insn) == BARRIER)
+ while (fix && BARRIER_P (fix->insn))
fix = fix->next;
/* No more fixes. */
@@ -9929,7 +9923,7 @@ arm_reorg (void)
for (ftmp = fix; ftmp; ftmp = ftmp->next)
{
- if (GET_CODE (ftmp->insn) == BARRIER)
+ if (BARRIER_P (ftmp->insn))
{
if (ftmp->address >= minipool_vector_head->max_address)
break;
@@ -9989,7 +9983,7 @@ arm_reorg (void)
while (ftmp)
{
- if (GET_CODE (ftmp->insn) != BARRIER
+ if (!BARRIER_P (ftmp->insn)
&& ((ftmp->minipool = add_minipool_backward_ref (ftmp))
== NULL))
break;
@@ -10001,7 +9995,7 @@ arm_reorg (void)
up and adding the constants to the pool itself. */
for (this_fix = fix; this_fix && ftmp != this_fix;
this_fix = this_fix->next)
- if (GET_CODE (this_fix->insn) != BARRIER)
+ if (!BARRIER_P (this_fix->insn))
{
rtx addr
= plus_constant (gen_rtx_LABEL_REF (VOIDmode,
@@ -10154,7 +10148,7 @@ vfp_output_fstmd (rtx * operands)
strcpy (pattern, "fstmfdd\t%m0!, {%P1");
p = strlen (pattern);
- gcc_assert (GET_CODE (operands[1]) == REG);
+ gcc_assert (REG_P (operands[1]));
base = (REGNO (operands[1]) - FIRST_VFP_REGNUM) / 2;
for (i = 1; i < XVECLEN (operands[2], 0); i++)
@@ -10531,7 +10525,7 @@ output_move_double (rtx *operands)
FIXME: IWMMXT allows offsets larger than ldrd can
handle, fix these up with a pair of ldr. */
if (TARGET_THUMB2
- || GET_CODE (otherops[2]) != CONST_INT
+ || !CONST_INT_P (otherops[2])
|| (INTVAL (otherops[2]) > -256
&& INTVAL (otherops[2]) < 256))
output_asm_insn ("ldr%(d%)\t%0, [%1, %2]!", otherops);
@@ -10548,7 +10542,7 @@ output_move_double (rtx *operands)
FIXME: IWMMXT allows offsets larger than ldrd can handle,
fix these up with a pair of ldr. */
if (TARGET_THUMB2
- || GET_CODE (otherops[2]) != CONST_INT
+ || !CONST_INT_P (otherops[2])
|| (INTVAL (otherops[2]) > -256
&& INTVAL (otherops[2]) < 256))
output_asm_insn ("ldr%(d%)\t%0, [%1], %2", otherops);
@@ -10587,7 +10581,7 @@ output_move_double (rtx *operands)
if (GET_CODE (XEXP (operands[1], 0)) == PLUS)
{
- if (GET_CODE (otherops[2]) == CONST_INT && !TARGET_LDRD)
+ if (CONST_INT_P (otherops[2]) && !TARGET_LDRD)
{
switch ((int) INTVAL (otherops[2]))
{
@@ -10609,9 +10603,9 @@ output_move_double (rtx *operands)
otherops[0] = gen_rtx_REG(SImode, REGNO(operands[0]) + 1);
operands[1] = otherops[0];
if (TARGET_LDRD
- && (GET_CODE (otherops[2]) == REG
+ && (REG_P (otherops[2])
|| TARGET_THUMB2
- || (GET_CODE (otherops[2]) == CONST_INT
+ || (CONST_INT_P (otherops[2])
&& INTVAL (otherops[2]) > -256
&& INTVAL (otherops[2]) < 256)))
{
@@ -10641,7 +10635,7 @@ output_move_double (rtx *operands)
return "";
}
- if (GET_CODE (otherops[2]) == CONST_INT)
+ if (CONST_INT_P (otherops[2]))
{
if (!(const_ok_for_arm (INTVAL (otherops[2]))))
output_asm_insn ("sub%?\t%0, %1, #%n2", otherops);
@@ -10724,7 +10718,7 @@ output_move_double (rtx *operands)
/* IWMMXT allows offsets larger than ldrd can handle,
fix these up with a pair of ldr. */
if (!TARGET_THUMB2
- && GET_CODE (otherops[2]) == CONST_INT
+ && CONST_INT_P (otherops[2])
&& (INTVAL(otherops[2]) <= -256
|| INTVAL(otherops[2]) >= 256))
{
@@ -10747,7 +10741,7 @@ output_move_double (rtx *operands)
case PLUS:
otherops[2] = XEXP (XEXP (operands[0], 0), 1);
- if (GET_CODE (otherops[2]) == CONST_INT && !TARGET_LDRD)
+ if (CONST_INT_P (otherops[2]) && !TARGET_LDRD)
{
switch ((int) INTVAL (XEXP (XEXP (operands[0], 0), 1)))
{
@@ -10769,9 +10763,9 @@ output_move_double (rtx *operands)
}
}
if (TARGET_LDRD
- && (GET_CODE (otherops[2]) == REG
+ && (REG_P (otherops[2])
|| TARGET_THUMB2
- || (GET_CODE (otherops[2]) == CONST_INT
+ || (CONST_INT_P (otherops[2])
&& INTVAL (otherops[2]) > -256
&& INTVAL (otherops[2]) < 256)))
{
@@ -13470,7 +13464,7 @@ arm_print_operand (FILE *stream, rtx x,
return;
case 'B':
- if (GET_CODE (x) == CONST_INT)
+ if (CONST_INT_P (x))
{
HOST_WIDE_INT val;
val = ARM_SIGN_EXTEND (~INTVAL (x));
@@ -13494,7 +13488,7 @@ arm_print_operand (FILE *stream, rtx x,
/* Truncate Cirrus shift counts. */
case 's':
- if (GET_CODE (x) == CONST_INT)
+ if (CONST_INT_P (x))
{
fprintf (stream, HOST_WIDE_INT_PRINT_DEC, INTVAL (x) & 0x3f);
return;
@@ -13558,7 +13552,7 @@ arm_print_operand (FILE *stream, rtx x,
of the memory location is actually held in one of the registers
being overwritten by the load. */
case 'Q':
- if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
+ if (!REG_P (x) || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13568,7 +13562,7 @@ arm_print_operand (FILE *stream, rtx x,
return;
case 'R':
- if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
+ if (!REG_P (x) || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13578,7 +13572,7 @@ arm_print_operand (FILE *stream, rtx x,
return;
case 'H':
- if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
+ if (!REG_P (x) || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13588,7 +13582,7 @@ arm_print_operand (FILE *stream, rtx x,
return;
case 'J':
- if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
+ if (!REG_P (x) || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13598,7 +13592,7 @@ arm_print_operand (FILE *stream, rtx x,
return;
case 'K':
- if (GET_CODE (x) != REG || REGNO (x) > LAST_ARM_REGNUM)
+ if (!REG_P (x) || REGNO (x) > LAST_ARM_REGNUM)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13609,7 +13603,7 @@ arm_print_operand (FILE *stream, rtx x,
case 'm':
asm_fprintf (stream, "%r",
- GET_CODE (XEXP (x, 0)) == REG
+ REG_P (XEXP (x, 0))
? REGNO (XEXP (x, 0)) : REGNO (XEXP (XEXP (x, 0), 0)));
return;
@@ -13675,7 +13669,7 @@ arm_print_operand (FILE *stream, rtx x,
case 'X': /* Cirrus register in D mode. */
case 'Y': /* Cirrus register in FX mode. */
case 'Z': /* Cirrus register in DX mode. */
- gcc_assert (GET_CODE (x) == REG
+ gcc_assert (REG_P (x)
&& REGNO_REG_CLASS (REGNO (x)) == CIRRUS_REGS);
fprintf (stream, "mv%s%s",
@@ -13690,7 +13684,7 @@ arm_print_operand (FILE *stream, rtx x,
{
int mode = GET_MODE (x);
- if (GET_CODE (x) != REG || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
+ if (!REG_P (x) || REGNO_REG_CLASS (REGNO (x)) != CIRRUS_REGS)
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13706,7 +13700,7 @@ arm_print_operand (FILE *stream, rtx x,
}
case 'U':
- if (GET_CODE (x) != REG
+ if (!REG_P (x)
|| REGNO (x) < FIRST_IWMMXT_GR_REGNUM
|| REGNO (x) > LAST_IWMMXT_GR_REGNUM)
/* Bad value for wCG register number. */
@@ -13721,7 +13715,7 @@ arm_print_operand (FILE *stream, rtx x,
/* Print an iWMMXt control register name. */
case 'w':
- if (GET_CODE (x) != CONST_INT
+ if (!CONST_INT_P (x)
|| INTVAL (x) < 0
|| INTVAL (x) >= 16)
/* Bad value for wC register number. */
@@ -13758,7 +13752,7 @@ arm_print_operand (FILE *stream, rtx x,
return;
}
- if (GET_CODE (x) != REG
+ if (!REG_P (x)
|| !IS_VFP_REGNUM (REGNO (x)))
{
output_operand_lossage ("invalid operand for code '%c'", code);
@@ -13788,7 +13782,7 @@ arm_print_operand (FILE *stream, rtx x,
int regno;
if ((GET_MODE_SIZE (mode) != 16
- && GET_MODE_SIZE (mode) != 32) || GET_CODE (x) != REG)
+ && GET_MODE_SIZE (mode) != 32) || !REG_P (x))
{
output_operand_lossage ("invalid operand for code '%c'", code);
return;
@@ -13866,7 +13860,7 @@ arm_print_operand (FILE *stream, rtx x,
{
rtx addr;
bool postinc = FALSE;
- gcc_assert (GET_CODE (x) == MEM);
+ gcc_assert (MEM_P (x));
addr = XEXP (x, 0);
if (GET_CODE (addr) == POST_INC)
{
@@ -14225,7 +14219,7 @@ thumb2_final_prescan_insn (rtx insn)
return;
/* Conditional jumps are implemented directly. */
- if (GET_CODE (insn) == JUMP_INSN)
+ if (JUMP_P (insn))
return;
predicate = COND_EXEC_TEST (body);
@@ -14242,7 +14236,7 @@ thumb2_final_prescan_insn (rtx insn)
/* Jumping into the middle of an IT block is illegal, so a label or
barrier terminates the block. */
- if (GET_CODE (insn) != INSN && GET_CODE(insn) != JUMP_INSN)
+ if (!NONJUMP_INSN_P (insn) && !JUMP_P (insn))
break;
body = PATTERN (insn);
@@ -14271,7 +14265,7 @@ thumb2_final_prescan_insn (rtx insn)
arm_condexec_masklen += n;
/* A jump must be the last instruction in a conditional block. */
- if (GET_CODE(insn) == JUMP_INSN)
+ if (JUMP_P(insn))
break;
}
/* Restore recog_data (getting the attributes of other insns can
@@ -14324,12 +14318,12 @@ arm_final_prescan_insn (rtx insn)
if (simplejump_p (insn))
{
start_insn = next_nonnote_insn (start_insn);
- if (GET_CODE (start_insn) == BARRIER)
+ if (BARRIER_P (start_insn))
{
/* XXX Isn't this always a barrier? */
start_insn = next_nonnote_insn (start_insn);
}
- if (GET_CODE (start_insn) == CODE_LABEL
+ if (LABEL_P (start_insn)
&& CODE_LABEL_NUMBER (start_insn) == arm_target_label
&& LABEL_NUSES (start_insn) == 1)
reverse = TRUE;
@@ -14339,9 +14333,9 @@ arm_final_prescan_insn (rtx insn)
else if (GET_CODE (body) == RETURN)
{
start_insn = next_nonnote_insn (start_insn);
- if (GET_CODE (start_insn) == BARRIER)
+ if (BARRIER_P (start_insn))
start_insn = next_nonnote_insn (start_insn);
- if (GET_CODE (start_insn) == CODE_LABEL
+ if (LABEL_P (start_insn)
&& CODE_LABEL_NUMBER (start_insn) == arm_target_label
&& LABEL_NUSES (start_insn) == 1)
{
@@ -14356,7 +14350,7 @@ arm_final_prescan_insn (rtx insn)
}
gcc_assert (!arm_ccfsm_state || reverse);
- if (GET_CODE (insn) != JUMP_INSN)
+ if (!JUMP_P (insn))
return;
/* This jump might be paralleled with a clobber of the condition codes
@@ -14473,7 +14467,7 @@ arm_final_prescan_insn (rtx insn)
if the following two insns are a barrier and the
target label. */
this_insn = next_nonnote_insn (this_insn);
- if (this_insn && GET_CODE (this_insn) == BARRIER)
+ if (this_insn && BARRIER_P (this_insn))
this_insn = next_nonnote_insn (this_insn);
if (this_insn && this_insn == label
@@ -14578,8 +14572,8 @@ arm_final_prescan_insn (rtx insn)
{
this_insn = next_nonnote_insn (this_insn);
gcc_assert (!this_insn
- || (GET_CODE (this_insn) != BARRIER
- && GET_CODE (this_insn) != CODE_LABEL));
+ || (!BARRIER_P (this_insn)
+ && !LABEL_P (this_insn)));
}
if (!this_insn)
{
@@ -14781,7 +14775,7 @@ arm_debugger_arg_offset (int value, rtx
return 0;
/* We can only cope with the case where the address is held in a register. */
- if (GET_CODE (addr) != REG)
+ if (!REG_P (addr))
return 0;
/* If we are using the frame pointer to point at the argument, then
@@ -14828,13 +14822,13 @@ arm_debugger_arg_offset (int value, rtx
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if ( GET_CODE (insn) == INSN
+ if ( NONJUMP_INSN_P (insn)
&& GET_CODE (PATTERN (insn)) == SET
&& REGNO (XEXP (PATTERN (insn), 0)) == REGNO (addr)
&& GET_CODE (XEXP (PATTERN (insn), 1)) == PLUS
- && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 0)) == REG
+ && REG_P (XEXP (XEXP (PATTERN (insn), 1), 0))
&& REGNO (XEXP (XEXP (PATTERN (insn), 1), 0)) == (unsigned) HARD_FRAME_POINTER_REGNUM
- && GET_CODE (XEXP (XEXP (PATTERN (insn), 1), 1)) == CONST_INT
+ && CONST_INT_P (XEXP (XEXP (PATTERN (insn), 1), 1))
)
{
value = INTVAL (XEXP (XEXP (PATTERN (insn), 1), 1));
@@ -17314,7 +17308,7 @@ thumb_far_jump_used_p (void)
insn with the far jump attribute set. */
for (insn = get_insns (); insn; insn = NEXT_INSN (insn))
{
- if (GET_CODE (insn) == JUMP_INSN
+ if (JUMP_P (insn)
/* Ignore tablejump patterns. */
&& GET_CODE (PATTERN (insn)) != ADDR_VEC
&& GET_CODE (PATTERN (insn)) != ADDR_DIFF_VEC
@@ -17775,7 +17769,7 @@ thumb1_output_function_prologue (FILE *f
{
const char * name;
- gcc_assert (GET_CODE (DECL_RTL (current_function_decl)) == MEM);
+ gcc_assert (MEM_P (DECL_RTL (current_function_decl)));
gcc_assert (GET_CODE (XEXP (DECL_RTL (current_function_decl), 0))
== SYMBOL_REF);
name = XSTR (XEXP (DECL_RTL (current_function_decl), 0), 0);
@@ -18015,8 +18009,8 @@ thumb_load_double_from_address (rtx *ope
rtx arg1;
rtx arg2;
- gcc_assert (GET_CODE (operands[0]) == REG);
- gcc_assert (GET_CODE (operands[1]) == MEM);
+ gcc_assert (REG_P (operands[0]));
+ gcc_assert (MEM_P (operands[1]));
/* Get the memory address. */
addr = XEXP (operands[1], 0);
@@ -18056,10 +18050,10 @@ thumb_load_double_from_address (rtx *ope
else
base = arg1, offset = arg2;
- gcc_assert (GET_CODE (base) == REG);
+ gcc_assert (REG_P (base));
/* Catch the case of <address> = <reg> + <reg> */
- if (GET_CODE (offset) == REG)
+ if (REG_P (offset))
{
int reg_offset = REGNO (offset);
int reg_base = REGNO (base);
@@ -18656,10 +18650,10 @@ arm_output_load_gr (rtx *operands)
rtx wcgr;
rtx sum;
- if (GET_CODE (operands [1]) != MEM
+ if (!MEM_P (operands [1])
|| GET_CODE (sum = XEXP (operands [1], 0)) != PLUS
- || GET_CODE (reg = XEXP (sum, 0)) != REG
- || GET_CODE (offset = XEXP (sum, 1)) != CONST_INT
+ || !REG_P (reg = XEXP (sum, 0))
+ || !CONST_INT_P (offset = XEXP (sum, 1))
|| ((INTVAL (offset) < 1024) && (INTVAL (offset) > -1024)))
return "wldrw%?\t%0, %1";
@@ -18751,7 +18745,7 @@ arm_no_early_alu_shift_dep (rtx producer
/* This is either an actual independent shift, or a shift applied to
the first operand of another operation. We want the whole shift
operation. */
- if (GET_CODE (early_op) == REG)
+ if (REG_P (early_op))
early_op = op;
return !reg_overlap_mentioned_p (value, early_op);
@@ -18784,7 +18778,7 @@ arm_no_early_alu_shift_value_dep (rtx pr
/* This is either an actual independent shift, or a shift applied to
the first operand of another operation. We want the value being
shifted, in either case. */
- if (GET_CODE (early_op) != REG)
+ if (!REG_P (early_op))
early_op = XEXP (early_op, 0);
return !reg_overlap_mentioned_p (value, early_op);
@@ -19151,7 +19145,7 @@ arm_unwind_emit_sequence (FILE * asm_out
/* First insn will adjust the stack pointer. */
if (GET_CODE (e) != SET
- || GET_CODE (XEXP (e, 0)) != REG
+ || !REG_P (XEXP (e, 0))
|| REGNO (XEXP (e, 0)) != SP_REGNUM
|| GET_CODE (XEXP (e, 1)) != PLUS)
abort ();
@@ -19201,8 +19195,8 @@ arm_unwind_emit_sequence (FILE * asm_out
Where <addr> is (reg:SP) or (plus (reg:SP) (const_int)). */
e = XVECEXP (p, 0, i);
if (GET_CODE (e) != SET
- || GET_CODE (XEXP (e, 0)) != MEM
- || GET_CODE (XEXP (e, 1)) != REG)
+ || !MEM_P (XEXP (e, 0))
+ || !REG_P (XEXP (e, 1)))
abort ();
reg = REGNO (XEXP (e, 1));
@@ -19224,14 +19218,14 @@ arm_unwind_emit_sequence (FILE * asm_out
if (GET_CODE (e) == PLUS)
{
offset += reg_size;
- if (GET_CODE (XEXP (e, 0)) != REG
+ if (!REG_P (XEXP (e, 0))
|| REGNO (XEXP (e, 0)) != SP_REGNUM
- || GET_CODE (XEXP (e, 1)) != CONST_INT
+ || !CONST_INT_P (XEXP (e, 1))
|| offset != INTVAL (XEXP (e, 1)))
abort ();
}
else if (i != 1
- || GET_CODE (e) != REG
+ || !REG_P (e)
|| REGNO (e) != SP_REGNUM)
abort ();
#endif
@@ -19255,7 +19249,7 @@ arm_unwind_emit_set (FILE * asm_out_file
case MEM:
/* Pushing a single register. */
if (GET_CODE (XEXP (e0, 0)) != PRE_DEC
- || GET_CODE (XEXP (XEXP (e0, 0), 0)) != REG
+ || !REG_P (XEXP (XEXP (e0, 0), 0))
|| REGNO (XEXP (XEXP (e0, 0), 0)) != SP_REGNUM)
abort ();
@@ -19272,9 +19266,9 @@ arm_unwind_emit_set (FILE * asm_out_file
{
/* A stack increment. */
if (GET_CODE (e1) != PLUS
- || GET_CODE (XEXP (e1, 0)) != REG
+ || !REG_P (XEXP (e1, 0))
|| REGNO (XEXP (e1, 0)) != SP_REGNUM
- || GET_CODE (XEXP (e1, 1)) != CONST_INT)
+ || !CONST_INT_P (XEXP (e1, 1)))
abort ();
asm_fprintf (asm_out_file, "\t.pad #%wd\n",
@@ -19286,8 +19280,8 @@ arm_unwind_emit_set (FILE * asm_out_file
if (GET_CODE (e1) == PLUS)
{
- if (GET_CODE (XEXP (e1, 0)) != REG
- || GET_CODE (XEXP (e1, 1)) != CONST_INT)
+ if (!REG_P (XEXP (e1, 0))
+ || !CONST_INT_P (XEXP (e1, 1)))
abort ();
reg = REGNO (XEXP (e1, 0));
offset = INTVAL (XEXP (e1, 1));
@@ -19295,7 +19289,7 @@ arm_unwind_emit_set (FILE * asm_out_file
HARD_FRAME_POINTER_REGNUM, reg,
INTVAL (XEXP (e1, 1)));
}
- else if (GET_CODE (e1) == REG)
+ else if (REG_P (e1))
{
reg = REGNO (e1);
asm_fprintf (asm_out_file, "\t.setfp %r, %r\n",
@@ -19304,15 +19298,15 @@ arm_unwind_emit_set (FILE * asm_out_file
else
abort ();
}
- else if (GET_CODE (e1) == REG && REGNO (e1) == SP_REGNUM)
+ else if (REG_P (e1) && REGNO (e1) == SP_REGNUM)
{
/* Move from sp to reg. */
asm_fprintf (asm_out_file, "\t.movsp %r\n", REGNO (e0));
}
else if (GET_CODE (e1) == PLUS
- && GET_CODE (XEXP (e1, 0)) == REG
+ && REG_P (XEXP (e1, 0))
&& REGNO (XEXP (e1, 0)) == SP_REGNUM
- && GET_CODE (XEXP (e1, 1)) == CONST_INT)
+ && CONST_INT_P (XEXP (e1, 1)))
{
/* Set reg to offset from sp. */
asm_fprintf (asm_out_file, "\t.movsp %r, #%d\n",
@@ -19350,7 +19344,7 @@ arm_unwind_emit (FILE * asm_out_file, rt
|| crtl->all_throwers_are_sibcalls))
return;
- if (GET_CODE (insn) == NOTE || !RTX_FRAME_RELATED_P (insn))
+ if (NOTE_P (insn) || !RTX_FRAME_RELATED_P (insn))
return;
pat = find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX);
@@ -19386,7 +19380,7 @@ arm_output_ttype (rtx x)
fputs ("\t.word\t", asm_out_file);
output_addr_const (asm_out_file, x);
/* Use special relocations for symbol references. */
- if (GET_CODE (x) != CONST_INT)
+ if (!CONST_INT_P (x))
fputs ("(TARGET2)", asm_out_file);
fputc ('\n', asm_out_file);
Index: arm.h
===================================================================
--- arm.h (revision 147651)
+++ arm.h (working copy)
@@ -1311,8 +1311,8 @@ enum reg_class
&& CONSTANT_P (X)) \
? GENERAL_REGS : \
(((MODE) == HImode && ! arm_arch4 \
- && (GET_CODE (X) == MEM \
- || ((GET_CODE (X) == REG || GET_CODE (X) == SUBREG) \
+ && (MEM_P (X) \
+ || ((REG_P (X) || GET_CODE (X) == SUBREG) \
&& true_regnum (X) == -1))) \
? GENERAL_REGS : NO_REGS) \
: THUMB_SECONDARY_INPUT_RELOAD_CLASS (CLASS, MODE, X)))
@@ -1328,10 +1328,10 @@ enum reg_class
do \
{ \
if (GET_CODE (X) == PLUS \
- && GET_CODE (XEXP (X, 0)) == REG \
+ && REG_P (XEXP (X, 0)) \
&& REGNO (XEXP (X, 0)) < FIRST_PSEUDO_REGISTER \
&& REG_MODE_OK_FOR_BASE_P (XEXP (X, 0), MODE) \
- && GET_CODE (XEXP (X, 1)) == CONST_INT) \
+ && CONST_INT_P (XEXP (X, 1))) \
{ \
HOST_WIDE_INT val = INTVAL (XEXP (X, 1)); \
HOST_WIDE_INT low, high; \
@@ -1995,7 +1995,7 @@ typedef struct
#define ARM_LEGITIMATE_CONSTANT_P(X) (flag_pic || ! label_mentioned_p (X))
#define THUMB_LEGITIMATE_CONSTANT_P(X) \
- ( GET_CODE (X) == CONST_INT \
+ ( CONST_INT_P (X) \
|| GET_CODE (X) == CONST_DOUBLE \
|| CONSTANT_ADDRESS_P (X) \
|| flag_pic)
@@ -2162,10 +2162,10 @@ typedef struct
REG_OK_FOR_INDEX_P (X)
#define ARM_BASE_REGISTER_RTX_P(X) \
- (GET_CODE (X) == REG && ARM_REG_OK_FOR_BASE_P (X))
+ (REG_P (X) && ARM_REG_OK_FOR_BASE_P (X))
#define ARM_INDEX_REGISTER_RTX_P(X) \
- (GET_CODE (X) == REG && ARM_REG_OK_FOR_INDEX_P (X))
+ (REG_P (X) && ARM_REG_OK_FOR_INDEX_P (X))
/* Define this for compatibility reasons. */
#define HANDLE_PRAGMA_PACK_PUSH_POP
@@ -2300,7 +2300,7 @@ extern int making_const_table;
#define CANONICALIZE_COMPARISON(CODE, OP0, OP1) \
do \
{ \
- if (GET_CODE (OP1) == CONST_INT \
+ if (CONST_INT_P (OP1) \
&& ! (const_ok_for_arm (INTVAL (OP1)) \
|| (const_ok_for_arm (- INTVAL (OP1))))) \
{ \
@@ -2460,15 +2460,15 @@ extern int making_const_table;
{ \
int is_minus = GET_CODE (X) == MINUS; \
\
- if (GET_CODE (X) == REG) \
+ if (REG_P (X)) \
asm_fprintf (STREAM, "[%r, #0]", REGNO (X)); \
else if (GET_CODE (X) == PLUS || is_minus) \
{ \
rtx base = XEXP (X, 0); \
rtx index = XEXP (X, 1); \
HOST_WIDE_INT offset = 0; \
- if (GET_CODE (base) != REG \
- || (GET_CODE (index) == REG && REGNO (index) == SP_REGNUM)) \
+ if (!REG_P (base) \
+ || (REG_P (index) && REGNO (index) == SP_REGNUM)) \
{ \
/* Ensure that BASE is a register. */ \
/* (one of them must be). */ \
@@ -2516,7 +2516,7 @@ extern int making_const_table;
{ \
extern enum machine_mode output_memory_reference_mode; \
\
- gcc_assert (GET_CODE (XEXP (X, 0)) == REG); \
+ gcc_assert (REG_P (XEXP (X, 0))); \
\
if (GET_CODE (X) == PRE_DEC || GET_CODE (X) == PRE_INC) \
asm_fprintf (STREAM, "[%r, #%s%d]!", \
@@ -2532,7 +2532,7 @@ extern int making_const_table;
else if (GET_CODE (X) == PRE_MODIFY) \
{ \
asm_fprintf (STREAM, "[%r, ", REGNO (XEXP (X, 0))); \
- if (GET_CODE (XEXP (XEXP (X, 1), 1)) == CONST_INT) \
+ if (CONST_INT_P (XEXP (XEXP (X, 1), 1))) \
asm_fprintf (STREAM, "#%wd]!", \
INTVAL (XEXP (XEXP (X, 1), 1))); \
else \
@@ -2542,7 +2542,7 @@ extern int making_const_table;
else if (GET_CODE (X) == POST_MODIFY) \
{ \
asm_fprintf (STREAM, "[%r], ", REGNO (XEXP (X, 0))); \
- if (GET_CODE (XEXP (XEXP (X, 1), 1)) == CONST_INT) \
+ if (CONST_INT_P (XEXP (XEXP (X, 1), 1))) \
asm_fprintf (STREAM, "#%wd", \
INTVAL (XEXP (XEXP (X, 1), 1))); \
else \
@@ -2554,14 +2554,14 @@ extern int making_const_table;
#define THUMB_PRINT_OPERAND_ADDRESS(STREAM, X) \
{ \
- if (GET_CODE (X) == REG) \
+ if (REG_P (X)) \
asm_fprintf (STREAM, "[%r]", REGNO (X)); \
else if (GET_CODE (X) == POST_INC) \
asm_fprintf (STREAM, "%r!", REGNO (XEXP (X, 0))); \
else if (GET_CODE (X) == PLUS) \
{ \
- gcc_assert (GET_CODE (XEXP (X, 0)) == REG); \
- if (GET_CODE (XEXP (X, 1)) == CONST_INT) \
+ gcc_assert (REG_P (XEXP (X, 0))); \
+ if (CONST_INT_P (XEXP (X, 1))) \
asm_fprintf (STREAM, "[%r, #%wd]", \
REGNO (XEXP (X, 0)), \
INTVAL (XEXP (X, 1))); \
Index: arm.md
===================================================================
--- arm.md (revision 147651)
+++ arm.md (working copy)
@@ -471,9 +471,9 @@ (define_expand "adddi3"
if (TARGET_THUMB1)
{
- if (GET_CODE (operands[1]) != REG)
+ if (!REG_P (operands[1]))
operands[1] = force_reg (DImode, operands[1]);
- if (GET_CODE (operands[2]) != REG)
+ if (!REG_P (operands[2]))
operands[2] = force_reg (DImode, operands[2]);
}
"
@@ -579,7 +579,7 @@ (define_expand "addsi3"
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_EITHER"
"
- if (TARGET_32BIT && GET_CODE (operands[2]) == CONST_INT)
+ if (TARGET_32BIT && CONST_INT_P (operands[2]))
{
arm_split_constant (PLUS, SImode, NULL_RTX,
INTVAL (operands[2]), operands[0], operands[1],
@@ -621,7 +621,7 @@ (define_insn_and_split "*arm_addsi3"
sub%?\\t%0, %1, #%n2
#"
"TARGET_32BIT &&
- GET_CODE (operands[2]) == CONST_INT
+ CONST_INT_P (operands[2])
&& !(const_ok_for_arm (INTVAL (operands[2]))
|| const_ok_for_arm (-INTVAL (operands[2])))"
[(clobber (const_int 0))]
@@ -656,7 +656,7 @@ (define_insn "*thumb1_addsi3"
\"add\\t%0, %1, %2\"
};
if ((which_alternative == 2 || which_alternative == 6)
- && GET_CODE (operands[2]) == CONST_INT
+ && CONST_INT_P (operands[2])
&& INTVAL (operands[2]) < 0)
return \"sub\\t%0, %1, #%n2\";
return asms[which_alternative];
@@ -964,9 +964,9 @@ (define_expand "subdi3"
if (TARGET_THUMB1)
{
- if (GET_CODE (operands[1]) != REG)
+ if (!REG_P (operands[1]))
operands[1] = force_reg (SImode, operands[1]);
- if (GET_CODE (operands[2]) != REG)
+ if (!REG_P (operands[2]))
operands[2] = force_reg (SImode, operands[2]);
}
"
@@ -1060,7 +1060,7 @@ (define_expand "subsi3"
(match_operand:SI 2 "s_register_operand" "")))]
"TARGET_EITHER"
"
- if (GET_CODE (operands[1]) == CONST_INT)
+ if (CONST_INT_P (operands[1]))
{
if (TARGET_32BIT)
{
@@ -1095,7 +1095,7 @@ (define_insn_and_split "*arm_subsi3_insn
sub%?\\t%0, %1, %2
#"
"TARGET_32BIT
- && GET_CODE (operands[1]) == CONST_INT
+ && CONST_INT_P (operands[1])
&& !const_ok_for_arm (INTVAL (operands[1]))"
[(clobber (const_int 0))]
"
@@ -1885,7 +1885,7 @@ (define_expand "andsi3"
"
if (TARGET_32BIT)
{
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
arm_split_constant (AND, SImode, NULL_RTX,
INTVAL (operands[2]), operands[0],
@@ -1896,7 +1896,7 @@ (define_expand "andsi3"
}
else /* TARGET_THUMB1 */
{
- if (GET_CODE (operands[2]) != CONST_INT)
+ if (!CONST_INT_P (operands[2]))
operands[2] = force_reg (SImode, operands[2]);
else
{
@@ -1950,7 +1950,7 @@ (define_insn_and_split "*arm_andsi3_insn
bic%?\\t%0, %1, #%B2
#"
"TARGET_32BIT
- && GET_CODE (operands[2]) == CONST_INT
+ && CONST_INT_P (operands[2])
&& !(const_ok_for_arm (INTVAL (operands[2]))
|| const_ok_for_arm (~INTVAL (operands[2])))"
[(clobber (const_int 0))]
@@ -2253,7 +2253,7 @@ (define_expand "insv"
{
bool use_bfi = TRUE;
- if (GET_CODE (operands[3]) == CONST_INT)
+ if (CONST_INT_P (operands[3]))
{
HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
@@ -2271,7 +2271,7 @@ (define_expand "insv"
if (use_bfi)
{
- if (GET_CODE (operands[3]) != REG)
+ if (!REG_P (operands[3]))
operands[3] = force_reg (SImode, operands[3]);
emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
@@ -2293,7 +2293,7 @@ (define_expand "insv"
else
subtarget = target;
- if (GET_CODE (operands[3]) == CONST_INT)
+ if (CONST_INT_P (operands[3]))
{
/* Since we are inserting a known constant, we may be able to
reduce the number of bits that we have to clear so that
@@ -2360,7 +2360,7 @@ (define_expand "insv"
/* Mask out any bits in operand[3] that are not needed. */
emit_insn (gen_andsi3 (op1, operands[3], op0));
- if (GET_CODE (op0) == CONST_INT
+ if (CONST_INT_P (op0)
&& (const_ok_for_arm (mask << start_bit)
|| const_ok_for_arm (~(mask << start_bit))))
{
@@ -2369,7 +2369,7 @@ (define_expand "insv"
}
else
{
- if (GET_CODE (op0) == CONST_INT)
+ if (CONST_INT_P (op0))
{
rtx tmp = gen_reg_rtx (SImode);
@@ -2596,7 +2596,7 @@ (define_expand "iorsi3"
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_EITHER"
"
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
if (TARGET_32BIT)
{
@@ -2620,7 +2620,7 @@ (define_insn_and_split "*arm_iorsi3"
orr%?\\t%0, %1, %2
#"
"TARGET_32BIT
- && GET_CODE (operands[2]) == CONST_INT
+ && CONST_INT_P (operands[2])
&& !const_ok_for_arm (INTVAL (operands[2]))"
[(clobber (const_int 0))]
"
@@ -2717,7 +2717,7 @@ (define_expand "xorsi3"
(match_operand:SI 2 "arm_rhs_operand" "")))]
"TARGET_EITHER"
"if (TARGET_THUMB1)
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
operands[2] = force_reg (SImode, operands[2]);
"
)
@@ -3115,7 +3115,7 @@ (define_expand "ashldi3"
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_32BIT"
"
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
{
@@ -3150,7 +3150,7 @@ (define_expand "ashlsi3"
(match_operand:SI 2 "arm_rhs_operand" "")))]
"TARGET_EITHER"
"
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
{
emit_insn (gen_movsi (operands[0], const0_rtx));
@@ -3174,7 +3174,7 @@ (define_expand "ashrdi3"
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_32BIT"
"
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
{
@@ -3209,7 +3209,7 @@ (define_expand "ashrsi3"
(match_operand:SI 2 "arm_rhs_operand" "")))]
"TARGET_EITHER"
"
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
operands[2] = GEN_INT (31);
"
@@ -3230,7 +3230,7 @@ (define_expand "lshrdi3"
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_32BIT"
"
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
{
if ((HOST_WIDE_INT) INTVAL (operands[2]) == 1)
{
@@ -3265,7 +3265,7 @@ (define_expand "lshrsi3"
(match_operand:SI 2 "arm_rhs_operand" "")))]
"TARGET_EITHER"
"
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
{
emit_insn (gen_movsi (operands[0], const0_rtx));
@@ -3289,7 +3289,7 @@ (define_expand "rotlsi3"
(match_operand:SI 2 "reg_or_int_operand" "")))]
"TARGET_32BIT"
"
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
else
{
@@ -3308,13 +3308,13 @@ (define_expand "rotrsi3"
"
if (TARGET_32BIT)
{
- if (GET_CODE (operands[2]) == CONST_INT
+ if (CONST_INT_P (operands[2])
&& ((unsigned HOST_WIDE_INT) INTVAL (operands[2])) > 31)
operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
}
else /* TARGET_THUMB1 */
{
- if (GET_CODE (operands [2]) == CONST_INT)
+ if (CONST_INT_P (operands [2]))
operands [2] = force_reg (SImode, operands[2]);
}
"
@@ -3490,7 +3490,7 @@ (define_expand "negdi2"
"
if (TARGET_THUMB1)
{
- if (GET_CODE (operands[1]) != REG)
+ if (!REG_P (operands[1]))
operands[1] = force_reg (SImode, operands[1]);
}
"
@@ -3855,14 +3855,14 @@ (define_expand "zero_extendhisi2"
"TARGET_EITHER"
"
{
- if ((TARGET_THUMB1 || arm_arch4) && GET_CODE (operands[1]) == MEM)
+ if ((TARGET_THUMB1 || arm_arch4) && MEM_P (operands[1]))
{
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_ZERO_EXTEND (SImode, operands[1])));
DONE;
}
- if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
+ if (TARGET_ARM && MEM_P (operands[1]))
{
emit_insn (gen_movhi_bytes (operands[0], operands[1]));
DONE;
@@ -3902,7 +3902,7 @@ (define_insn "*thumb1_zero_extendhisi2"
rtx b = XEXP (mem, 1);
/* This can happen due to bugs in reload. */
- if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
+ if (REG_P (a) && REGNO (a) == SP_REGNUM)
{
rtx ops[2];
ops[0] = operands[0];
@@ -3914,7 +3914,7 @@ (define_insn "*thumb1_zero_extendhisi2"
}
else if ( GET_CODE (a) == LABEL_REF
- && GET_CODE (b) == CONST_INT)
+ && CONST_INT_P (b))
return \"ldr\\t%0, %1\";
}
@@ -3949,7 +3949,7 @@ (define_insn "*thumb1_zero_extendhisi2_v
rtx b = XEXP (mem, 1);
/* This can happen due to bugs in reload. */
- if (GET_CODE (a) == REG && REGNO (a) == SP_REGNUM)
+ if (REG_P (a) && REGNO (a) == SP_REGNUM)
{
rtx ops[2];
ops[0] = operands[0];
@@ -3961,7 +3961,7 @@ (define_insn "*thumb1_zero_extendhisi2_v
}
else if ( GET_CODE (a) == LABEL_REF
- && GET_CODE (b) == CONST_INT)
+ && CONST_INT_P (b))
return \"ldr\\t%0, %1\";
}
@@ -4011,7 +4011,7 @@ (define_expand "zero_extendqisi2"
(zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "")))]
"TARGET_EITHER"
"
- if (!arm_arch6 && GET_CODE (operands[1]) != MEM)
+ if (!arm_arch6 && !MEM_P (operands[1]))
{
if (TARGET_ARM)
{
@@ -4107,7 +4107,7 @@ (define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
(clobber (match_operand:SI 2 "s_register_operand" ""))]
- "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && ! BYTES_BIG_ENDIAN"
+ "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
""
@@ -4117,7 +4117,7 @@ (define_split
[(set (match_operand:SI 0 "s_register_operand" "")
(zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
(clobber (match_operand:SI 2 "s_register_operand" ""))]
- "TARGET_32BIT && (GET_CODE (operands[1]) != MEM) && BYTES_BIG_ENDIAN"
+ "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
[(set (match_dup 2) (match_dup 1))
(set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
""
@@ -4142,7 +4142,7 @@ (define_expand "extendhisi2"
"TARGET_EITHER"
"
{
- if (GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[1]))
{
if (TARGET_THUMB1)
{
@@ -4157,7 +4157,7 @@ (define_expand "extendhisi2"
}
}
- if (TARGET_ARM && GET_CODE (operands[1]) == MEM)
+ if (TARGET_ARM && MEM_P (operands[1]))
{
emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
DONE;
@@ -4209,10 +4209,10 @@ (define_insn "thumb1_extendhisi2"
rtx b = XEXP (mem, 1);
if (GET_CODE (a) == LABEL_REF
- && GET_CODE (b) == CONST_INT)
+ && CONST_INT_P (b))
return \"ldr\\t%0, %1\";
- if (GET_CODE (b) == REG)
+ if (REG_P (b))
return \"ldrsh\\t%0, %1\";
ops[1] = a;
@@ -4224,7 +4224,7 @@ (define_insn "thumb1_extendhisi2"
ops[2] = const0_rtx;
}
- gcc_assert (GET_CODE (ops[1]) == REG);
+ gcc_assert (REG_P (ops[1]));
ops[0] = operands[0];
ops[3] = operands[2];
@@ -4275,10 +4275,10 @@ (define_insn "*thumb1_extendhisi2_insn_v
rtx b = XEXP (mem, 1);
if (GET_CODE (a) == LABEL_REF
- && GET_CODE (b) == CONST_INT)
+ && CONST_INT_P (b))
return \"ldr\\t%0, %1\";
- if (GET_CODE (b) == REG)
+ if (REG_P (b))
return \"ldrsh\\t%0, %1\";
ops[1] = a;
@@ -4290,7 +4290,7 @@ (define_insn "*thumb1_extendhisi2_insn_v
ops[2] = const0_rtx;
}
- gcc_assert (GET_CODE (ops[1]) == REG);
+ gcc_assert (REG_P (ops[1]));
ops[0] = operands[0];
if (reg_mentioned_p (operands[2], ops[1]))
@@ -4384,7 +4384,7 @@ (define_expand "extendqihi2"
"TARGET_ARM"
"
{
- if (arm_arch4 && GET_CODE (operands[1]) == MEM)
+ if (arm_arch4 && MEM_P (operands[1]))
{
emit_insn (gen_rtx_SET (VOIDmode,
operands[0],
@@ -4420,7 +4420,7 @@ (define_expand "extendqisi2"
"TARGET_EITHER"
"
{
- if ((TARGET_THUMB || arm_arch4) && GET_CODE (operands[1]) == MEM)
+ if ((TARGET_THUMB || arm_arch4) && MEM_P (operands[1]))
{
emit_insn (gen_rtx_SET (VOIDmode, operands[0],
gen_rtx_SIGN_EXTEND (SImode, operands[1])));
@@ -4510,9 +4510,9 @@ (define_insn "*thumb1_extendqisi2"
ops[1] = a;
ops[2] = b;
- if (GET_CODE (a) == REG)
+ if (REG_P (a))
{
- if (GET_CODE (b) == REG)
+ if (REG_P (b))
output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
else if (REGNO (a) == REGNO (ops[0]))
{
@@ -4525,7 +4525,7 @@ (define_insn "*thumb1_extendqisi2"
}
else
{
- gcc_assert (GET_CODE (b) == REG);
+ gcc_assert (REG_P (b));
if (REGNO (b) == REGNO (ops[0]))
{
output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
@@ -4536,7 +4536,7 @@ (define_insn "*thumb1_extendqisi2"
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
}
- else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
+ else if (REG_P (mem) && REGNO (ops[0]) == REGNO (mem))
{
output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
output_asm_insn (\"lsl\\t%0, %0, #24\", ops);
@@ -4593,9 +4593,9 @@ (define_insn "*thumb1_extendqisi2_v6"
ops[1] = a;
ops[2] = b;
- if (GET_CODE (a) == REG)
+ if (REG_P (a))
{
- if (GET_CODE (b) == REG)
+ if (REG_P (b))
output_asm_insn (\"ldrsb\\t%0, [%1, %2]\", ops);
else if (REGNO (a) == REGNO (ops[0]))
{
@@ -4607,7 +4607,7 @@ (define_insn "*thumb1_extendqisi2_v6"
}
else
{
- gcc_assert (GET_CODE (b) == REG);
+ gcc_assert (REG_P (b));
if (REGNO (b) == REGNO (ops[0]))
{
output_asm_insn (\"ldrb\\t%0, [%2, %1]\", ops);
@@ -4617,7 +4617,7 @@ (define_insn "*thumb1_extendqisi2_v6"
output_asm_insn (\"mov\\t%0, %2\;ldrsb\\t%0, [%1, %0]\", ops);
}
}
- else if (GET_CODE (mem) == REG && REGNO (ops[0]) == REGNO (mem))
+ else if (REG_P (mem) && REGNO (ops[0]) == REGNO (mem))
{
output_asm_insn (\"ldrb\\t%0, [%0, #0]\", ops);
output_asm_insn (\"sxtb\\t%0, %0\", ops);
@@ -4666,11 +4666,11 @@ (define_expand "extendsfdf2"
;;{
;; rtx insn;
;;
-;; if (GET_CODE (operands[0]) == MEM && GET_CODE (operands[1]) == MEM)
+;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
;; operands[1] = copy_to_reg (operands[1]);
-;; if (GET_CODE (operands[0]) == MEM)
+;; if (MEM_P (operands[0]))
;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
-;; else if (GET_CODE (operands[1]) == MEM)
+;; else if (MEM_P (operands[1]))
;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
;; else
;; FAIL;
@@ -4711,7 +4711,7 @@ (define_expand "movdi"
"
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[0]) != REG)
+ if (!REG_P (operands[0]))
operands[1] = force_reg (DImode, operands[1]);
}
"
@@ -4891,10 +4891,10 @@ (define_expand "movsi"
if (TARGET_32BIT)
{
/* Everything except mem = const or mem = mem can be done easily. */
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (SImode, operands[1]);
if (arm_general_register_operand (operands[0], SImode)
- && GET_CODE (operands[1]) == CONST_INT
+ && CONST_INT_P (operands[1])
&& !(const_ok_for_arm (INTVAL (operands[1]))
|| const_ok_for_arm (~INTVAL (operands[1]))))
{
@@ -4916,7 +4916,7 @@ (define_expand "movsi"
{
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[0]) != REG)
+ if (!REG_P (operands[0]))
operands[1] = force_reg (SImode, operands[1]);
}
}
@@ -5237,7 +5237,7 @@ (define_expand "storehi"
rtx addr = XEXP (op1, 0);
enum rtx_code code = GET_CODE (addr);
- if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
+ if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
|| code == MINUS)
op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
@@ -5262,7 +5262,7 @@ (define_expand "storehi_bigend"
rtx addr = XEXP (op1, 0);
enum rtx_code code = GET_CODE (addr);
- if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
+ if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
|| code == MINUS)
op1 = replace_equiv_address (op1, force_reg (SImode, addr));
@@ -5288,7 +5288,7 @@ (define_expand "storeinthi"
rtx op0 = operands[0];
enum rtx_code code = GET_CODE (addr);
- if ((code == PLUS && GET_CODE (XEXP (addr, 1)) != CONST_INT)
+ if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
|| code == MINUS)
op0 = replace_equiv_address (op0, force_reg (SImode, addr));
@@ -5342,18 +5342,18 @@ (define_expand "movhi"
{
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
{
if (arm_arch4)
{
emit_insn (gen_storehi_single_op (operands[0], operands[1]));
DONE;
}
- if (GET_CODE (operands[1]) == CONST_INT)
+ if (CONST_INT_P (operands[1]))
emit_insn (gen_storeinthi (operands[0], operands[1]));
else
{
- if (GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[1]))
operands[1] = force_reg (HImode, operands[1]);
if (BYTES_BIG_ENDIAN)
emit_insn (gen_storehi_bigend (operands[1], operands[0]));
@@ -5363,7 +5363,7 @@ (define_expand "movhi"
DONE;
}
/* Sign extend a constant, and keep it in an SImode reg. */
- else if (GET_CODE (operands[1]) == CONST_INT)
+ else if (CONST_INT_P (operands[1]))
{
rtx reg = gen_reg_rtx (SImode);
HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
@@ -5385,7 +5385,7 @@ (define_expand "movhi"
operands[1] = gen_lowpart (HImode, reg);
}
else if (arm_arch4 && optimize && can_create_pseudo_p ()
- && GET_CODE (operands[1]) == MEM)
+ && MEM_P (operands[1]))
{
rtx reg = gen_reg_rtx (SImode);
@@ -5394,18 +5394,17 @@ (define_expand "movhi"
}
else if (!arm_arch4)
{
- if (GET_CODE (operands[1]) == MEM)
+ if (MEM_P (operands[1]))
{
rtx base;
rtx offset = const0_rtx;
rtx reg = gen_reg_rtx (SImode);
- if ((GET_CODE (base = XEXP (operands[1], 0)) == REG
+ if ((REG_P (base = XEXP (operands[1], 0))
|| (GET_CODE (base) == PLUS
- && (GET_CODE (offset = XEXP (base, 1))
- == CONST_INT)
+ && (CONST_INT_P (offset = XEXP (base, 1)))
&& ((INTVAL(offset) & 1) != 1)
- && GET_CODE (base = XEXP (base, 0)) == REG))
+ && REG_P (base = XEXP (base, 0))))
&& REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
{
rtx new_rtx;
@@ -5431,13 +5430,13 @@ (define_expand "movhi"
}
}
/* Handle loading a large integer during reload. */
- else if (GET_CODE (operands[1]) == CONST_INT
+ else if (CONST_INT_P (operands[1])
&& !const_ok_for_arm (INTVAL (operands[1]))
&& !const_ok_for_arm (~INTVAL (operands[1])))
{
/* Writing a constant to memory needs a scratch, which should
be handled with SECONDARY_RELOADs. */
- gcc_assert (GET_CODE (operands[0]) == REG);
+ gcc_assert (REG_P (operands[0]));
operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
emit_insn (gen_movsi (operands[0], operands[1]));
@@ -5449,10 +5448,10 @@ (define_expand "movhi"
/* Thumb-2 can do everything except mem=mem and mem=const easily. */
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[0]) != REG)
+ if (!REG_P (operands[0]))
operands[1] = force_reg (HImode, operands[1]);
/* Zero extend a constant, and keep it in an SImode reg. */
- else if (GET_CODE (operands[1]) == CONST_INT)
+ else if (CONST_INT_P (operands[1]))
{
rtx reg = gen_reg_rtx (SImode);
HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
@@ -5466,7 +5465,7 @@ (define_expand "movhi"
{
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[1]) == CONST_INT)
+ if (CONST_INT_P (operands[1]))
{
rtx reg = gen_reg_rtx (SImode);
@@ -5482,21 +5481,21 @@ (define_expand "movhi"
fixup_stack_1, by checking for other kinds of invalid addresses,
e.g. a bare reference to a virtual register. This may confuse the
alpha though, which must handle this case differently. */
- if (GET_CODE (operands[0]) == MEM
+ if (MEM_P (operands[0])
&& !memory_address_p (GET_MODE (operands[0]),
XEXP (operands[0], 0)))
operands[0]
= replace_equiv_address (operands[0],
copy_to_reg (XEXP (operands[0], 0)));
- if (GET_CODE (operands[1]) == MEM
+ if (MEM_P (operands[1])
&& !memory_address_p (GET_MODE (operands[1]),
XEXP (operands[1], 0)))
operands[1]
= replace_equiv_address (operands[1],
copy_to_reg (XEXP (operands[1], 0)));
- if (GET_CODE (operands[1]) == MEM && optimize > 0)
+ if (MEM_P (operands[1]) && optimize > 0)
{
rtx reg = gen_reg_rtx (SImode);
@@ -5504,17 +5503,17 @@ (define_expand "movhi"
operands[1] = gen_lowpart (HImode, reg);
}
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (HImode, operands[1]);
}
- else if (GET_CODE (operands[1]) == CONST_INT
+ else if (CONST_INT_P (operands[1])
&& !satisfies_constraint_I (operands[1]))
{
/* Handle loading a large integer during reload. */
/* Writing a constant to memory needs a scratch, which should
be handled with SECONDARY_RELOADs. */
- gcc_assert (GET_CODE (operands[0]) == REG);
+ gcc_assert (REG_P (operands[0]));
operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
emit_insn (gen_movsi (operands[0], operands[1]));
@@ -5543,7 +5542,7 @@ (define_insn "*thumb1_movhi_insn"
/* The stack pointer can end up being taken as an index register.
Catch this case here and deal with it. */
if (GET_CODE (XEXP (operands[1], 0)) == PLUS
- && GET_CODE (XEXP (XEXP (operands[1], 0), 0)) == REG
+ && REG_P (XEXP (XEXP (operands[1], 0), 0))
&& REGNO (XEXP (XEXP (operands[1], 0), 0)) == SP_REGNUM)
{
rtx ops[2];
@@ -5617,7 +5616,7 @@ (define_insn "*movhi_insn_arch4"
(match_operand:HI 1 "general_operand" "rI,K,r,m"))]
"TARGET_ARM
&& arm_arch4
- && (GET_CODE (operands[1]) != CONST_INT
+ && (!CONST_INT_P (operands[1])
|| const_ok_for_arm (INTVAL (operands[1]))
|| const_ok_for_arm (~INTVAL (operands[1])))"
"@
@@ -5696,7 +5695,7 @@ (define_expand "movqi"
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[1]) == CONST_INT)
+ if (CONST_INT_P (operands[1]))
{
rtx reg = gen_reg_rtx (SImode);
@@ -5714,13 +5713,13 @@ (define_expand "movqi"
fixup_stack_1, by checking for other kinds of invalid addresses,
e.g. a bare reference to a virtual register. This may confuse the
alpha though, which must handle this case differently. */
- if (GET_CODE (operands[0]) == MEM
+ if (MEM_P (operands[0])
&& !memory_address_p (GET_MODE (operands[0]),
XEXP (operands[0], 0)))
operands[0]
= replace_equiv_address (operands[0],
copy_to_reg (XEXP (operands[0], 0)));
- if (GET_CODE (operands[1]) == MEM
+ if (MEM_P (operands[1])
&& !memory_address_p (GET_MODE (operands[1]),
XEXP (operands[1], 0)))
operands[1]
@@ -5728,7 +5727,7 @@ (define_expand "movqi"
copy_to_reg (XEXP (operands[1], 0)));
}
- if (GET_CODE (operands[1]) == MEM && optimize > 0)
+ if (MEM_P (operands[1]) && optimize > 0)
{
rtx reg = gen_reg_rtx (SImode);
@@ -5736,18 +5735,18 @@ (define_expand "movqi"
operands[1] = gen_lowpart (QImode, reg);
}
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (QImode, operands[1]);
}
else if (TARGET_THUMB
- && GET_CODE (operands[1]) == CONST_INT
+ && CONST_INT_P (operands[1])
&& !satisfies_constraint_I (operands[1]))
{
/* Handle loading a large integer during reload. */
/* Writing a constant to memory needs a scratch, which should
be handled with SECONDARY_RELOADs. */
- gcc_assert (GET_CODE (operands[0]) == REG);
+ gcc_assert (REG_P (operands[0]));
operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
emit_insn (gen_movsi (operands[0], operands[1]));
@@ -5797,14 +5796,14 @@ (define_expand "movsf"
"
if (TARGET_32BIT)
{
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (SFmode, operands[1]);
}
else /* TARGET_THUMB1 */
{
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[0]) != REG)
+ if (!REG_P (operands[0]))
operands[1] = force_reg (SFmode, operands[1]);
}
}
@@ -5833,7 +5832,7 @@ (define_insn "*arm_movsf_soft_insn"
(match_operand:SF 1 "general_operand" "r,mE,r"))]
"TARGET_ARM
&& TARGET_SOFT_FLOAT
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
"@
mov%?\\t%0, %1
@@ -5873,14 +5872,14 @@ (define_expand "movdf"
"
if (TARGET_32BIT)
{
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (DFmode, operands[1]);
}
else /* TARGET_THUMB */
{
if (can_create_pseudo_p ())
{
- if (GET_CODE (operands[0]) != REG)
+ if (!REG_P (operands[0]))
operands[1] = force_reg (DFmode, operands[1]);
}
}
@@ -5999,7 +5998,7 @@ (define_expand "movxf"
(match_operand:XF 1 "general_operand" ""))]
"TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_FPA"
"
- if (GET_CODE (operands[0]) == MEM)
+ if (MEM_P (operands[0]))
operands[1] = force_reg (XFmode, operands[1]);
"
)
@@ -6019,11 +6018,11 @@ (define_expand "load_multiple"
HOST_WIDE_INT offset = 0;
/* Support only fixed point registers. */
- if (GET_CODE (operands[2]) != CONST_INT
+ if (!CONST_INT_P (operands[2])
|| INTVAL (operands[2]) > 14
|| INTVAL (operands[2]) < 2
- || GET_CODE (operands[1]) != MEM
- || GET_CODE (operands[0]) != REG
+ || !MEM_P (operands[1])
+ || !REG_P (operands[0])
|| REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
|| REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
FAIL;
@@ -6158,11 +6157,11 @@ (define_expand "store_multiple"
HOST_WIDE_INT offset = 0;
/* Support only fixed point registers. */
- if (GET_CODE (operands[2]) != CONST_INT
+ if (!CONST_INT_P (operands[2])
|| INTVAL (operands[2]) > 14
|| INTVAL (operands[2]) < 2
- || GET_CODE (operands[1]) != REG
- || GET_CODE (operands[0]) != MEM
+ || !REG_P (operands[1])
+ || !MEM_P (operands[0])
|| REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
|| REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
FAIL;
@@ -7251,7 +7250,7 @@ (define_insn "*addsi3_cbranch"
cond[1] = operands[2];
cond[2] = operands[3];
- if (GET_CODE (cond[2]) == CONST_INT && INTVAL (cond[2]) < 0)
+ if (CONST_INT_P (cond[2]) && INTVAL (cond[2]) < 0)
output_asm_insn (\"sub\\t%0, %1, #%n2\", cond);
else
output_asm_insn (\"add\\t%0, %1, %2\", cond);
@@ -9449,13 +9448,13 @@ (define_insn "movcond"
if (GET_CODE (operands[5]) == LT
&& (operands[4] == const0_rtx))
{
- if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
+ if (which_alternative != 1 && REG_P (operands[1]))
{
if (operands[2] == const0_rtx)
return \"and\\t%0, %1, %3, asr #31\";
return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
}
- else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
+ else if (which_alternative != 0 && REG_P (operands[2]))
{
if (operands[1] == const0_rtx)
return \"bic\\t%0, %2, %3, asr #31\";
@@ -9468,13 +9467,13 @@ (define_insn "movcond"
if (GET_CODE (operands[5]) == GE
&& (operands[4] == const0_rtx))
{
- if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
+ if (which_alternative != 1 && REG_P (operands[1]))
{
if (operands[2] == const0_rtx)
return \"bic\\t%0, %1, %3, asr #31\";
return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
}
- else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
+ else if (which_alternative != 0 && REG_P (operands[2]))
{
if (operands[1] == const0_rtx)
return \"and\\t%0, %2, %3, asr #31\";
@@ -9483,7 +9482,7 @@ (define_insn "movcond"
/* The only case that falls through to here is when both ops 1 & 2
are constants. */
}
- if (GET_CODE (operands[4]) == CONST_INT
+ if (CONST_INT_P (operands[4])
&& !const_ok_for_arm (INTVAL (operands[4])))
output_asm_insn (\"cmn\\t%3, #%n4\", operands);
else
@@ -9623,8 +9622,8 @@ (define_insn "*ifcompare_arith_move"
everything is in registers then we can do this in two instructions. */
if (operands[3] == const0_rtx
&& GET_CODE (operands[7]) != AND
- && GET_CODE (operands[5]) == REG
- && GET_CODE (operands[1]) == REG
+ && REG_P (operands[5])
+ && REG_P (operands[1])
&& REGNO (operands[1]) == REGNO (operands[4])
&& REGNO (operands[4]) != REGNO (operands[0]))
{
@@ -9633,7 +9632,7 @@ (define_insn "*ifcompare_arith_move"
else if (GET_CODE (operands[6]) == GE)
return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
}
- if (GET_CODE (operands[3]) == CONST_INT
+ if (CONST_INT_P (operands[3])
&& !const_ok_for_arm (INTVAL (operands[3])))
output_asm_insn (\"cmn\\t%2, #%n3\", operands);
else
@@ -9681,8 +9680,8 @@ (define_insn "*ifcompare_move_arith"
everything is in registers then we can do this in two instructions */
if (operands[5] == const0_rtx
&& GET_CODE (operands[7]) != AND
- && GET_CODE (operands[3]) == REG
- && GET_CODE (operands[1]) == REG
+ && REG_P (operands[3])
+ && REG_P (operands[1])
&& REGNO (operands[1]) == REGNO (operands[2])
&& REGNO (operands[2]) != REGNO (operands[0]))
{
@@ -9692,7 +9691,7 @@ (define_insn "*ifcompare_move_arith"
return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
}
- if (GET_CODE (operands[5]) == CONST_INT
+ if (CONST_INT_P (operands[5])
&& !const_ok_for_arm (INTVAL (operands[5])))
output_asm_insn (\"cmn\\t%4, #%n5\", operands);
else
@@ -10350,7 +10349,7 @@ (define_expand "eh_epilogue"
"
{
cfun->machine->eh_epilogue_sp_ofs = operands[1];
- if (GET_CODE (operands[2]) != REG || REGNO (operands[2]) != 2)
+ if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
{
rtx ra = gen_rtx_REG (Pmode, 2);
Index: cirrus.md
===================================================================
--- cirrus.md (revision 147651)
+++ cirrus.md (working copy)
@@ -407,7 +407,7 @@ (define_insn "*cirrus_movsf_hard_insn"
[(set (match_operand:SF 0 "nonimmediate_operand" "=v,v,v,r,m,r,r,m")
(match_operand:SF 1 "general_operand" "v,mE,r,v,v,r,mE,r"))]
"TARGET_ARM && TARGET_HARD_FLOAT && TARGET_MAVERICK
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
"@
cfcpys%?\\t%V0, %V1
@@ -430,7 +430,7 @@ (define_insn "*cirrus_movdf_hard_insn"
(match_operand:DF 1 "general_operand" "Q,r,r,r,mF,v,mF,r,v,v"))]
"TARGET_ARM
&& TARGET_HARD_FLOAT && TARGET_MAVERICK
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], DFmode))"
"*
{
@@ -491,7 +491,7 @@ (define_insn "*thumb2_cirrus_movsf_hard_
[(set (match_operand:SF 0 "nonimmediate_operand" "=v,v,v,r,m,r,r,m")
(match_operand:SF 1 "general_operand" "v,mE,r,v,v,r,mE,r"))]
"TARGET_THUMB2 && TARGET_HARD_FLOAT && TARGET_MAVERICK
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
"@
cfcpys%?\\t%V0, %V1
@@ -514,7 +514,7 @@ (define_insn "*thumb2_cirrus_movdf_hard_
(match_operand:DF 1 "general_operand" "Q,r,r,r,mF,v,mF,r,v,v"))]
"TARGET_THUMB2
&& TARGET_HARD_FLOAT && TARGET_MAVERICK
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], DFmode))"
"*
{
Index: fpa.md
===================================================================
--- fpa.md (revision 147651)
+++ fpa.md (working copy)
@@ -530,7 +530,7 @@ (define_insn "*movsf_fpa"
(match_operand:SF 1 "general_operand" "fG,H,mE,f,r,f,r,mE,r"))]
"TARGET_ARM
&& TARGET_HARD_FLOAT && TARGET_FPA
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
"@
mvf%?s\\t%0, %1
@@ -557,7 +557,7 @@ (define_insn "*movdf_fpa"
"Q, r,r,r,mF,fG,H,mF,f,r, f"))]
"TARGET_ARM
&& TARGET_HARD_FLOAT && TARGET_FPA
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], DFmode))"
"*
{
@@ -619,7 +619,7 @@ (define_insn "*thumb2_movsf_fpa"
(match_operand:SF 1 "general_operand" "fG,H,mE,f,r,f,r,mE,r"))]
"TARGET_THUMB2
&& TARGET_HARD_FLOAT && TARGET_FPA
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
"@
mvf%?s\\t%0, %1
@@ -648,7 +648,7 @@ (define_insn "*thumb2_movdf_fpa"
"Q, r,r,r,mF,fG,H,mF,f,r, f"))]
"TARGET_THUMB2
&& TARGET_HARD_FLOAT && TARGET_FPA
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], DFmode))"
"*
{
Index: pe.c
===================================================================
--- pe.c (revision 147651)
+++ pe.c (working copy)
@@ -102,7 +102,7 @@ arm_mark_dllexport (tree decl)
tree idp;
rtlname = XEXP (DECL_RTL (decl), 0);
- if (GET_CODE (rtlname) == MEM)
+ if (MEM_P (rtlname))
rtlname = XEXP (rtlname, 0);
gcc_assert (GET_CODE (rtlname) == SYMBOL_REF);
oldname = XSTR (rtlname, 0);
@@ -138,7 +138,7 @@ arm_mark_dllimport (tree decl)
rtlname = XEXP (DECL_RTL (decl), 0);
- if (GET_CODE (rtlname) == MEM)
+ if (MEM_P (rtlname))
rtlname = XEXP (rtlname, 0);
gcc_assert (GET_CODE (rtlname) == SYMBOL_REF);
oldname = XSTR (rtlname, 0);
@@ -213,8 +213,8 @@ arm_pe_encode_section_info (tree decl, r
else if ((TREE_CODE (decl) == FUNCTION_DECL
|| TREE_CODE (decl) == VAR_DECL)
&& DECL_RTL (decl) != NULL_RTX
- && GET_CODE (DECL_RTL (decl)) == MEM
- && GET_CODE (XEXP (DECL_RTL (decl), 0)) == MEM
+ && MEM_P (DECL_RTL (decl))
+ && MEM_P (XEXP (DECL_RTL (decl), 0))
&& GET_CODE (XEXP (XEXP (DECL_RTL (decl), 0), 0)) == SYMBOL_REF
&& arm_dllimport_name_p (XSTR (XEXP (XEXP (DECL_RTL (decl), 0), 0), 0)))
{
Index: predicates.md
===================================================================
--- predicates.md (revision 147651)
+++ predicates.md (working copy)
@@ -26,7 +26,7 @@ (define_predicate "s_register_operand"
/* We don't consider registers whose class is NO_REGS
to be a register operand. */
/* XXX might have to check for lo regs only for thumb ??? */
- return (GET_CODE (op) == REG
+ return (REG_P (op)
&& (REGNO (op) >= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
})
@@ -55,7 +55,7 @@ (define_predicate "arm_general_register_
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
- return (GET_CODE (op) == REG
+ return (REG_P (op)
&& (REGNO (op) <= LAST_ARM_REGNUM
|| REGNO (op) >= FIRST_PSEUDO_REGISTER));
})
@@ -68,7 +68,7 @@ (define_predicate "f_register_operand"
/* We don't consider registers whose class is NO_REGS
to be a register operand. */
- return (GET_CODE (op) == REG
+ return (REG_P (op)
&& (REGNO (op) >= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (REGNO (op)) == FPA_REGS));
})
@@ -131,7 +131,7 @@ (define_predicate "arm_reload_memory_ope
(and (match_code "mem,reg,subreg")
(match_test "(!CONSTANT_P (op)
&& (true_regnum(op) == -1
- || (GET_CODE (op) == REG
+ || (REG_P (op)
&& REGNO (op) >= FIRST_PSEUDO_REGISTER)))")))
;; True for valid operands for the rhs of an floating point insns.
@@ -160,7 +160,7 @@ (define_predicate "arm_float_compare_ope
(define_predicate "index_operand"
(ior (match_operand 0 "s_register_operand")
(and (match_operand 0 "immediate_operand")
- (match_test "(GET_CODE (op) != CONST_INT
+ (match_test "(!CONST_INT_P (op)
|| (INTVAL (op) < 4096 && INTVAL (op) > -4096))"))))
;; True for operators that can be combined with a shift in ARM state.
@@ -178,7 +178,7 @@ (define_special_predicate "shift_operato
(and (ior (ior (and (match_code "mult")
(match_test "power_of_two_operand (XEXP (op, 1), mode)"))
(and (match_code "rotate")
- (match_test "GET_CODE (XEXP (op, 1)) == CONST_INT
+ (match_test "CONST_INT_P (XEXP (op, 1))
&& ((unsigned HOST_WIDE_INT) INTVAL (XEXP (op, 1))) < 32")))
(match_code "ashift,ashiftrt,lshiftrt,rotatert"))
(match_test "mode == GET_MODE (op)")))
@@ -259,7 +259,7 @@ (define_predicate "nonimmediate_di_opera
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
- return GET_CODE (op) == MEM && memory_address_p (DImode, XEXP (op, 0));
+ return MEM_P (op) && memory_address_p (DImode, XEXP (op, 0));
})
(define_predicate "di_operand"
@@ -276,7 +276,7 @@ (define_predicate "nonimmediate_soft_df_
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
- return GET_CODE (op) == MEM && memory_address_p (DFmode, XEXP (op, 0));
+ return MEM_P (op) && memory_address_p (DFmode, XEXP (op, 0));
})
(define_predicate "soft_df_operand"
@@ -310,9 +310,9 @@ (define_special_predicate "load_multiple
base = 1;
/* Now check it more carefully. */
- if (GET_CODE (SET_DEST (elt)) != REG
- || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
- || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
+ if (!REG_P (SET_DEST (elt))
+ || !REG_P (XEXP (SET_SRC (elt), 0))
+ || !CONST_INT_P (XEXP (SET_SRC (elt), 1))
|| INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
return false;
}
@@ -320,8 +320,8 @@ (define_special_predicate "load_multiple
/* Perform a quick check so we don't blow up below. */
if (count <= i
|| GET_CODE (XVECEXP (op, 0, i - 1)) != SET
- || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != REG
- || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != MEM)
+ || !REG_P (SET_DEST (XVECEXP (op, 0, i - 1)))
+ || !MEM_P (SET_SRC (XVECEXP (op, 0, i - 1))))
return false;
dest_regno = REGNO (SET_DEST (XVECEXP (op, 0, i - 1)));
@@ -332,14 +332,14 @@ (define_special_predicate "load_multiple
elt = XVECEXP (op, 0, i);
if (GET_CODE (elt) != SET
- || GET_CODE (SET_DEST (elt)) != REG
+ || !REG_P (SET_DEST (elt))
|| GET_MODE (SET_DEST (elt)) != SImode
|| REGNO (SET_DEST (elt)) != (unsigned int)(dest_regno + i - base)
- || GET_CODE (SET_SRC (elt)) != MEM
+ || !MEM_P (SET_SRC (elt))
|| GET_MODE (SET_SRC (elt)) != SImode
|| GET_CODE (XEXP (SET_SRC (elt), 0)) != PLUS
|| !rtx_equal_p (XEXP (XEXP (SET_SRC (elt), 0), 0), src_addr)
- || GET_CODE (XEXP (XEXP (SET_SRC (elt), 0), 1)) != CONST_INT
+ || !CONST_INT_P (XEXP (XEXP (SET_SRC (elt), 0), 1))
|| INTVAL (XEXP (XEXP (SET_SRC (elt), 0), 1)) != (i - base) * 4)
return false;
}
@@ -367,9 +367,9 @@ (define_special_predicate "store_multipl
base = 1;
/* Now check it more carefully. */
- if (GET_CODE (SET_DEST (elt)) != REG
- || GET_CODE (XEXP (SET_SRC (elt), 0)) != REG
- || GET_CODE (XEXP (SET_SRC (elt), 1)) != CONST_INT
+ if (!REG_P (SET_DEST (elt))
+ || !REG_P (XEXP (SET_SRC (elt), 0))
+ || !CONST_INT_P (XEXP (SET_SRC (elt), 1))
|| INTVAL (XEXP (SET_SRC (elt), 1)) != (count - 1) * 4)
return false;
}
@@ -377,8 +377,8 @@ (define_special_predicate "store_multipl
/* Perform a quick check so we don't blow up below. */
if (count <= i
|| GET_CODE (XVECEXP (op, 0, i - 1)) != SET
- || GET_CODE (SET_DEST (XVECEXP (op, 0, i - 1))) != MEM
- || GET_CODE (SET_SRC (XVECEXP (op, 0, i - 1))) != REG)
+ || !MEM_P (SET_DEST (XVECEXP (op, 0, i - 1)))
+ || !REG_P (SET_SRC (XVECEXP (op, 0, i - 1))))
return false;
src_regno = REGNO (SET_SRC (XVECEXP (op, 0, i - 1)));
@@ -389,14 +389,14 @@ (define_special_predicate "store_multipl
elt = XVECEXP (op, 0, i);
if (GET_CODE (elt) != SET
- || GET_CODE (SET_SRC (elt)) != REG
+ || !REG_P (SET_SRC (elt))
|| GET_MODE (SET_SRC (elt)) != SImode
|| REGNO (SET_SRC (elt)) != (unsigned int)(src_regno + i - base)
- || GET_CODE (SET_DEST (elt)) != MEM
+ || !MEM_P (SET_DEST (elt))
|| GET_MODE (SET_DEST (elt)) != SImode
|| GET_CODE (XEXP (SET_DEST (elt), 0)) != PLUS
|| !rtx_equal_p (XEXP (XEXP (SET_DEST (elt), 0), 0), dest_addr)
- || GET_CODE (XEXP (XEXP (SET_DEST (elt), 0), 1)) != CONST_INT
+ || !CONST_INT_P (XEXP (XEXP (SET_DEST (elt), 0), 1))
|| INTVAL (XEXP (XEXP (SET_DEST (elt), 0), 1)) != (i - base) * 4)
return false;
}
@@ -456,7 +456,7 @@ (define_predicate "cirrus_register_opera
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
- return (GET_CODE (op) == REG
+ return (REG_P (op)
&& (REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS
|| REGNO_REG_CLASS (REGNO (op)) == GENERAL_REGS));
})
@@ -467,7 +467,7 @@ (define_predicate "cirrus_fp_register"
if (GET_CODE (op) == SUBREG)
op = SUBREG_REG (op);
- return (GET_CODE (op) == REG
+ return (REG_P (op)
&& (REGNO (op) >= FIRST_PSEUDO_REGISTER
|| REGNO_REG_CLASS (REGNO (op)) == CIRRUS_REGS));
})
Index: thumb2.md
===================================================================
--- thumb2.md (revision 147651)
+++ thumb2.md (working copy)
@@ -298,7 +298,7 @@ (define_insn "*thumb2_movsf_soft_insn"
(match_operand:SF 1 "general_operand" "r,mE,r"))]
"TARGET_THUMB2
&& TARGET_SOFT_FLOAT
- && (GET_CODE (operands[0]) != MEM
+ && (!MEM_P (operands[0])
|| register_operand (operands[1], SFmode))"
"@
mov%?\\t%0, %1
@@ -784,13 +784,13 @@ (define_insn "*thumb2_movcond"
if (GET_CODE (operands[5]) == LT
&& (operands[4] == const0_rtx))
{
- if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
+ if (which_alternative != 1 && REG_P (operands[1]))
{
if (operands[2] == const0_rtx)
return \"and\\t%0, %1, %3, asr #31\";
return \"ands\\t%0, %1, %3, asr #32\;it\\tcc\;movcc\\t%0, %2\";
}
- else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
+ else if (which_alternative != 0 && REG_P (operands[2]))
{
if (operands[1] == const0_rtx)
return \"bic\\t%0, %2, %3, asr #31\";
@@ -803,13 +803,13 @@ (define_insn "*thumb2_movcond"
if (GET_CODE (operands[5]) == GE
&& (operands[4] == const0_rtx))
{
- if (which_alternative != 1 && GET_CODE (operands[1]) == REG)
+ if (which_alternative != 1 && REG_P (operands[1]))
{
if (operands[2] == const0_rtx)
return \"bic\\t%0, %1, %3, asr #31\";
return \"bics\\t%0, %1, %3, asr #32\;it\\tcs\;movcs\\t%0, %2\";
}
- else if (which_alternative != 0 && GET_CODE (operands[2]) == REG)
+ else if (which_alternative != 0 && REG_P (operands[2]))
{
if (operands[1] == const0_rtx)
return \"and\\t%0, %2, %3, asr #31\";
@@ -818,7 +818,7 @@ (define_insn "*thumb2_movcond"
/* The only case that falls through to here is when both ops 1 & 2
are constants. */
}
- if (GET_CODE (operands[4]) == CONST_INT
+ if (CONST_INT_P (operands[4])
&& !const_ok_for_arm (INTVAL (operands[4])))
output_asm_insn (\"cmn\\t%3, #%n4\", operands);
else
@@ -1116,7 +1116,7 @@ (define_insn "*thumb2_addsi_short"
"*
HOST_WIDE_INT val;
- if (GET_CODE (operands[2]) == CONST_INT)
+ if (CONST_INT_P (operands[2]))
val = INTVAL(operands[2]);
else
val = 0;