1 ;;- Machine description for ARM for GNU compiler
2 ;; Copyright (C)
1991-
2019 Free Software Foundation, Inc.
3 ;; Contributed by Pieter
`Tiggr' Schoenmakers (rcpieter@win.tue.nl)
4 ;; and Martin Simmons (@harleqn.co.uk).
5 ;; More major hacks by Richard Earnshaw (rearnsha@arm.com).
7 ;; This file is part of GCC.
9 ;; GCC is free software; you can redistribute it and/or modify it
10 ;; under the terms of the GNU General Public License as published
11 ;; by the Free Software Foundation; either version 3, or (at your
12 ;; option) any later version.
14 ;; GCC is distributed in the hope that it will be useful, but WITHOUT
15 ;; ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
16 ;; or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
17 ;; License for more details.
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GCC; see the file COPYING3. If not see
21 ;; <http://www.gnu.org/licenses/>.
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
26 ;;---------------------------------------------------------------------------
29 ;; Register numbers -- All machine registers should be defined here
31 [(R0_REGNUM 0) ; First CORE register
32 (R1_REGNUM 1) ; Second CORE register
33 (R4_REGNUM 4) ; Fifth CORE register
34 (FDPIC_REGNUM 9) ; FDPIC register
35 (IP_REGNUM 12) ; Scratch register
36 (SP_REGNUM 13) ; Stack pointer
37 (LR_REGNUM 14) ; Return address register
38 (PC_REGNUM 15) ; Program counter
39 (LAST_ARM_REGNUM 15) ;
40 (CC_REGNUM 100) ; Condition code pseudo register
41 (VFPCC_REGNUM 101) ; VFP Condition code pseudo register
44 ;; 3rd operand to select_dominance_cc_mode
51 ;; conditional compare combination
62 ;;---------------------------------------------------------------------------
65 ;; Processor type. This is created automatically from arm-cores.def.
66 (include "arm-tune.md")
68 ;; Instruction classification types
71 ; IS_THUMB is set to 'yes' when we are generating Thumb code, and 'no' when
72 ; generating ARM code. This is used to control the length of some insn
73 ; patterns that share the same RTL in both ARM and Thumb code.
74 (define_attr "is_thumb" "yes,no"
75 (const (if_then_else (symbol_ref "TARGET_THUMB")
76 (const_string "yes") (const_string "no"))))
78 ; IS_ARCH6 is set to 'yes' when we are generating code form ARMv6.
79 (define_attr "is_arch6" "no,yes" (const (symbol_ref "arm_arch6")))
81 ; IS_THUMB1 is set to 'yes' iff we are generating Thumb-1 code.
82 (define_attr "is_thumb1" "yes,no"
83 (const (if_then_else (symbol_ref "TARGET_THUMB1")
84 (const_string "yes") (const_string "no"))))
86 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
87 ; The arm_restrict_it flag enables the "short IT" feature which
88 ; restricts IT blocks to a single 16-bit instruction.
89 ; This attribute should only be used on 16-bit Thumb-2 instructions
90 ; which may be predicated (the "predicable" attribute must be set).
91 (define_attr "predicable_short_it" "no,yes" (const_string "no"))
93 ; Mark an instruction as suitable for "short IT" blocks in Thumb-2.
94 ; This attribute should only be used on instructions which may emit
95 ; an IT block in their expansion which is not a short IT.
96 (define_attr "enabled_for_short_it" "no,yes" (const_string "yes"))
98 ;; Operand number of an input operand that is shifted. Zero if the
99 ;; given instruction does not shift one of its input operands.
100 (define_attr "shift" "" (const_int 0))
102 ;; [For compatibility with AArch64 in pipeline models]
103 ;; Attribute that specifies whether or not the instruction touches fp
105 (define_attr "fp" "no,yes" (const_string "no"))
107 ; Floating Point Unit. If we only have floating point emulation, then there
108 ; is no point in scheduling the floating point insns. (Well, for best
109 ; performance we should try and group them together).
110 (define_attr "fpu" "none,vfp"
111 (const (symbol_ref "arm_fpu_attr")))
113 ; Predicated means that the insn form is conditionally executed based on a
114 ; predicate. We default to 'no' because no Thumb patterns match this rule
115 ; and not all ARM insns do.
116 (define_attr "predicated" "yes,no" (const_string "no"))
118 ; LENGTH of an instruction (in bytes)
119 (define_attr "length" ""
122 ; The architecture which supports the instruction (or alternative).
123 ; This can be "a" for ARM, "t" for either of the Thumbs, "32" for
124 ; TARGET_32BIT, "t1" or "t2" to specify a specific Thumb mode. "v6"
125 ; for ARM or Thumb-2 with arm_arch6, and nov6 for ARM without
126 ; arm_arch6. "v6t2" for Thumb-2 with arm_arch6 and "v8mb" for ARMv8-M
127 ; Baseline. This attribute is used to compute attribute "enabled",
128 ; use type "any" to enable an alternative in all cases.
129 (define_attr "arch" "any,a,t,32,t1,t2,v6,nov6,v6t2,v8mb,iwmmxt,iwmmxt2,armv6_or_vfpv3,neon"
130 (const_string "any"))
132 (define_attr "arch_enabled" "no,yes"
133 (cond [(eq_attr "arch" "any")
136 (and (eq_attr "arch" "a")
137 (match_test "TARGET_ARM"))
140 (and (eq_attr "arch" "t")
141 (match_test "TARGET_THUMB"))
144 (and (eq_attr "arch" "t1")
145 (match_test "TARGET_THUMB1"))
148 (and (eq_attr "arch" "t2")
149 (match_test "TARGET_THUMB2"))
152 (and (eq_attr "arch" "32")
153 (match_test "TARGET_32BIT"))
156 (and (eq_attr "arch" "v6")
157 (match_test "TARGET_32BIT && arm_arch6"))
160 (and (eq_attr "arch" "nov6")
161 (match_test "TARGET_32BIT && !arm_arch6"))
164 (and (eq_attr "arch" "v6t2")
165 (match_test "TARGET_32BIT && arm_arch6 && arm_arch_thumb2"))
168 (and (eq_attr "arch" "v8mb")
169 (match_test "TARGET_THUMB1 && arm_arch8"))
172 (and (eq_attr "arch" "iwmmxt2")
173 (match_test "TARGET_REALLY_IWMMXT2"))
176 (and (eq_attr "arch" "armv6_or_vfpv3")
177 (match_test "arm_arch6 || TARGET_VFP3"))
180 (and (eq_attr "arch" "neon")
181 (match_test "TARGET_NEON"))
185 (const_string "no")))
187 (define_attr "opt" "any,speed,size"
188 (const_string "any"))
190 (define_attr "opt_enabled" "no,yes"
191 (cond [(eq_attr "opt" "any")
194 (and (eq_attr "opt" "speed")
195 (match_test "optimize_function_for_speed_p (cfun)"))
198 (and (eq_attr "opt" "size")
199 (match_test "optimize_function_for_size_p (cfun)"))
200 (const_string "yes")]
201 (const_string "no")))
203 (define_attr "use_literal_pool" "no,yes"
204 (cond [(and (eq_attr "type" "f_loads,f_loadd")
205 (match_test "CONSTANT_P (operands[1])"))
206 (const_string "yes")]
207 (const_string "no")))
209 ; Enable all alternatives that are both arch_enabled and insn_enabled.
210 ; FIXME:: opt_enabled has been temporarily removed till the time we have
211 ; an attribute that allows the use of such alternatives.
212 ; This depends on caching of speed_p, size_p on a per
213 ; alternative basis. The problem is that the enabled attribute
214 ; cannot depend on any state that is not cached or is not constant
215 ; for a compilation unit. We probably need a generic "hot/cold"
216 ; alternative which if implemented can help with this. We disable this
217 ; until such a time as this is implemented and / or the improvements or
218 ; regressions with removing this attribute are double checked.
219 ; See ashldi3_neon and <shift>di3_neon in neon.md.
221 (define_attr "enabled" "no,yes"
222 (cond [(and (eq_attr "predicable_short_it" "no")
223 (and (eq_attr "predicated" "yes")
224 (match_test "arm_restrict_it")))
227 (and (eq_attr "enabled_for_short_it" "no")
228 (match_test "arm_restrict_it"))
231 (eq_attr "arch_enabled" "no")
233 (const_string "yes")))
235 ; POOL_RANGE is how far away from a constant pool entry that this insn
236 ; can be placed. If the distance is zero, then this insn will never
237 ; reference the pool.
238 ; Note that for Thumb constant pools the PC value is rounded down to the
239 ; nearest multiple of four. Therefore, THUMB2_POOL_RANGE (and POOL_RANGE for
240 ; Thumb insns) should be set to <max_range> - 2.
241 ; NEG_POOL_RANGE is nonzero for insns that can reference a constant pool entry
242 ; before its address. It is set to <max_range> - (8 + <data_size>).
243 (define_attr "arm_pool_range" "" (const_int 0))
244 (define_attr "thumb2_pool_range" "" (const_int 0))
245 (define_attr "arm_neg_pool_range" "" (const_int 0))
246 (define_attr "thumb2_neg_pool_range" "" (const_int 0))
248 (define_attr "pool_range" ""
249 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_pool_range")]
250 (attr "arm_pool_range")))
251 (define_attr "neg_pool_range" ""
252 (cond [(eq_attr "is_thumb" "yes") (attr "thumb2_neg_pool_range")]
253 (attr "arm_neg_pool_range")))
255 ; An assembler sequence may clobber the condition codes without us knowing.
256 ; If such an insn references the pool, then we have no way of knowing how,
257 ; so use the most conservative value for pool_range.
258 (define_asm_attributes
259 [(set_attr "conds" "clob")
260 (set_attr "length" "4")
261 (set_attr "pool_range" "250")])
263 ; Load scheduling, set from the arm_ld_sched variable
264 ; initialized by arm_option_override()
265 (define_attr "ldsched" "no,yes" (const (symbol_ref "arm_ld_sched")))
267 ; condition codes: this one is used by final_prescan_insn to speed up
268 ; conditionalizing instructions. It saves having to scan the rtl to see if
269 ; it uses or alters the condition codes.
271 ; USE means that the condition codes are used by the insn in the process of
272 ; outputting code, this means (at present) that we can't use the insn in
275 ; SET means that the purpose of the insn is to set the condition codes in a
276 ; well defined manner.
278 ; CLOB means that the condition codes are altered in an undefined manner, if
279 ; they are altered at all
281 ; UNCONDITIONAL means the instruction cannot be conditionally executed and
282 ; that the instruction does not use or alter the condition codes.
284 ; NOCOND means that the instruction does not use or alter the condition
285 ; codes but can be converted into a conditionally exectuted instruction.
287 (define_attr "conds" "use,set,clob,unconditional,nocond"
289 (ior (eq_attr "is_thumb1" "yes")
290 (eq_attr "type" "call"))
291 (const_string "clob")
292 (if_then_else (eq_attr "is_neon_type" "no")
293 (const_string "nocond")
294 (const_string "unconditional"))))
296 ; Predicable means that the insn can be conditionally executed based on
297 ; an automatically added predicate (additional patterns are generated by
298 ; gen...). We default to 'no' because no Thumb patterns match this rule
299 ; and not all ARM patterns do.
300 (define_attr "predicable" "no,yes" (const_string "no"))
302 ; Only model the write buffer for ARM6 and ARM7. Earlier processors don't
303 ; have one. Later ones, such as StrongARM, have write-back caches, so don't
304 ; suffer blockages enough to warrant modelling this (and it can adversely
305 ; affect the schedule).
306 (define_attr "model_wbuf" "no,yes" (const (symbol_ref "arm_tune_wbuf")))
308 ; WRITE_CONFLICT implies that a read following an unrelated write is likely
309 ; to stall the processor. Used with model_wbuf above.
310 (define_attr "write_conflict" "no,yes"
311 (if_then_else (eq_attr "type"
314 (const_string "no")))
316 ; Classify the insns into those that take one cycle and those that take more
317 ; than one on the main cpu execution unit.
318 (define_attr "core_cycles" "single,multi"
319 (if_then_else (eq_attr "type"
320 "adc_imm, adc_reg, adcs_imm, adcs_reg, adr, alu_ext, alu_imm, alu_sreg,\
321 alu_shift_imm, alu_shift_reg, alu_dsp_reg, alus_ext, alus_imm, alus_sreg,\
322 alus_shift_imm, alus_shift_reg, bfm, csel, rev, logic_imm, logic_reg,\
323 logic_shift_imm, logic_shift_reg, logics_imm, logics_reg,\
324 logics_shift_imm, logics_shift_reg, extend, shift_imm, float, fcsel,\
325 wmmx_wor, wmmx_wxor, wmmx_wand, wmmx_wandn, wmmx_wmov, wmmx_tmcrr,\
326 wmmx_tmrrc, wmmx_wldr, wmmx_wstr, wmmx_tmcr, wmmx_tmrc, wmmx_wadd,\
327 wmmx_wsub, wmmx_wmul, wmmx_wmac, wmmx_wavg2, wmmx_tinsr, wmmx_textrm,\
328 wmmx_wshufh, wmmx_wcmpeq, wmmx_wcmpgt, wmmx_wmax, wmmx_wmin, wmmx_wpack,\
329 wmmx_wunpckih, wmmx_wunpckil, wmmx_wunpckeh, wmmx_wunpckel, wmmx_wror,\
330 wmmx_wsra, wmmx_wsrl, wmmx_wsll, wmmx_wmadd, wmmx_tmia, wmmx_tmiaph,\
331 wmmx_tmiaxy, wmmx_tbcst, wmmx_tmovmsk, wmmx_wacc, wmmx_waligni,\
332 wmmx_walignr, wmmx_tandc, wmmx_textrc, wmmx_torc, wmmx_torvsc, wmmx_wsad,\
333 wmmx_wabs, wmmx_wabsdiff, wmmx_waddsubhx, wmmx_wsubaddhx, wmmx_wavg4,\
334 wmmx_wmulw, wmmx_wqmulm, wmmx_wqmulwm, wmmx_waddbhus, wmmx_wqmiaxy,\
335 wmmx_wmiaxy, wmmx_wmiawxy, wmmx_wmerge")
336 (const_string "single")
337 (const_string "multi")))
339 ;; FAR_JUMP is "yes" if a BL instruction is used to generate a branch to a
340 ;; distant label. Only applicable to Thumb code.
341 (define_attr "far_jump" "yes,no" (const_string "no"))
344 ;; The number of machine instructions this pattern expands to.
345 ;; Used for Thumb-2 conditional execution.
346 (define_attr "ce_count" "" (const_int 1))
348 ;;---------------------------------------------------------------------------
351 (include "unspecs.md")
353 ;;---------------------------------------------------------------------------
356 (include "iterators.md")
358 ;;---------------------------------------------------------------------------
361 (include "predicates.md")
362 (include "constraints.md")
364 ;;---------------------------------------------------------------------------
365 ;; Pipeline descriptions
367 (define_attr "tune_cortexr4" "yes,no"
369 (eq_attr "tune" "cortexr4,cortexr4f,cortexr5")
371 (const_string "no"))))
373 ;; True if the generic scheduling description should be used.
375 (define_attr "generic_sched" "yes,no"
377 (ior (eq_attr "tune" "fa526,fa626,fa606te,fa626te,fmp626,fa726te,\
378 arm926ejs,arm10e,arm1026ejs,arm1136js,\
379 arm1136jfs,cortexa5,cortexa7,cortexa8,\
380 cortexa9,cortexa12,cortexa15,cortexa17,\
381 cortexa53,cortexa57,cortexm4,cortexm7,\
382 exynosm1,marvell_pj4,xgene1")
383 (eq_attr "tune_cortexr4" "yes"))
385 (const_string "yes"))))
387 (define_attr "generic_vfp" "yes,no"
389 (and (eq_attr "fpu" "vfp")
390 (eq_attr "tune" "!arm10e,cortexa5,cortexa7,\
391 cortexa8,cortexa9,cortexa53,cortexm4,\
392 cortexm7,marvell_pj4,xgene1")
393 (eq_attr "tune_cortexr4" "no"))
395 (const_string "no"))))
397 (include "marvell-f-iwmmxt.md")
398 (include "arm-generic.md")
399 (include "arm926ejs.md")
400 (include "arm1020e.md")
401 (include "arm1026ejs.md")
402 (include "arm1136jfs.md")
404 (include "fa606te.md")
405 (include "fa626te.md")
406 (include "fmp626.md")
407 (include "fa726te.md")
408 (include "cortex-a5.md")
409 (include "cortex-a7.md")
410 (include "cortex-a8.md")
411 (include "cortex-a9.md")
412 (include "cortex-a15.md")
413 (include "cortex-a17.md")
414 (include "cortex-a53.md")
415 (include "cortex-a57.md")
416 (include "cortex-r4.md")
417 (include "cortex-r4f.md")
418 (include "cortex-m7.md")
419 (include "cortex-m4.md")
420 (include "cortex-m4-fpu.md")
421 (include "exynos-m1.md")
423 (include "marvell-pj4.md")
424 (include "xgene1.md")
427 ;;---------------------------------------------------------------------------
432 ;; Note: For DImode insns, there is normally no reason why operands should
433 ;; not be in the same register, what we don't want is for something being
434 ;; written to partially overlap something that is an input.
436 (define_expand "adddi3"
438 [(set (match_operand:DI 0 "s_register_operand")
439 (plus:DI (match_operand:DI 1 "s_register_operand")
440 (match_operand:DI 2 "reg_or_int_operand")))
441 (clobber (reg:CC CC_REGNUM))])]
446 if (!REG_P (operands[2]))
447 operands[2] = force_reg (DImode, operands[2]);
451 rtx lo_result, hi_result, lo_dest, hi_dest;
452 rtx lo_op1, hi_op1, lo_op2, hi_op2;
453 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
455 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
456 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
458 if (lo_op2 == const0_rtx)
461 if (!arm_add_operand (hi_op2, SImode))
462 hi_op2 = force_reg (SImode, hi_op2);
463 /* Assume hi_op2 won't also be zero. */
464 emit_insn (gen_addsi3 (hi_dest, hi_op1, hi_op2));
468 if (!arm_add_operand (lo_op2, SImode))
469 lo_op2 = force_reg (SImode, lo_op2);
470 if (!arm_not_operand (hi_op2, SImode))
471 hi_op2 = force_reg (SImode, hi_op2);
473 emit_insn (gen_addsi3_compare_op1 (lo_dest, lo_op1, lo_op2));
474 rtx carry = gen_rtx_LTU (SImode, gen_rtx_REG (CC_Cmode, CC_REGNUM),
476 if (hi_op2 == const0_rtx)
477 emit_insn (gen_add0si3_carryin (hi_dest, hi_op1, carry));
479 emit_insn (gen_addsi3_carryin (hi_dest, hi_op1, hi_op2, carry));
482 if (lo_result != lo_dest)
483 emit_move_insn (lo_result, lo_dest);
484 if (hi_result != hi_dest)
485 emit_move_insn (gen_highpart (SImode, operands[0]), hi_dest);
491 (define_expand "addvsi4"
492 [(match_operand:SI 0 "s_register_operand")
493 (match_operand:SI 1 "s_register_operand")
494 (match_operand:SI 2 "arm_add_operand")
495 (match_operand 3 "")]
498 if (CONST_INT_P (operands[2]))
499 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1], operands[2]));
501 emit_insn (gen_addsi3_compareV_reg (operands[0], operands[1], operands[2]));
502 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
507 (define_expand "addvdi4"
508 [(match_operand:DI 0 "s_register_operand")
509 (match_operand:DI 1 "s_register_operand")
510 (match_operand:DI 2 "reg_or_int_operand")
511 (match_operand 3 "")]
514 rtx lo_result, hi_result;
515 rtx lo_op1, hi_op1, lo_op2, hi_op2;
516 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
518 lo_result = gen_lowpart (SImode, operands[0]);
519 hi_result = gen_highpart (SImode, operands[0]);
521 if (lo_op2 == const0_rtx)
523 emit_move_insn (lo_result, lo_op1);
524 if (!arm_add_operand (hi_op2, SImode))
525 hi_op2 = force_reg (SImode, hi_op2);
527 emit_insn (gen_addvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
531 if (!arm_add_operand (lo_op2, SImode))
532 lo_op2 = force_reg (SImode, lo_op2);
533 if (!arm_not_operand (hi_op2, SImode))
534 hi_op2 = force_reg (SImode, hi_op2);
536 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
538 if (hi_op2 == const0_rtx)
539 emit_insn (gen_addsi3_cin_vout_0 (hi_result, hi_op1));
540 else if (CONST_INT_P (hi_op2))
541 emit_insn (gen_addsi3_cin_vout_imm (hi_result, hi_op1, hi_op2));
543 emit_insn (gen_addsi3_cin_vout_reg (hi_result, hi_op1, hi_op2));
545 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
551 (define_expand "addsi3_cin_vout_reg"
556 (plus:DI (match_dup 4)
557 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
558 (sign_extend:DI (match_operand:SI 2 "s_register_operand")))
559 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
561 (set (match_operand:SI 0 "s_register_operand")
562 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
566 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
567 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
568 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
569 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
573 (define_insn "*addsi3_cin_vout_reg_insn"
574 [(set (reg:CC_V CC_REGNUM)
578 (match_operand:DI 3 "arm_carry_operation" "")
579 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
580 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
582 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
585 (set (match_operand:SI 0 "s_register_operand" "=l,r")
586 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
592 [(set_attr "type" "alus_sreg")
593 (set_attr "arch" "t2,*")
594 (set_attr "length" "2,4")]
597 (define_expand "addsi3_cin_vout_imm"
602 (plus:DI (match_dup 4)
603 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
605 (sign_extend:DI (plus:SI (plus:SI (match_dup 5) (match_dup 1))
607 (set (match_operand:SI 0 "s_register_operand")
608 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
609 (match_operand 2 "arm_adcimm_operand")))])]
612 operands[3] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
613 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
614 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
615 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
619 (define_insn "*addsi3_cin_vout_imm_insn"
620 [(set (reg:CC_V CC_REGNUM)
624 (match_operand:DI 3 "arm_carry_operation" "")
625 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
626 (match_operand 2 "arm_adcimm_operand" "I,K"))
628 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
631 (set (match_operand:SI 0 "s_register_operand" "=r,r")
632 (plus:SI (plus:SI (match_dup 4) (match_dup 1))
637 sbcs%?\\t%0, %1, #%B2"
638 [(set_attr "type" "alus_imm")]
641 (define_expand "addsi3_cin_vout_0"
645 (plus:DI (match_dup 3)
646 (sign_extend:DI (match_operand:SI 1 "s_register_operand")))
647 (sign_extend:DI (plus:SI (match_dup 4) (match_dup 1)))))
648 (set (match_operand:SI 0 "s_register_operand")
649 (plus:SI (match_dup 4) (match_dup 1)))])]
652 operands[2] = gen_rtx_REG (CC_Vmode, CC_REGNUM);
653 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
654 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
655 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
659 (define_insn "*addsi3_cin_vout_0_insn"
660 [(set (reg:CC_V CC_REGNUM)
663 (match_operand:DI 2 "arm_carry_operation" "")
664 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
665 (sign_extend:DI (plus:SI
666 (match_operand:SI 3 "arm_carry_operation" "")
668 (set (match_operand:SI 0 "s_register_operand" "=r")
669 (plus:SI (match_dup 3) (match_dup 1)))]
671 "adcs%?\\t%0, %1, #0"
672 [(set_attr "type" "alus_imm")]
675 (define_expand "uaddvsi4"
676 [(match_operand:SI 0 "s_register_operand")
677 (match_operand:SI 1 "s_register_operand")
678 (match_operand:SI 2 "arm_add_operand")
679 (match_operand 3 "")]
682 emit_insn (gen_addsi3_compare_op1 (operands[0], operands[1], operands[2]));
683 arm_gen_unlikely_cbranch (LTU, CC_Cmode, operands[3]);
688 (define_expand "uaddvdi4"
689 [(match_operand:DI 0 "s_register_operand")
690 (match_operand:DI 1 "s_register_operand")
691 (match_operand:DI 2 "reg_or_int_operand")
692 (match_operand 3 "")]
695 rtx lo_result, hi_result;
696 rtx lo_op1, hi_op1, lo_op2, hi_op2;
697 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
699 lo_result = gen_lowpart (SImode, operands[0]);
700 hi_result = gen_highpart (SImode, operands[0]);
702 if (lo_op2 == const0_rtx)
704 emit_move_insn (lo_result, lo_op1);
705 if (!arm_add_operand (hi_op2, SImode))
706 hi_op2 = force_reg (SImode, hi_op2);
708 gen_uaddvsi4 (hi_result, hi_op1, hi_op2, operands[3]);
712 if (!arm_add_operand (lo_op2, SImode))
713 lo_op2 = force_reg (SImode, lo_op2);
714 if (!arm_not_operand (hi_op2, SImode))
715 hi_op2 = force_reg (SImode, hi_op2);
717 emit_insn (gen_addsi3_compare_op1 (lo_result, lo_op1, lo_op2));
719 if (hi_op2 == const0_rtx)
720 emit_insn (gen_addsi3_cin_cout_0 (hi_result, hi_op1));
721 else if (CONST_INT_P (hi_op2))
722 emit_insn (gen_addsi3_cin_cout_imm (hi_result, hi_op1, hi_op2));
724 emit_insn (gen_addsi3_cin_cout_reg (hi_result, hi_op1, hi_op2));
726 arm_gen_unlikely_cbranch (GEU, CC_ADCmode, operands[3]);
732 (define_expand "addsi3_cin_cout_reg"
737 (plus:DI (match_dup 4)
738 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
739 (zero_extend:DI (match_operand:SI 2 "s_register_operand")))
740 (const_int 4294967296)))
741 (set (match_operand:SI 0 "s_register_operand")
742 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
746 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
747 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
748 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
749 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
753 (define_insn "*addsi3_cin_cout_reg_insn"
754 [(set (reg:CC_ADC CC_REGNUM)
758 (match_operand:DI 3 "arm_carry_operation" "")
759 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "%0,r")))
760 (zero_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
761 (const_int 4294967296)))
762 (set (match_operand:SI 0 "s_register_operand" "=l,r")
763 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
770 [(set_attr "type" "alus_sreg")
771 (set_attr "arch" "t2,*")
772 (set_attr "length" "2,4")]
775 (define_expand "addsi3_cin_cout_imm"
780 (plus:DI (match_dup 4)
781 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
783 (const_int 4294967296)))
784 (set (match_operand:SI 0 "s_register_operand")
785 (plus:SI (plus:SI (match_dup 5) (match_dup 1))
786 (match_operand:SI 2 "arm_adcimm_operand")))])]
789 operands[3] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
790 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
791 operands[4] = gen_rtx_LTU (DImode, ccin, const0_rtx);
792 operands[5] = gen_rtx_LTU (SImode, ccin, const0_rtx);
793 operands[6] = GEN_INT (UINTVAL (operands[2]) & 0xffffffff);
797 (define_insn "*addsi3_cin_cout_imm_insn"
798 [(set (reg:CC_ADC CC_REGNUM)
802 (match_operand:DI 3 "arm_carry_operation" "")
803 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r")))
804 (match_operand:DI 5 "const_int_operand" "n,n"))
805 (const_int 4294967296)))
806 (set (match_operand:SI 0 "s_register_operand" "=r,r")
807 (plus:SI (plus:SI (match_operand:SI 4 "arm_carry_operation" "")
809 (match_operand:SI 2 "arm_adcimm_operand" "I,K")))]
811 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[5])"
814 sbcs%?\\t%0, %1, #%B2"
815 [(set_attr "type" "alus_imm")]
818 (define_expand "addsi3_cin_cout_0"
822 (plus:DI (match_dup 3)
823 (zero_extend:DI (match_operand:SI 1 "s_register_operand")))
824 (const_int 4294967296)))
825 (set (match_operand:SI 0 "s_register_operand")
826 (plus:SI (match_dup 4) (match_dup 1)))])]
829 operands[2] = gen_rtx_REG (CC_ADCmode, CC_REGNUM);
830 rtx ccin = gen_rtx_REG (CC_Cmode, CC_REGNUM);
831 operands[3] = gen_rtx_LTU (DImode, ccin, const0_rtx);
832 operands[4] = gen_rtx_LTU (SImode, ccin, const0_rtx);
836 (define_insn "*addsi3_cin_cout_0_insn"
837 [(set (reg:CC_ADC CC_REGNUM)
840 (match_operand:DI 2 "arm_carry_operation" "")
841 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r")))
842 (const_int 4294967296)))
843 (set (match_operand:SI 0 "s_register_operand" "=r")
844 (plus:SI (match_operand:SI 3 "arm_carry_operation" "") (match_dup 1)))]
846 "adcs%?\\t%0, %1, #0"
847 [(set_attr "type" "alus_imm")]
850 (define_expand "addsi3"
851 [(set (match_operand:SI 0 "s_register_operand")
852 (plus:SI (match_operand:SI 1 "s_register_operand")
853 (match_operand:SI 2 "reg_or_int_operand")))]
856 if (TARGET_32BIT && CONST_INT_P (operands[2]))
858 arm_split_constant (PLUS, SImode, NULL_RTX,
859 INTVAL (operands[2]), operands[0], operands[1],
860 optimize && can_create_pseudo_p ());
866 ; If there is a scratch available, this will be faster than synthesizing the
869 [(match_scratch:SI 3 "r")
870 (set (match_operand:SI 0 "arm_general_register_operand" "")
871 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
872 (match_operand:SI 2 "const_int_operand" "")))]
874 !(const_ok_for_arm (INTVAL (operands[2]))
875 || const_ok_for_arm (-INTVAL (operands[2])))
876 && const_ok_for_arm (~INTVAL (operands[2]))"
877 [(set (match_dup 3) (match_dup 2))
878 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))]
882 ;; The r/r/k alternative is required when reloading the address
883 ;; (plus (reg rN) (reg sp)) into (reg rN). In this case reload will
884 ;; put the duplicated register first, and not try the commutative version.
885 (define_insn_and_split "*arm_addsi3"
886 [(set (match_operand:SI 0 "s_register_operand" "=rk,l,l ,l ,r ,k ,r,k ,r ,k ,r ,k,k,r ,k ,r")
887 (plus:SI (match_operand:SI 1 "s_register_operand" "%0 ,l,0 ,l ,rk,k ,r,r ,rk,k ,rk,k,r,rk,k ,rk")
888 (match_operand:SI 2 "reg_or_int_operand" "rk ,l,Py,Pd,rI,rI,k,rI,Pj,Pj,L ,L,L,PJ,PJ,?n")))]
904 subw%?\\t%0, %1, #%n2
905 subw%?\\t%0, %1, #%n2
908 && CONST_INT_P (operands[2])
909 && !const_ok_for_op (INTVAL (operands[2]), PLUS)
910 && (reload_completed || !arm_eliminable_register (operands[1]))"
911 [(clobber (const_int 0))]
913 arm_split_constant (PLUS, SImode, curr_insn,
914 INTVAL (operands[2]), operands[0],
918 [(set_attr "length" "2,4,4,4,4,4,4,4,4,4,4,4,4,4,4,16")
919 (set_attr "predicable" "yes")
920 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no,no,no,no,no,no,no,no")
921 (set_attr "arch" "t2,t2,t2,t2,*,*,*,a,t2,t2,*,*,a,t2,t2,*")
922 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
923 (const_string "alu_imm")
924 (const_string "alu_sreg")))
928 (define_insn "addsi3_compareV_reg"
929 [(set (reg:CC_V CC_REGNUM)
932 (sign_extend:DI (match_operand:SI 1 "register_operand" "%l,0,r"))
933 (sign_extend:DI (match_operand:SI 2 "register_operand" "l,r,r")))
934 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
935 (set (match_operand:SI 0 "register_operand" "=l,r,r")
936 (plus:SI (match_dup 1) (match_dup 2)))]
938 "adds%?\\t%0, %1, %2"
939 [(set_attr "conds" "set")
940 (set_attr "arch" "t2,t2,*")
941 (set_attr "length" "2,2,4")
942 (set_attr "type" "alus_sreg")]
945 (define_insn "*addsi3_compareV_reg_nosum"
946 [(set (reg:CC_V CC_REGNUM)
949 (sign_extend:DI (match_operand:SI 0 "register_operand" "%l,r"))
950 (sign_extend:DI (match_operand:SI 1 "register_operand" "l,r")))
951 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
954 [(set_attr "conds" "set")
955 (set_attr "arch" "t2,*")
956 (set_attr "length" "2,4")
957 (set_attr "type" "alus_sreg")]
960 (define_insn "subvsi3_intmin"
961 [(set (reg:CC_V CC_REGNUM)
965 (match_operand:SI 1 "register_operand" "r"))
966 (const_int 2147483648))
967 (sign_extend:DI (plus:SI (match_dup 1) (const_int -2147483648)))))
968 (set (match_operand:SI 0 "register_operand" "=r")
969 (plus:SI (match_dup 1) (const_int -2147483648)))]
971 "subs%?\\t%0, %1, #-2147483648"
972 [(set_attr "conds" "set")
973 (set_attr "type" "alus_imm")]
976 (define_insn "addsi3_compareV_imm"
977 [(set (reg:CC_V CC_REGNUM)
981 (match_operand:SI 1 "register_operand" "l,0,l,0,r,r"))
982 (match_operand 2 "arm_addimm_operand" "Pd,Py,Px,Pw,I,L"))
983 (sign_extend:DI (plus:SI (match_dup 1) (match_dup 2)))))
984 (set (match_operand:SI 0 "register_operand" "=l,l,l,l,r,r")
985 (plus:SI (match_dup 1) (match_dup 2)))]
987 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
991 subs%?\\t%0, %1, #%n2
992 subs%?\\t%0, %0, #%n2
994 subs%?\\t%0, %1, #%n2"
995 [(set_attr "conds" "set")
996 (set_attr "arch" "t2,t2,t2,t2,*,*")
997 (set_attr "length" "2,2,2,2,4,4")
998 (set_attr "type" "alus_imm")]
1001 (define_insn "addsi3_compareV_imm_nosum"
1002 [(set (reg:CC_V CC_REGNUM)
1006 (match_operand:SI 0 "register_operand" "l,r,r"))
1007 (match_operand 1 "arm_addimm_operand" "Pw,I,L"))
1008 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1010 && INTVAL (operands[1]) == ARM_SIGN_EXTEND (INTVAL (operands[1]))"
1015 [(set_attr "conds" "set")
1016 (set_attr "arch" "t2,*,*")
1017 (set_attr "length" "2,4,4")
1018 (set_attr "type" "alus_imm")]
1021 ;; We can handle more constants efficently if we can clobber either a scratch
1022 ;; or the other source operand. We deliberately leave this late as in
1023 ;; high register pressure situations it's not worth forcing any reloads.
1025 [(match_scratch:SI 2 "l")
1026 (set (reg:CC_V CC_REGNUM)
1030 (match_operand:SI 0 "low_register_operand"))
1031 (match_operand 1 "const_int_operand"))
1032 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1034 && satisfies_constraint_Pd (operands[1])"
1036 (set (reg:CC_V CC_REGNUM)
1038 (plus:DI (sign_extend:DI (match_dup 0))
1039 (sign_extend:DI (match_dup 1)))
1040 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1041 (set (match_dup 2) (plus:SI (match_dup 0) (match_dup 1)))])]
1045 [(set (reg:CC_V CC_REGNUM)
1049 (match_operand:SI 0 "low_register_operand"))
1050 (match_operand 1 "const_int_operand"))
1051 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))]
1053 && dead_or_set_p (peep2_next_insn (0), operands[0])
1054 && satisfies_constraint_Py (operands[1])"
1056 (set (reg:CC_V CC_REGNUM)
1058 (plus:DI (sign_extend:DI (match_dup 0))
1059 (sign_extend:DI (match_dup 1)))
1060 (sign_extend:DI (plus:SI (match_dup 0) (match_dup 1)))))
1061 (set (match_dup 0) (plus:SI (match_dup 0) (match_dup 1)))])]
1064 (define_insn "addsi3_compare0"
1065 [(set (reg:CC_NOOV CC_REGNUM)
1067 (plus:SI (match_operand:SI 1 "s_register_operand" "r, r,r")
1068 (match_operand:SI 2 "arm_add_operand" "I,L,r"))
1070 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1071 (plus:SI (match_dup 1) (match_dup 2)))]
1075 subs%?\\t%0, %1, #%n2
1076 adds%?\\t%0, %1, %2"
1077 [(set_attr "conds" "set")
1078 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1081 (define_insn "*addsi3_compare0_scratch"
1082 [(set (reg:CC_NOOV CC_REGNUM)
1084 (plus:SI (match_operand:SI 0 "s_register_operand" "r, r, r")
1085 (match_operand:SI 1 "arm_add_operand" "I,L, r"))
1092 [(set_attr "conds" "set")
1093 (set_attr "predicable" "yes")
1094 (set_attr "type" "alus_imm,alus_imm,alus_sreg")]
1097 (define_insn "*compare_negsi_si"
1098 [(set (reg:CC_Z CC_REGNUM)
1100 (neg:SI (match_operand:SI 0 "s_register_operand" "l,r"))
1101 (match_operand:SI 1 "s_register_operand" "l,r")))]
1104 [(set_attr "conds" "set")
1105 (set_attr "predicable" "yes")
1106 (set_attr "arch" "t2,*")
1107 (set_attr "length" "2,4")
1108 (set_attr "predicable_short_it" "yes,no")
1109 (set_attr "type" "alus_sreg")]
1112 ;; This is the canonicalization of subsi3_compare when the
1113 ;; addend is a constant.
1114 (define_insn "cmpsi2_addneg"
1115 [(set (reg:CC CC_REGNUM)
1117 (match_operand:SI 1 "s_register_operand" "r,r")
1118 (match_operand:SI 2 "arm_addimm_operand" "I,L")))
1119 (set (match_operand:SI 0 "s_register_operand" "=r,r")
1120 (plus:SI (match_dup 1)
1121 (match_operand:SI 3 "arm_addimm_operand" "L,I")))]
1123 && (INTVAL (operands[2])
1124 == trunc_int_for_mode (-INTVAL (operands[3]), SImode))"
1126 /* For 0 and INT_MIN it is essential that we use subs, as adds will result
1127 in different condition codes (like cmn rather than like cmp), so that
1128 alternative comes first. Both alternatives can match for any 0x??000000
1129 where except for 0 and INT_MIN it doesn't matter what we choose, and also
1130 for -1 and 1 with TARGET_THUMB2, in that case prefer instruction with #1
1131 as it is shorter. */
1132 if (which_alternative == 0 && operands[3] != const1_rtx)
1133 return "subs%?\\t%0, %1, #%n3";
1135 return "adds%?\\t%0, %1, %3";
1137 [(set_attr "conds" "set")
1138 (set_attr "type" "alus_sreg")]
1141 ;; Convert the sequence
1143 ;; cmn rd, #1 (equivalent to cmp rd, #-1)
1147 ;; bcs dest ((unsigned)rn >= 1)
1148 ;; similarly for the beq variant using bcc.
1149 ;; This is a common looping idiom (while (n--))
1151 [(set (match_operand:SI 0 "arm_general_register_operand" "")
1152 (plus:SI (match_operand:SI 1 "arm_general_register_operand" "")
1154 (set (match_operand 2 "cc_register" "")
1155 (compare (match_dup 0) (const_int -1)))
1157 (if_then_else (match_operator 3 "equality_operator"
1158 [(match_dup 2) (const_int 0)])
1159 (match_operand 4 "" "")
1160 (match_operand 5 "" "")))]
1161 "TARGET_32BIT && peep2_reg_dead_p (3, operands[2])"
1165 (match_dup 1) (const_int 1)))
1166 (set (match_dup 0) (plus:SI (match_dup 1) (const_int -1)))])
1168 (if_then_else (match_op_dup 3 [(match_dup 2) (const_int 0)])
1171 "operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
1172 operands[3] = gen_rtx_fmt_ee ((GET_CODE (operands[3]) == NE
1175 operands[2], const0_rtx);"
1178 ;; The next four insns work because they compare the result with one of
1179 ;; the operands, and we know that the use of the condition code is
1180 ;; either GEU or LTU, so we can use the carry flag from the addition
1181 ;; instead of doing the compare a second time.
1182 (define_insn "addsi3_compare_op1"
1183 [(set (reg:CC_C CC_REGNUM)
1185 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,rk,rk")
1186 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rkI,L"))
1188 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,rk,rk")
1189 (plus:SI (match_dup 1) (match_dup 2)))]
1194 subs%?\\t%0, %1, #%n2
1195 subs%?\\t%0, %0, #%n2
1197 subs%?\\t%0, %1, #%n2"
1198 [(set_attr "conds" "set")
1199 (set_attr "arch" "t2,t2,t2,t2,*,*")
1200 (set_attr "length" "2,2,2,2,4,4")
1202 (if_then_else (match_operand 2 "const_int_operand")
1203 (const_string "alu_imm")
1204 (const_string "alu_sreg")))]
1207 (define_insn "*addsi3_compare_op2"
1208 [(set (reg:CC_C CC_REGNUM)
1210 (plus:SI (match_operand:SI 1 "s_register_operand" "l,0,l,0,r,r")
1211 (match_operand:SI 2 "arm_add_operand" "lPd,Py,lPx,Pw,rI,L"))
1213 (set (match_operand:SI 0 "s_register_operand" "=l,l,l,l,r,r")
1214 (plus:SI (match_dup 1) (match_dup 2)))]
1219 subs%?\\t%0, %1, #%n2
1220 subs%?\\t%0, %0, #%n2
1222 subs%?\\t%0, %1, #%n2"
1223 [(set_attr "conds" "set")
1224 (set_attr "arch" "t2,t2,t2,t2,*,*")
1225 (set_attr "length" "2,2,2,2,4,4")
1227 (if_then_else (match_operand 2 "const_int_operand")
1228 (const_string "alu_imm")
1229 (const_string "alu_sreg")))]
1232 (define_insn "*compare_addsi2_op0"
1233 [(set (reg:CC_C CC_REGNUM)
1235 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1236 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1244 [(set_attr "conds" "set")
1245 (set_attr "predicable" "yes")
1246 (set_attr "arch" "t2,t2,*,*")
1247 (set_attr "predicable_short_it" "yes,yes,no,no")
1248 (set_attr "length" "2,2,4,4")
1250 (if_then_else (match_operand 1 "const_int_operand")
1251 (const_string "alu_imm")
1252 (const_string "alu_sreg")))]
1255 (define_insn "*compare_addsi2_op1"
1256 [(set (reg:CC_C CC_REGNUM)
1258 (plus:SI (match_operand:SI 0 "s_register_operand" "l,l,r,r")
1259 (match_operand:SI 1 "arm_add_operand" "l,Pw,rI,L"))
1267 [(set_attr "conds" "set")
1268 (set_attr "predicable" "yes")
1269 (set_attr "arch" "t2,t2,*,*")
1270 (set_attr "predicable_short_it" "yes,yes,no,no")
1271 (set_attr "length" "2,2,4,4")
1273 (if_then_else (match_operand 1 "const_int_operand")
1274 (const_string "alu_imm")
1275 (const_string "alu_sreg")))]
1278 (define_insn "addsi3_carryin"
1279 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1280 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%l,r,r")
1281 (match_operand:SI 2 "arm_not_operand" "0,rI,K"))
1282 (match_operand:SI 3 "arm_carry_operation" "")))]
1287 sbc%?\\t%0, %1, #%B2"
1288 [(set_attr "conds" "use")
1289 (set_attr "predicable" "yes")
1290 (set_attr "arch" "t2,*,*")
1291 (set_attr "length" "4")
1292 (set_attr "predicable_short_it" "yes,no,no")
1293 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1296 ;; Canonicalization of the above when the immediate is zero.
1297 (define_insn "add0si3_carryin"
1298 [(set (match_operand:SI 0 "s_register_operand" "=r")
1299 (plus:SI (match_operand:SI 2 "arm_carry_operation" "")
1300 (match_operand:SI 1 "arm_not_operand" "r")))]
1302 "adc%?\\t%0, %1, #0"
1303 [(set_attr "conds" "use")
1304 (set_attr "predicable" "yes")
1305 (set_attr "length" "4")
1306 (set_attr "type" "adc_imm")]
1309 (define_insn "*addsi3_carryin_alt2"
1310 [(set (match_operand:SI 0 "s_register_operand" "=l,r,r")
1311 (plus:SI (plus:SI (match_operand:SI 3 "arm_carry_operation" "")
1312 (match_operand:SI 1 "s_register_operand" "%l,r,r"))
1313 (match_operand:SI 2 "arm_not_operand" "l,rI,K")))]
1318 sbc%?\\t%0, %1, #%B2"
1319 [(set_attr "conds" "use")
1320 (set_attr "predicable" "yes")
1321 (set_attr "arch" "t2,*,*")
1322 (set_attr "length" "4")
1323 (set_attr "predicable_short_it" "yes,no,no")
1324 (set_attr "type" "adc_reg,adc_reg,adc_imm")]
1327 (define_insn "*addsi3_carryin_shift"
1328 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
1330 (match_operator:SI 2 "shift_operator"
1331 [(match_operand:SI 3 "s_register_operand" "r,r")
1332 (match_operand:SI 4 "shift_amount_operand" "M,r")])
1333 (match_operand:SI 5 "arm_carry_operation" ""))
1334 (match_operand:SI 1 "s_register_operand" "r,r")))]
1336 "adc%?\\t%0, %1, %3%S2"
1337 [(set_attr "conds" "use")
1338 (set_attr "arch" "32,a")
1339 (set_attr "shift" "3")
1340 (set_attr "predicable" "yes")
1341 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1342 (const_string "alu_shift_imm")
1343 (const_string "alu_shift_reg")))]
1346 (define_insn "*addsi3_carryin_clobercc"
1347 [(set (match_operand:SI 0 "s_register_operand" "=r")
1348 (plus:SI (plus:SI (match_operand:SI 1 "s_register_operand" "%r")
1349 (match_operand:SI 2 "arm_rhs_operand" "rI"))
1350 (match_operand:SI 3 "arm_carry_operation" "")))
1351 (clobber (reg:CC CC_REGNUM))]
1353 "adcs%?\\t%0, %1, %2"
1354 [(set_attr "conds" "set")
1355 (set_attr "type" "adcs_reg")]
1358 (define_expand "subvsi4"
1359 [(match_operand:SI 0 "s_register_operand")
1360 (match_operand:SI 1 "arm_rhs_operand")
1361 (match_operand:SI 2 "arm_add_operand")
1362 (match_operand 3 "")]
1365 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1367 /* If both operands are constants we can decide the result statically. */
1368 wi::overflow_type overflow;
1369 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1370 rtx_mode_t (operands[2], SImode),
1372 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1373 if (overflow != wi::OVF_NONE)
1374 emit_jump_insn (gen_jump (operands[3]));
1377 else if (CONST_INT_P (operands[2]))
1379 operands[2] = GEN_INT (-INTVAL (operands[2]));
1380 /* Special case for INT_MIN. */
1381 if (INTVAL (operands[2]) == 0x80000000)
1382 emit_insn (gen_subvsi3_intmin (operands[0], operands[1]));
1384 emit_insn (gen_addsi3_compareV_imm (operands[0], operands[1],
1387 else if (CONST_INT_P (operands[1]))
1388 emit_insn (gen_subvsi3_imm1 (operands[0], operands[1], operands[2]));
1390 emit_insn (gen_subvsi3 (operands[0], operands[1], operands[2]));
1392 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1396 (define_expand "subvdi4"
1397 [(match_operand:DI 0 "s_register_operand")
1398 (match_operand:DI 1 "reg_or_int_operand")
1399 (match_operand:DI 2 "reg_or_int_operand")
1400 (match_operand 3 "")]
1403 rtx lo_result, hi_result;
1404 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1405 lo_result = gen_lowpart (SImode, operands[0]);
1406 hi_result = gen_highpart (SImode, operands[0]);
1407 machine_mode mode = CCmode;
1409 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1411 /* If both operands are constants we can decide the result statically. */
1412 wi::overflow_type overflow;
1413 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1414 rtx_mode_t (operands[2], DImode),
1416 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1417 if (overflow != wi::OVF_NONE)
1418 emit_jump_insn (gen_jump (operands[3]));
1421 else if (CONST_INT_P (operands[1]))
1423 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1425 if (const_ok_for_arm (INTVAL (lo_op1)))
1427 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1428 GEN_INT (~UINTVAL (lo_op1))));
1429 /* We could potentially use RSC here in Arm state, but not
1430 in Thumb, so it's probably not worth the effort of handling
1432 hi_op1 = force_reg (SImode, hi_op1);
1436 operands[1] = force_reg (DImode, operands[1]);
1439 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1441 if (lo_op2 == const0_rtx)
1443 emit_move_insn (lo_result, lo_op1);
1444 if (!arm_add_operand (hi_op2, SImode))
1445 hi_op2 = force_reg (SImode, hi_op2);
1446 emit_insn (gen_subvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1450 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1451 lo_op2 = force_reg (SImode, lo_op2);
1452 if (CONST_INT_P (lo_op2))
1453 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1454 GEN_INT (-INTVAL (lo_op2))));
1456 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1459 if (!arm_not_operand (hi_op2, SImode))
1460 hi_op2 = force_reg (SImode, hi_op2);
1461 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1462 if (CONST_INT_P (hi_op2))
1463 emit_insn (gen_subvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1464 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1465 gen_rtx_LTU (DImode, ccreg,
1468 emit_insn (gen_subvsi3_borrow (hi_result, hi_op1, hi_op2,
1469 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1470 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1471 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[3]);
1476 (define_expand "usubvsi4"
1477 [(match_operand:SI 0 "s_register_operand")
1478 (match_operand:SI 1 "arm_rhs_operand")
1479 (match_operand:SI 2 "arm_add_operand")
1480 (match_operand 3 "")]
1483 machine_mode mode = CCmode;
1484 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1486 /* If both operands are constants we can decide the result statically. */
1487 wi::overflow_type overflow;
1488 wide_int val = wi::sub (rtx_mode_t (operands[1], SImode),
1489 rtx_mode_t (operands[2], SImode),
1490 UNSIGNED, &overflow);
1491 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1492 if (overflow != wi::OVF_NONE)
1493 emit_jump_insn (gen_jump (operands[3]));
1496 else if (CONST_INT_P (operands[2]))
1497 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
1498 GEN_INT (-INTVAL (operands[2]))));
1499 else if (CONST_INT_P (operands[1]))
1502 emit_insn (gen_rsb_imm_compare (operands[0], operands[1], operands[2],
1503 GEN_INT (~UINTVAL (operands[1]))));
1506 emit_insn (gen_subsi3_compare1 (operands[0], operands[1], operands[2]));
1507 arm_gen_unlikely_cbranch (LTU, mode, operands[3]);
1512 (define_expand "usubvdi4"
1513 [(match_operand:DI 0 "s_register_operand")
1514 (match_operand:DI 1 "reg_or_int_operand")
1515 (match_operand:DI 2 "reg_or_int_operand")
1516 (match_operand 3 "")]
1519 rtx lo_result, hi_result;
1520 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1521 lo_result = gen_lowpart (SImode, operands[0]);
1522 hi_result = gen_highpart (SImode, operands[0]);
1523 machine_mode mode = CCmode;
1525 if (CONST_INT_P (operands[1]) && CONST_INT_P (operands[2]))
1527 /* If both operands are constants we can decide the result statically. */
1528 wi::overflow_type overflow;
1529 wide_int val = wi::sub (rtx_mode_t (operands[1], DImode),
1530 rtx_mode_t (operands[2], DImode),
1531 UNSIGNED, &overflow);
1532 emit_move_insn (operands[0], GEN_INT (val.to_shwi ()));
1533 if (overflow != wi::OVF_NONE)
1534 emit_jump_insn (gen_jump (operands[3]));
1537 else if (CONST_INT_P (operands[1]))
1539 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1541 if (const_ok_for_arm (INTVAL (lo_op1)))
1543 emit_insn (gen_rsb_imm_compare (lo_result, lo_op1, lo_op2,
1544 GEN_INT (~UINTVAL (lo_op1))));
1545 /* We could potentially use RSC here in Arm state, but not
1546 in Thumb, so it's probably not worth the effort of handling
1548 hi_op1 = force_reg (SImode, hi_op1);
1552 operands[1] = force_reg (DImode, operands[1]);
1555 arm_decompose_di_binop (operands[1], operands[2], &lo_op1, &hi_op1,
1557 if (lo_op2 == const0_rtx)
1559 emit_move_insn (lo_result, lo_op1);
1560 if (!arm_add_operand (hi_op2, SImode))
1561 hi_op2 = force_reg (SImode, hi_op2);
1562 emit_insn (gen_usubvsi4 (hi_result, hi_op1, hi_op2, operands[3]));
1566 if (CONST_INT_P (lo_op2) && !arm_addimm_operand (lo_op2, SImode))
1567 lo_op2 = force_reg (SImode, lo_op2);
1568 if (CONST_INT_P (lo_op2))
1569 emit_insn (gen_cmpsi2_addneg (lo_result, lo_op1, lo_op2,
1570 GEN_INT (-INTVAL (lo_op2))));
1572 emit_insn (gen_subsi3_compare1 (lo_result, lo_op1, lo_op2));
1575 if (!arm_not_operand (hi_op2, SImode))
1576 hi_op2 = force_reg (SImode, hi_op2);
1577 rtx ccreg = gen_rtx_REG (mode, CC_REGNUM);
1578 if (CONST_INT_P (hi_op2))
1579 emit_insn (gen_usubvsi3_borrow_imm (hi_result, hi_op1, hi_op2,
1580 GEN_INT (UINTVAL (hi_op2) & 0xffffffff),
1581 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1582 gen_rtx_LTU (DImode, ccreg,
1585 emit_insn (gen_usubvsi3_borrow (hi_result, hi_op1, hi_op2,
1586 gen_rtx_LTU (SImode, ccreg, const0_rtx),
1587 gen_rtx_LTU (DImode, ccreg, const0_rtx)));
1588 arm_gen_unlikely_cbranch (LTU, CC_Bmode, operands[3]);
1593 (define_insn "subsi3_compare1"
1594 [(set (reg:CC CC_REGNUM)
1596 (match_operand:SI 1 "register_operand" "r")
1597 (match_operand:SI 2 "register_operand" "r")))
1598 (set (match_operand:SI 0 "register_operand" "=r")
1599 (minus:SI (match_dup 1) (match_dup 2)))]
1601 "subs%?\\t%0, %1, %2"
1602 [(set_attr "conds" "set")
1603 (set_attr "type" "alus_sreg")]
1606 (define_insn "subvsi3"
1607 [(set (reg:CC_V CC_REGNUM)
1610 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "l,r"))
1611 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
1612 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1613 (set (match_operand:SI 0 "s_register_operand" "=l,r")
1614 (minus:SI (match_dup 1) (match_dup 2)))]
1616 "subs%?\\t%0, %1, %2"
1617 [(set_attr "conds" "set")
1618 (set_attr "arch" "t2,*")
1619 (set_attr "length" "2,4")
1620 (set_attr "type" "alus_sreg")]
1623 (define_insn "subvsi3_imm1"
1624 [(set (reg:CC_V CC_REGNUM)
1627 (match_operand 1 "arm_immediate_operand" "I")
1628 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "r")))
1629 (sign_extend:DI (minus:SI (match_dup 1) (match_dup 2)))))
1630 (set (match_operand:SI 0 "s_register_operand" "=r")
1631 (minus:SI (match_dup 1) (match_dup 2)))]
1633 "rsbs%?\\t%0, %2, %1"
1634 [(set_attr "conds" "set")
1635 (set_attr "type" "alus_imm")]
1638 (define_insn "subsi3_carryin"
1639 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1640 (minus:SI (minus:SI (match_operand:SI 1 "reg_or_int_operand" "r,I,Pz")
1641 (match_operand:SI 2 "s_register_operand" "r,r,r"))
1642 (match_operand:SI 3 "arm_borrow_operation" "")))]
1647 sbc%?\\t%0, %2, %2, lsl #1"
1648 [(set_attr "conds" "use")
1649 (set_attr "arch" "*,a,t2")
1650 (set_attr "predicable" "yes")
1651 (set_attr "type" "adc_reg,adc_imm,alu_shift_imm")]
1654 (define_insn "cmpsi3_carryin_<CC_EXTEND>out"
1655 [(set (reg:<CC_EXTEND> CC_REGNUM)
1656 (compare:<CC_EXTEND>
1657 (SE:DI (match_operand:SI 1 "s_register_operand" "0,r"))
1658 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1659 (SE:DI (match_operand:SI 2 "s_register_operand" "l,r")))))
1660 (clobber (match_scratch:SI 0 "=l,r"))]
1663 [(set_attr "conds" "set")
1664 (set_attr "arch" "t2,*")
1665 (set_attr "length" "2,4")
1666 (set_attr "type" "adc_reg")]
1669 ;; Similar to the above, but handling a constant which has a different
1670 ;; canonicalization.
1671 (define_insn "cmpsi3_imm_carryin_<CC_EXTEND>out"
1672 [(set (reg:<CC_EXTEND> CC_REGNUM)
1673 (compare:<CC_EXTEND>
1674 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1675 (plus:DI (match_operand:DI 3 "arm_borrow_operation" "")
1676 (match_operand:DI 2 "arm_adcimm_operand" "I,K"))))
1677 (clobber (match_scratch:SI 0 "=l,r"))]
1681 adcs\\t%0, %1, #%B2"
1682 [(set_attr "conds" "set")
1683 (set_attr "type" "adc_imm")]
1686 ;; Further canonicalization when the constant is zero.
1687 (define_insn "cmpsi3_0_carryin_<CC_EXTEND>out"
1688 [(set (reg:<CC_EXTEND> CC_REGNUM)
1689 (compare:<CC_EXTEND>
1690 (SE:DI (match_operand:SI 1 "s_register_operand" "r,r"))
1691 (match_operand:DI 2 "arm_borrow_operation" "")))
1692 (clobber (match_scratch:SI 0 "=l,r"))]
1695 [(set_attr "conds" "set")
1696 (set_attr "type" "adc_imm")]
1699 (define_insn "*subsi3_carryin_const"
1700 [(set (match_operand:SI 0 "s_register_operand" "=r")
1702 (match_operand:SI 1 "s_register_operand" "r")
1703 (match_operand:SI 2 "arm_neg_immediate_operand" "L"))
1704 (match_operand:SI 3 "arm_borrow_operation" "")))]
1706 "sbc\\t%0, %1, #%n2"
1707 [(set_attr "conds" "use")
1708 (set_attr "type" "adc_imm")]
1711 (define_insn "*subsi3_carryin_const0"
1712 [(set (match_operand:SI 0 "s_register_operand" "=r")
1713 (minus:SI (match_operand:SI 1 "s_register_operand" "r")
1714 (match_operand:SI 2 "arm_borrow_operation" "")))]
1717 [(set_attr "conds" "use")
1718 (set_attr "type" "adc_imm")]
1721 (define_insn "*subsi3_carryin_shift"
1722 [(set (match_operand:SI 0 "s_register_operand" "=r")
1724 (match_operand:SI 1 "s_register_operand" "r")
1725 (match_operator:SI 2 "shift_operator"
1726 [(match_operand:SI 3 "s_register_operand" "r")
1727 (match_operand:SI 4 "reg_or_int_operand" "rM")]))
1728 (match_operand:SI 5 "arm_borrow_operation" "")))]
1730 "sbc%?\\t%0, %1, %3%S2"
1731 [(set_attr "conds" "use")
1732 (set_attr "predicable" "yes")
1733 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1734 (const_string "alu_shift_imm")
1735 (const_string "alu_shift_reg")))]
1738 (define_insn "*subsi3_carryin_shift_alt"
1739 [(set (match_operand:SI 0 "s_register_operand" "=r")
1741 (match_operand:SI 1 "s_register_operand" "r")
1742 (match_operand:SI 5 "arm_borrow_operation" ""))
1743 (match_operator:SI 2 "shift_operator"
1744 [(match_operand:SI 3 "s_register_operand" "r")
1745 (match_operand:SI 4 "reg_or_int_operand" "rM")])))]
1747 "sbc%?\\t%0, %1, %3%S2"
1748 [(set_attr "conds" "use")
1749 (set_attr "predicable" "yes")
1750 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1751 (const_string "alu_shift_imm")
1752 (const_string "alu_shift_reg")))]
1755 (define_insn "*rsbsi3_carryin_shift"
1756 [(set (match_operand:SI 0 "s_register_operand" "=r")
1758 (match_operator:SI 2 "shift_operator"
1759 [(match_operand:SI 3 "s_register_operand" "r")
1760 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1761 (match_operand:SI 1 "s_register_operand" "r"))
1762 (match_operand:SI 5 "arm_borrow_operation" "")))]
1764 "rsc%?\\t%0, %1, %3%S2"
1765 [(set_attr "conds" "use")
1766 (set_attr "predicable" "yes")
1767 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1768 (const_string "alu_shift_imm")
1769 (const_string "alu_shift_reg")))]
1772 (define_insn "*rsbsi3_carryin_shift_alt"
1773 [(set (match_operand:SI 0 "s_register_operand" "=r")
1775 (match_operator:SI 2 "shift_operator"
1776 [(match_operand:SI 3 "s_register_operand" "r")
1777 (match_operand:SI 4 "reg_or_int_operand" "rM")])
1778 (match_operand:SI 5 "arm_borrow_operation" ""))
1779 (match_operand:SI 1 "s_register_operand" "r")))]
1781 "rsc%?\\t%0, %1, %3%S2"
1782 [(set_attr "conds" "use")
1783 (set_attr "predicable" "yes")
1784 (set (attr "type") (if_then_else (match_operand 4 "const_int_operand" "")
1785 (const_string "alu_shift_imm")
1786 (const_string "alu_shift_reg")))]
1789 ; transform ((x << y) - 1) to ~(~(x-1) << y) Where X is a constant.
1791 [(set (match_operand:SI 0 "s_register_operand" "")
1792 (plus:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
1793 (match_operand:SI 2 "s_register_operand" ""))
1795 (clobber (match_operand:SI 3 "s_register_operand" ""))]
1797 [(set (match_dup 3) (match_dup 1))
1798 (set (match_dup 0) (not:SI (ashift:SI (match_dup 3) (match_dup 2))))]
1800 operands[1] = GEN_INT (~(INTVAL (operands[1]) - 1));
1803 (define_expand "addsf3"
1804 [(set (match_operand:SF 0 "s_register_operand")
1805 (plus:SF (match_operand:SF 1 "s_register_operand")
1806 (match_operand:SF 2 "s_register_operand")))]
1807 "TARGET_32BIT && TARGET_HARD_FLOAT"
1811 (define_expand "adddf3"
1812 [(set (match_operand:DF 0 "s_register_operand")
1813 (plus:DF (match_operand:DF 1 "s_register_operand")
1814 (match_operand:DF 2 "s_register_operand")))]
1815 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
1819 (define_expand "subdi3"
1821 [(set (match_operand:DI 0 "s_register_operand")
1822 (minus:DI (match_operand:DI 1 "reg_or_int_operand")
1823 (match_operand:DI 2 "s_register_operand")))
1824 (clobber (reg:CC CC_REGNUM))])]
1829 if (!REG_P (operands[1]))
1830 operands[1] = force_reg (DImode, operands[1]);
1834 rtx lo_result, hi_result, lo_dest, hi_dest;
1835 rtx lo_op1, hi_op1, lo_op2, hi_op2;
1838 /* Since operands[1] may be an integer, pass it second, so that
1839 any necessary simplifications will be done on the decomposed
1841 arm_decompose_di_binop (operands[2], operands[1], &lo_op2, &hi_op2,
1843 lo_result = lo_dest = gen_lowpart (SImode, operands[0]);
1844 hi_result = hi_dest = gen_highpart (SImode, operands[0]);
1846 if (!arm_rhs_operand (lo_op1, SImode))
1847 lo_op1 = force_reg (SImode, lo_op1);
1849 if ((TARGET_THUMB2 && ! s_register_operand (hi_op1, SImode))
1850 || !arm_rhs_operand (hi_op1, SImode))
1851 hi_op1 = force_reg (SImode, hi_op1);
1854 if (lo_op1 == const0_rtx)
1856 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1857 emit_insn (gen_negsi2_0compare (lo_dest, lo_op2));
1859 else if (CONST_INT_P (lo_op1))
1861 cc_reg = gen_rtx_REG (CC_RSBmode, CC_REGNUM);
1862 emit_insn (gen_rsb_imm_compare (lo_dest, lo_op1, lo_op2,
1863 GEN_INT (~UINTVAL (lo_op1))));
1867 cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
1868 emit_insn (gen_subsi3_compare (lo_dest, lo_op1, lo_op2));
1871 condition = gen_rtx_LTU (SImode, cc_reg, const0_rtx);
1873 if (hi_op1 == const0_rtx)
1874 emit_insn (gen_negsi2_carryin (hi_dest, hi_op2, condition));
1876 emit_insn (gen_subsi3_carryin (hi_dest, hi_op1, hi_op2, condition));
1878 if (lo_result != lo_dest)
1879 emit_move_insn (lo_result, lo_dest);
1881 if (hi_result != hi_dest)
1882 emit_move_insn (hi_result, hi_dest);
1889 (define_expand "subsi3"
1890 [(set (match_operand:SI 0 "s_register_operand")
1891 (minus:SI (match_operand:SI 1 "reg_or_int_operand")
1892 (match_operand:SI 2 "s_register_operand")))]
1895 if (CONST_INT_P (operands[1]))
1899 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), MINUS))
1900 operands[1] = force_reg (SImode, operands[1]);
1903 arm_split_constant (MINUS, SImode, NULL_RTX,
1904 INTVAL (operands[1]), operands[0],
1906 optimize && can_create_pseudo_p ());
1910 else /* TARGET_THUMB1 */
1911 operands[1] = force_reg (SImode, operands[1]);
1916 ; ??? Check Thumb-2 split length
1917 (define_insn_and_split "*arm_subsi3_insn"
1918 [(set (match_operand:SI 0 "s_register_operand" "=l,l ,l ,l ,r,r,r,rk,r")
1919 (minus:SI (match_operand:SI 1 "reg_or_int_operand" "l ,0 ,l ,Pz,I,r,r,k ,?n")
1920 (match_operand:SI 2 "reg_or_int_operand" "l ,Py,Pd,l ,r,I,r,r ,r")))]
1932 "&& (CONST_INT_P (operands[1])
1933 && !const_ok_for_arm (INTVAL (operands[1])))"
1934 [(clobber (const_int 0))]
1936 arm_split_constant (MINUS, SImode, curr_insn,
1937 INTVAL (operands[1]), operands[0], operands[2], 0);
1940 [(set_attr "length" "4,4,4,4,4,4,4,4,16")
1941 (set_attr "arch" "t2,t2,t2,t2,*,*,*,*,*")
1942 (set_attr "predicable" "yes")
1943 (set_attr "predicable_short_it" "yes,yes,yes,yes,no,no,no,no,no")
1944 (set_attr "type" "alu_sreg,alu_sreg,alu_sreg,alu_sreg,alu_imm,alu_imm,alu_sreg,alu_sreg,multiple")]
1948 [(match_scratch:SI 3 "r")
1949 (set (match_operand:SI 0 "arm_general_register_operand" "")
1950 (minus:SI (match_operand:SI 1 "const_int_operand" "")
1951 (match_operand:SI 2 "arm_general_register_operand" "")))]
1953 && !const_ok_for_arm (INTVAL (operands[1]))
1954 && const_ok_for_arm (~INTVAL (operands[1]))"
1955 [(set (match_dup 3) (match_dup 1))
1956 (set (match_dup 0) (minus:SI (match_dup 3) (match_dup 2)))]
1960 (define_insn "subsi3_compare0"
1961 [(set (reg:CC_NOOV CC_REGNUM)
1963 (minus:SI (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1964 (match_operand:SI 2 "arm_rhs_operand" "I,r,r"))
1966 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1967 (minus:SI (match_dup 1) (match_dup 2)))]
1972 rsbs%?\\t%0, %2, %1"
1973 [(set_attr "conds" "set")
1974 (set_attr "type" "alus_imm,alus_sreg,alus_sreg")]
1977 (define_insn "subsi3_compare"
1978 [(set (reg:CC CC_REGNUM)
1979 (compare:CC (match_operand:SI 1 "arm_rhs_operand" "r,r,I")
1980 (match_operand:SI 2 "arm_rhs_operand" "I,r,r")))
1981 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
1982 (minus:SI (match_dup 1) (match_dup 2)))]
1987 rsbs%?\\t%0, %2, %1"
1988 [(set_attr "conds" "set")
1989 (set_attr "type" "alus_imm,alus_sreg,alus_imm")]
1992 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
1993 ;; rather than (0 cmp reg). This gives the same results for unsigned
1994 ;; and equality compares which is what we mostly need here.
1995 (define_insn "rsb_imm_compare"
1996 [(set (reg:CC_RSB CC_REGNUM)
1997 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
1998 (match_operand 3 "const_int_operand" "")))
1999 (set (match_operand:SI 0 "s_register_operand" "=r")
2000 (minus:SI (match_operand 1 "arm_immediate_operand" "I")
2002 "TARGET_32BIT && ~UINTVAL (operands[1]) == UINTVAL (operands[3])"
2004 [(set_attr "conds" "set")
2005 (set_attr "type" "alus_imm")]
2008 ;; Similarly, but the result is unused.
2009 (define_insn "rsb_imm_compare_scratch"
2010 [(set (reg:CC_RSB CC_REGNUM)
2011 (compare:CC_RSB (not:SI (match_operand:SI 2 "s_register_operand" "r"))
2012 (match_operand 1 "arm_not_immediate_operand" "K")))
2013 (clobber (match_scratch:SI 0 "=r"))]
2015 "rsbs\\t%0, %2, #%B1"
2016 [(set_attr "conds" "set")
2017 (set_attr "type" "alus_imm")]
2020 ;; Compare the sum of a value plus a carry against a constant. Uses
2021 ;; RSC, so the result is swapped. Only available on Arm
2022 (define_insn "rscsi3_<CC_EXTEND>out_scratch"
2023 [(set (reg:CC_SWP CC_REGNUM)
2025 (plus:DI (SE:DI (match_operand:SI 2 "s_register_operand" "r"))
2026 (match_operand:DI 3 "arm_borrow_operation" ""))
2027 (match_operand 1 "arm_immediate_operand" "I")))
2028 (clobber (match_scratch:SI 0 "=r"))]
2031 [(set_attr "conds" "set")
2032 (set_attr "type" "alus_imm")]
2035 (define_insn "usubvsi3_borrow"
2036 [(set (reg:CC_B CC_REGNUM)
2038 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2039 (plus:DI (match_operand:DI 4 "arm_borrow_operation" "")
2041 (match_operand:SI 2 "s_register_operand" "l,r")))))
2042 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2043 (minus:SI (match_dup 1)
2044 (plus:SI (match_operand:SI 3 "arm_borrow_operation" "")
2047 "sbcs%?\\t%0, %1, %2"
2048 [(set_attr "conds" "set")
2049 (set_attr "arch" "t2,*")
2050 (set_attr "length" "2,4")]
2053 (define_insn "usubvsi3_borrow_imm"
2054 [(set (reg:CC_B CC_REGNUM)
2056 (zero_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2057 (plus:DI (match_operand:DI 5 "arm_borrow_operation" "")
2058 (match_operand:DI 3 "const_int_operand" "n,n"))))
2059 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2060 (minus:SI (match_dup 1)
2061 (plus:SI (match_operand:SI 4 "arm_borrow_operation" "")
2062 (match_operand:SI 2 "arm_adcimm_operand" "I,K"))))]
2064 && (UINTVAL (operands[2]) & 0xffffffff) == UINTVAL (operands[3])"
2067 adcs%?\\t%0, %1, #%B2"
2068 [(set_attr "conds" "set")
2069 (set_attr "type" "alus_imm")]
2072 (define_insn "subvsi3_borrow"
2073 [(set (reg:CC_V CC_REGNUM)
2077 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "0,r"))
2078 (sign_extend:DI (match_operand:SI 2 "s_register_operand" "l,r")))
2079 (match_operand:DI 4 "arm_borrow_operation" ""))
2081 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2082 (match_operand:SI 3 "arm_borrow_operation" "")))))
2083 (set (match_operand:SI 0 "s_register_operand" "=l,r")
2084 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2087 "sbcs%?\\t%0, %1, %2"
2088 [(set_attr "conds" "set")
2089 (set_attr "arch" "t2,*")
2090 (set_attr "length" "2,4")]
2093 (define_insn "subvsi3_borrow_imm"
2094 [(set (reg:CC_V CC_REGNUM)
2098 (sign_extend:DI (match_operand:SI 1 "s_register_operand" "r,r"))
2099 (match_operand 2 "arm_adcimm_operand" "I,K"))
2100 (match_operand:DI 4 "arm_borrow_operation" ""))
2102 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2103 (match_operand:SI 3 "arm_borrow_operation" "")))))
2104 (set (match_operand:SI 0 "s_register_operand" "=r,r")
2105 (minus:SI (minus:SI (match_dup 1) (match_dup 2))
2108 && INTVAL (operands[2]) == ARM_SIGN_EXTEND (INTVAL (operands[2]))"
2111 adcs%?\\t%0, %1, #%B2"
2112 [(set_attr "conds" "set")
2113 (set_attr "type" "alus_imm")]
2116 (define_expand "subsf3"
2117 [(set (match_operand:SF 0 "s_register_operand")
2118 (minus:SF (match_operand:SF 1 "s_register_operand")
2119 (match_operand:SF 2 "s_register_operand")))]
2120 "TARGET_32BIT && TARGET_HARD_FLOAT"
2124 (define_expand "subdf3"
2125 [(set (match_operand:DF 0 "s_register_operand")
2126 (minus:DF (match_operand:DF 1 "s_register_operand")
2127 (match_operand:DF 2 "s_register_operand")))]
2128 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2133 ;; Multiplication insns
2135 (define_expand "mulhi3"
2136 [(set (match_operand:HI 0 "s_register_operand")
2137 (mult:HI (match_operand:HI 1 "s_register_operand")
2138 (match_operand:HI 2 "s_register_operand")))]
2139 "TARGET_DSP_MULTIPLY"
2142 rtx result = gen_reg_rtx (SImode);
2143 emit_insn (gen_mulhisi3 (result, operands[1], operands[2]));
2144 emit_move_insn (operands[0], gen_lowpart (HImode, result));
2149 (define_expand "mulsi3"
2150 [(set (match_operand:SI 0 "s_register_operand")
2151 (mult:SI (match_operand:SI 2 "s_register_operand")
2152 (match_operand:SI 1 "s_register_operand")))]
2157 ;; Use `&' and then
`0' to prevent operands 0 and 2 being the same
2159 [(set (match_operand:SI 0 "s_register_operand" "=l,r,&r,&r")
2160 (mult:SI (match_operand:SI 2 "s_register_operand" "l,r,r,r")
2161 (match_operand:SI 1 "s_register_operand" "%0,r,0,r")))]
2163 "mul%?\\t%0, %2, %1"
2164 [(set_attr "type" "mul")
2165 (set_attr "predicable" "yes")
2166 (set_attr "arch" "t2,v6,nov6,nov6")
2167 (set_attr "length" "4")
2168 (set_attr "predicable_short_it" "yes,no,*,*")]
2171 ;; MLA and MLS instruction. Use operand 1 for the accumulator to prefer
2172 ;; reusing the same register.
2175 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r,&r")
2177 (mult:SI (match_operand:SI 3 "s_register_operand" "r,r,r,r")
2178 (match_operand:SI 2 "s_register_operand" "%r,r,0,r"))
2179 (match_operand:SI 1 "s_register_operand" "r,0,r,r")))]
2181 "mla%?\\t%0, %3, %2, %1"
2182 [(set_attr "type" "mla")
2183 (set_attr "predicable" "yes")
2184 (set_attr "arch" "v6,nov6,nov6,nov6")]
2188 [(set (match_operand:SI 0 "s_register_operand" "=r")
2190 (match_operand:SI 1 "s_register_operand" "r")
2191 (mult:SI (match_operand:SI 3 "s_register_operand" "r")
2192 (match_operand:SI 2 "s_register_operand" "r"))))]
2193 "TARGET_32BIT && arm_arch_thumb2"
2194 "mls%?\\t%0, %3, %2, %1"
2195 [(set_attr "type" "mla")
2196 (set_attr "predicable" "yes")]
2199 (define_insn "*mulsi3_compare0"
2200 [(set (reg:CC_NOOV CC_REGNUM)
2201 (compare:CC_NOOV (mult:SI
2202 (match_operand:SI 2 "s_register_operand" "r,r")
2203 (match_operand:SI 1 "s_register_operand" "%0,r"))
2205 (set (match_operand:SI 0 "s_register_operand" "=&r,&r")
2206 (mult:SI (match_dup 2) (match_dup 1)))]
2207 "TARGET_ARM && !arm_arch6"
2208 "muls%?\\t%0, %2, %1"
2209 [(set_attr "conds" "set")
2210 (set_attr "type" "muls")]
2213 (define_insn "*mulsi3_compare0_v6"
2214 [(set (reg:CC_NOOV CC_REGNUM)
2215 (compare:CC_NOOV (mult:SI
2216 (match_operand:SI 2 "s_register_operand" "r")
2217 (match_operand:SI 1 "s_register_operand" "r"))
2219 (set (match_operand:SI 0 "s_register_operand" "=r")
2220 (mult:SI (match_dup 2) (match_dup 1)))]
2221 "TARGET_ARM && arm_arch6 && optimize_size"
2222 "muls%?\\t%0, %2, %1"
2223 [(set_attr "conds" "set")
2224 (set_attr "type" "muls")]
2227 (define_insn "*mulsi_compare0_scratch"
2228 [(set (reg:CC_NOOV CC_REGNUM)
2229 (compare:CC_NOOV (mult:SI
2230 (match_operand:SI 2 "s_register_operand" "r,r")
2231 (match_operand:SI 1 "s_register_operand" "%0,r"))
2233 (clobber (match_scratch:SI 0 "=&r,&r"))]
2234 "TARGET_ARM && !arm_arch6"
2235 "muls%?\\t%0, %2, %1"
2236 [(set_attr "conds" "set")
2237 (set_attr "type" "muls")]
2240 (define_insn "*mulsi_compare0_scratch_v6"
2241 [(set (reg:CC_NOOV CC_REGNUM)
2242 (compare:CC_NOOV (mult:SI
2243 (match_operand:SI 2 "s_register_operand" "r")
2244 (match_operand:SI 1 "s_register_operand" "r"))
2246 (clobber (match_scratch:SI 0 "=r"))]
2247 "TARGET_ARM && arm_arch6 && optimize_size"
2248 "muls%?\\t%0, %2, %1"
2249 [(set_attr "conds" "set")
2250 (set_attr "type" "muls")]
2253 (define_insn "*mulsi3addsi_compare0"
2254 [(set (reg:CC_NOOV CC_REGNUM)
2257 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2258 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2259 (match_operand:SI 3 "s_register_operand" "r,r,0,0"))
2261 (set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r,&r")
2262 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2264 "TARGET_ARM && arm_arch6"
2265 "mlas%?\\t%0, %2, %1, %3"
2266 [(set_attr "conds" "set")
2267 (set_attr "type" "mlas")]
2270 (define_insn "*mulsi3addsi_compare0_v6"
2271 [(set (reg:CC_NOOV CC_REGNUM)
2274 (match_operand:SI 2 "s_register_operand" "r")
2275 (match_operand:SI 1 "s_register_operand" "r"))
2276 (match_operand:SI 3 "s_register_operand" "r"))
2278 (set (match_operand:SI 0 "s_register_operand" "=r")
2279 (plus:SI (mult:SI (match_dup 2) (match_dup 1))
2281 "TARGET_ARM && arm_arch6 && optimize_size"
2282 "mlas%?\\t%0, %2, %1, %3"
2283 [(set_attr "conds" "set")
2284 (set_attr "type" "mlas")]
2287 (define_insn "*mulsi3addsi_compare0_scratch"
2288 [(set (reg:CC_NOOV CC_REGNUM)
2291 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
2292 (match_operand:SI 1 "s_register_operand" "%0,r,0,r"))
2293 (match_operand:SI 3 "s_register_operand" "?r,r,0,0"))
2295 (clobber (match_scratch:SI 0 "=&r,&r,&r,&r"))]
2296 "TARGET_ARM && !arm_arch6"
2297 "mlas%?\\t%0, %2, %1, %3"
2298 [(set_attr "conds" "set")
2299 (set_attr "type" "mlas")]
2302 (define_insn "*mulsi3addsi_compare0_scratch_v6"
2303 [(set (reg:CC_NOOV CC_REGNUM)
2306 (match_operand:SI 2 "s_register_operand" "r")
2307 (match_operand:SI 1 "s_register_operand" "r"))
2308 (match_operand:SI 3 "s_register_operand" "r"))
2310 (clobber (match_scratch:SI 0 "=r"))]
2311 "TARGET_ARM && arm_arch6 && optimize_size"
2312 "mlas%?\\t%0, %2, %1, %3"
2313 [(set_attr "conds" "set")
2314 (set_attr "type" "mlas")]
2317 ;; 32x32->64 widening multiply.
2318 ;; The only difference between the v3-5 and v6+ versions is the requirement
2319 ;; that the output does not overlap with either input.
2321 (define_expand "<Us>mulsidi3"
2322 [(set (match_operand:DI 0 "s_register_operand")
2324 (SE:DI (match_operand:SI 1 "s_register_operand"))
2325 (SE:DI (match_operand:SI 2 "s_register_operand"))))]
2328 emit_insn (gen_<US>mull (gen_lowpart (SImode, operands[0]),
2329 gen_highpart (SImode, operands[0]),
2330 operands[1], operands[2]));
2335 (define_insn "<US>mull"
2336 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2338 (match_operand:SI 2 "s_register_operand" "%r,r")
2339 (match_operand:SI 3 "s_register_operand" "r,r")))
2340 (set (match_operand:SI 1 "s_register_operand" "=r,&r")
2343 (mult:DI (SE:DI (match_dup 2)) (SE:DI (match_dup 3)))
2346 "<US>mull%?\\t%0, %1, %2, %3"
2347 [(set_attr "type" "umull")
2348 (set_attr "predicable" "yes")
2349 (set_attr "arch" "v6,nov6")]
2352 (define_expand "<Us>maddsidi4"
2353 [(set (match_operand:DI 0 "s_register_operand")
2356 (SE:DI (match_operand:SI 1 "s_register_operand"))
2357 (SE:DI (match_operand:SI 2 "s_register_operand")))
2358 (match_operand:DI 3 "s_register_operand")))]
2361 emit_insn (gen_<US>mlal (gen_lowpart (SImode, operands[0]),
2362 gen_lowpart (SImode, operands[3]),
2363 gen_highpart (SImode, operands[0]),
2364 gen_highpart (SImode, operands[3]),
2365 operands[1], operands[2]));
2370 (define_insn "<US>mlal"
2371 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
2374 (match_operand:SI 4 "s_register_operand" "%r,r")
2375 (match_operand:SI 5 "s_register_operand" "r,r"))
2376 (match_operand:SI 1 "s_register_operand" "0,0")))
2377 (set (match_operand:SI 2 "s_register_operand" "=r,&r")
2382 (mult:DI (SE:DI (match_dup 4)) (SE:DI (match_dup 5)))
2383 (zero_extend:DI (match_dup 1)))
2385 (match_operand:SI 3 "s_register_operand" "2,2")))]
2387 "<US>mlal%?\\t%0, %2, %4, %5"
2388 [(set_attr "type" "umlal")
2389 (set_attr "predicable" "yes")
2390 (set_attr "arch" "v6,nov6")]
2393 (define_expand "<US>mulsi3_highpart"
2395 [(set (match_operand:SI 0 "s_register_operand")
2399 (SE:DI (match_operand:SI 1 "s_register_operand"))
2400 (SE:DI (match_operand:SI 2 "s_register_operand")))
2402 (clobber (match_scratch:SI 3 ""))])]
2407 (define_insn "*<US>mull_high"
2408 [(set (match_operand:SI 0 "s_register_operand" "=r,&r,&r")
2412 (SE:DI (match_operand:SI 1 "s_register_operand" "%r,0,r"))
2413 (SE:DI (match_operand:SI 2 "s_register_operand" "r,r,r")))
2415 (clobber (match_scratch:SI 3 "=r,&r,&r"))]
2417 "<US>mull%?\\t%3, %0, %2, %1"
2418 [(set_attr "type" "umull")
2419 (set_attr "predicable" "yes")
2420 (set_attr "arch" "v6,nov6,nov6")]
2423 (define_insn "mulhisi3"
2424 [(set (match_operand:SI 0 "s_register_operand" "=r")
2425 (mult:SI (sign_extend:SI
2426 (match_operand:HI 1 "s_register_operand" "%r"))
2428 (match_operand:HI 2 "s_register_operand" "r"))))]
2429 "TARGET_DSP_MULTIPLY"
2430 "smulbb%?\\t%0, %1, %2"
2431 [(set_attr "type" "smulxy")
2432 (set_attr "predicable" "yes")]
2435 (define_insn "*mulhisi3tb"
2436 [(set (match_operand:SI 0 "s_register_operand" "=r")
2437 (mult:SI (ashiftrt:SI
2438 (match_operand:SI 1 "s_register_operand" "r")
2441 (match_operand:HI 2 "s_register_operand" "r"))))]
2442 "TARGET_DSP_MULTIPLY"
2443 "smultb%?\\t%0, %1, %2"
2444 [(set_attr "type" "smulxy")
2445 (set_attr "predicable" "yes")]
2448 (define_insn "*mulhisi3bt"
2449 [(set (match_operand:SI 0 "s_register_operand" "=r")
2450 (mult:SI (sign_extend:SI
2451 (match_operand:HI 1 "s_register_operand" "r"))
2453 (match_operand:SI 2 "s_register_operand" "r")
2455 "TARGET_DSP_MULTIPLY"
2456 "smulbt%?\\t%0, %1, %2"
2457 [(set_attr "type" "smulxy")
2458 (set_attr "predicable" "yes")]
2461 (define_insn "*mulhisi3tt"
2462 [(set (match_operand:SI 0 "s_register_operand" "=r")
2463 (mult:SI (ashiftrt:SI
2464 (match_operand:SI 1 "s_register_operand" "r")
2467 (match_operand:SI 2 "s_register_operand" "r")
2469 "TARGET_DSP_MULTIPLY"
2470 "smultt%?\\t%0, %1, %2"
2471 [(set_attr "type" "smulxy")
2472 (set_attr "predicable" "yes")]
2475 (define_insn "maddhisi4"
2476 [(set (match_operand:SI 0 "s_register_operand" "=r")
2477 (plus:SI (mult:SI (sign_extend:SI
2478 (match_operand:HI 1 "s_register_operand" "r"))
2480 (match_operand:HI 2 "s_register_operand" "r")))
2481 (match_operand:SI 3 "s_register_operand" "r")))]
2482 "TARGET_DSP_MULTIPLY"
2483 "smlabb%?\\t%0, %1, %2, %3"
2484 [(set_attr "type" "smlaxy")
2485 (set_attr "predicable" "yes")]
2488 ;; Note: there is no maddhisi4ibt because this one is canonical form
2489 (define_insn "*maddhisi4tb"
2490 [(set (match_operand:SI 0 "s_register_operand" "=r")
2491 (plus:SI (mult:SI (ashiftrt:SI
2492 (match_operand:SI 1 "s_register_operand" "r")
2495 (match_operand:HI 2 "s_register_operand" "r")))
2496 (match_operand:SI 3 "s_register_operand" "r")))]
2497 "TARGET_DSP_MULTIPLY"
2498 "smlatb%?\\t%0, %1, %2, %3"
2499 [(set_attr "type" "smlaxy")
2500 (set_attr "predicable" "yes")]
2503 (define_insn "*maddhisi4tt"
2504 [(set (match_operand:SI 0 "s_register_operand" "=r")
2505 (plus:SI (mult:SI (ashiftrt:SI
2506 (match_operand:SI 1 "s_register_operand" "r")
2509 (match_operand:SI 2 "s_register_operand" "r")
2511 (match_operand:SI 3 "s_register_operand" "r")))]
2512 "TARGET_DSP_MULTIPLY"
2513 "smlatt%?\\t%0, %1, %2, %3"
2514 [(set_attr "type" "smlaxy")
2515 (set_attr "predicable" "yes")]
2518 (define_insn "maddhidi4"
2519 [(set (match_operand:DI 0 "s_register_operand" "=r")
2521 (mult:DI (sign_extend:DI
2522 (match_operand:HI 1 "s_register_operand" "r"))
2524 (match_operand:HI 2 "s_register_operand" "r")))
2525 (match_operand:DI 3 "s_register_operand" "0")))]
2526 "TARGET_DSP_MULTIPLY"
2527 "smlalbb%?\\t%Q0, %R0, %1, %2"
2528 [(set_attr "type" "smlalxy")
2529 (set_attr "predicable" "yes")])
2531 ;; Note: there is no maddhidi4ibt because this one is canonical form
2532 (define_insn "*maddhidi4tb"
2533 [(set (match_operand:DI 0 "s_register_operand" "=r")
2535 (mult:DI (sign_extend:DI
2537 (match_operand:SI 1 "s_register_operand" "r")
2540 (match_operand:HI 2 "s_register_operand" "r")))
2541 (match_operand:DI 3 "s_register_operand" "0")))]
2542 "TARGET_DSP_MULTIPLY"
2543 "smlaltb%?\\t%Q0, %R0, %1, %2"
2544 [(set_attr "type" "smlalxy")
2545 (set_attr "predicable" "yes")])
2547 (define_insn "*maddhidi4tt"
2548 [(set (match_operand:DI 0 "s_register_operand" "=r")
2550 (mult:DI (sign_extend:DI
2552 (match_operand:SI 1 "s_register_operand" "r")
2556 (match_operand:SI 2 "s_register_operand" "r")
2558 (match_operand:DI 3 "s_register_operand" "0")))]
2559 "TARGET_DSP_MULTIPLY"
2560 "smlaltt%?\\t%Q0, %R0, %1, %2"
2561 [(set_attr "type" "smlalxy")
2562 (set_attr "predicable" "yes")])
2564 (define_expand "mulsf3"
2565 [(set (match_operand:SF 0 "s_register_operand")
2566 (mult:SF (match_operand:SF 1 "s_register_operand")
2567 (match_operand:SF 2 "s_register_operand")))]
2568 "TARGET_32BIT && TARGET_HARD_FLOAT"
2572 (define_expand "muldf3"
2573 [(set (match_operand:DF 0 "s_register_operand")
2574 (mult:DF (match_operand:DF 1 "s_register_operand")
2575 (match_operand:DF 2 "s_register_operand")))]
2576 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
2582 (define_expand "divsf3"
2583 [(set (match_operand:SF 0 "s_register_operand")
2584 (div:SF (match_operand:SF 1 "s_register_operand")
2585 (match_operand:SF 2 "s_register_operand")))]
2586 "TARGET_32BIT && TARGET_HARD_FLOAT"
2589 (define_expand "divdf3"
2590 [(set (match_operand:DF 0 "s_register_operand")
2591 (div:DF (match_operand:DF 1 "s_register_operand")
2592 (match_operand:DF 2 "s_register_operand")))]
2593 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
2597 ; Expand logical operations. The mid-end expander does not split off memory
2598 ; operands or complex immediates, which leads to fewer LDRD/STRD instructions.
2599 ; So an explicit expander is needed to generate better code.
2601 (define_expand "<LOGICAL:optab>di3"
2602 [(set (match_operand:DI 0 "s_register_operand")
2603 (LOGICAL:DI (match_operand:DI 1 "s_register_operand")
2604 (match_operand:DI 2 "arm_<optab>di_operand")))]
2607 rtx low = simplify_gen_binary (<CODE>, SImode,
2608 gen_lowpart (SImode, operands[1]),
2609 gen_lowpart (SImode, operands[2]));
2610 rtx high = simplify_gen_binary (<CODE>, SImode,
2611 gen_highpart (SImode, operands[1]),
2612 gen_highpart_mode (SImode, DImode,
2615 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2616 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2621 (define_expand "one_cmpldi2"
2622 [(set (match_operand:DI 0 "s_register_operand")
2623 (not:DI (match_operand:DI 1 "s_register_operand")))]
2626 rtx low = simplify_gen_unary (NOT, SImode,
2627 gen_lowpart (SImode, operands[1]),
2629 rtx high = simplify_gen_unary (NOT, SImode,
2630 gen_highpart_mode (SImode, DImode,
2634 emit_insn (gen_rtx_SET (gen_lowpart (SImode, operands[0]), low));
2635 emit_insn (gen_rtx_SET (gen_highpart (SImode, operands[0]), high));
2640 ;; Split DImode and, ior, xor operations. Simply perform the logical
2641 ;; operation on the upper and lower halves of the registers.
2642 ;; This is needed for atomic operations in arm_split_atomic_op.
2643 ;; Avoid splitting IWMMXT instructions.
2645 [(set (match_operand:DI 0 "s_register_operand" "")
2646 (match_operator:DI 6 "logical_binary_operator"
2647 [(match_operand:DI 1 "s_register_operand" "")
2648 (match_operand:DI 2 "s_register_operand" "")]))]
2649 "TARGET_32BIT && reload_completed
2650 && ! IS_IWMMXT_REGNUM (REGNO (operands[0]))"
2651 [(set (match_dup 0) (match_op_dup:SI 6 [(match_dup 1) (match_dup 2)]))
2652 (set (match_dup 3) (match_op_dup:SI 6 [(match_dup 4) (match_dup 5)]))]
2655 operands[3] = gen_highpart (SImode, operands[0]);
2656 operands[0] = gen_lowpart (SImode, operands[0]);
2657 operands[4] = gen_highpart (SImode, operands[1]);
2658 operands[1] = gen_lowpart (SImode, operands[1]);
2659 operands[5] = gen_highpart (SImode, operands[2]);
2660 operands[2] = gen_lowpart (SImode, operands[2]);
2664 ;; Split DImode not (needed for atomic operations in arm_split_atomic_op).
2665 ;; Unconditionally split since there is no SIMD DImode NOT pattern.
2667 [(set (match_operand:DI 0 "s_register_operand")
2668 (not:DI (match_operand:DI 1 "s_register_operand")))]
2670 [(set (match_dup 0) (not:SI (match_dup 1)))
2671 (set (match_dup 2) (not:SI (match_dup 3)))]
2674 operands[2] = gen_highpart (SImode, operands[0]);
2675 operands[0] = gen_lowpart (SImode, operands[0]);
2676 operands[3] = gen_highpart (SImode, operands[1]);
2677 operands[1] = gen_lowpart (SImode, operands[1]);
2681 (define_expand "andsi3"
2682 [(set (match_operand:SI 0 "s_register_operand")
2683 (and:SI (match_operand:SI 1 "s_register_operand")
2684 (match_operand:SI 2 "reg_or_int_operand")))]
2689 if (CONST_INT_P (operands[2]))
2691 if (INTVAL (operands[2]) == 255 && arm_arch6)
2693 operands[1] = convert_to_mode (QImode, operands[1], 1);
2694 emit_insn (gen_thumb2_zero_extendqisi2_v6 (operands[0],
2698 else if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), AND))
2699 operands[2] = force_reg (SImode, operands[2]);
2702 arm_split_constant (AND, SImode, NULL_RTX,
2703 INTVAL (operands[2]), operands[0],
2705 optimize && can_create_pseudo_p ());
2711 else /* TARGET_THUMB1 */
2713 if (!CONST_INT_P (operands[2]))
2715 rtx tmp = force_reg (SImode, operands[2]);
2716 if (rtx_equal_p (operands[0], operands[1]))
2720 operands[2] = operands[1];
2728 if (((unsigned HOST_WIDE_INT) ~INTVAL (operands[2])) < 256)
2730 operands[2] = force_reg (SImode,
2731 GEN_INT (~INTVAL (operands[2])));
2733 emit_insn (gen_thumb1_bicsi3 (operands[0], operands[2], operands[1]));
2738 for (i = 9; i <= 31; i++)
2740 if ((HOST_WIDE_INT_1 << i) - 1 == INTVAL (operands[2]))
2742 emit_insn (gen_extzv (operands[0], operands[1], GEN_INT (i),
2746 else if ((HOST_WIDE_INT_1 << i) - 1
2747 == ~INTVAL (operands[2]))
2749 rtx shift = GEN_INT (i);
2750 rtx reg = gen_reg_rtx (SImode);
2752 emit_insn (gen_lshrsi3 (reg, operands[1], shift));
2753 emit_insn (gen_ashlsi3 (operands[0], reg, shift));
2759 operands[2] = force_reg (SImode, operands[2]);
2765 ; ??? Check split length for Thumb-2
2766 (define_insn_and_split "*arm_andsi3_insn"
2767 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
2768 (and:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
2769 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
2774 bic%?\\t%0, %1, #%B2
2778 && CONST_INT_P (operands[2])
2779 && !(const_ok_for_arm (INTVAL (operands[2]))
2780 || const_ok_for_arm (~INTVAL (operands[2])))"
2781 [(clobber (const_int 0))]
2783 arm_split_constant (AND, SImode, curr_insn,
2784 INTVAL (operands[2]), operands[0], operands[1], 0);
2787 [(set_attr "length" "4,4,4,4,16")
2788 (set_attr "predicable" "yes")
2789 (set_attr "predicable_short_it" "no,yes,no,no,no")
2790 (set_attr "type" "logic_imm,logic_imm,logic_reg,logic_reg,logic_imm")]
2793 (define_insn "*andsi3_compare0"
2794 [(set (reg:CC_NOOV CC_REGNUM)
2796 (and:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
2797 (match_operand:SI 2 "arm_not_operand" "I,K,r"))
2799 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
2800 (and:SI (match_dup 1) (match_dup 2)))]
2804 bics%?\\t%0, %1, #%B2
2805 ands%?\\t%0, %1, %2"
2806 [(set_attr "conds" "set")
2807 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2810 (define_insn "*andsi3_compare0_scratch"
2811 [(set (reg:CC_NOOV CC_REGNUM)
2813 (and:SI (match_operand:SI 0 "s_register_operand" "r,r,r")
2814 (match_operand:SI 1 "arm_not_operand" "I,K,r"))
2816 (clobber (match_scratch:SI 2 "=X,r,X"))]
2820 bics%?\\t%2, %0, #%B1
2822 [(set_attr "conds" "set")
2823 (set_attr "type" "logics_imm,logics_imm,logics_reg")]
2826 (define_insn "*zeroextractsi_compare0_scratch"
2827 [(set (reg:CC_NOOV CC_REGNUM)
2828 (compare:CC_NOOV (zero_extract:SI
2829 (match_operand:SI 0 "s_register_operand" "r")
2830 (match_operand 1 "const_int_operand" "n")
2831 (match_operand 2 "const_int_operand" "n"))
2834 && (INTVAL (operands[2]) >= 0 && INTVAL (operands[2]) < 32
2835 && INTVAL (operands[1]) > 0
2836 && INTVAL (operands[1]) + (INTVAL (operands[2]) & 1) <= 8
2837 && INTVAL (operands[1]) + INTVAL (operands[2]) <= 32)"
2839 operands[1] = GEN_INT (((1 << INTVAL (operands[1])) - 1)
2840 << INTVAL (operands[2]));
2841 output_asm_insn (\"tst%?\\t%0, %1\", operands);
2844 [(set_attr "conds" "set")
2845 (set_attr "predicable" "yes")
2846 (set_attr "type" "logics_imm")]
2849 (define_insn_and_split "*ne_zeroextractsi"
2850 [(set (match_operand:SI 0 "s_register_operand" "=r")
2851 (ne:SI (zero_extract:SI
2852 (match_operand:SI 1 "s_register_operand" "r")
2853 (match_operand:SI 2 "const_int_operand" "n")
2854 (match_operand:SI 3 "const_int_operand" "n"))
2856 (clobber (reg:CC CC_REGNUM))]
2858 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2859 && INTVAL (operands[2]) > 0
2860 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2861 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2864 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2865 && INTVAL (operands[2]) > 0
2866 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2867 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)"
2868 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2869 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2871 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2873 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2874 (match_dup 0) (const_int 1)))]
2876 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2877 << INTVAL (operands[3]));
2879 [(set_attr "conds" "clob")
2880 (set (attr "length")
2881 (if_then_else (eq_attr "is_thumb" "yes")
2884 (set_attr "type" "multiple")]
2887 (define_insn_and_split "*ne_zeroextractsi_shifted"
2888 [(set (match_operand:SI 0 "s_register_operand" "=r")
2889 (ne:SI (zero_extract:SI
2890 (match_operand:SI 1 "s_register_operand" "r")
2891 (match_operand:SI 2 "const_int_operand" "n")
2894 (clobber (reg:CC CC_REGNUM))]
2898 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2899 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2901 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2903 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2904 (match_dup 0) (const_int 1)))]
2906 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2908 [(set_attr "conds" "clob")
2909 (set_attr "length" "8")
2910 (set_attr "type" "multiple")]
2913 (define_insn_and_split "*ite_ne_zeroextractsi"
2914 [(set (match_operand:SI 0 "s_register_operand" "=r")
2915 (if_then_else:SI (ne (zero_extract:SI
2916 (match_operand:SI 1 "s_register_operand" "r")
2917 (match_operand:SI 2 "const_int_operand" "n")
2918 (match_operand:SI 3 "const_int_operand" "n"))
2920 (match_operand:SI 4 "arm_not_operand" "rIK")
2922 (clobber (reg:CC CC_REGNUM))]
2924 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2925 && INTVAL (operands[2]) > 0
2926 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2927 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2928 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2931 && (INTVAL (operands[3]) >= 0 && INTVAL (operands[3]) < 32
2932 && INTVAL (operands[2]) > 0
2933 && INTVAL (operands[2]) + (INTVAL (operands[3]) & 1) <= 8
2934 && INTVAL (operands[2]) + INTVAL (operands[3]) <= 32)
2935 && !reg_overlap_mentioned_p (operands[0], operands[4])"
2936 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2937 (compare:CC_NOOV (and:SI (match_dup 1) (match_dup 2))
2939 (set (match_dup 0) (and:SI (match_dup 1) (match_dup 2)))])
2941 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2942 (match_dup 0) (match_dup 4)))]
2944 operands[2] = GEN_INT (((1 << INTVAL (operands[2])) - 1)
2945 << INTVAL (operands[3]));
2947 [(set_attr "conds" "clob")
2948 (set_attr "length" "8")
2949 (set_attr "type" "multiple")]
2952 (define_insn_and_split "*ite_ne_zeroextractsi_shifted"
2953 [(set (match_operand:SI 0 "s_register_operand" "=r")
2954 (if_then_else:SI (ne (zero_extract:SI
2955 (match_operand:SI 1 "s_register_operand" "r")
2956 (match_operand:SI 2 "const_int_operand" "n")
2959 (match_operand:SI 3 "arm_not_operand" "rIK")
2961 (clobber (reg:CC CC_REGNUM))]
2962 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2964 "TARGET_ARM && !reg_overlap_mentioned_p (operands[0], operands[3])"
2965 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
2966 (compare:CC_NOOV (ashift:SI (match_dup 1) (match_dup 2))
2968 (set (match_dup 0) (ashift:SI (match_dup 1) (match_dup 2)))])
2970 (if_then_else:SI (eq (reg:CC_NOOV CC_REGNUM) (const_int 0))
2971 (match_dup 0) (match_dup 3)))]
2973 operands[2] = GEN_INT (32 - INTVAL (operands[2]));
2975 [(set_attr "conds" "clob")
2976 (set_attr "length" "8")
2977 (set_attr "type" "multiple")]
2980 ;; ??? Use Thumb-2 has bitfield insert/extract instructions.
2982 [(set (match_operand:SI 0 "s_register_operand" "")
2983 (match_operator:SI 1 "shiftable_operator"
2984 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
2985 (match_operand:SI 3 "const_int_operand" "")
2986 (match_operand:SI 4 "const_int_operand" ""))
2987 (match_operand:SI 5 "s_register_operand" "")]))
2988 (clobber (match_operand:SI 6 "s_register_operand" ""))]
2990 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
2993 [(lshiftrt:SI (match_dup 6) (match_dup 4))
2996 HOST_WIDE_INT temp = INTVAL (operands[3]);
2998 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
2999 operands[4] = GEN_INT (32 - temp);
3004 [(set (match_operand:SI 0 "s_register_operand" "")
3005 (match_operator:SI 1 "shiftable_operator"
3006 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3007 (match_operand:SI 3 "const_int_operand" "")
3008 (match_operand:SI 4 "const_int_operand" ""))
3009 (match_operand:SI 5 "s_register_operand" "")]))
3010 (clobber (match_operand:SI 6 "s_register_operand" ""))]
3012 [(set (match_dup 6) (ashift:SI (match_dup 2) (match_dup 3)))
3015 [(ashiftrt:SI (match_dup 6) (match_dup 4))
3018 HOST_WIDE_INT temp = INTVAL (operands[3]);
3020 operands[3] = GEN_INT (32 - temp - INTVAL (operands[4]));
3021 operands[4] = GEN_INT (32 - temp);
3025 ;;; ??? This pattern is bogus. If operand3 has bits outside the range
3026 ;;; represented by the bitfield, then this will produce incorrect results.
3027 ;;; Somewhere, the value needs to be truncated. On targets like the m68k,
3028 ;;; which have a real bit-field insert instruction, the truncation happens
3029 ;;; in the bit-field insert instruction itself. Since arm does not have a
3030 ;;; bit-field insert instruction, we would have to emit code here to truncate
3031 ;;; the value before we insert. This loses some of the advantage of having
3032 ;;; this insv pattern, so this pattern needs to be reevalutated.
3034 (define_expand "insv"
3035 [(set (zero_extract (match_operand 0 "nonimmediate_operand")
3036 (match_operand 1 "general_operand")
3037 (match_operand 2 "general_operand"))
3038 (match_operand 3 "reg_or_int_operand"))]
3039 "TARGET_ARM || arm_arch_thumb2"
3042 int start_bit = INTVAL (operands[2]);
3043 int width = INTVAL (operands[1]);
3044 HOST_WIDE_INT mask = (HOST_WIDE_INT_1 << width) - 1;
3045 rtx target, subtarget;
3047 if (arm_arch_thumb2)
3049 if (unaligned_access && MEM_P (operands[0])
3050 && s_register_operand (operands[3], GET_MODE (operands[3]))
3051 && (width == 16 || width == 32) && (start_bit % BITS_PER_UNIT) == 0)
3055 if (BYTES_BIG_ENDIAN)
3056 start_bit = GET_MODE_BITSIZE (GET_MODE (operands[3])) - width
3061 base_addr = adjust_address (operands[0], SImode,
3062 start_bit / BITS_PER_UNIT);
3063 emit_insn (gen_unaligned_storesi (base_addr, operands[3]));
3067 rtx tmp = gen_reg_rtx (HImode);
3069 base_addr = adjust_address (operands[0], HImode,
3070 start_bit / BITS_PER_UNIT);
3071 emit_move_insn (tmp, gen_lowpart (HImode, operands[3]));
3072 emit_insn (gen_unaligned_storehi (base_addr, tmp));
3076 else if (s_register_operand (operands[0], GET_MODE (operands[0])))
3078 bool use_bfi = TRUE;
3080 if (CONST_INT_P (operands[3]))
3082 HOST_WIDE_INT val = INTVAL (operands[3]) & mask;
3086 emit_insn (gen_insv_zero (operands[0], operands[1],
3091 /* See if the set can be done with a single orr instruction. */
3092 if (val == mask && const_ok_for_arm (val << start_bit))
3098 if (!REG_P (operands[3]))
3099 operands[3] = force_reg (SImode, operands[3]);
3101 emit_insn (gen_insv_t2 (operands[0], operands[1], operands[2],
3110 if (!s_register_operand (operands[0], GET_MODE (operands[0])))
3113 target = copy_rtx (operands[0]);
3114 /* Avoid using a subreg as a subtarget, and avoid writing a paradoxical
3115 subreg as the final target. */
3116 if (GET_CODE (target) == SUBREG)
3118 subtarget = gen_reg_rtx (SImode);
3119 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (target)))
3120 < GET_MODE_SIZE (SImode))
3121 target = SUBREG_REG (target);
3126 if (CONST_INT_P (operands[3]))
3128 /* Since we are inserting a known constant, we may be able to
3129 reduce the number of bits that we have to clear so that
3130 the mask becomes simple. */
3131 /* ??? This code does not check to see if the new mask is actually
3132 simpler. It may not be. */
3133 rtx op1 = gen_reg_rtx (SImode);
3134 /* ??? Truncate operand3 to fit in the bitfield. See comment before
3135 start of this pattern. */
3136 HOST_WIDE_INT op3_value = mask & INTVAL (operands[3]);
3137 HOST_WIDE_INT mask2 = ((mask & ~op3_value) << start_bit);
3139 emit_insn (gen_andsi3 (op1, operands[0],
3140 gen_int_mode (~mask2, SImode)));
3141 emit_insn (gen_iorsi3 (subtarget, op1,
3142 gen_int_mode (op3_value << start_bit, SImode)));
3144 else if (start_bit == 0
3145 && !(const_ok_for_arm (mask)
3146 || const_ok_for_arm (~mask)))
3148 /* A Trick, since we are setting the bottom bits in the word,
3149 we can shift operand[3] up, operand[0] down, OR them together
3150 and rotate the result back again. This takes 3 insns, and
3151 the third might be mergeable into another op. */
3152 /* The shift up copes with the possibility that operand[3] is
3153 wider than the bitfield. */
3154 rtx op0 = gen_reg_rtx (SImode);
3155 rtx op1 = gen_reg_rtx (SImode);
3157 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3158 emit_insn (gen_lshrsi3 (op1, operands[0], operands[1]));
3159 emit_insn (gen_iorsi3 (op1, op1, op0));
3160 emit_insn (gen_rotlsi3 (subtarget, op1, operands[1]));
3162 else if ((width + start_bit == 32)
3163 && !(const_ok_for_arm (mask)
3164 || const_ok_for_arm (~mask)))
3166 /* Similar trick, but slightly less efficient. */
3168 rtx op0 = gen_reg_rtx (SImode);
3169 rtx op1 = gen_reg_rtx (SImode);
3171 emit_insn (gen_ashlsi3 (op0, operands[3], GEN_INT (32 - width)));
3172 emit_insn (gen_ashlsi3 (op1, operands[0], operands[1]));
3173 emit_insn (gen_lshrsi3 (op1, op1, operands[1]));
3174 emit_insn (gen_iorsi3 (subtarget, op1, op0));
3178 rtx op0 = gen_int_mode (mask, SImode);
3179 rtx op1 = gen_reg_rtx (SImode);
3180 rtx op2 = gen_reg_rtx (SImode);
3182 if (!(const_ok_for_arm (mask) || const_ok_for_arm (~mask)))
3184 rtx tmp = gen_reg_rtx (SImode);
3186 emit_insn (gen_movsi (tmp, op0));
3190 /* Mask out any bits in operand[3] that are not needed. */
3191 emit_insn (gen_andsi3 (op1, operands[3], op0));
3193 if (CONST_INT_P (op0)
3194 && (const_ok_for_arm (mask << start_bit)
3195 || const_ok_for_arm (~(mask << start_bit))))
3197 op0 = gen_int_mode (~(mask << start_bit), SImode);
3198 emit_insn (gen_andsi3 (op2, operands[0], op0));
3202 if (CONST_INT_P (op0))
3204 rtx tmp = gen_reg_rtx (SImode);
3206 emit_insn (gen_movsi (tmp, op0));
3211 emit_insn (gen_ashlsi3 (op0, op0, operands[2]));
3213 emit_insn (gen_andsi_notsi_si (op2, operands[0], op0));
3217 emit_insn (gen_ashlsi3 (op1, op1, operands[2]));
3219 emit_insn (gen_iorsi3 (subtarget, op1, op2));
3222 if (subtarget != target)
3224 /* If TARGET is still a SUBREG, then it must be wider than a word,
3225 so we must be careful only to set the subword we were asked to. */
3226 if (GET_CODE (target) == SUBREG)
3227 emit_move_insn (target, subtarget);
3229 emit_move_insn (target, gen_lowpart (GET_MODE (target), subtarget));
3236 (define_insn "insv_zero"
3237 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3238 (match_operand:SI 1 "const_int_M_operand" "M")
3239 (match_operand:SI 2 "const_int_M_operand" "M"))
3243 [(set_attr "length" "4")
3244 (set_attr "predicable" "yes")
3245 (set_attr "type" "bfm")]
3248 (define_insn "insv_t2"
3249 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r")
3250 (match_operand:SI 1 "const_int_M_operand" "M")
3251 (match_operand:SI 2 "const_int_M_operand" "M"))
3252 (match_operand:SI 3 "s_register_operand" "r"))]
3254 "bfi%?\t%0, %3, %2, %1"
3255 [(set_attr "length" "4")
3256 (set_attr "predicable" "yes")
3257 (set_attr "type" "bfm")]
3260 (define_insn "andsi_notsi_si"
3261 [(set (match_operand:SI 0 "s_register_operand" "=r")
3262 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3263 (match_operand:SI 1 "s_register_operand" "r")))]
3265 "bic%?\\t%0, %1, %2"
3266 [(set_attr "predicable" "yes")
3267 (set_attr "type" "logic_reg")]
3270 (define_insn "andsi_not_shiftsi_si"
3271 [(set (match_operand:SI 0 "s_register_operand" "=r")
3272 (and:SI (not:SI (match_operator:SI 4 "shift_operator"
3273 [(match_operand:SI 2 "s_register_operand" "r")
3274 (match_operand:SI 3 "arm_rhs_operand" "rM")]))
3275 (match_operand:SI 1 "s_register_operand" "r")))]
3277 "bic%?\\t%0, %1, %2%S4"
3278 [(set_attr "predicable" "yes")
3279 (set_attr "shift" "2")
3280 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
3281 (const_string "logic_shift_imm")
3282 (const_string "logic_shift_reg")))]
3285 ;; Shifted bics pattern used to set up CC status register and not reusing
3286 ;; bics output. Pattern restricts Thumb2 shift operand as bics for Thumb2
3287 ;; does not support shift by register.
3288 (define_insn "andsi_not_shiftsi_si_scc_no_reuse"
3289 [(set (reg:CC_NOOV CC_REGNUM)
3291 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3292 [(match_operand:SI 1 "s_register_operand" "r")
3293 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3294 (match_operand:SI 3 "s_register_operand" "r"))
3296 (clobber (match_scratch:SI 4 "=r"))]
3297 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3298 "bics%?\\t%4, %3, %1%S0"
3299 [(set_attr "predicable" "yes")
3300 (set_attr "conds" "set")
3301 (set_attr "shift" "1")
3302 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3303 (const_string "logic_shift_imm")
3304 (const_string "logic_shift_reg")))]
3307 ;; Same as andsi_not_shiftsi_si_scc_no_reuse, but the bics result is also
3308 ;; getting reused later.
3309 (define_insn "andsi_not_shiftsi_si_scc"
3310 [(parallel [(set (reg:CC_NOOV CC_REGNUM)
3312 (and:SI (not:SI (match_operator:SI 0 "shift_operator"
3313 [(match_operand:SI 1 "s_register_operand" "r")
3314 (match_operand:SI 2 "arm_rhs_operand" "rM")]))
3315 (match_operand:SI 3 "s_register_operand" "r"))
3317 (set (match_operand:SI 4 "s_register_operand" "=r")
3318 (and:SI (not:SI (match_op_dup 0
3322 "TARGET_ARM || (TARGET_THUMB2 && CONST_INT_P (operands[2]))"
3323 "bics%?\\t%4, %3, %1%S0"
3324 [(set_attr "predicable" "yes")
3325 (set_attr "conds" "set")
3326 (set_attr "shift" "1")
3327 (set (attr "type") (if_then_else (match_operand 2 "const_int_operand" "")
3328 (const_string "logic_shift_imm")
3329 (const_string "logic_shift_reg")))]
3332 (define_insn "*andsi_notsi_si_compare0"
3333 [(set (reg:CC_NOOV CC_REGNUM)
3335 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3336 (match_operand:SI 1 "s_register_operand" "r"))
3338 (set (match_operand:SI 0 "s_register_operand" "=r")
3339 (and:SI (not:SI (match_dup 2)) (match_dup 1)))]
3342 [(set_attr "conds" "set")
3343 (set_attr "type" "logics_shift_reg")]
3346 (define_insn "*andsi_notsi_si_compare0_scratch"
3347 [(set (reg:CC_NOOV CC_REGNUM)
3349 (and:SI (not:SI (match_operand:SI 2 "s_register_operand" "r"))
3350 (match_operand:SI 1 "s_register_operand" "r"))
3352 (clobber (match_scratch:SI 0 "=r"))]
3355 [(set_attr "conds" "set")
3356 (set_attr "type" "logics_shift_reg")]
3359 (define_expand "iorsi3"
3360 [(set (match_operand:SI 0 "s_register_operand")
3361 (ior:SI (match_operand:SI 1 "s_register_operand")
3362 (match_operand:SI 2 "reg_or_int_operand")))]
3365 if (CONST_INT_P (operands[2]))
3369 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), IOR))
3370 operands[2] = force_reg (SImode, operands[2]);
3373 arm_split_constant (IOR, SImode, NULL_RTX,
3374 INTVAL (operands[2]), operands[0],
3376 optimize && can_create_pseudo_p ());
3380 else /* TARGET_THUMB1 */
3382 rtx tmp = force_reg (SImode, operands[2]);
3383 if (rtx_equal_p (operands[0], operands[1]))
3387 operands[2] = operands[1];
3395 (define_insn_and_split "*iorsi3_insn"
3396 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r,r")
3397 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r,r")
3398 (match_operand:SI 2 "reg_or_int_operand" "I,l,K,r,?n")))]
3403 orn%?\\t%0, %1, #%B2
3407 && CONST_INT_P (operands[2])
3408 && !(const_ok_for_arm (INTVAL (operands[2]))
3409 || (TARGET_THUMB2 && const_ok_for_arm (~INTVAL (operands[2]))))"
3410 [(clobber (const_int 0))]
3412 arm_split_constant (IOR, SImode, curr_insn,
3413 INTVAL (operands[2]), operands[0], operands[1], 0);
3416 [(set_attr "length" "4,4,4,4,16")
3417 (set_attr "arch" "32,t2,t2,32,32")
3418 (set_attr "predicable" "yes")
3419 (set_attr "predicable_short_it" "no,yes,no,no,no")
3420 (set_attr "type" "logic_imm,logic_reg,logic_imm,logic_reg,logic_reg")]
3424 [(match_scratch:SI 3 "r")
3425 (set (match_operand:SI 0 "arm_general_register_operand" "")
3426 (ior:SI (match_operand:SI 1 "arm_general_register_operand" "")
3427 (match_operand:SI 2 "const_int_operand" "")))]
3429 && !const_ok_for_arm (INTVAL (operands[2]))
3430 && const_ok_for_arm (~INTVAL (operands[2]))"
3431 [(set (match_dup 3) (match_dup 2))
3432 (set (match_dup 0) (ior:SI (match_dup 1) (match_dup 3)))]
3436 (define_insn "*iorsi3_compare0"
3437 [(set (reg:CC_NOOV CC_REGNUM)
3439 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3440 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3442 (set (match_operand:SI 0 "s_register_operand" "=r,l,r")
3443 (ior:SI (match_dup 1) (match_dup 2)))]
3445 "orrs%?\\t%0, %1, %2"
3446 [(set_attr "conds" "set")
3447 (set_attr "arch" "*,t2,*")
3448 (set_attr "length" "4,2,4")
3449 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3452 (define_insn "*iorsi3_compare0_scratch"
3453 [(set (reg:CC_NOOV CC_REGNUM)
3455 (ior:SI (match_operand:SI 1 "s_register_operand" "%r,0,r")
3456 (match_operand:SI 2 "arm_rhs_operand" "I,l,r"))
3458 (clobber (match_scratch:SI 0 "=r,l,r"))]
3460 "orrs%?\\t%0, %1, %2"
3461 [(set_attr "conds" "set")
3462 (set_attr "arch" "*,t2,*")
3463 (set_attr "length" "4,2,4")
3464 (set_attr "type" "logics_imm,logics_reg,logics_reg")]
3467 (define_expand "xorsi3"
3468 [(set (match_operand:SI 0 "s_register_operand")
3469 (xor:SI (match_operand:SI 1 "s_register_operand")
3470 (match_operand:SI 2 "reg_or_int_operand")))]
3472 "if (CONST_INT_P (operands[2]))
3476 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[2]), XOR))
3477 operands[2] = force_reg (SImode, operands[2]);
3480 arm_split_constant (XOR, SImode, NULL_RTX,
3481 INTVAL (operands[2]), operands[0],
3483 optimize && can_create_pseudo_p ());
3487 else /* TARGET_THUMB1 */
3489 rtx tmp = force_reg (SImode, operands[2]);
3490 if (rtx_equal_p (operands[0], operands[1]))
3494 operands[2] = operands[1];
3501 (define_insn_and_split "*arm_xorsi3"
3502 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r,r")
3503 (xor:SI (match_operand:SI 1 "s_register_operand" "%r,0,r,r")
3504 (match_operand:SI 2 "reg_or_int_operand" "I,l,r,?n")))]
3512 && CONST_INT_P (operands[2])
3513 && !const_ok_for_arm (INTVAL (operands[2]))"
3514 [(clobber (const_int 0))]
3516 arm_split_constant (XOR, SImode, curr_insn,
3517 INTVAL (operands[2]), operands[0], operands[1], 0);
3520 [(set_attr "length" "4,4,4,16")
3521 (set_attr "predicable" "yes")
3522 (set_attr "predicable_short_it" "no,yes,no,no")
3523 (set_attr "type" "logic_imm,logic_reg,logic_reg,multiple")]
3526 (define_insn "*xorsi3_compare0"
3527 [(set (reg:CC_NOOV CC_REGNUM)
3528 (compare:CC_NOOV (xor:SI (match_operand:SI 1 "s_register_operand" "r,r")
3529 (match_operand:SI 2 "arm_rhs_operand" "I,r"))
3531 (set (match_operand:SI 0 "s_register_operand" "=r,r")
3532 (xor:SI (match_dup 1) (match_dup 2)))]
3534 "eors%?\\t%0, %1, %2"
3535 [(set_attr "conds" "set")
3536 (set_attr "type" "logics_imm,logics_reg")]
3539 (define_insn "*xorsi3_compare0_scratch"
3540 [(set (reg:CC_NOOV CC_REGNUM)
3541 (compare:CC_NOOV (xor:SI (match_operand:SI 0 "s_register_operand" "r,r")
3542 (match_operand:SI 1 "arm_rhs_operand" "I,r"))
3546 [(set_attr "conds" "set")
3547 (set_attr "type" "logics_imm,logics_reg")]
3550 ; By splitting (IOR (AND (NOT A) (NOT B)) C) as D = AND (IOR A B) (NOT C),
3551 ; (NOT D) we can sometimes merge the final NOT into one of the following
3555 [(set (match_operand:SI 0 "s_register_operand" "")
3556 (ior:SI (and:SI (not:SI (match_operand:SI 1 "s_register_operand" ""))
3557 (not:SI (match_operand:SI 2 "arm_rhs_operand" "")))
3558 (match_operand:SI 3 "arm_rhs_operand" "")))
3559 (clobber (match_operand:SI 4 "s_register_operand" ""))]
3561 [(set (match_dup 4) (and:SI (ior:SI (match_dup 1) (match_dup 2))
3562 (not:SI (match_dup 3))))
3563 (set (match_dup 0) (not:SI (match_dup 4)))]
3567 (define_insn_and_split "*andsi_iorsi3_notsi"
3568 [(set (match_operand:SI 0 "s_register_operand" "=&r,&r,&r")
3569 (and:SI (ior:SI (match_operand:SI 1 "s_register_operand" "%0,r,r")
3570 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI"))
3571 (not:SI (match_operand:SI 3 "arm_rhs_operand" "rI,rI,rI"))))]
3573 "#" ; "orr%?\\t%0, %1, %2\;bic%?\\t%0, %0, %3"
3574 "&& reload_completed"
3575 [(set (match_dup 0) (ior:SI (match_dup 1) (match_dup 2)))
3576 (set (match_dup 0) (and:SI (match_dup 4) (match_dup 5)))]
3578 /* If operands[3] is a constant make sure to fold the NOT into it
3579 to avoid creating a NOT of a CONST_INT. */
3580 rtx not_rtx = simplify_gen_unary (NOT, SImode, operands[3], SImode);
3581 if (CONST_INT_P (not_rtx))
3583 operands[4] = operands[0];
3584 operands[5] = not_rtx;
3588 operands[5] = operands[0];
3589 operands[4] = not_rtx;
3592 [(set_attr "length" "8")
3593 (set_attr "ce_count" "2")
3594 (set_attr "predicable" "yes")
3595 (set_attr "type" "multiple")]
3598 ; ??? Are these four splitters still beneficial when the Thumb-2 bitfield
3599 ; insns are available?
3601 [(set (match_operand:SI 0 "s_register_operand" "")
3602 (match_operator:SI 1 "logical_binary_operator"
3603 [(zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3604 (match_operand:SI 3 "const_int_operand" "")
3605 (match_operand:SI 4 "const_int_operand" ""))
3606 (match_operator:SI 9 "logical_binary_operator"
3607 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3608 (match_operand:SI 6 "const_int_operand" ""))
3609 (match_operand:SI 7 "s_register_operand" "")])]))
3610 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3612 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3613 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3616 [(ashift:SI (match_dup 2) (match_dup 4))
3620 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3623 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3627 [(set (match_operand:SI 0 "s_register_operand" "")
3628 (match_operator:SI 1 "logical_binary_operator"
3629 [(match_operator:SI 9 "logical_binary_operator"
3630 [(lshiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3631 (match_operand:SI 6 "const_int_operand" ""))
3632 (match_operand:SI 7 "s_register_operand" "")])
3633 (zero_extract:SI (match_operand:SI 2 "s_register_operand" "")
3634 (match_operand:SI 3 "const_int_operand" "")
3635 (match_operand:SI 4 "const_int_operand" ""))]))
3636 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3638 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3639 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3642 [(ashift:SI (match_dup 2) (match_dup 4))
3646 [(lshiftrt:SI (match_dup 8) (match_dup 6))
3649 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3653 [(set (match_operand:SI 0 "s_register_operand" "")
3654 (match_operator:SI 1 "logical_binary_operator"
3655 [(sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3656 (match_operand:SI 3 "const_int_operand" "")
3657 (match_operand:SI 4 "const_int_operand" ""))
3658 (match_operator:SI 9 "logical_binary_operator"
3659 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3660 (match_operand:SI 6 "const_int_operand" ""))
3661 (match_operand:SI 7 "s_register_operand" "")])]))
3662 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3664 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3665 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3668 [(ashift:SI (match_dup 2) (match_dup 4))
3672 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3675 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3679 [(set (match_operand:SI 0 "s_register_operand" "")
3680 (match_operator:SI 1 "logical_binary_operator"
3681 [(match_operator:SI 9 "logical_binary_operator"
3682 [(ashiftrt:SI (match_operand:SI 5 "s_register_operand" "")
3683 (match_operand:SI 6 "const_int_operand" ""))
3684 (match_operand:SI 7 "s_register_operand" "")])
3685 (sign_extract:SI (match_operand:SI 2 "s_register_operand" "")
3686 (match_operand:SI 3 "const_int_operand" "")
3687 (match_operand:SI 4 "const_int_operand" ""))]))
3688 (clobber (match_operand:SI 8 "s_register_operand" ""))]
3690 && GET_CODE (operands[1]) == GET_CODE (operands[9])
3691 && INTVAL (operands[3]) == 32 - INTVAL (operands[6])"
3694 [(ashift:SI (match_dup 2) (match_dup 4))
3698 [(ashiftrt:SI (match_dup 8) (match_dup 6))
3701 operands[4] = GEN_INT (32 - (INTVAL (operands[3]) + INTVAL (operands[4])));
3705 ;; Minimum and maximum insns
3707 (define_expand "smaxsi3"
3709 (set (match_operand:SI 0 "s_register_operand")
3710 (smax:SI (match_operand:SI 1 "s_register_operand")
3711 (match_operand:SI 2 "arm_rhs_operand")))
3712 (clobber (reg:CC CC_REGNUM))])]
3715 if (operands[2] == const0_rtx || operands[2] == constm1_rtx)
3717 /* No need for a clobber of the condition code register here. */
3718 emit_insn (gen_rtx_SET (operands[0],
3719 gen_rtx_SMAX (SImode, operands[1],
3725 (define_insn "*smax_0"
3726 [(set (match_operand:SI 0 "s_register_operand" "=r")
3727 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3730 "bic%?\\t%0, %1, %1, asr #31"
3731 [(set_attr "predicable" "yes")
3732 (set_attr "type" "logic_shift_reg")]
3735 (define_insn "*smax_m1"
3736 [(set (match_operand:SI 0 "s_register_operand" "=r")
3737 (smax:SI (match_operand:SI 1 "s_register_operand" "r")
3740 "orr%?\\t%0, %1, %1, asr #31"
3741 [(set_attr "predicable" "yes")
3742 (set_attr "type" "logic_shift_reg")]
3745 (define_insn_and_split "*arm_smax_insn"
3746 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3747 (smax:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3748 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3749 (clobber (reg:CC CC_REGNUM))]
3752 ; cmp\\t%1, %2\;movlt\\t%0, %2
3753 ; cmp\\t%1, %2\;movge\\t%0, %1\;movlt\\t%0, %2"
3755 [(set (reg:CC CC_REGNUM)
3756 (compare:CC (match_dup 1) (match_dup 2)))
3758 (if_then_else:SI (ge:SI (reg:CC CC_REGNUM) (const_int 0))
3762 [(set_attr "conds" "clob")
3763 (set_attr "length" "8,12")
3764 (set_attr "type" "multiple")]
3767 (define_expand "sminsi3"
3769 (set (match_operand:SI 0 "s_register_operand")
3770 (smin:SI (match_operand:SI 1 "s_register_operand")
3771 (match_operand:SI 2 "arm_rhs_operand")))
3772 (clobber (reg:CC CC_REGNUM))])]
3775 if (operands[2] == const0_rtx)
3777 /* No need for a clobber of the condition code register here. */
3778 emit_insn (gen_rtx_SET (operands[0],
3779 gen_rtx_SMIN (SImode, operands[1],
3785 (define_insn "*smin_0"
3786 [(set (match_operand:SI 0 "s_register_operand" "=r")
3787 (smin:SI (match_operand:SI 1 "s_register_operand" "r")
3790 "and%?\\t%0, %1, %1, asr #31"
3791 [(set_attr "predicable" "yes")
3792 (set_attr "type" "logic_shift_reg")]
3795 (define_insn_and_split "*arm_smin_insn"
3796 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3797 (smin:SI (match_operand:SI 1 "s_register_operand" "%0,?r")
3798 (match_operand:SI 2 "arm_rhs_operand" "rI,rI")))
3799 (clobber (reg:CC CC_REGNUM))]
3802 ; cmp\\t%1, %2\;movge\\t%0, %2
3803 ; cmp\\t%1, %2\;movlt\\t%0, %1\;movge\\t%0, %2"
3805 [(set (reg:CC CC_REGNUM)
3806 (compare:CC (match_dup 1) (match_dup 2)))
3808 (if_then_else:SI (lt:SI (reg:CC CC_REGNUM) (const_int 0))
3812 [(set_attr "conds" "clob")
3813 (set_attr "length" "8,12")
3814 (set_attr "type" "multiple,multiple")]
3817 (define_expand "umaxsi3"
3819 (set (match_operand:SI 0 "s_register_operand")
3820 (umax:SI (match_operand:SI 1 "s_register_operand")
3821 (match_operand:SI 2 "arm_rhs_operand")))
3822 (clobber (reg:CC CC_REGNUM))])]
3827 (define_insn_and_split "*arm_umaxsi3"
3828 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3829 (umax:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3830 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3831 (clobber (reg:CC CC_REGNUM))]
3834 ; cmp\\t%1, %2\;movcc\\t%0, %2
3835 ; cmp\\t%1, %2\;movcs\\t%0, %1
3836 ; cmp\\t%1, %2\;movcs\\t%0, %1\;movcc\\t%0, %2"
3838 [(set (reg:CC CC_REGNUM)
3839 (compare:CC (match_dup 1) (match_dup 2)))
3841 (if_then_else:SI (geu:SI (reg:CC CC_REGNUM) (const_int 0))
3845 [(set_attr "conds" "clob")
3846 (set_attr "length" "8,8,12")
3847 (set_attr "type" "store_4")]
3850 (define_expand "uminsi3"
3852 (set (match_operand:SI 0 "s_register_operand")
3853 (umin:SI (match_operand:SI 1 "s_register_operand")
3854 (match_operand:SI 2 "arm_rhs_operand")))
3855 (clobber (reg:CC CC_REGNUM))])]
3860 (define_insn_and_split "*arm_uminsi3"
3861 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
3862 (umin:SI (match_operand:SI 1 "s_register_operand" "0,r,?r")
3863 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
3864 (clobber (reg:CC CC_REGNUM))]
3867 ; cmp\\t%1, %2\;movcs\\t%0, %2
3868 ; cmp\\t%1, %2\;movcc\\t%0, %1
3869 ; cmp\\t%1, %2\;movcc\\t%0, %1\;movcs\\t%0, %2"
3871 [(set (reg:CC CC_REGNUM)
3872 (compare:CC (match_dup 1) (match_dup 2)))
3874 (if_then_else:SI (ltu:SI (reg:CC CC_REGNUM) (const_int 0))
3878 [(set_attr "conds" "clob")
3879 (set_attr "length" "8,8,12")
3880 (set_attr "type" "store_4")]
3883 (define_insn "*store_minmaxsi"
3884 [(set (match_operand:SI 0 "memory_operand" "=m")
3885 (match_operator:SI 3 "minmax_operator"
3886 [(match_operand:SI 1 "s_register_operand" "r")
3887 (match_operand:SI 2 "s_register_operand" "r")]))
3888 (clobber (reg:CC CC_REGNUM))]
3889 "TARGET_32BIT && optimize_function_for_size_p (cfun) && !arm_restrict_it"
3891 operands[3] = gen_rtx_fmt_ee (minmax_code (operands[3]), SImode,
3892 operands[1], operands[2]);
3893 output_asm_insn (\"cmp\\t%1, %2\", operands);
3895 output_asm_insn (\"ite\t%d3\", operands);
3896 output_asm_insn (\"str%d3\\t%1, %0\", operands);
3897 output_asm_insn (\"str%D3\\t%2, %0\", operands);
3900 [(set_attr "conds" "clob")
3901 (set (attr "length")
3902 (if_then_else (eq_attr "is_thumb" "yes")
3905 (set_attr "type" "store_4")]
3908 ; Reject the frame pointer in operand[1], since reloading this after
3909 ; it has been eliminated can cause carnage.
3910 (define_insn "*minmax_arithsi"
3911 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
3912 (match_operator:SI 4 "shiftable_operator"
3913 [(match_operator:SI 5 "minmax_operator"
3914 [(match_operand:SI 2 "s_register_operand" "r,r")
3915 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
3916 (match_operand:SI 1 "s_register_operand" "0,?r")]))
3917 (clobber (reg:CC CC_REGNUM))]
3918 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && !arm_restrict_it"
3921 enum rtx_code code = GET_CODE (operands[4]);
3924 if (which_alternative != 0 || operands[3] != const0_rtx
3925 || (code != PLUS && code != IOR && code != XOR))
3930 operands[5] = gen_rtx_fmt_ee (minmax_code (operands[5]), SImode,
3931 operands[2], operands[3]);
3932 output_asm_insn (\"cmp\\t%2, %3\", operands);
3936 output_asm_insn (\"ite\\t%d5\", operands);
3938 output_asm_insn (\"it\\t%d5\", operands);
3940 output_asm_insn (\"%i4%d5\\t%0, %1, %2\", operands);
3942 output_asm_insn (\"%i4%D5\\t%0, %1, %3\", operands);
3945 [(set_attr "conds" "clob")
3946 (set (attr "length")
3947 (if_then_else (eq_attr "is_thumb" "yes")
3950 (set_attr "type" "multiple")]
3953 ; Reject the frame pointer in operand[1], since reloading this after
3954 ; it has been eliminated can cause carnage.
3955 (define_insn_and_split "*minmax_arithsi_non_canon"
3956 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
3958 (match_operand:SI 1 "s_register_operand" "0,?Ts")
3959 (match_operator:SI 4 "minmax_operator"
3960 [(match_operand:SI 2 "s_register_operand" "Ts,Ts")
3961 (match_operand:SI 3 "arm_rhs_operand" "TsI,TsI")])))
3962 (clobber (reg:CC CC_REGNUM))]
3963 "TARGET_32BIT && !arm_eliminable_register (operands[1])
3964 && !(arm_restrict_it && CONST_INT_P (operands[3]))"
3966 "TARGET_32BIT && !arm_eliminable_register (operands[1]) && reload_completed"
3967 [(set (reg:CC CC_REGNUM)
3968 (compare:CC (match_dup 2) (match_dup 3)))
3970 (cond_exec (match_op_dup 4 [(reg:CC CC_REGNUM) (const_int 0)])
3972 (minus:SI (match_dup 1)
3974 (cond_exec (match_op_dup 5 [(reg:CC CC_REGNUM) (const_int 0)])
3978 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
3979 operands[2], operands[3]);
3980 enum rtx_code rc = minmax_code (operands[4]);
3981 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode,
3982 operands[2], operands[3]);
3984 if (mode == CCFPmode || mode == CCFPEmode)
3985 rc = reverse_condition_maybe_unordered (rc);
3987 rc = reverse_condition (rc);
3988 operands[5] = gen_rtx_fmt_ee (rc, SImode, operands[2], operands[3]);
3989 if (CONST_INT_P (operands[3]))
3990 operands[6] = plus_constant (SImode, operands[1], -INTVAL (operands[3]));
3992 operands[6] = gen_rtx_MINUS (SImode, operands[1], operands[3]);
3994 [(set_attr "conds" "clob")
3995 (set (attr "length")
3996 (if_then_else (eq_attr "is_thumb" "yes")
3999 (set_attr "type" "multiple")]
4002 (define_code_iterator SAT [smin smax])
4003 (define_code_attr SATrev [(smin "smax") (smax "smin")])
4004 (define_code_attr SATlo [(smin "1") (smax "2")])
4005 (define_code_attr SAThi [(smin "2") (smax "1")])
4007 (define_insn "*satsi_<SAT:code>"
4008 [(set (match_operand:SI 0 "s_register_operand" "=r")
4009 (SAT:SI (<SATrev>:SI (match_operand:SI 3 "s_register_operand" "r")
4010 (match_operand:SI 1 "const_int_operand" "i"))
4011 (match_operand:SI 2 "const_int_operand" "i")))]
4012 "TARGET_32BIT && arm_arch6
4013 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4017 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4018 &mask, &signed_sat))
4021 operands[1] = GEN_INT (mask);
4023 return "ssat%?\t%0, %1, %3";
4025 return "usat%?\t%0, %1, %3";
4027 [(set_attr "predicable" "yes")
4028 (set_attr "type" "alus_imm")]
4031 (define_insn "*satsi_<SAT:code>_shift"
4032 [(set (match_operand:SI 0 "s_register_operand" "=r")
4033 (SAT:SI (<SATrev>:SI (match_operator:SI 3 "sat_shift_operator"
4034 [(match_operand:SI 4 "s_register_operand" "r")
4035 (match_operand:SI 5 "const_int_operand" "i")])
4036 (match_operand:SI 1 "const_int_operand" "i"))
4037 (match_operand:SI 2 "const_int_operand" "i")))]
4038 "TARGET_32BIT && arm_arch6
4039 && arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>], NULL, NULL)"
4043 if (!arm_sat_operator_match (operands[<SAT:SATlo>], operands[<SAT:SAThi>],
4044 &mask, &signed_sat))
4047 operands[1] = GEN_INT (mask);
4049 return "ssat%?\t%0, %1, %4%S3";
4051 return "usat%?\t%0, %1, %4%S3";
4053 [(set_attr "predicable" "yes")
4054 (set_attr "shift" "3")
4055 (set_attr "type" "logic_shift_reg")])
4057 ;; Shift and rotation insns
4059 (define_expand "ashldi3"
4060 [(set (match_operand:DI 0 "s_register_operand")
4061 (ashift:DI (match_operand:DI 1 "s_register_operand")
4062 (match_operand:SI 2 "reg_or_int_operand")))]
4065 arm_emit_coreregs_64bit_shift (ASHIFT, operands[0], operands[1],
4066 operands[2], gen_reg_rtx (SImode),
4067 gen_reg_rtx (SImode));
4071 (define_expand "ashlsi3"
4072 [(set (match_operand:SI 0 "s_register_operand")
4073 (ashift:SI (match_operand:SI 1 "s_register_operand")
4074 (match_operand:SI 2 "arm_rhs_operand")))]
4077 if (CONST_INT_P (operands[2])
4078 && (UINTVAL (operands[2])) > 31)
4080 emit_insn (gen_movsi (operands[0], const0_rtx));
4086 (define_expand "ashrdi3"
4087 [(set (match_operand:DI 0 "s_register_operand")
4088 (ashiftrt:DI (match_operand:DI 1 "s_register_operand")
4089 (match_operand:SI 2 "reg_or_int_operand")))]
4092 arm_emit_coreregs_64bit_shift (ASHIFTRT, operands[0], operands[1],
4093 operands[2], gen_reg_rtx (SImode),
4094 gen_reg_rtx (SImode));
4098 (define_expand "ashrsi3"
4099 [(set (match_operand:SI 0 "s_register_operand")
4100 (ashiftrt:SI (match_operand:SI 1 "s_register_operand")
4101 (match_operand:SI 2 "arm_rhs_operand")))]
4104 if (CONST_INT_P (operands[2])
4105 && UINTVAL (operands[2]) > 31)
4106 operands[2] = GEN_INT (31);
4110 (define_expand "lshrdi3"
4111 [(set (match_operand:DI 0 "s_register_operand")
4112 (lshiftrt:DI (match_operand:DI 1 "s_register_operand")
4113 (match_operand:SI 2 "reg_or_int_operand")))]
4116 arm_emit_coreregs_64bit_shift (LSHIFTRT, operands[0], operands[1],
4117 operands[2], gen_reg_rtx (SImode),
4118 gen_reg_rtx (SImode));
4122 (define_expand "lshrsi3"
4123 [(set (match_operand:SI 0 "s_register_operand")
4124 (lshiftrt:SI (match_operand:SI 1 "s_register_operand")
4125 (match_operand:SI 2 "arm_rhs_operand")))]
4128 if (CONST_INT_P (operands[2])
4129 && (UINTVAL (operands[2])) > 31)
4131 emit_insn (gen_movsi (operands[0], const0_rtx));
4137 (define_expand "rotlsi3"
4138 [(set (match_operand:SI 0 "s_register_operand")
4139 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4140 (match_operand:SI 2 "reg_or_int_operand")))]
4143 if (CONST_INT_P (operands[2]))
4144 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) % 32);
4147 rtx reg = gen_reg_rtx (SImode);
4148 emit_insn (gen_subsi3 (reg, GEN_INT (32), operands[2]));
4154 (define_expand "rotrsi3"
4155 [(set (match_operand:SI 0 "s_register_operand")
4156 (rotatert:SI (match_operand:SI 1 "s_register_operand")
4157 (match_operand:SI 2 "arm_rhs_operand")))]
4162 if (CONST_INT_P (operands[2])
4163 && UINTVAL (operands[2]) > 31)
4164 operands[2] = GEN_INT (INTVAL (operands[2]) % 32);
4166 else /* TARGET_THUMB1 */
4168 if (CONST_INT_P (operands [2]))
4169 operands [2] = force_reg (SImode, operands[2]);
4174 (define_insn "*arm_shiftsi3"
4175 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r,r")
4176 (match_operator:SI 3 "shift_operator"
4177 [(match_operand:SI 1 "s_register_operand" "0,l,r,r")
4178 (match_operand:SI 2 "reg_or_int_operand" "l,M,M,r")]))]
4180 "* return arm_output_shift(operands, 0);"
4181 [(set_attr "predicable" "yes")
4182 (set_attr "arch" "t2,t2,*,*")
4183 (set_attr "predicable_short_it" "yes,yes,no,no")
4184 (set_attr "length" "4")
4185 (set_attr "shift" "1")
4186 (set_attr "type" "alu_shift_reg,alu_shift_imm,alu_shift_imm,alu_shift_reg")]
4189 (define_insn "*shiftsi3_compare0"
4190 [(set (reg:CC_NOOV CC_REGNUM)
4191 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4192 [(match_operand:SI 1 "s_register_operand" "r,r")
4193 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4195 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4196 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))]
4198 "* return arm_output_shift(operands, 1);"
4199 [(set_attr "conds" "set")
4200 (set_attr "shift" "1")
4201 (set_attr "type" "alus_shift_imm,alus_shift_reg")]
4204 (define_insn "*shiftsi3_compare0_scratch"
4205 [(set (reg:CC_NOOV CC_REGNUM)
4206 (compare:CC_NOOV (match_operator:SI 3 "shift_operator"
4207 [(match_operand:SI 1 "s_register_operand" "r,r")
4208 (match_operand:SI 2 "arm_rhs_operand" "M,r")])
4210 (clobber (match_scratch:SI 0 "=r,r"))]
4212 "* return arm_output_shift(operands, 1);"
4213 [(set_attr "conds" "set")
4214 (set_attr "shift" "1")
4215 (set_attr "type" "shift_imm,shift_reg")]
4218 (define_insn "*not_shiftsi"
4219 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4220 (not:SI (match_operator:SI 3 "shift_operator"
4221 [(match_operand:SI 1 "s_register_operand" "r,r")
4222 (match_operand:SI 2 "shift_amount_operand" "M,rM")])))]
4225 [(set_attr "predicable" "yes")
4226 (set_attr "shift" "1")
4227 (set_attr "arch" "32,a")
4228 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4230 (define_insn "*not_shiftsi_compare0"
4231 [(set (reg:CC_NOOV CC_REGNUM)
4233 (not:SI (match_operator:SI 3 "shift_operator"
4234 [(match_operand:SI 1 "s_register_operand" "r,r")
4235 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4237 (set (match_operand:SI 0 "s_register_operand" "=r,r")
4238 (not:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])))]
4240 "mvns%?\\t%0, %1%S3"
4241 [(set_attr "conds" "set")
4242 (set_attr "shift" "1")
4243 (set_attr "arch" "32,a")
4244 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4246 (define_insn "*not_shiftsi_compare0_scratch"
4247 [(set (reg:CC_NOOV CC_REGNUM)
4249 (not:SI (match_operator:SI 3 "shift_operator"
4250 [(match_operand:SI 1 "s_register_operand" "r,r")
4251 (match_operand:SI 2 "shift_amount_operand" "M,rM")]))
4253 (clobber (match_scratch:SI 0 "=r,r"))]
4255 "mvns%?\\t%0, %1%S3"
4256 [(set_attr "conds" "set")
4257 (set_attr "shift" "1")
4258 (set_attr "arch" "32,a")
4259 (set_attr "type" "mvn_shift,mvn_shift_reg")])
4261 ;; We don't really have extzv, but defining this using shifts helps
4262 ;; to reduce register pressure later on.
4264 (define_expand "extzv"
4265 [(set (match_operand 0 "s_register_operand")
4266 (zero_extract (match_operand 1 "nonimmediate_operand")
4267 (match_operand 2 "const_int_operand")
4268 (match_operand 3 "const_int_operand")))]
4269 "TARGET_THUMB1 || arm_arch_thumb2"
4272 HOST_WIDE_INT lshift = 32 - INTVAL (operands[2]) - INTVAL (operands[3]);
4273 HOST_WIDE_INT rshift = 32 - INTVAL (operands[2]);
4275 if (arm_arch_thumb2)
4277 HOST_WIDE_INT width = INTVAL (operands[2]);
4278 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4280 if (unaligned_access && MEM_P (operands[1])
4281 && (width == 16 || width == 32) && (bitpos % BITS_PER_UNIT) == 0)
4285 if (BYTES_BIG_ENDIAN)
4286 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width
4291 base_addr = adjust_address (operands[1], SImode,
4292 bitpos / BITS_PER_UNIT);
4293 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4297 rtx dest = operands[0];
4298 rtx tmp = gen_reg_rtx (SImode);
4300 /* We may get a paradoxical subreg here. Strip it off. */
4301 if (GET_CODE (dest) == SUBREG
4302 && GET_MODE (dest) == SImode
4303 && GET_MODE (SUBREG_REG (dest)) == HImode)
4304 dest = SUBREG_REG (dest);
4306 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4309 base_addr = adjust_address (operands[1], HImode,
4310 bitpos / BITS_PER_UNIT);
4311 emit_insn (gen_unaligned_loadhiu (tmp, base_addr));
4312 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4316 else if (s_register_operand (operands[1], GET_MODE (operands[1])))
4318 emit_insn (gen_extzv_t2 (operands[0], operands[1], operands[2],
4326 if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4329 operands[3] = GEN_INT (rshift);
4333 emit_insn (gen_lshrsi3 (operands[0], operands[1], operands[3]));
4337 emit_insn (gen_extzv_t1 (operands[0], operands[1], GEN_INT (lshift),
4338 operands[3], gen_reg_rtx (SImode)));
4343 ;; Helper for extzv, for the Thumb-1 register-shifts case.
4345 (define_expand "extzv_t1"
4346 [(set (match_operand:SI 4 "s_register_operand")
4347 (ashift:SI (match_operand:SI 1 "nonimmediate_operand")
4348 (match_operand:SI 2 "const_int_operand")))
4349 (set (match_operand:SI 0 "s_register_operand")
4350 (lshiftrt:SI (match_dup 4)
4351 (match_operand:SI 3 "const_int_operand")))]
4355 (define_expand "extv"
4356 [(set (match_operand 0 "s_register_operand")
4357 (sign_extract (match_operand 1 "nonimmediate_operand")
4358 (match_operand 2 "const_int_operand")
4359 (match_operand 3 "const_int_operand")))]
4362 HOST_WIDE_INT width = INTVAL (operands[2]);
4363 HOST_WIDE_INT bitpos = INTVAL (operands[3]);
4365 if (unaligned_access && MEM_P (operands[1]) && (width == 16 || width == 32)
4366 && (bitpos % BITS_PER_UNIT) == 0)
4370 if (BYTES_BIG_ENDIAN)
4371 bitpos = GET_MODE_BITSIZE (GET_MODE (operands[0])) - width - bitpos;
4375 base_addr = adjust_address (operands[1], SImode,
4376 bitpos / BITS_PER_UNIT);
4377 emit_insn (gen_unaligned_loadsi (operands[0], base_addr));
4381 rtx dest = operands[0];
4382 rtx tmp = gen_reg_rtx (SImode);
4384 /* We may get a paradoxical subreg here. Strip it off. */
4385 if (GET_CODE (dest) == SUBREG
4386 && GET_MODE (dest) == SImode
4387 && GET_MODE (SUBREG_REG (dest)) == HImode)
4388 dest = SUBREG_REG (dest);
4390 if (GET_MODE_BITSIZE (GET_MODE (dest)) != width)
4393 base_addr = adjust_address (operands[1], HImode,
4394 bitpos / BITS_PER_UNIT);
4395 emit_insn (gen_unaligned_loadhis (tmp, base_addr));
4396 emit_move_insn (gen_lowpart (SImode, dest), tmp);
4401 else if (!s_register_operand (operands[1], GET_MODE (operands[1])))
4403 else if (GET_MODE (operands[0]) == SImode
4404 && GET_MODE (operands[1]) == SImode)
4406 emit_insn (gen_extv_regsi (operands[0], operands[1], operands[2],
4414 ; Helper to expand register forms of extv with the proper modes.
4416 (define_expand "extv_regsi"
4417 [(set (match_operand:SI 0 "s_register_operand")
4418 (sign_extract:SI (match_operand:SI 1 "s_register_operand")
4419 (match_operand 2 "const_int_operand")
4420 (match_operand 3 "const_int_operand")))]
4425 ; ARMv6+ unaligned load/store instructions (used for packed structure accesses).
4427 (define_insn "unaligned_loaddi"
4428 [(set (match_operand:DI 0 "s_register_operand" "=r")
4429 (unspec:DI [(match_operand:DI 1 "memory_operand" "m")]
4430 UNSPEC_UNALIGNED_LOAD))]
4431 "TARGET_32BIT && TARGET_LDRD"
4433 return output_move_double (operands, true, NULL);
4435 [(set_attr "length" "8")
4436 (set_attr "type" "load_8")])
4438 (define_insn "unaligned_loadsi"
4439 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4440 (unspec:SI [(match_operand:SI 1 "memory_operand" "m,Uw,m")]
4441 UNSPEC_UNALIGNED_LOAD))]
4444 ldr\t%0, %1\t@ unaligned
4445 ldr%?\t%0, %1\t@ unaligned
4446 ldr%?\t%0, %1\t@ unaligned"
4447 [(set_attr "arch" "t1,t2,32")
4448 (set_attr "length" "2,2,4")
4449 (set_attr "predicable" "no,yes,yes")
4450 (set_attr "predicable_short_it" "no,yes,no")
4451 (set_attr "type" "load_4")])
4453 ;; The 16-bit Thumb1 variant of ldrsh requires two registers in the
4454 ;; address (there's no immediate format). That's tricky to support
4455 ;; here and we don't really need this pattern for that case, so only
4456 ;; enable for 32-bit ISAs.
4457 (define_insn "unaligned_loadhis"
4458 [(set (match_operand:SI 0 "s_register_operand" "=r")
4460 (unspec:HI [(match_operand:HI 1 "memory_operand" "Uh")]
4461 UNSPEC_UNALIGNED_LOAD)))]
4462 "unaligned_access && TARGET_32BIT"
4463 "ldrsh%?\t%0, %1\t@ unaligned"
4464 [(set_attr "predicable" "yes")
4465 (set_attr "type" "load_byte")])
4467 (define_insn "unaligned_loadhiu"
4468 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
4470 (unspec:HI [(match_operand:HI 1 "memory_operand" "m,Uw,m")]
4471 UNSPEC_UNALIGNED_LOAD)))]
4474 ldrh\t%0, %1\t@ unaligned
4475 ldrh%?\t%0, %1\t@ unaligned
4476 ldrh%?\t%0, %1\t@ unaligned"
4477 [(set_attr "arch" "t1,t2,32")
4478 (set_attr "length" "2,2,4")
4479 (set_attr "predicable" "no,yes,yes")
4480 (set_attr "predicable_short_it" "no,yes,no")
4481 (set_attr "type" "load_byte")])
4483 (define_insn "unaligned_storedi"
4484 [(set (match_operand:DI 0 "memory_operand" "=m")
4485 (unspec:DI [(match_operand:DI 1 "s_register_operand" "r")]
4486 UNSPEC_UNALIGNED_STORE))]
4487 "TARGET_32BIT && TARGET_LDRD"
4489 return output_move_double (operands, true, NULL);
4491 [(set_attr "length" "8")
4492 (set_attr "type" "store_8")])
4494 (define_insn "unaligned_storesi"
4495 [(set (match_operand:SI 0 "memory_operand" "=m,Uw,m")
4496 (unspec:SI [(match_operand:SI 1 "s_register_operand" "l,l,r")]
4497 UNSPEC_UNALIGNED_STORE))]
4500 str\t%1, %0\t@ unaligned
4501 str%?\t%1, %0\t@ unaligned
4502 str%?\t%1, %0\t@ unaligned"
4503 [(set_attr "arch" "t1,t2,32")
4504 (set_attr "length" "2,2,4")
4505 (set_attr "predicable" "no,yes,yes")
4506 (set_attr "predicable_short_it" "no,yes,no")
4507 (set_attr "type" "store_4")])
4509 (define_insn "unaligned_storehi"
4510 [(set (match_operand:HI 0 "memory_operand" "=m,Uw,m")
4511 (unspec:HI [(match_operand:HI 1 "s_register_operand" "l,l,r")]
4512 UNSPEC_UNALIGNED_STORE))]
4515 strh\t%1, %0\t@ unaligned
4516 strh%?\t%1, %0\t@ unaligned
4517 strh%?\t%1, %0\t@ unaligned"
4518 [(set_attr "arch" "t1,t2,32")
4519 (set_attr "length" "2,2,4")
4520 (set_attr "predicable" "no,yes,yes")
4521 (set_attr "predicable_short_it" "no,yes,no")
4522 (set_attr "type" "store_4")])
4525 (define_insn "*extv_reg"
4526 [(set (match_operand:SI 0 "s_register_operand" "=r")
4527 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4528 (match_operand:SI 2 "const_int_operand" "n")
4529 (match_operand:SI 3 "const_int_operand" "n")))]
4531 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4532 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4533 "sbfx%?\t%0, %1, %3, %2"
4534 [(set_attr "length" "4")
4535 (set_attr "predicable" "yes")
4536 (set_attr "type" "bfm")]
4539 (define_insn "extzv_t2"
4540 [(set (match_operand:SI 0 "s_register_operand" "=r")
4541 (zero_extract:SI (match_operand:SI 1 "s_register_operand" "r")
4542 (match_operand:SI 2 "const_int_operand" "n")
4543 (match_operand:SI 3 "const_int_operand" "n")))]
4545 && IN_RANGE (INTVAL (operands[3]), 0, 31)
4546 && IN_RANGE (INTVAL (operands[2]), 1, 32 - INTVAL (operands[3]))"
4547 "ubfx%?\t%0, %1, %3, %2"
4548 [(set_attr "length" "4")
4549 (set_attr "predicable" "yes")
4550 (set_attr "type" "bfm")]
4554 ;; Division instructions
4555 (define_insn "divsi3"
4556 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4557 (div:SI (match_operand:SI 1 "s_register_operand" "r,r")
4558 (match_operand:SI 2 "s_register_operand" "r,r")))]
4563 [(set_attr "arch" "32,v8mb")
4564 (set_attr "predicable" "yes")
4565 (set_attr "type" "sdiv")]
4568 (define_insn "udivsi3"
4569 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4570 (udiv:SI (match_operand:SI 1 "s_register_operand" "r,r")
4571 (match_operand:SI 2 "s_register_operand" "r,r")))]
4576 [(set_attr "arch" "32,v8mb")
4577 (set_attr "predicable" "yes")
4578 (set_attr "type" "udiv")]
4582 ;; Unary arithmetic insns
4584 (define_expand "negvsi3"
4585 [(match_operand:SI 0 "register_operand")
4586 (match_operand:SI 1 "register_operand")
4587 (match_operand 2 "")]
4590 emit_insn (gen_subsi3_compare (operands[0], const0_rtx, operands[1]));
4591 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4596 (define_expand "negvdi3"
4597 [(match_operand:DI 0 "s_register_operand")
4598 (match_operand:DI 1 "s_register_operand")
4599 (match_operand 2 "")]
4602 emit_insn (gen_negdi2_compare (operands[0], operands[1]));
4603 arm_gen_unlikely_cbranch (NE, CC_Vmode, operands[2]);
4609 (define_insn "negdi2_compare"
4610 [(set (reg:CC CC_REGNUM)
4613 (match_operand:DI 1 "register_operand" "r,r")))
4614 (set (match_operand:DI 0 "register_operand" "=&r,&r")
4615 (minus:DI (const_int 0) (match_dup 1)))]
4618 rsbs\\t%Q0, %Q1, #0;rscs\\t%R0, %R1, #0
4619 rsbs\\t%Q0, %Q1, #0;sbcs\\t%R0, %R1, %R1, lsl #1"
4620 [(set_attr "conds" "set")
4621 (set_attr "arch" "a,t2")
4622 (set_attr "length" "8")
4623 (set_attr "type" "multiple")]
4626 (define_expand "negsi2"
4627 [(set (match_operand:SI 0 "s_register_operand")
4628 (neg:SI (match_operand:SI 1 "s_register_operand")))]
4633 (define_insn "*arm_negsi2"
4634 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4635 (neg:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4637 "rsb%?\\t%0, %1, #0"
4638 [(set_attr "predicable" "yes")
4639 (set_attr "predicable_short_it" "yes,no")
4640 (set_attr "arch" "t2,*")
4641 (set_attr "length" "4")
4642 (set_attr "type" "alu_imm")]
4645 ;; To keep the comparison in canonical form we express it as (~reg cmp ~0)
4646 ;; rather than (0 cmp reg). This gives the same results for unsigned
4647 ;; and equality compares which is what we mostly need here.
4648 (define_insn "negsi2_0compare"
4649 [(set (reg:CC_RSB CC_REGNUM)
4650 (compare:CC_RSB (not:SI (match_operand:SI 1 "s_register_operand" "l,r"))
4652 (set (match_operand:SI 0 "s_register_operand" "=l,r")
4653 (neg:SI (match_dup 1)))]
4658 [(set_attr "conds" "set")
4659 (set_attr "arch" "t2,*")
4660 (set_attr "length" "2,*")
4661 (set_attr "type" "alus_imm")]
4664 (define_insn "negsi2_carryin"
4665 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
4666 (minus:SI (neg:SI (match_operand:SI 1 "s_register_operand" "r,r"))
4667 (match_operand:SI 2 "arm_borrow_operation" "")))]
4671 sbc\\t%0, %1, %1, lsl #1"
4672 [(set_attr "conds" "use")
4673 (set_attr "arch" "a,t2")
4674 (set_attr "type" "adc_imm,adc_reg")]
4677 (define_expand "negsf2"
4678 [(set (match_operand:SF 0 "s_register_operand")
4679 (neg:SF (match_operand:SF 1 "s_register_operand")))]
4680 "TARGET_32BIT && TARGET_HARD_FLOAT"
4684 (define_expand "negdf2"
4685 [(set (match_operand:DF 0 "s_register_operand")
4686 (neg:DF (match_operand:DF 1 "s_register_operand")))]
4687 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4690 ;; abssi2 doesn't really clobber the condition codes if a different register
4691 ;; is being set. To keep things simple, assume during rtl manipulations that
4692 ;; it does, but tell the final scan operator the truth. Similarly for
4695 (define_expand "abssi2"
4697 [(set (match_operand:SI 0 "s_register_operand")
4698 (abs:SI (match_operand:SI 1 "s_register_operand")))
4699 (clobber (match_dup 2))])]
4703 operands[2] = gen_rtx_SCRATCH (SImode);
4705 operands[2] = gen_rtx_REG (CCmode, CC_REGNUM);
4708 (define_insn_and_split "*arm_abssi2"
4709 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4710 (abs:SI (match_operand:SI 1 "s_register_operand" "0,r")))
4711 (clobber (reg:CC CC_REGNUM))]
4714 "&& reload_completed"
4717 /* if (which_alternative == 0) */
4718 if (REGNO(operands[0]) == REGNO(operands[1]))
4720 /* Emit the pattern:
4721 cmp\\t%0, #0\;rsblt\\t%0, %0, #0
4722 [(set (reg:CC CC_REGNUM)
4723 (compare:CC (match_dup 0) (const_int 0)))
4724 (cond_exec (lt:CC (reg:CC CC_REGNUM) (const_int 0))
4725 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 1))))]
4727 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4728 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4729 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4730 (gen_rtx_LT (SImode,
4731 gen_rtx_REG (CCmode, CC_REGNUM),
4733 (gen_rtx_SET (operands[0],
4734 (gen_rtx_MINUS (SImode,
4741 /* Emit the pattern:
4742 alt1: eor%?\\t%0, %1, %1, asr #31\;sub%?\\t%0, %0, %1, asr #31
4744 (xor:SI (match_dup 1)
4745 (ashiftrt:SI (match_dup 1) (const_int 31))))
4747 (minus:SI (match_dup 0)
4748 (ashiftrt:SI (match_dup 1) (const_int 31))))]
4750 emit_insn (gen_rtx_SET (operands[0],
4751 gen_rtx_XOR (SImode,
4752 gen_rtx_ASHIFTRT (SImode,
4756 emit_insn (gen_rtx_SET (operands[0],
4757 gen_rtx_MINUS (SImode,
4759 gen_rtx_ASHIFTRT (SImode,
4765 [(set_attr "conds" "clob,*")
4766 (set_attr "shift" "1")
4767 (set_attr "predicable" "no, yes")
4768 (set_attr "length" "8")
4769 (set_attr "type" "multiple")]
4772 (define_insn_and_split "*arm_neg_abssi2"
4773 [(set (match_operand:SI 0 "s_register_operand" "=r,&r")
4774 (neg:SI (abs:SI (match_operand:SI 1 "s_register_operand" "0,r"))))
4775 (clobber (reg:CC CC_REGNUM))]
4778 "&& reload_completed"
4781 /* if (which_alternative == 0) */
4782 if (REGNO (operands[0]) == REGNO (operands[1]))
4784 /* Emit the pattern:
4785 cmp\\t%0, #0\;rsbgt\\t%0, %0, #0
4787 emit_insn (gen_rtx_SET (gen_rtx_REG (CCmode, CC_REGNUM),
4788 gen_rtx_COMPARE (CCmode, operands[0], const0_rtx)));
4789 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
4791 gen_rtx_REG (CCmode, CC_REGNUM),
4793 gen_rtx_SET (operands[0],
4794 (gen_rtx_MINUS (SImode,
4800 /* Emit the pattern:
4801 eor%?\\t%0, %1, %1, asr #31\;rsb%?\\t%0, %0, %1, asr #31
4803 emit_insn (gen_rtx_SET (operands[0],
4804 gen_rtx_XOR (SImode,
4805 gen_rtx_ASHIFTRT (SImode,
4809 emit_insn (gen_rtx_SET (operands[0],
4810 gen_rtx_MINUS (SImode,
4811 gen_rtx_ASHIFTRT (SImode,
4818 [(set_attr "conds" "clob,*")
4819 (set_attr "shift" "1")
4820 (set_attr "predicable" "no, yes")
4821 (set_attr "length" "8")
4822 (set_attr "type" "multiple")]
4825 (define_expand "abssf2"
4826 [(set (match_operand:SF 0 "s_register_operand")
4827 (abs:SF (match_operand:SF 1 "s_register_operand")))]
4828 "TARGET_32BIT && TARGET_HARD_FLOAT"
4831 (define_expand "absdf2"
4832 [(set (match_operand:DF 0 "s_register_operand")
4833 (abs:DF (match_operand:DF 1 "s_register_operand")))]
4834 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4837 (define_expand "sqrtsf2"
4838 [(set (match_operand:SF 0 "s_register_operand")
4839 (sqrt:SF (match_operand:SF 1 "s_register_operand")))]
4840 "TARGET_32BIT && TARGET_HARD_FLOAT"
4843 (define_expand "sqrtdf2"
4844 [(set (match_operand:DF 0 "s_register_operand")
4845 (sqrt:DF (match_operand:DF 1 "s_register_operand")))]
4846 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
4849 (define_expand "one_cmplsi2"
4850 [(set (match_operand:SI 0 "s_register_operand")
4851 (not:SI (match_operand:SI 1 "s_register_operand")))]
4856 (define_insn "*arm_one_cmplsi2"
4857 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
4858 (not:SI (match_operand:SI 1 "s_register_operand" "l,r")))]
4861 [(set_attr "predicable" "yes")
4862 (set_attr "predicable_short_it" "yes,no")
4863 (set_attr "arch" "t2,*")
4864 (set_attr "length" "4")
4865 (set_attr "type" "mvn_reg")]
4868 (define_insn "*notsi_compare0"
4869 [(set (reg:CC_NOOV CC_REGNUM)
4870 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4872 (set (match_operand:SI 0 "s_register_operand" "=r")
4873 (not:SI (match_dup 1)))]
4876 [(set_attr "conds" "set")
4877 (set_attr "type" "mvn_reg")]
4880 (define_insn "*notsi_compare0_scratch"
4881 [(set (reg:CC_NOOV CC_REGNUM)
4882 (compare:CC_NOOV (not:SI (match_operand:SI 1 "s_register_operand" "r"))
4884 (clobber (match_scratch:SI 0 "=r"))]
4887 [(set_attr "conds" "set")
4888 (set_attr "type" "mvn_reg")]
4891 ;; Fixed <--> Floating conversion insns
4893 (define_expand "floatsihf2"
4894 [(set (match_operand:HF 0 "general_operand")
4895 (float:HF (match_operand:SI 1 "general_operand")))]
4899 rtx op1 = gen_reg_rtx (SFmode);
4900 expand_float (op1, operands[1], 0);
4901 op1 = convert_to_mode (HFmode, op1, 0);
4902 emit_move_insn (operands[0], op1);
4907 (define_expand "floatdihf2"
4908 [(set (match_operand:HF 0 "general_operand")
4909 (float:HF (match_operand:DI 1 "general_operand")))]
4913 rtx op1 = gen_reg_rtx (SFmode);
4914 expand_float (op1, operands[1], 0);
4915 op1 = convert_to_mode (HFmode, op1, 0);
4916 emit_move_insn (operands[0], op1);
4921 (define_expand "floatsisf2"
4922 [(set (match_operand:SF 0 "s_register_operand")
4923 (float:SF (match_operand:SI 1 "s_register_operand")))]
4924 "TARGET_32BIT && TARGET_HARD_FLOAT"
4928 (define_expand "floatsidf2"
4929 [(set (match_operand:DF 0 "s_register_operand")
4930 (float:DF (match_operand:SI 1 "s_register_operand")))]
4931 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4935 (define_expand "fix_trunchfsi2"
4936 [(set (match_operand:SI 0 "general_operand")
4937 (fix:SI (fix:HF (match_operand:HF 1 "general_operand"))))]
4941 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4942 expand_fix (operands[0], op1, 0);
4947 (define_expand "fix_trunchfdi2"
4948 [(set (match_operand:DI 0 "general_operand")
4949 (fix:DI (fix:HF (match_operand:HF 1 "general_operand"))))]
4953 rtx op1 = convert_to_mode (SFmode, operands[1], 0);
4954 expand_fix (operands[0], op1, 0);
4959 (define_expand "fix_truncsfsi2"
4960 [(set (match_operand:SI 0 "s_register_operand")
4961 (fix:SI (fix:SF (match_operand:SF 1 "s_register_operand"))))]
4962 "TARGET_32BIT && TARGET_HARD_FLOAT"
4966 (define_expand "fix_truncdfsi2"
4967 [(set (match_operand:SI 0 "s_register_operand")
4968 (fix:SI (fix:DF (match_operand:DF 1 "s_register_operand"))))]
4969 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4975 (define_expand "truncdfsf2"
4976 [(set (match_operand:SF 0 "s_register_operand")
4978 (match_operand:DF 1 "s_register_operand")))]
4979 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
4983 ;; DFmode to HFmode conversions on targets without a single-step hardware
4984 ;; instruction for it would have to go through SFmode. This is dangerous
4985 ;; as it introduces double rounding.
4987 ;; Disable this pattern unless we are in an unsafe math mode, or we have
4988 ;; a single-step instruction.
4990 (define_expand "truncdfhf2"
4991 [(set (match_operand:HF 0 "s_register_operand")
4993 (match_operand:DF 1 "s_register_operand")))]
4994 "(TARGET_EITHER && flag_unsafe_math_optimizations)
4995 || (TARGET_32BIT && TARGET_FP16_TO_DOUBLE)"
4997 /* We don't have a direct instruction for this, so we must be in
4998 an unsafe math mode, and going via SFmode. */
5000 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5003 op1 = convert_to_mode (SFmode, operands[1], 0);
5004 op1 = convert_to_mode (HFmode, op1, 0);
5005 emit_move_insn (operands[0], op1);
5008 /* Otherwise, we will pick this up as a single instruction with
5009 no intermediary rounding. */
5013 ;; Zero and sign extension instructions.
5015 (define_expand "zero_extend<mode>di2"
5016 [(set (match_operand:DI 0 "s_register_operand" "")
5017 (zero_extend:DI (match_operand:QHSI 1 "<qhs_zextenddi_op>" "")))]
5018 "TARGET_32BIT <qhs_zextenddi_cond>"
5020 rtx res_lo, res_hi, op0_lo, op0_hi;
5021 res_lo = gen_lowpart (SImode, operands[0]);
5022 res_hi = gen_highpart (SImode, operands[0]);
5023 if (can_create_pseudo_p ())
5025 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5026 op0_hi = gen_reg_rtx (SImode);
5030 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5033 if (<MODE>mode != SImode)
5034 emit_insn (gen_rtx_SET (op0_lo,
5035 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5036 emit_insn (gen_movsi (op0_hi, const0_rtx));
5037 if (res_lo != op0_lo)
5038 emit_move_insn (res_lo, op0_lo);
5039 if (res_hi != op0_hi)
5040 emit_move_insn (res_hi, op0_hi);
5045 (define_expand "extend<mode>di2"
5046 [(set (match_operand:DI 0 "s_register_operand" "")
5047 (sign_extend:DI (match_operand:QHSI 1 "<qhs_extenddi_op>" "")))]
5048 "TARGET_32BIT <qhs_sextenddi_cond>"
5050 rtx res_lo, res_hi, op0_lo, op0_hi;
5051 res_lo = gen_lowpart (SImode, operands[0]);
5052 res_hi = gen_highpart (SImode, operands[0]);
5053 if (can_create_pseudo_p ())
5055 op0_lo = <MODE>mode == SImode ? operands[1] : gen_reg_rtx (SImode);
5056 op0_hi = gen_reg_rtx (SImode);
5060 op0_lo = <MODE>mode == SImode ? operands[1] : res_lo;
5063 if (<MODE>mode != SImode)
5064 emit_insn (gen_rtx_SET (op0_lo,
5065 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5066 emit_insn (gen_ashrsi3 (op0_hi, op0_lo, GEN_INT (31)));
5067 if (res_lo != op0_lo)
5068 emit_move_insn (res_lo, op0_lo);
5069 if (res_hi != op0_hi)
5070 emit_move_insn (res_hi, op0_hi);
5075 ;; Splits for all extensions to DImode
5077 [(set (match_operand:DI 0 "s_register_operand" "")
5078 (zero_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5080 [(set (match_dup 0) (match_dup 1))]
5082 rtx lo_part = gen_lowpart (SImode, operands[0]);
5083 machine_mode src_mode = GET_MODE (operands[1]);
5085 if (src_mode == SImode)
5086 emit_move_insn (lo_part, operands[1]);
5088 emit_insn (gen_rtx_SET (lo_part,
5089 gen_rtx_ZERO_EXTEND (SImode, operands[1])));
5090 operands[0] = gen_highpart (SImode, operands[0]);
5091 operands[1] = const0_rtx;
5095 [(set (match_operand:DI 0 "s_register_operand" "")
5096 (sign_extend:DI (match_operand 1 "nonimmediate_operand" "")))]
5098 [(set (match_dup 0) (ashiftrt:SI (match_dup 1) (const_int 31)))]
5100 rtx lo_part = gen_lowpart (SImode, operands[0]);
5101 machine_mode src_mode = GET_MODE (operands[1]);
5103 if (src_mode == SImode)
5104 emit_move_insn (lo_part, operands[1]);
5106 emit_insn (gen_rtx_SET (lo_part,
5107 gen_rtx_SIGN_EXTEND (SImode, operands[1])));
5108 operands[1] = lo_part;
5109 operands[0] = gen_highpart (SImode, operands[0]);
5112 (define_expand "zero_extendhisi2"
5113 [(set (match_operand:SI 0 "s_register_operand")
5114 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5117 if (TARGET_ARM && !arm_arch4 && MEM_P (operands[1]))
5119 emit_insn (gen_movhi_bytes (operands[0], operands[1]));
5122 if (!arm_arch6 && !MEM_P (operands[1]))
5124 rtx t = gen_lowpart (SImode, operands[1]);
5125 rtx tmp = gen_reg_rtx (SImode);
5126 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5127 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (16)));
5133 [(set (match_operand:SI 0 "s_register_operand" "")
5134 (zero_extend:SI (match_operand:HI 1 "s_register_operand" "")))]
5135 "!TARGET_THUMB2 && !arm_arch6"
5136 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5137 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 16)))]
5139 operands[2] = gen_lowpart (SImode, operands[1]);
5142 (define_insn "*arm_zero_extendhisi2"
5143 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5144 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,m")))]
5145 "TARGET_ARM && arm_arch4 && !arm_arch6"
5149 [(set_attr "type" "alu_shift_reg,load_byte")
5150 (set_attr "predicable" "yes")]
5153 (define_insn "*arm_zero_extendhisi2_v6"
5154 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5155 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5156 "TARGET_ARM && arm_arch6"
5160 [(set_attr "predicable" "yes")
5161 (set_attr "type" "extend,load_byte")]
5164 (define_insn "*arm_zero_extendhisi2addsi"
5165 [(set (match_operand:SI 0 "s_register_operand" "=r")
5166 (plus:SI (zero_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5167 (match_operand:SI 2 "s_register_operand" "r")))]
5169 "uxtah%?\\t%0, %2, %1"
5170 [(set_attr "type" "alu_shift_reg")
5171 (set_attr "predicable" "yes")]
5174 (define_expand "zero_extendqisi2"
5175 [(set (match_operand:SI 0 "s_register_operand")
5176 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand")))]
5179 if (TARGET_ARM && !arm_arch6 && !MEM_P (operands[1]))
5181 emit_insn (gen_andsi3 (operands[0],
5182 gen_lowpart (SImode, operands[1]),
5186 if (!arm_arch6 && !MEM_P (operands[1]))
5188 rtx t = gen_lowpart (SImode, operands[1]);
5189 rtx tmp = gen_reg_rtx (SImode);
5190 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5191 emit_insn (gen_lshrsi3 (operands[0], tmp, GEN_INT (24)));
5197 [(set (match_operand:SI 0 "s_register_operand" "")
5198 (zero_extend:SI (match_operand:QI 1 "s_register_operand" "")))]
5200 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5201 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 24)))]
5203 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5206 emit_insn (gen_andsi3 (operands[0], operands[2], GEN_INT (255)));
5211 (define_insn "*arm_zero_extendqisi2"
5212 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5213 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,m")))]
5214 "TARGET_ARM && !arm_arch6"
5217 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5218 [(set_attr "length" "8,4")
5219 (set_attr "type" "alu_shift_reg,load_byte")
5220 (set_attr "predicable" "yes")]
5223 (define_insn "*arm_zero_extendqisi2_v6"
5224 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5225 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "r,Uh")))]
5226 "TARGET_ARM && arm_arch6"
5229 ldrb%?\\t%0, %1\\t%@ zero_extendqisi2"
5230 [(set_attr "type" "extend,load_byte")
5231 (set_attr "predicable" "yes")]
5234 (define_insn "*arm_zero_extendqisi2addsi"
5235 [(set (match_operand:SI 0 "s_register_operand" "=r")
5236 (plus:SI (zero_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5237 (match_operand:SI 2 "s_register_operand" "r")))]
5239 "uxtab%?\\t%0, %2, %1"
5240 [(set_attr "predicable" "yes")
5241 (set_attr "type" "alu_shift_reg")]
5245 [(set (match_operand:SI 0 "s_register_operand" "")
5246 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 0)))
5247 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5248 "TARGET_32BIT && (!MEM_P (operands[1])) && ! BYTES_BIG_ENDIAN"
5249 [(set (match_dup 2) (match_dup 1))
5250 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5255 [(set (match_operand:SI 0 "s_register_operand" "")
5256 (zero_extend:SI (subreg:QI (match_operand:SI 1 "" "") 3)))
5257 (clobber (match_operand:SI 2 "s_register_operand" ""))]
5258 "TARGET_32BIT && (!MEM_P (operands[1])) && BYTES_BIG_ENDIAN"
5259 [(set (match_dup 2) (match_dup 1))
5260 (set (match_dup 0) (and:SI (match_dup 2) (const_int 255)))]
5266 [(set (match_operand:SI 0 "s_register_operand" "")
5267 (IOR_XOR:SI (and:SI (ashift:SI
5268 (match_operand:SI 1 "s_register_operand" "")
5269 (match_operand:SI 2 "const_int_operand" ""))
5270 (match_operand:SI 3 "const_int_operand" ""))
5272 (match_operator 5 "subreg_lowpart_operator"
5273 [(match_operand:SI 4 "s_register_operand" "")]))))]
5275 && (UINTVAL (operands[3])
5276 == (GET_MODE_MASK (GET_MODE (operands[5]))
5277 & (GET_MODE_MASK (GET_MODE (operands[5]))
5278 << (INTVAL (operands[2])))))"
5279 [(set (match_dup 0) (IOR_XOR:SI (ashift:SI (match_dup 1) (match_dup 2))
5281 (set (match_dup 0) (zero_extend:SI (match_dup 5)))]
5282 "operands[5] = gen_lowpart (GET_MODE (operands[5]), operands[0]);"
5285 (define_insn "*compareqi_eq0"
5286 [(set (reg:CC_Z CC_REGNUM)
5287 (compare:CC_Z (match_operand:QI 0 "s_register_operand" "r")
5291 [(set_attr "conds" "set")
5292 (set_attr "predicable" "yes")
5293 (set_attr "type" "logic_imm")]
5296 (define_expand "extendhisi2"
5297 [(set (match_operand:SI 0 "s_register_operand")
5298 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand")))]
5303 emit_insn (gen_thumb1_extendhisi2 (operands[0], operands[1]));
5306 if (MEM_P (operands[1]) && TARGET_ARM && !arm_arch4)
5308 emit_insn (gen_extendhisi2_mem (operands[0], operands[1]));
5312 if (!arm_arch6 && !MEM_P (operands[1]))
5314 rtx t = gen_lowpart (SImode, operands[1]);
5315 rtx tmp = gen_reg_rtx (SImode);
5316 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (16)));
5317 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (16)));
5324 [(set (match_operand:SI 0 "register_operand" "")
5325 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))
5326 (clobber (match_scratch:SI 2 ""))])]
5328 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5329 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5331 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5334 ;; This pattern will only be used when ldsh is not available
5335 (define_expand "extendhisi2_mem"
5336 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
5338 (zero_extend:SI (match_dup 7)))
5339 (set (match_dup 6) (ashift:SI (match_dup 4) (const_int 24)))
5340 (set (match_operand:SI 0 "" "")
5341 (ior:SI (ashiftrt:SI (match_dup 6) (const_int 16)) (match_dup 5)))]
5346 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
5348 mem1 = change_address (operands[1], QImode, addr);
5349 mem2 = change_address (operands[1], QImode,
5350 plus_constant (Pmode, addr, 1));
5351 operands[0] = gen_lowpart (SImode, operands[0]);
5353 operands[2] = gen_reg_rtx (SImode);
5354 operands[3] = gen_reg_rtx (SImode);
5355 operands[6] = gen_reg_rtx (SImode);
5358 if (BYTES_BIG_ENDIAN)
5360 operands[4] = operands[2];
5361 operands[5] = operands[3];
5365 operands[4] = operands[3];
5366 operands[5] = operands[2];
5372 [(set (match_operand:SI 0 "register_operand" "")
5373 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
5375 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 16)))
5376 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 16)))]
5378 operands[2] = simplify_gen_subreg (SImode, operands[1], HImode, 0);
5381 (define_insn "*arm_extendhisi2"
5382 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5383 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5384 "TARGET_ARM && arm_arch4 && !arm_arch6"
5388 [(set_attr "length" "8,4")
5389 (set_attr "type" "alu_shift_reg,load_byte")
5390 (set_attr "predicable" "yes")]
5393 ;; ??? Check Thumb-2 pool range
5394 (define_insn "*arm_extendhisi2_v6"
5395 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5396 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "r,Uh")))]
5397 "TARGET_32BIT && arm_arch6"
5401 [(set_attr "type" "extend,load_byte")
5402 (set_attr "predicable" "yes")]
5405 (define_insn "*arm_extendhisi2addsi"
5406 [(set (match_operand:SI 0 "s_register_operand" "=r")
5407 (plus:SI (sign_extend:SI (match_operand:HI 1 "s_register_operand" "r"))
5408 (match_operand:SI 2 "s_register_operand" "r")))]
5410 "sxtah%?\\t%0, %2, %1"
5411 [(set_attr "type" "alu_shift_reg")]
5414 (define_expand "extendqihi2"
5416 (ashift:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")
5418 (set (match_operand:HI 0 "s_register_operand")
5419 (ashiftrt:SI (match_dup 2)
5424 if (arm_arch4 && MEM_P (operands[1]))
5426 emit_insn (gen_rtx_SET (operands[0],
5427 gen_rtx_SIGN_EXTEND (HImode, operands[1])));
5430 if (!s_register_operand (operands[1], QImode))
5431 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5432 operands[0] = gen_lowpart (SImode, operands[0]);
5433 operands[1] = gen_lowpart (SImode, operands[1]);
5434 operands[2] = gen_reg_rtx (SImode);
5438 (define_insn "*arm_extendqihi_insn"
5439 [(set (match_operand:HI 0 "s_register_operand" "=r")
5440 (sign_extend:HI (match_operand:QI 1 "arm_extendqisi_mem_op" "Uq")))]
5441 "TARGET_ARM && arm_arch4"
5443 [(set_attr "type" "load_byte")
5444 (set_attr "predicable" "yes")]
5447 (define_expand "extendqisi2"
5448 [(set (match_operand:SI 0 "s_register_operand")
5449 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op")))]
5452 if (!arm_arch4 && MEM_P (operands[1]))
5453 operands[1] = copy_to_mode_reg (QImode, operands[1]);
5455 if (!arm_arch6 && !MEM_P (operands[1]))
5457 rtx t = gen_lowpart (SImode, operands[1]);
5458 rtx tmp = gen_reg_rtx (SImode);
5459 emit_insn (gen_ashlsi3 (tmp, t, GEN_INT (24)));
5460 emit_insn (gen_ashrsi3 (operands[0], tmp, GEN_INT (24)));
5466 [(set (match_operand:SI 0 "register_operand" "")
5467 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
5469 [(set (match_dup 0) (ashift:SI (match_dup 2) (const_int 24)))
5470 (set (match_dup 0) (ashiftrt:SI (match_dup 0) (const_int 24)))]
5472 operands[2] = simplify_gen_subreg (SImode, operands[1], QImode, 0);
5475 (define_insn "*arm_extendqisi"
5476 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5477 (sign_extend:SI (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5478 "TARGET_ARM && arm_arch4 && !arm_arch6"
5482 [(set_attr "length" "8,4")
5483 (set_attr "type" "alu_shift_reg,load_byte")
5484 (set_attr "predicable" "yes")]
5487 (define_insn "*arm_extendqisi_v6"
5488 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
5490 (match_operand:QI 1 "arm_reg_or_extendqisi_mem_op" "r,Uq")))]
5491 "TARGET_ARM && arm_arch6"
5495 [(set_attr "type" "extend,load_byte")
5496 (set_attr "predicable" "yes")]
5499 (define_insn "*arm_extendqisi2addsi"
5500 [(set (match_operand:SI 0 "s_register_operand" "=r")
5501 (plus:SI (sign_extend:SI (match_operand:QI 1 "s_register_operand" "r"))
5502 (match_operand:SI 2 "s_register_operand" "r")))]
5504 "sxtab%?\\t%0, %2, %1"
5505 [(set_attr "type" "alu_shift_reg")
5506 (set_attr "predicable" "yes")]
5509 (define_insn "arm_<sup>xtb16"
5510 [(set (match_operand:SI 0 "s_register_operand" "=r")
5512 [(match_operand:SI 1 "s_register_operand" "r")] USXTB16))]
5514 "<sup>xtb16%?\\t%0, %1"
5515 [(set_attr "predicable" "yes")
5516 (set_attr "type" "alu_dsp_reg")])
5518 (define_insn "arm_<simd32_op>"
5519 [(set (match_operand:SI 0 "s_register_operand" "=r")
5521 [(match_operand:SI 1 "s_register_operand" "r")
5522 (match_operand:SI 2 "s_register_operand" "r")] SIMD32_NOGE_BINOP))]
5524 "<simd32_op>%?\\t%0, %1, %2"
5525 [(set_attr "predicable" "yes")
5526 (set_attr "type" "alu_dsp_reg")])
5528 (define_insn "arm_usada8"
5529 [(set (match_operand:SI 0 "s_register_operand" "=r")
5531 [(match_operand:SI 1 "s_register_operand" "r")
5532 (match_operand:SI 2 "s_register_operand" "r")
5533 (match_operand:SI 3 "s_register_operand" "r")] UNSPEC_USADA8))]
5535 "usada8%?\\t%0, %1, %2, %3"
5536 [(set_attr "predicable" "yes")
5537 (set_attr "type" "alu_dsp_reg")])
5539 (define_insn "arm_<simd32_op>"
5540 [(set (match_operand:DI 0 "s_register_operand" "=r")
5542 [(match_operand:SI 1 "s_register_operand" "r")
5543 (match_operand:SI 2 "s_register_operand" "r")
5544 (match_operand:DI 3 "s_register_operand" "0")] SIMD32_DIMODE))]
5546 "<simd32_op>%?\\t%Q0, %R0, %1, %2"
5547 [(set_attr "predicable" "yes")
5548 (set_attr "type" "smlald")])
5550 (define_expand "extendsfdf2"
5551 [(set (match_operand:DF 0 "s_register_operand")
5552 (float_extend:DF (match_operand:SF 1 "s_register_operand")))]
5553 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
5557 ;; HFmode -> DFmode conversions where we don't have an instruction for it
5558 ;; must go through SFmode.
5560 ;; This is always safe for an extend.
5562 (define_expand "extendhfdf2"
5563 [(set (match_operand:DF 0 "s_register_operand")
5564 (float_extend:DF (match_operand:HF 1 "s_register_operand")))]
5567 /* We don't have a direct instruction for this, so go via SFmode. */
5568 if (!(TARGET_32BIT && TARGET_FP16_TO_DOUBLE))
5571 op1 = convert_to_mode (SFmode, operands[1], 0);
5572 op1 = convert_to_mode (DFmode, op1, 0);
5573 emit_insn (gen_movdf (operands[0], op1));
5576 /* Otherwise, we're done producing RTL and will pick up the correct
5577 pattern to do this with one rounding-step in a single instruction. */
5581 ;; Move insns (including loads and stores)
5583 ;; XXX Just some ideas about movti.
5584 ;; I don't think these are a good idea on the arm, there just aren't enough
5586 ;;(define_expand "loadti"
5587 ;; [(set (match_operand:TI 0 "s_register_operand")
5588 ;; (mem:TI (match_operand:SI 1 "address_operand")))]
5591 ;;(define_expand "storeti"
5592 ;; [(set (mem:TI (match_operand:TI 0 "address_operand"))
5593 ;; (match_operand:TI 1 "s_register_operand"))]
5596 ;;(define_expand "movti"
5597 ;; [(set (match_operand:TI 0 "general_operand")
5598 ;; (match_operand:TI 1 "general_operand"))]
5604 ;; if (MEM_P (operands[0]) && MEM_P (operands[1]))
5605 ;; operands[1] = copy_to_reg (operands[1]);
5606 ;; if (MEM_P (operands[0]))
5607 ;; insn = gen_storeti (XEXP (operands[0], 0), operands[1]);
5608 ;; else if (MEM_P (operands[1]))
5609 ;; insn = gen_loadti (operands[0], XEXP (operands[1], 0));
5613 ;; emit_insn (insn);
5617 ;; Recognize garbage generated above.
5620 ;; [(set (match_operand:TI 0 "general_operand" "=r,r,r,<,>,m")
5621 ;; (match_operand:TI 1 "general_operand" "<,>,m,r,r,r"))]
5625 ;; register mem = (which_alternative < 3);
5626 ;; register const char *template;
5628 ;; operands[mem] = XEXP (operands[mem], 0);
5629 ;; switch (which_alternative)
5631 ;; case 0: template = \"ldmdb\\t%1!, %M0\"; break;
5632 ;; case 1: template = \"ldmia\\t%1!, %M0\"; break;
5633 ;; case 2: template = \"ldmia\\t%1, %M0\"; break;
5634 ;; case 3: template = \"stmdb\\t%0!, %M1\"; break;
5635 ;; case 4: template = \"stmia\\t%0!, %M1\"; break;
5636 ;; case 5: template = \"stmia\\t%0, %M1\"; break;
5638 ;; output_asm_insn (template, operands);
5642 (define_expand "movdi"
5643 [(set (match_operand:DI 0 "general_operand")
5644 (match_operand:DI 1 "general_operand"))]
5647 gcc_checking_assert (aligned_operand (operands[0], DImode));
5648 gcc_checking_assert (aligned_operand (operands[1], DImode));
5649 if (can_create_pseudo_p ())
5651 if (!REG_P (operands[0]))
5652 operands[1] = force_reg (DImode, operands[1]);
5654 if (REG_P (operands[0]) && REGNO (operands[0]) <= LAST_ARM_REGNUM
5655 && !targetm.hard_regno_mode_ok (REGNO (operands[0]), DImode))
5657 /* Avoid LDRD's into an odd-numbered register pair in ARM state
5658 when expanding function calls. */
5659 gcc_assert (can_create_pseudo_p ());
5660 if (MEM_P (operands[1]) && MEM_VOLATILE_P (operands[1]))
5662 /* Perform load into legal reg pair first, then move. */
5663 rtx reg = gen_reg_rtx (DImode);
5664 emit_insn (gen_movdi (reg, operands[1]));
5667 emit_move_insn (gen_lowpart (SImode, operands[0]),
5668 gen_lowpart (SImode, operands[1]));
5669 emit_move_insn (gen_highpart (SImode, operands[0]),
5670 gen_highpart (SImode, operands[1]));
5673 else if (REG_P (operands[1]) && REGNO (operands[1]) <= LAST_ARM_REGNUM
5674 && !targetm.hard_regno_mode_ok (REGNO (operands[1]), DImode))
5676 /* Avoid STRD's from an odd-numbered register pair in ARM state
5677 when expanding function prologue. */
5678 gcc_assert (can_create_pseudo_p ());
5679 rtx split_dest = (MEM_P (operands[0]) && MEM_VOLATILE_P (operands[0]))
5680 ? gen_reg_rtx (DImode)
5682 emit_move_insn (gen_lowpart (SImode, split_dest),
5683 gen_lowpart (SImode, operands[1]));
5684 emit_move_insn (gen_highpart (SImode, split_dest),
5685 gen_highpart (SImode, operands[1]));
5686 if (split_dest != operands[0])
5687 emit_insn (gen_movdi (operands[0], split_dest));
5693 (define_insn "*arm_movdi"
5694 [(set (match_operand:DI 0 "nonimmediate_di_operand" "=r, r, r, r, m")
5695 (match_operand:DI 1 "di_operand" "rDa,Db,Dc,mi,r"))]
5697 && !(TARGET_HARD_FLOAT)
5699 && ( register_operand (operands[0], DImode)
5700 || register_operand (operands[1], DImode))"
5702 switch (which_alternative)
5709 /* Cannot load it directly, split to load it via MOV / MOVT. */
5710 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
5714 return output_move_double (operands, true, NULL);
5717 [(set_attr "length" "8,12,16,8,8")
5718 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
5719 (set_attr "arm_pool_range" "*,*,*,1020,*")
5720 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
5721 (set_attr "thumb2_pool_range" "*,*,*,4094,*")
5722 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
5726 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5727 (match_operand:ANY64 1 "immediate_operand" ""))]
5730 && (arm_disable_literal_pool
5731 || (arm_const_double_inline_cost (operands[1])
5732 <= arm_max_const_double_inline_cost ()))"
5735 arm_split_constant (SET, SImode, curr_insn,
5736 INTVAL (gen_lowpart (SImode, operands[1])),
5737 gen_lowpart (SImode, operands[0]), NULL_RTX, 0);
5738 arm_split_constant (SET, SImode, curr_insn,
5739 INTVAL (gen_highpart_mode (SImode,
5740 GET_MODE (operands[0]),
5742 gen_highpart (SImode, operands[0]), NULL_RTX, 0);
5747 ; If optimizing for size, or if we have load delay slots, then
5748 ; we want to split the constant into two separate operations.
5749 ; In both cases this may split a trivial part into a single data op
5750 ; leaving a single complex constant to load. We can also get longer
5751 ; offsets in a LDR which means we get better chances of sharing the pool
5752 ; entries. Finally, we can normally do a better job of scheduling
5753 ; LDR instructions than we can with LDM.
5754 ; This pattern will only match if the one above did not.
5756 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5757 (match_operand:ANY64 1 "const_double_operand" ""))]
5758 "TARGET_ARM && reload_completed
5759 && arm_const_double_by_parts (operands[1])"
5760 [(set (match_dup 0) (match_dup 1))
5761 (set (match_dup 2) (match_dup 3))]
5763 operands[2] = gen_highpart (SImode, operands[0]);
5764 operands[3] = gen_highpart_mode (SImode, GET_MODE (operands[0]),
5766 operands[0] = gen_lowpart (SImode, operands[0]);
5767 operands[1] = gen_lowpart (SImode, operands[1]);
5772 [(set (match_operand:ANY64 0 "arm_general_register_operand" "")
5773 (match_operand:ANY64 1 "arm_general_register_operand" ""))]
5774 "TARGET_EITHER && reload_completed"
5775 [(set (match_dup 0) (match_dup 1))
5776 (set (match_dup 2) (match_dup 3))]
5778 operands[2] = gen_highpart (SImode, operands[0]);
5779 operands[3] = gen_highpart (SImode, operands[1]);
5780 operands[0] = gen_lowpart (SImode, operands[0]);
5781 operands[1] = gen_lowpart (SImode, operands[1]);
5783 /* Handle a partial overlap. */
5784 if (rtx_equal_p (operands[0], operands[3]))
5786 rtx tmp0 = operands[0];
5787 rtx tmp1 = operands[1];
5789 operands[0] = operands[2];
5790 operands[1] = operands[3];
5797 ;; We can't actually do base+index doubleword loads if the index and
5798 ;; destination overlap. Split here so that we at least have chance to
5801 [(set (match_operand:DI 0 "s_register_operand" "")
5802 (mem:DI (plus:SI (match_operand:SI 1 "s_register_operand" "")
5803 (match_operand:SI 2 "s_register_operand" ""))))]
5805 && reg_overlap_mentioned_p (operands[0], operands[1])
5806 && reg_overlap_mentioned_p (operands[0], operands[2])"
5808 (plus:SI (match_dup 1)
5811 (mem:DI (match_dup 4)))]
5813 operands[4] = gen_rtx_REG (SImode, REGNO(operands[0]));
5817 (define_expand "movsi"
5818 [(set (match_operand:SI 0 "general_operand")
5819 (match_operand:SI 1 "general_operand"))]
5823 rtx base, offset, tmp;
5825 gcc_checking_assert (aligned_operand (operands[0], SImode));
5826 gcc_checking_assert (aligned_operand (operands[1], SImode));
5827 if (TARGET_32BIT || TARGET_HAVE_MOVT)
5829 /* Everything except mem = const or mem = mem can be done easily. */
5830 if (MEM_P (operands[0]))
5831 operands[1] = force_reg (SImode, operands[1]);
5832 if (arm_general_register_operand (operands[0], SImode)
5833 && CONST_INT_P (operands[1])
5834 && !(const_ok_for_arm (INTVAL (operands[1]))
5835 || const_ok_for_arm (~INTVAL (operands[1]))))
5837 if (DONT_EARLY_SPLIT_CONSTANT (INTVAL (operands[1]), SET))
5839 emit_insn (gen_rtx_SET (operands[0], operands[1]));
5844 arm_split_constant (SET, SImode, NULL_RTX,
5845 INTVAL (operands[1]), operands[0], NULL_RTX,
5846 optimize && can_create_pseudo_p ());
5851 else /* Target doesn't have MOVT... */
5853 if (can_create_pseudo_p ())
5855 if (!REG_P (operands[0]))
5856 operands[1] = force_reg (SImode, operands[1]);
5860 split_const (operands[1], &base, &offset);
5861 if (INTVAL (offset) != 0
5862 && targetm.cannot_force_const_mem (SImode, operands[1]))
5864 tmp = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];
5865 emit_move_insn (tmp, base);
5866 emit_insn (gen_addsi3 (operands[0], tmp, offset));
5870 tmp = can_create_pseudo_p () ? NULL_RTX : operands[0];
5872 /* Recognize the case where operand[1] is a reference to thread-local
5873 data and load its address to a register. Offsets have been split off
5875 if (arm_tls_referenced_p (operands[1]))
5876 operands[1] = legitimize_tls_address (operands[1], tmp);
5878 && (CONSTANT_P (operands[1])
5879 || symbol_mentioned_p (operands[1])
5880 || label_mentioned_p (operands[1])))
5882 legitimize_pic_address (operands[1], SImode, tmp, NULL_RTX, false);
5887 ;; The ARM LO_SUM and HIGH are backwards - HIGH sets the low bits, and
5888 ;; LO_SUM adds in the high bits. Fortunately these are opaque operations
5889 ;; so this does not matter.
5890 (define_insn "*arm_movt"
5891 [(set (match_operand:SI 0 "nonimmediate_operand" "=r,r")
5892 (lo_sum:SI (match_operand:SI 1 "nonimmediate_operand" "0,0")
5893 (match_operand:SI 2 "general_operand" "i,i")))]
5894 "TARGET_HAVE_MOVT && arm_valid_symbolic_address_p (operands[2])"
5896 movt%?\t%0, #:upper16:%c2
5897 movt\t%0, #:upper16:%c2"
5898 [(set_attr "arch" "32,v8mb")
5899 (set_attr "predicable" "yes")
5900 (set_attr "length" "4")
5901 (set_attr "type" "alu_sreg")]
5904 (define_insn "*arm_movsi_insn"
5905 [(set (match_operand:SI 0 "nonimmediate_operand" "=rk,r,r,r,rk,m")
5906 (match_operand:SI 1 "general_operand" "rk, I,K,j,mi,rk"))]
5907 "TARGET_ARM && !TARGET_IWMMXT && !TARGET_HARD_FLOAT
5908 && ( register_operand (operands[0], SImode)
5909 || register_operand (operands[1], SImode))"
5917 [(set_attr "type" "mov_reg,mov_imm,mvn_imm,mov_imm,load_4,store_4")
5918 (set_attr "predicable" "yes")
5919 (set_attr "arch" "*,*,*,v6t2,*,*")
5920 (set_attr "pool_range" "*,*,*,*,4096,*")
5921 (set_attr "neg_pool_range" "*,*,*,*,4084,*")]
5925 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5926 (match_operand:SI 1 "const_int_operand" ""))]
5927 "(TARGET_32BIT || TARGET_HAVE_MOVT)
5928 && (!(const_ok_for_arm (INTVAL (operands[1]))
5929 || const_ok_for_arm (~INTVAL (operands[1]))))"
5930 [(clobber (const_int 0))]
5932 arm_split_constant (SET, SImode, NULL_RTX,
5933 INTVAL (operands[1]), operands[0], NULL_RTX, 0);
5938 ;; A normal way to do (symbol + offset) requires three instructions at least
5939 ;; (depends on how big the offset is) as below:
5940 ;; movw r0, #:lower16:g
5941 ;; movw r0, #:upper16:g
5944 ;; A better way would be:
5945 ;; movw r0, #:lower16:g+4
5946 ;; movw r0, #:upper16:g+4
5948 ;; The limitation of this way is that the length of offset should be a 16-bit
5949 ;; signed value, because current assembler only supports REL type relocation for
5950 ;; such case. If the more powerful RELA type is supported in future, we should
5951 ;; update this pattern to go with better way.
5953 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5954 (const:SI (plus:SI (match_operand:SI 1 "general_operand" "")
5955 (match_operand:SI 2 "const_int_operand" ""))))]
5958 && arm_disable_literal_pool
5960 && GET_CODE (operands[1]) == SYMBOL_REF"
5961 [(clobber (const_int 0))]
5963 int offset = INTVAL (operands[2]);
5965 if (offset < -0x8000 || offset > 0x7fff)
5967 arm_emit_movpair (operands[0], operands[1]);
5968 emit_insn (gen_rtx_SET (operands[0],
5969 gen_rtx_PLUS (SImode, operands[0], operands[2])));
5973 rtx op = gen_rtx_CONST (SImode,
5974 gen_rtx_PLUS (SImode, operands[1], operands[2]));
5975 arm_emit_movpair (operands[0], op);
5980 ;; Split symbol_refs at the later stage (after cprop), instead of generating
5981 ;; movt/movw pair directly at expand. Otherwise corresponding high_sum
5982 ;; and lo_sum would be merged back into memory load at cprop. However,
5983 ;; if the default is to prefer movt/movw rather than a load from the constant
5984 ;; pool, the performance is better.
5986 [(set (match_operand:SI 0 "arm_general_register_operand" "")
5987 (match_operand:SI 1 "general_operand" ""))]
5988 "TARGET_USE_MOVT && GET_CODE (operands[1]) == SYMBOL_REF
5989 && !target_word_relocations
5990 && !arm_tls_referenced_p (operands[1])"
5991 [(clobber (const_int 0))]
5993 arm_emit_movpair (operands[0], operands[1]);
5997 ;; When generating pic, we need to load the symbol offset into a register.
5998 ;; So that the optimizer does not confuse this with a normal symbol load
5999 ;; we use an unspec. The offset will be loaded from a constant pool entry,
6000 ;; since that is the only type of relocation we can use.
6002 ;; Wrap calculation of the whole PIC address in a single pattern for the
6003 ;; benefit of optimizers, particularly, PRE and HOIST. Calculation of
6004 ;; a PIC address involves two loads from memory, so we want to CSE it
6005 ;; as often as possible.
6006 ;; This pattern will be split into one of the pic_load_addr_* patterns
6007 ;; and a move after GCSE optimizations.
6009 ;; Note: Update arm.c: legitimize_pic_address() when changing this pattern.
6010 (define_expand "calculate_pic_address"
6011 [(set (match_operand:SI 0 "register_operand")
6012 (mem:SI (plus:SI (match_operand:SI 1 "register_operand")
6013 (unspec:SI [(match_operand:SI 2 "" "")]
6018 ;; Split calculate_pic_address into pic_load_addr_* and a move.
6020 [(set (match_operand:SI 0 "register_operand" "")
6021 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "")
6022 (unspec:SI [(match_operand:SI 2 "" "")]
6025 [(set (match_dup 3) (unspec:SI [(match_dup 2)] UNSPEC_PIC_SYM))
6026 (set (match_dup 0) (mem:SI (plus:SI (match_dup 1) (match_dup 3))))]
6027 "operands[3] = can_create_pseudo_p () ? gen_reg_rtx (SImode) : operands[0];"
6030 ;; operand1 is the memory address to go into
6031 ;; pic_load_addr_32bit.
6032 ;; operand2 is the PIC label to be emitted
6033 ;; from pic_add_dot_plus_eight.
6034 ;; We do this to allow hoisting of the entire insn.
6035 (define_insn_and_split "pic_load_addr_unified"
6036 [(set (match_operand:SI 0 "s_register_operand" "=r,r,l")
6037 (unspec:SI [(match_operand:SI 1 "" "mX,mX,mX")
6038 (match_operand:SI 2 "" "")]
6039 UNSPEC_PIC_UNIFIED))]
6042 "&& reload_completed"
6043 [(set (match_dup 0) (unspec:SI [(match_dup 1)] UNSPEC_PIC_SYM))
6044 (set (match_dup 0) (unspec:SI [(match_dup 0) (match_dup 3)
6045 (match_dup 2)] UNSPEC_PIC_BASE))]
6046 "operands[3] = TARGET_THUMB ? GEN_INT (4) : GEN_INT (8);"
6047 [(set_attr "type" "load_4,load_4,load_4")
6048 (set_attr "pool_range" "4096,4094,1022")
6049 (set_attr "neg_pool_range" "4084,0,0")
6050 (set_attr "arch" "a,t2,t1")
6051 (set_attr "length" "8,6,4")]
6054 ;; The rather odd constraints on the following are to force reload to leave
6055 ;; the insn alone, and to force the minipool generation pass to then move
6056 ;; the GOT symbol to memory.
6058 (define_insn "pic_load_addr_32bit"
6059 [(set (match_operand:SI 0 "s_register_operand" "=r")
6060 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6061 "TARGET_32BIT && flag_pic"
6063 [(set_attr "type" "load_4")
6064 (set (attr "pool_range")
6065 (if_then_else (eq_attr "is_thumb" "no")
6068 (set (attr "neg_pool_range")
6069 (if_then_else (eq_attr "is_thumb" "no")
6074 (define_insn "pic_load_addr_thumb1"
6075 [(set (match_operand:SI 0 "s_register_operand" "=l")
6076 (unspec:SI [(match_operand:SI 1 "" "mX")] UNSPEC_PIC_SYM))]
6077 "TARGET_THUMB1 && flag_pic"
6079 [(set_attr "type" "load_4")
6080 (set (attr "pool_range") (const_int 1018))]
6083 (define_insn "pic_add_dot_plus_four"
6084 [(set (match_operand:SI 0 "register_operand" "=r")
6085 (unspec:SI [(match_operand:SI 1 "register_operand" "0")
6087 (match_operand 2 "" "")]
6091 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6092 INTVAL (operands[2]));
6093 return \"add\\t%0, %|pc\";
6095 [(set_attr "length" "2")
6096 (set_attr "type" "alu_sreg")]
6099 (define_insn "pic_add_dot_plus_eight"
6100 [(set (match_operand:SI 0 "register_operand" "=r")
6101 (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6103 (match_operand 2 "" "")]
6107 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6108 INTVAL (operands[2]));
6109 return \"add%?\\t%0, %|pc, %1\";
6111 [(set_attr "predicable" "yes")
6112 (set_attr "type" "alu_sreg")]
6115 (define_insn "tls_load_dot_plus_eight"
6116 [(set (match_operand:SI 0 "register_operand" "=r")
6117 (mem:SI (unspec:SI [(match_operand:SI 1 "register_operand" "r")
6119 (match_operand 2 "" "")]
6123 (*targetm.asm_out.internal_label) (asm_out_file, \"LPIC\",
6124 INTVAL (operands[2]));
6125 return \"ldr%?\\t%0, [%|pc, %1]\t\t@ tls_load_dot_plus_eight\";
6127 [(set_attr "predicable" "yes")
6128 (set_attr "type" "load_4")]
6131 ;; PIC references to local variables can generate pic_add_dot_plus_eight
6132 ;; followed by a load. These sequences can be crunched down to
6133 ;; tls_load_dot_plus_eight by a peephole.
6136 [(set (match_operand:SI 0 "register_operand" "")
6137 (unspec:SI [(match_operand:SI 3 "register_operand" "")
6139 (match_operand 1 "" "")]
6141 (set (match_operand:SI 2 "arm_general_register_operand" "")
6142 (mem:SI (match_dup 0)))]
6143 "TARGET_ARM && peep2_reg_dead_p (2, operands[0])"
6145 (mem:SI (unspec:SI [(match_dup 3)
6152 (define_insn "pic_offset_arm"
6153 [(set (match_operand:SI 0 "register_operand" "=r")
6154 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
6155 (unspec:SI [(match_operand:SI 2 "" "X")]
6156 UNSPEC_PIC_OFFSET))))]
6157 "TARGET_VXWORKS_RTP && TARGET_ARM && flag_pic"
6158 "ldr%?\\t%0, [%1,%2]"
6159 [(set_attr "type" "load_4")]
6162 (define_expand "builtin_setjmp_receiver"
6163 [(label_ref (match_operand 0 "" ""))]
6167 /* r3 is clobbered by set/longjmp, so we can use it as a scratch
6169 if (arm_pic_register != INVALID_REGNUM)
6170 arm_load_pic_register (1UL << 3, NULL_RTX);
6174 ;; If copying one reg to another we can set the condition codes according to
6175 ;; its value. Such a move is common after a return from subroutine and the
6176 ;; result is being tested against zero.
6178 (define_insn "*movsi_compare0"
6179 [(set (reg:CC CC_REGNUM)
6180 (compare:CC (match_operand:SI 1 "s_register_operand" "0,r")
6182 (set (match_operand:SI 0 "s_register_operand" "=r,r")
6187 subs%?\\t%0, %1, #0"
6188 [(set_attr "conds" "set")
6189 (set_attr "type" "alus_imm,alus_imm")]
6192 ;; Subroutine to store a half word from a register into memory.
6193 ;; Operand 0 is the source register (HImode)
6194 ;; Operand 1 is the destination address in a register (SImode)
6196 ;; In both this routine and the next, we must be careful not to spill
6197 ;; a memory address of reg+large_const into a separate PLUS insn, since this
6198 ;; can generate unrecognizable rtl.
6200 (define_expand "storehi"
6201 [;; store the low byte
6202 (set (match_operand 1 "" "") (match_dup 3))
6203 ;; extract the high byte
6205 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6206 ;; store the high byte
6207 (set (match_dup 4) (match_dup 5))]
6211 rtx op1 = operands[1];
6212 rtx addr = XEXP (op1, 0);
6213 enum rtx_code code = GET_CODE (addr);
6215 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6217 op1 = replace_equiv_address (operands[1], force_reg (SImode, addr));
6219 operands[4] = adjust_address (op1, QImode, 1);
6220 operands[1] = adjust_address (operands[1], QImode, 0);
6221 operands[3] = gen_lowpart (QImode, operands[0]);
6222 operands[0] = gen_lowpart (SImode, operands[0]);
6223 operands[2] = gen_reg_rtx (SImode);
6224 operands[5] = gen_lowpart (QImode, operands[2]);
6228 (define_expand "storehi_bigend"
6229 [(set (match_dup 4) (match_dup 3))
6231 (ashiftrt:SI (match_operand 0 "" "") (const_int 8)))
6232 (set (match_operand 1 "" "") (match_dup 5))]
6236 rtx op1 = operands[1];
6237 rtx addr = XEXP (op1, 0);
6238 enum rtx_code code = GET_CODE (addr);
6240 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6242 op1 = replace_equiv_address (op1, force_reg (SImode, addr));
6244 operands[4] = adjust_address (op1, QImode, 1);
6245 operands[1] = adjust_address (operands[1], QImode, 0);
6246 operands[3] = gen_lowpart (QImode, operands[0]);
6247 operands[0] = gen_lowpart (SImode, operands[0]);
6248 operands[2] = gen_reg_rtx (SImode);
6249 operands[5] = gen_lowpart (QImode, operands[2]);
6253 ;; Subroutine to store a half word integer constant into memory.
6254 (define_expand "storeinthi"
6255 [(set (match_operand 0 "" "")
6256 (match_operand 1 "" ""))
6257 (set (match_dup 3) (match_dup 2))]
6261 HOST_WIDE_INT value = INTVAL (operands[1]);
6262 rtx addr = XEXP (operands[0], 0);
6263 rtx op0 = operands[0];
6264 enum rtx_code code = GET_CODE (addr);
6266 if ((code == PLUS && !CONST_INT_P (XEXP (addr, 1)))
6268 op0 = replace_equiv_address (op0, force_reg (SImode, addr));
6270 operands[1] = gen_reg_rtx (SImode);
6271 if (BYTES_BIG_ENDIAN)
6273 emit_insn (gen_movsi (operands[1], GEN_INT ((value >> 8) & 255)));
6274 if ((value & 255) == ((value >> 8) & 255))
6275 operands[2] = operands[1];
6278 operands[2] = gen_reg_rtx (SImode);
6279 emit_insn (gen_movsi (operands[2], GEN_INT (value & 255)));
6284 emit_insn (gen_movsi (operands[1], GEN_INT (value & 255)));
6285 if ((value & 255) == ((value >> 8) & 255))
6286 operands[2] = operands[1];
6289 operands[2] = gen_reg_rtx (SImode);
6290 emit_insn (gen_movsi (operands[2], GEN_INT ((value >> 8) & 255)));
6294 operands[3] = adjust_address (op0, QImode, 1);
6295 operands[0] = adjust_address (operands[0], QImode, 0);
6296 operands[2] = gen_lowpart (QImode, operands[2]);
6297 operands[1] = gen_lowpart (QImode, operands[1]);
6301 (define_expand "storehi_single_op"
6302 [(set (match_operand:HI 0 "memory_operand")
6303 (match_operand:HI 1 "general_operand"))]
6304 "TARGET_32BIT && arm_arch4"
6306 if (!s_register_operand (operands[1], HImode))
6307 operands[1] = copy_to_mode_reg (HImode, operands[1]);
6311 (define_expand "movhi"
6312 [(set (match_operand:HI 0 "general_operand")
6313 (match_operand:HI 1 "general_operand"))]
6316 gcc_checking_assert (aligned_operand (operands[0], HImode));
6317 gcc_checking_assert (aligned_operand (operands[1], HImode));
6320 if (can_create_pseudo_p ())
6322 if (MEM_P (operands[0]))
6326 emit_insn (gen_storehi_single_op (operands[0], operands[1]));
6329 if (CONST_INT_P (operands[1]))
6330 emit_insn (gen_storeinthi (operands[0], operands[1]));
6333 if (MEM_P (operands[1]))
6334 operands[1] = force_reg (HImode, operands[1]);
6335 if (BYTES_BIG_ENDIAN)
6336 emit_insn (gen_storehi_bigend (operands[1], operands[0]));
6338 emit_insn (gen_storehi (operands[1], operands[0]));
6342 /* Sign extend a constant, and keep it in an SImode reg. */
6343 else if (CONST_INT_P (operands[1]))
6345 rtx reg = gen_reg_rtx (SImode);
6346 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6348 /* If the constant is already valid, leave it alone. */
6349 if (!const_ok_for_arm (val))
6351 /* If setting all the top bits will make the constant
6352 loadable in a single instruction, then set them.
6353 Otherwise, sign extend the number. */
6355 if (const_ok_for_arm (~(val | ~0xffff)))
6357 else if (val & 0x8000)
6361 emit_insn (gen_movsi (reg, GEN_INT (val)));
6362 operands[1] = gen_lowpart (HImode, reg);
6364 else if (arm_arch4 && optimize && can_create_pseudo_p ()
6365 && MEM_P (operands[1]))
6367 rtx reg = gen_reg_rtx (SImode);
6369 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6370 operands[1] = gen_lowpart (HImode, reg);
6372 else if (!arm_arch4)
6374 if (MEM_P (operands[1]))
6377 rtx offset = const0_rtx;
6378 rtx reg = gen_reg_rtx (SImode);
6380 if ((REG_P (base = XEXP (operands[1], 0))
6381 || (GET_CODE (base) == PLUS
6382 && (CONST_INT_P (offset = XEXP (base, 1)))
6383 && ((INTVAL(offset) & 1) != 1)
6384 && REG_P (base = XEXP (base, 0))))
6385 && REGNO_POINTER_ALIGN (REGNO (base)) >= 32)
6389 new_rtx = widen_memory_access (operands[1], SImode,
6390 ((INTVAL (offset) & ~3)
6391 - INTVAL (offset)));
6392 emit_insn (gen_movsi (reg, new_rtx));
6393 if (((INTVAL (offset) & 2) != 0)
6394 ^ (BYTES_BIG_ENDIAN ? 1 : 0))
6396 rtx reg2 = gen_reg_rtx (SImode);
6398 emit_insn (gen_lshrsi3 (reg2, reg, GEN_INT (16)));
6403 emit_insn (gen_movhi_bytes (reg, operands[1]));
6405 operands[1] = gen_lowpart (HImode, reg);
6409 /* Handle loading a large integer during reload. */
6410 else if (CONST_INT_P (operands[1])
6411 && !const_ok_for_arm (INTVAL (operands[1]))
6412 && !const_ok_for_arm (~INTVAL (operands[1])))
6414 /* Writing a constant to memory needs a scratch, which should
6415 be handled with SECONDARY_RELOADs. */
6416 gcc_assert (REG_P (operands[0]));
6418 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6419 emit_insn (gen_movsi (operands[0], operands[1]));
6423 else if (TARGET_THUMB2)
6425 /* Thumb-2 can do everything except mem=mem and mem=const easily. */
6426 if (can_create_pseudo_p ())
6428 if (!REG_P (operands[0]))
6429 operands[1] = force_reg (HImode, operands[1]);
6430 /* Zero extend a constant, and keep it in an SImode reg. */
6431 else if (CONST_INT_P (operands[1]))
6433 rtx reg = gen_reg_rtx (SImode);
6434 HOST_WIDE_INT val = INTVAL (operands[1]) & 0xffff;
6436 emit_insn (gen_movsi (reg, GEN_INT (val)));
6437 operands[1] = gen_lowpart (HImode, reg);
6441 else /* TARGET_THUMB1 */
6443 if (can_create_pseudo_p ())
6445 if (CONST_INT_P (operands[1]))
6447 rtx reg = gen_reg_rtx (SImode);
6449 emit_insn (gen_movsi (reg, operands[1]));
6450 operands[1] = gen_lowpart (HImode, reg);
6453 /* ??? We shouldn't really get invalid addresses here, but this can
6454 happen if we are passed a SP (never OK for HImode/QImode) or
6455 virtual register (also rejected as illegitimate for HImode/QImode)
6456 relative address. */
6457 /* ??? This should perhaps be fixed elsewhere, for instance, in
6458 fixup_stack_1, by checking for other kinds of invalid addresses,
6459 e.g. a bare reference to a virtual register. This may confuse the
6460 alpha though, which must handle this case differently. */
6461 if (MEM_P (operands[0])
6462 && !memory_address_p (GET_MODE (operands[0]),
6463 XEXP (operands[0], 0)))
6465 = replace_equiv_address (operands[0],
6466 copy_to_reg (XEXP (operands[0], 0)));
6468 if (MEM_P (operands[1])
6469 && !memory_address_p (GET_MODE (operands[1]),
6470 XEXP (operands[1], 0)))
6472 = replace_equiv_address (operands[1],
6473 copy_to_reg (XEXP (operands[1], 0)));
6475 if (MEM_P (operands[1]) && optimize > 0)
6477 rtx reg = gen_reg_rtx (SImode);
6479 emit_insn (gen_zero_extendhisi2 (reg, operands[1]));
6480 operands[1] = gen_lowpart (HImode, reg);
6483 if (MEM_P (operands[0]))
6484 operands[1] = force_reg (HImode, operands[1]);
6486 else if (CONST_INT_P (operands[1])
6487 && !satisfies_constraint_I (operands[1]))
6489 /* Handle loading a large integer during reload. */
6491 /* Writing a constant to memory needs a scratch, which should
6492 be handled with SECONDARY_RELOADs. */
6493 gcc_assert (REG_P (operands[0]));
6495 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6496 emit_insn (gen_movsi (operands[0], operands[1]));
6503 (define_expand "movhi_bytes"
6504 [(set (match_dup 2) (zero_extend:SI (match_operand:HI 1 "" "")))
6506 (zero_extend:SI (match_dup 6)))
6507 (set (match_operand:SI 0 "" "")
6508 (ior:SI (ashift:SI (match_dup 4) (const_int 8)) (match_dup 5)))]
6513 rtx addr = copy_to_mode_reg (SImode, XEXP (operands[1], 0));
6515 mem1 = change_address (operands[1], QImode, addr);
6516 mem2 = change_address (operands[1], QImode,
6517 plus_constant (Pmode, addr, 1));
6518 operands[0] = gen_lowpart (SImode, operands[0]);
6520 operands[2] = gen_reg_rtx (SImode);
6521 operands[3] = gen_reg_rtx (SImode);
6524 if (BYTES_BIG_ENDIAN)
6526 operands[4] = operands[2];
6527 operands[5] = operands[3];
6531 operands[4] = operands[3];
6532 operands[5] = operands[2];
6537 (define_expand "movhi_bigend"
6539 (rotate:SI (subreg:SI (match_operand:HI 1 "memory_operand") 0)
6542 (ashiftrt:SI (match_dup 2) (const_int 16)))
6543 (set (match_operand:HI 0 "s_register_operand")
6547 operands[2] = gen_reg_rtx (SImode);
6548 operands[3] = gen_reg_rtx (SImode);
6549 operands[4] = gen_lowpart (HImode, operands[3]);
6553 ;; Pattern to recognize insn generated default case above
6554 (define_insn "*movhi_insn_arch4"
6555 [(set (match_operand:HI 0 "nonimmediate_operand" "=r,r,r,m,r")
6556 (match_operand:HI 1 "general_operand" "rIk,K,n,r,mi"))]
6558 && arm_arch4 && !TARGET_HARD_FLOAT
6559 && (register_operand (operands[0], HImode)
6560 || register_operand (operands[1], HImode))"
6562 mov%?\\t%0, %1\\t%@ movhi
6563 mvn%?\\t%0, #%B1\\t%@ movhi
6564 movw%?\\t%0, %L1\\t%@ movhi
6565 strh%?\\t%1, %0\\t%@ movhi
6566 ldrh%?\\t%0, %1\\t%@ movhi"
6567 [(set_attr "predicable" "yes")
6568 (set_attr "pool_range" "*,*,*,*,256")
6569 (set_attr "neg_pool_range" "*,*,*,*,244")
6570 (set_attr "arch" "*,*,v6t2,*,*")
6571 (set_attr_alternative "type"
6572 [(if_then_else (match_operand 1 "const_int_operand" "")
6573 (const_string "mov_imm" )
6574 (const_string "mov_reg"))
6575 (const_string "mvn_imm")
6576 (const_string "mov_imm")
6577 (const_string "store_4")
6578 (const_string "load_4")])]
6581 (define_insn "*movhi_bytes"
6582 [(set (match_operand:HI 0 "s_register_operand" "=r,r,r")
6583 (match_operand:HI 1 "arm_rhs_operand" "I,rk,K"))]
6584 "TARGET_ARM && !TARGET_HARD_FLOAT"
6586 mov%?\\t%0, %1\\t%@ movhi
6587 mov%?\\t%0, %1\\t%@ movhi
6588 mvn%?\\t%0, #%B1\\t%@ movhi"
6589 [(set_attr "predicable" "yes")
6590 (set_attr "type" "mov_imm,mov_reg,mvn_imm")]
6593 ;; We use a DImode scratch because we may occasionally need an additional
6594 ;; temporary if the address isn't offsettable -- push_reload doesn't seem
6595 ;; to take any notice of the "o" constraints on reload_memory_operand operand.
6596 ;; The reload_in<m> and reload_out<m> patterns require special constraints
6597 ;; to be correctly handled in default_secondary_reload function.
6598 (define_expand "reload_outhi"
6599 [(parallel [(match_operand:HI 0 "arm_reload_memory_operand" "=o")
6600 (match_operand:HI 1 "s_register_operand" "r")
6601 (match_operand:DI 2 "s_register_operand" "=&l")])]
6604 arm_reload_out_hi (operands);
6606 thumb_reload_out_hi (operands);
6611 (define_expand "reload_inhi"
6612 [(parallel [(match_operand:HI 0 "s_register_operand" "=r")
6613 (match_operand:HI 1 "arm_reload_memory_operand" "o")
6614 (match_operand:DI 2 "s_register_operand" "=&r")])]
6618 arm_reload_in_hi (operands);
6620 thumb_reload_out_hi (operands);
6624 (define_expand "movqi"
6625 [(set (match_operand:QI 0 "general_operand")
6626 (match_operand:QI 1 "general_operand"))]
6629 /* Everything except mem = const or mem = mem can be done easily */
6631 if (can_create_pseudo_p ())
6633 if (CONST_INT_P (operands[1]))
6635 rtx reg = gen_reg_rtx (SImode);
6637 /* For thumb we want an unsigned immediate, then we are more likely
6638 to be able to use a movs insn. */
6640 operands[1] = GEN_INT (INTVAL (operands[1]) & 255);
6642 emit_insn (gen_movsi (reg, operands[1]));
6643 operands[1] = gen_lowpart (QImode, reg);
6648 /* ??? We shouldn't really get invalid addresses here, but this can
6649 happen if we are passed a SP (never OK for HImode/QImode) or
6650 virtual register (also rejected as illegitimate for HImode/QImode)
6651 relative address. */
6652 /* ??? This should perhaps be fixed elsewhere, for instance, in
6653 fixup_stack_1, by checking for other kinds of invalid addresses,
6654 e.g. a bare reference to a virtual register. This may confuse the
6655 alpha though, which must handle this case differently. */
6656 if (MEM_P (operands[0])
6657 && !memory_address_p (GET_MODE (operands[0]),
6658 XEXP (operands[0], 0)))
6660 = replace_equiv_address (operands[0],
6661 copy_to_reg (XEXP (operands[0], 0)));
6662 if (MEM_P (operands[1])
6663 && !memory_address_p (GET_MODE (operands[1]),
6664 XEXP (operands[1], 0)))
6666 = replace_equiv_address (operands[1],
6667 copy_to_reg (XEXP (operands[1], 0)));
6670 if (MEM_P (operands[1]) && optimize > 0)
6672 rtx reg = gen_reg_rtx (SImode);
6674 emit_insn (gen_zero_extendqisi2 (reg, operands[1]));
6675 operands[1] = gen_lowpart (QImode, reg);
6678 if (MEM_P (operands[0]))
6679 operands[1] = force_reg (QImode, operands[1]);
6681 else if (TARGET_THUMB
6682 && CONST_INT_P (operands[1])
6683 && !satisfies_constraint_I (operands[1]))
6685 /* Handle loading a large integer during reload. */
6687 /* Writing a constant to memory needs a scratch, which should
6688 be handled with SECONDARY_RELOADs. */
6689 gcc_assert (REG_P (operands[0]));
6691 operands[0] = gen_rtx_SUBREG (SImode, operands[0], 0);
6692 emit_insn (gen_movsi (operands[0], operands[1]));
6698 (define_insn "*arm_movqi_insn"
6699 [(set (match_operand:QI 0 "nonimmediate_operand" "=r,r,r,l,r,l,Uu,r,m")
6700 (match_operand:QI 1 "general_operand" "rk,rk,I,Py,K,Uu,l,Uh,r"))]
6702 && ( register_operand (operands[0], QImode)
6703 || register_operand (operands[1], QImode))"
6714 [(set_attr "type" "mov_reg,mov_reg,mov_imm,mov_imm,mvn_imm,load_4,store_4,load_4,store_4")
6715 (set_attr "predicable" "yes")
6716 (set_attr "predicable_short_it" "yes,yes,no,yes,no,no,no,no,no")
6717 (set_attr "arch" "t2,any,any,t2,any,t2,t2,any,any")
6718 (set_attr "length" "2,4,4,2,4,2,2,4,4")]
6722 (define_expand "movhf"
6723 [(set (match_operand:HF 0 "general_operand")
6724 (match_operand:HF 1 "general_operand"))]
6727 gcc_checking_assert (aligned_operand (operands[0], HFmode));
6728 gcc_checking_assert (aligned_operand (operands[1], HFmode));
6731 if (MEM_P (operands[0]))
6732 operands[1] = force_reg (HFmode, operands[1]);
6734 else /* TARGET_THUMB1 */
6736 if (can_create_pseudo_p ())
6738 if (!REG_P (operands[0]))
6739 operands[1] = force_reg (HFmode, operands[1]);
6745 (define_insn "*arm32_movhf"
6746 [(set (match_operand:HF 0 "nonimmediate_operand" "=r,m,r,r")
6747 (match_operand:HF 1 "general_operand" " m,r,r,F"))]
6748 "TARGET_32BIT && !TARGET_HARD_FLOAT
6749 && ( s_register_operand (operands[0], HFmode)
6750 || s_register_operand (operands[1], HFmode))"
6752 switch (which_alternative)
6754 case 0: /* ARM register from memory */
6755 return \"ldrh%?\\t%0, %1\\t%@ __fp16\";
6756 case 1: /* memory from ARM register */
6757 return \"strh%?\\t%1, %0\\t%@ __fp16\";
6758 case 2: /* ARM register from ARM register */
6759 return \"mov%?\\t%0, %1\\t%@ __fp16\";
6760 case 3: /* ARM register from constant */
6765 bits = real_to_target (NULL, CONST_DOUBLE_REAL_VALUE (operands[1]),
6767 ops[0] = operands[0];
6768 ops[1] = GEN_INT (bits);
6769 ops[2] = GEN_INT (bits & 0xff00);
6770 ops[3] = GEN_INT (bits & 0x00ff);
6772 if (arm_arch_thumb2)
6773 output_asm_insn (\"movw%?\\t%0, %1\", ops);
6775 output_asm_insn (\"mov%?\\t%0, %2\;orr%?\\t%0, %0, %3\", ops);
6782 [(set_attr "conds" "unconditional")
6783 (set_attr "type" "load_4,store_4,mov_reg,multiple")
6784 (set_attr "length" "4,4,4,8")
6785 (set_attr "predicable" "yes")]
6788 (define_expand "movsf"
6789 [(set (match_operand:SF 0 "general_operand")
6790 (match_operand:SF 1 "general_operand"))]
6793 gcc_checking_assert (aligned_operand (operands[0], SFmode));
6794 gcc_checking_assert (aligned_operand (operands[1], SFmode));
6797 if (MEM_P (operands[0]))
6798 operands[1] = force_reg (SFmode, operands[1]);
6800 else /* TARGET_THUMB1 */
6802 if (can_create_pseudo_p ())
6804 if (!REG_P (operands[0]))
6805 operands[1] = force_reg (SFmode, operands[1]);
6809 /* Cannot load it directly, generate a load with clobber so that it can be
6810 loaded via GPR with MOV / MOVT. */
6811 if (arm_disable_literal_pool
6812 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6813 && CONST_DOUBLE_P (operands[1])
6814 && TARGET_HARD_FLOAT
6815 && !vfp3_const_double_rtx (operands[1]))
6817 rtx clobreg = gen_reg_rtx (SFmode);
6818 emit_insn (gen_no_literal_pool_sf_immediate (operands[0], operands[1],
6825 ;; Transform a floating-point move of a constant into a core register into
6826 ;; an SImode operation.
6828 [(set (match_operand:SF 0 "arm_general_register_operand" "")
6829 (match_operand:SF 1 "immediate_operand" ""))]
6832 && CONST_DOUBLE_P (operands[1])"
6833 [(set (match_dup 2) (match_dup 3))]
6835 operands[2] = gen_lowpart (SImode, operands[0]);
6836 operands[3] = gen_lowpart (SImode, operands[1]);
6837 if (operands[2] == 0 || operands[3] == 0)
6842 (define_insn "*arm_movsf_soft_insn"
6843 [(set (match_operand:SF 0 "nonimmediate_operand" "=r,r,m")
6844 (match_operand:SF 1 "general_operand" "r,mE,r"))]
6846 && TARGET_SOFT_FLOAT
6847 && (!MEM_P (operands[0])
6848 || register_operand (operands[1], SFmode))"
6850 switch (which_alternative)
6852 case 0: return \"mov%?\\t%0, %1\";
6854 /* Cannot load it directly, split to load it via MOV / MOVT. */
6855 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6857 return \"ldr%?\\t%0, %1\\t%@ float\";
6858 case 2: return \"str%?\\t%1, %0\\t%@ float\";
6859 default: gcc_unreachable ();
6862 [(set_attr "predicable" "yes")
6863 (set_attr "type" "mov_reg,load_4,store_4")
6864 (set_attr "arm_pool_range" "*,4096,*")
6865 (set_attr "thumb2_pool_range" "*,4094,*")
6866 (set_attr "arm_neg_pool_range" "*,4084,*")
6867 (set_attr "thumb2_neg_pool_range" "*,0,*")]
6870 ;; Splitter for the above.
6872 [(set (match_operand:SF 0 "s_register_operand")
6873 (match_operand:SF 1 "const_double_operand"))]
6874 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
6878 real_to_target (&buf, CONST_DOUBLE_REAL_VALUE (operands[1]), SFmode);
6879 rtx cst = gen_int_mode (buf, SImode);
6880 emit_move_insn (simplify_gen_subreg (SImode, operands[0], SFmode, 0), cst);
6885 (define_expand "movdf"
6886 [(set (match_operand:DF 0 "general_operand")
6887 (match_operand:DF 1 "general_operand"))]
6890 gcc_checking_assert (aligned_operand (operands[0], DFmode));
6891 gcc_checking_assert (aligned_operand (operands[1], DFmode));
6894 if (MEM_P (operands[0]))
6895 operands[1] = force_reg (DFmode, operands[1]);
6897 else /* TARGET_THUMB */
6899 if (can_create_pseudo_p ())
6901 if (!REG_P (operands[0]))
6902 operands[1] = force_reg (DFmode, operands[1]);
6906 /* Cannot load it directly, generate a load with clobber so that it can be
6907 loaded via GPR with MOV / MOVT. */
6908 if (arm_disable_literal_pool
6909 && (REG_P (operands[0]) || SUBREG_P (operands[0]))
6910 && CONSTANT_P (operands[1])
6911 && TARGET_HARD_FLOAT
6912 && !arm_const_double_rtx (operands[1])
6913 && !(TARGET_VFP_DOUBLE && vfp3_const_double_rtx (operands[1])))
6915 rtx clobreg = gen_reg_rtx (DFmode);
6916 emit_insn (gen_no_literal_pool_df_immediate (operands[0], operands[1],
6923 ;; Reloading a df mode value stored in integer regs to memory can require a
6925 ;; Another reload_out<m> pattern that requires special constraints.
6926 (define_expand "reload_outdf"
6927 [(match_operand:DF 0 "arm_reload_memory_operand" "=o")
6928 (match_operand:DF 1 "s_register_operand" "r")
6929 (match_operand:SI 2 "s_register_operand" "=&r")]
6933 enum rtx_code code = GET_CODE (XEXP (operands[0], 0));
6936 operands[2] = XEXP (operands[0], 0);
6937 else if (code == POST_INC || code == PRE_DEC)
6939 operands[0] = gen_rtx_SUBREG (DImode, operands[0], 0);
6940 operands[1] = gen_rtx_SUBREG (DImode, operands[1], 0);
6941 emit_insn (gen_movdi (operands[0], operands[1]));
6944 else if (code == PRE_INC)
6946 rtx reg = XEXP (XEXP (operands[0], 0), 0);
6948 emit_insn (gen_addsi3 (reg, reg, GEN_INT (8)));
6951 else if (code == POST_DEC)
6952 operands[2] = XEXP (XEXP (operands[0], 0), 0);
6954 emit_insn (gen_addsi3 (operands[2], XEXP (XEXP (operands[0], 0), 0),
6955 XEXP (XEXP (operands[0], 0), 1)));
6957 emit_insn (gen_rtx_SET (replace_equiv_address (operands[0], operands[2]),
6960 if (code == POST_DEC)
6961 emit_insn (gen_addsi3 (operands[2], operands[2], GEN_INT (-8)));
6967 (define_insn "*movdf_soft_insn"
6968 [(set (match_operand:DF 0 "nonimmediate_soft_df_operand" "=r,r,r,r,m")
6969 (match_operand:DF 1 "soft_df_operand" "rDa,Db,Dc,mF,r"))]
6970 "TARGET_32BIT && TARGET_SOFT_FLOAT
6971 && ( register_operand (operands[0], DFmode)
6972 || register_operand (operands[1], DFmode))"
6974 switch (which_alternative)
6981 /* Cannot load it directly, split to load it via MOV / MOVT. */
6982 if (!MEM_P (operands[1]) && arm_disable_literal_pool)
6986 return output_move_double (operands, true, NULL);
6989 [(set_attr "length" "8,12,16,8,8")
6990 (set_attr "type" "multiple,multiple,multiple,load_8,store_8")
6991 (set_attr "arm_pool_range" "*,*,*,1020,*")
6992 (set_attr "thumb2_pool_range" "*,*,*,1018,*")
6993 (set_attr "arm_neg_pool_range" "*,*,*,1004,*")
6994 (set_attr "thumb2_neg_pool_range" "*,*,*,0,*")]
6997 ;; Splitter for the above.
6999 [(set (match_operand:DF 0 "s_register_operand")
7000 (match_operand:DF 1 "const_double_operand"))]
7001 "arm_disable_literal_pool && TARGET_SOFT_FLOAT"
7005 int order = BYTES_BIG_ENDIAN ? 1 : 0;
7006 real_to_target (buf, CONST_DOUBLE_REAL_VALUE (operands[1]), DFmode);
7007 unsigned HOST_WIDE_INT ival = zext_hwi (buf[order], 32);
7008 ival |= (zext_hwi (buf[1 - order], 32) << 32);
7009 rtx cst = gen_int_mode (ival, DImode);
7010 emit_move_insn (simplify_gen_subreg (DImode, operands[0], DFmode, 0), cst);
7016 ;; load- and store-multiple insns
7017 ;; The arm can load/store any set of registers, provided that they are in
7018 ;; ascending order, but these expanders assume a contiguous set.
7020 (define_expand "load_multiple"
7021 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7022 (match_operand:SI 1 "" ""))
7023 (use (match_operand:SI 2 "" ""))])]
7026 HOST_WIDE_INT offset = 0;
7028 /* Support only fixed point registers. */
7029 if (!CONST_INT_P (operands[2])
7030 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7031 || INTVAL (operands[2]) < 2
7032 || !MEM_P (operands[1])
7033 || !REG_P (operands[0])
7034 || REGNO (operands[0]) > (LAST_ARM_REGNUM - 1)
7035 || REGNO (operands[0]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7039 = arm_gen_load_multiple (arm_regs_in_sequence + REGNO (operands[0]),
7040 INTVAL (operands[2]),
7041 force_reg (SImode, XEXP (operands[1], 0)),
7042 FALSE, operands[1], &offset);
7045 (define_expand "store_multiple"
7046 [(match_par_dup 3 [(set (match_operand:SI 0 "" "")
7047 (match_operand:SI 1 "" ""))
7048 (use (match_operand:SI 2 "" ""))])]
7051 HOST_WIDE_INT offset = 0;
7053 /* Support only fixed point registers. */
7054 if (!CONST_INT_P (operands[2])
7055 || INTVAL (operands[2]) > MAX_LDM_STM_OPS
7056 || INTVAL (operands[2]) < 2
7057 || !REG_P (operands[1])
7058 || !MEM_P (operands[0])
7059 || REGNO (operands[1]) > (LAST_ARM_REGNUM - 1)
7060 || REGNO (operands[1]) + INTVAL (operands[2]) > LAST_ARM_REGNUM)
7064 = arm_gen_store_multiple (arm_regs_in_sequence + REGNO (operands[1]),
7065 INTVAL (operands[2]),
7066 force_reg (SImode, XEXP (operands[0], 0)),
7067 FALSE, operands[0], &offset);
7071 (define_expand "setmemsi"
7072 [(match_operand:BLK 0 "general_operand")
7073 (match_operand:SI 1 "const_int_operand")
7074 (match_operand:SI 2 "const_int_operand")
7075 (match_operand:SI 3 "const_int_operand")]
7078 if (arm_gen_setmem (operands))
7085 ;; Move a block of memory if it is word aligned and MORE than 2 words long.
7086 ;; We could let this apply for blocks of less than this, but it clobbers so
7087 ;; many registers that there is then probably a better way.
7089 (define_expand "cpymemqi"
7090 [(match_operand:BLK 0 "general_operand")
7091 (match_operand:BLK 1 "general_operand")
7092 (match_operand:SI 2 "const_int_operand")
7093 (match_operand:SI 3 "const_int_operand")]
7098 if (TARGET_LDRD && current_tune->prefer_ldrd_strd
7099 && !optimize_function_for_size_p (cfun))
7101 if (gen_cpymem_ldrd_strd (operands))
7106 if (arm_gen_cpymemqi (operands))
7110 else /* TARGET_THUMB1 */
7112 if ( INTVAL (operands[3]) != 4
7113 || INTVAL (operands[2]) > 48)
7116 thumb_expand_cpymemqi (operands);
7123 ;; Compare & branch insns
7124 ;; The range calculations are based as follows:
7125 ;; For forward branches, the address calculation returns the address of
7126 ;; the next instruction. This is 2 beyond the branch instruction.
7127 ;; For backward branches, the address calculation returns the address of
7128 ;; the first instruction in this pattern (cmp). This is 2 before the branch
7129 ;; instruction for the shortest sequence, and 4 before the branch instruction
7130 ;; if we have to jump around an unconditional branch.
7131 ;; To the basic branch range the PC offset must be added (this is +4).
7132 ;; So for forward branches we have
7133 ;; (pos_range - pos_base_offs + pc_offs) = (pos_range - 2 + 4).
7134 ;; And for backward branches we have
7135 ;; (neg_range - neg_base_offs + pc_offs) = (neg_range - (-2 or -4) + 4).
7137 ;; For a 'b' pos_range = 2046, neg_range = -2048 giving (-2040->2048).
7138 ;; For a 'b<cond>' pos_range = 254, neg_range = -256 giving (-250 ->256).
7140 (define_expand "cbranchsi4"
7141 [(set (pc) (if_then_else
7142 (match_operator 0 "expandable_comparison_operator"
7143 [(match_operand:SI 1 "s_register_operand")
7144 (match_operand:SI 2 "nonmemory_operand")])
7145 (label_ref (match_operand 3 "" ""))
7151 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7153 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7157 if (thumb1_cmpneg_operand (operands[2], SImode))
7159 emit_jump_insn (gen_cbranchsi4_scratch (NULL, operands[1], operands[2],
7160 operands[3], operands[0]));
7163 if (!thumb1_cmp_operand (operands[2], SImode))
7164 operands[2] = force_reg (SImode, operands[2]);
7167 (define_expand "cbranchsf4"
7168 [(set (pc) (if_then_else
7169 (match_operator 0 "expandable_comparison_operator"
7170 [(match_operand:SF 1 "s_register_operand")
7171 (match_operand:SF 2 "vfp_compare_operand")])
7172 (label_ref (match_operand 3 "" ""))
7174 "TARGET_32BIT && TARGET_HARD_FLOAT"
7175 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7176 operands[3])); DONE;"
7179 (define_expand "cbranchdf4"
7180 [(set (pc) (if_then_else
7181 (match_operator 0 "expandable_comparison_operator"
7182 [(match_operand:DF 1 "s_register_operand")
7183 (match_operand:DF 2 "vfp_compare_operand")])
7184 (label_ref (match_operand 3 "" ""))
7186 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7187 "emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7188 operands[3])); DONE;"
7191 (define_expand "cbranchdi4"
7192 [(set (pc) (if_then_else
7193 (match_operator 0 "expandable_comparison_operator"
7194 [(match_operand:DI 1 "s_register_operand")
7195 (match_operand:DI 2 "reg_or_int_operand")])
7196 (label_ref (match_operand 3 "" ""))
7200 if (!arm_validize_comparison (&operands[0], &operands[1], &operands[2]))
7202 emit_jump_insn (gen_cbranch_cc (operands[0], operands[1], operands[2],
7208 ;; Comparison and test insns
7210 (define_insn "*arm_cmpsi_insn"
7211 [(set (reg:CC CC_REGNUM)
7212 (compare:CC (match_operand:SI 0 "s_register_operand" "l,r,r,r,r")
7213 (match_operand:SI 1 "arm_add_operand" "Py,r,r,I,L")))]
7221 [(set_attr "conds" "set")
7222 (set_attr "arch" "t2,t2,any,any,any")
7223 (set_attr "length" "2,2,4,4,4")
7224 (set_attr "predicable" "yes")
7225 (set_attr "predicable_short_it" "yes,yes,yes,no,no")
7226 (set_attr "type" "alus_imm,alus_sreg,alus_sreg,alus_imm,alus_imm")]
7229 (define_insn "*cmpsi_shiftsi"
7230 [(set (reg:CC CC_REGNUM)
7231 (compare:CC (match_operand:SI 0 "s_register_operand" "r,r,r")
7232 (match_operator:SI 3 "shift_operator"
7233 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7234 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])))]
7237 [(set_attr "conds" "set")
7238 (set_attr "shift" "1")
7239 (set_attr "arch" "32,a,a")
7240 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7242 (define_insn "*cmpsi_shiftsi_swp"
7243 [(set (reg:CC_SWP CC_REGNUM)
7244 (compare:CC_SWP (match_operator:SI 3 "shift_operator"
7245 [(match_operand:SI 1 "s_register_operand" "r,r,r")
7246 (match_operand:SI 2 "shift_amount_operand" "M,r,M")])
7247 (match_operand:SI 0 "s_register_operand" "r,r,r")))]
7250 [(set_attr "conds" "set")
7251 (set_attr "shift" "1")
7252 (set_attr "arch" "32,a,a")
7253 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
7255 (define_insn "*arm_cmpsi_negshiftsi_si"
7256 [(set (reg:CC_Z CC_REGNUM)
7258 (neg:SI (match_operator:SI 1 "shift_operator"
7259 [(match_operand:SI 2 "s_register_operand" "r")
7260 (match_operand:SI 3 "reg_or_int_operand" "rM")]))
7261 (match_operand:SI 0 "s_register_operand" "r")))]
7264 [(set_attr "conds" "set")
7265 (set (attr "type") (if_then_else (match_operand 3 "const_int_operand" "")
7266 (const_string "alus_shift_imm")
7267 (const_string "alus_shift_reg")))
7268 (set_attr "predicable" "yes")]
7271 ; This insn allows redundant compares to be removed by cse, nothing should
7272 ; ever appear in the output file since (set (reg x) (reg x)) is a no-op that
7273 ; is deleted later on. The match_dup will match the mode here, so that
7274 ; mode changes of the condition codes aren't lost by this even though we don't
7275 ; specify what they are.
7277 (define_insn "*deleted_compare"
7278 [(set (match_operand 0 "cc_register" "") (match_dup 0))]
7280 "\\t%@ deleted compare"
7281 [(set_attr "conds" "set")
7282 (set_attr "length" "0")
7283 (set_attr "type" "no_insn")]
7287 ;; Conditional branch insns
7289 (define_expand "cbranch_cc"
7291 (if_then_else (match_operator 0 "" [(match_operand 1 "" "")
7292 (match_operand 2 "" "")])
7293 (label_ref (match_operand 3 "" ""))
7296 "operands[1] = arm_gen_compare_reg (GET_CODE (operands[0]),
7297 operands[1], operands[2], NULL_RTX);
7298 operands[2] = const0_rtx;"
7302 ;; Patterns to match conditional branch insns.
7305 (define_insn "arm_cond_branch"
7307 (if_then_else (match_operator 1 "arm_comparison_operator"
7308 [(match_operand 2 "cc_register" "") (const_int 0)])
7309 (label_ref (match_operand 0 "" ""))
7313 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7315 arm_ccfsm_state += 2;
7318 return \"b%d1\\t%l0\";
7320 [(set_attr "conds" "use")
7321 (set_attr "type" "branch")
7322 (set (attr "length")
7324 (and (match_test "TARGET_THUMB2")
7325 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7326 (le (minus (match_dup 0) (pc)) (const_int 256))))
7331 (define_insn "*arm_cond_branch_reversed"
7333 (if_then_else (match_operator 1 "arm_comparison_operator"
7334 [(match_operand 2 "cc_register" "") (const_int 0)])
7336 (label_ref (match_operand 0 "" ""))))]
7339 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7341 arm_ccfsm_state += 2;
7344 return \"b%D1\\t%l0\";
7346 [(set_attr "conds" "use")
7347 (set_attr "type" "branch")
7348 (set (attr "length")
7350 (and (match_test "TARGET_THUMB2")
7351 (and (ge (minus (match_dup 0) (pc)) (const_int -250))
7352 (le (minus (match_dup 0) (pc)) (const_int 256))))
7361 (define_expand "cstore_cc"
7362 [(set (match_operand:SI 0 "s_register_operand")
7363 (match_operator:SI 1 "" [(match_operand 2 "" "")
7364 (match_operand 3 "" "")]))]
7366 "operands[2] = arm_gen_compare_reg (GET_CODE (operands[1]),
7367 operands[2], operands[3], NULL_RTX);
7368 operands[3] = const0_rtx;"
7371 (define_insn_and_split "*mov_scc"
7372 [(set (match_operand:SI 0 "s_register_operand" "=r")
7373 (match_operator:SI 1 "arm_comparison_operator_mode"
7374 [(match_operand 2 "cc_register" "") (const_int 0)]))]
7376 "#" ; "mov%D1\\t%0, #0\;mov%d1\\t%0, #1"
7379 (if_then_else:SI (match_dup 1)
7383 [(set_attr "conds" "use")
7384 (set_attr "length" "8")
7385 (set_attr "type" "multiple")]
7388 (define_insn "*negscc_borrow"
7389 [(set (match_operand:SI 0 "s_register_operand" "=r")
7390 (neg:SI (match_operand:SI 1 "arm_borrow_operation" "")))]
7393 [(set_attr "conds" "use")
7394 (set_attr "length" "4")
7395 (set_attr "type" "adc_reg")]
7398 (define_insn_and_split "*mov_negscc"
7399 [(set (match_operand:SI 0 "s_register_operand" "=r")
7400 (neg:SI (match_operator:SI 1 "arm_comparison_operator_mode"
7401 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7402 "TARGET_ARM && !arm_borrow_operation (operands[1], SImode)"
7403 "#" ; "mov%D1\\t%0, #0\;mvn%d1\\t%0, #0"
7406 (if_then_else:SI (match_dup 1)
7410 operands[3] = GEN_INT (~0);
7412 [(set_attr "conds" "use")
7413 (set_attr "length" "8")
7414 (set_attr "type" "multiple")]
7417 (define_insn_and_split "*mov_notscc"
7418 [(set (match_operand:SI 0 "s_register_operand" "=r")
7419 (not:SI (match_operator:SI 1 "arm_comparison_operator"
7420 [(match_operand 2 "cc_register" "") (const_int 0)])))]
7422 "#" ; "mvn%D1\\t%0, #0\;mvn%d1\\t%0, #1"
7425 (if_then_else:SI (match_dup 1)
7429 operands[3] = GEN_INT (~1);
7430 operands[4] = GEN_INT (~0);
7432 [(set_attr "conds" "use")
7433 (set_attr "length" "8")
7434 (set_attr "type" "multiple")]
7437 (define_expand "cstoresi4"
7438 [(set (match_operand:SI 0 "s_register_operand")
7439 (match_operator:SI 1 "expandable_comparison_operator"
7440 [(match_operand:SI 2 "s_register_operand")
7441 (match_operand:SI 3 "reg_or_int_operand")]))]
7442 "TARGET_32BIT || TARGET_THUMB1"
7444 rtx op3, scratch, scratch2;
7448 if (!arm_add_operand (operands[3], SImode))
7449 operands[3] = force_reg (SImode, operands[3]);
7450 emit_insn (gen_cstore_cc (operands[0], operands[1],
7451 operands[2], operands[3]));
7455 if (operands[3] == const0_rtx)
7457 switch (GET_CODE (operands[1]))
7460 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], operands[2]));
7464 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], operands[2]));
7468 scratch = expand_binop (SImode, add_optab, operands[2], constm1_rtx,
7469 NULL_RTX, 0, OPTAB_WIDEN);
7470 scratch = expand_binop (SImode, ior_optab, operands[2], scratch,
7471 NULL_RTX, 0, OPTAB_WIDEN);
7472 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7473 operands[0], 1, OPTAB_WIDEN);
7477 scratch = expand_unop (SImode, one_cmpl_optab, operands[2],
7479 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31),
7480 NULL_RTX, 1, OPTAB_WIDEN);
7484 scratch = expand_binop (SImode, ashr_optab, operands[2],
7485 GEN_INT (31), NULL_RTX, 0, OPTAB_WIDEN);
7486 scratch = expand_binop (SImode, sub_optab, scratch, operands[2],
7487 NULL_RTX, 0, OPTAB_WIDEN);
7488 expand_binop (SImode, lshr_optab, scratch, GEN_INT (31), operands[0],
7492 /* LT is handled by generic code. No need for unsigned with 0. */
7499 switch (GET_CODE (operands[1]))
7502 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7503 NULL_RTX, 0, OPTAB_WIDEN);
7504 emit_insn (gen_cstoresi_eq0_thumb1 (operands[0], scratch));
7508 scratch = expand_binop (SImode, sub_optab, operands[2], operands[3],
7509 NULL_RTX, 0, OPTAB_WIDEN);
7510 emit_insn (gen_cstoresi_ne0_thumb1 (operands[0], scratch));
7514 op3 = force_reg (SImode, operands[3]);
7516 scratch = expand_binop (SImode, lshr_optab, operands[2], GEN_INT (31),
7517 NULL_RTX, 1, OPTAB_WIDEN);
7518 scratch2 = expand_binop (SImode, ashr_optab, op3, GEN_INT (31),
7519 NULL_RTX, 0, OPTAB_WIDEN);
7520 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7526 if (!thumb1_cmp_operand (op3, SImode))
7527 op3 = force_reg (SImode, op3);
7528 scratch = expand_binop (SImode, ashr_optab, operands[2], GEN_INT (31),
7529 NULL_RTX, 0, OPTAB_WIDEN);
7530 scratch2 = expand_binop (SImode, lshr_optab, op3, GEN_INT (31),
7531 NULL_RTX, 1, OPTAB_WIDEN);
7532 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch2,
7537 op3 = force_reg (SImode, operands[3]);
7538 scratch = force_reg (SImode, const0_rtx);
7539 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7545 if (!thumb1_cmp_operand (op3, SImode))
7546 op3 = force_reg (SImode, op3);
7547 scratch = force_reg (SImode, const0_rtx);
7548 emit_insn (gen_thumb1_addsi3_addgeu (operands[0], scratch, scratch,
7554 if (!thumb1_cmp_operand (op3, SImode))
7555 op3 = force_reg (SImode, op3);
7556 scratch = gen_reg_rtx (SImode);
7557 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], operands[2], op3));
7561 op3 = force_reg (SImode, operands[3]);
7562 scratch = gen_reg_rtx (SImode);
7563 emit_insn (gen_cstoresi_ltu_thumb1 (operands[0], op3, operands[2]));
7566 /* No good sequences for GT, LT. */
7573 (define_expand "cstorehf4"
7574 [(set (match_operand:SI 0 "s_register_operand")
7575 (match_operator:SI 1 "expandable_comparison_operator"
7576 [(match_operand:HF 2 "s_register_operand")
7577 (match_operand:HF 3 "vfp_compare_operand")]))]
7578 "TARGET_VFP_FP16INST"
7580 if (!arm_validize_comparison (&operands[1],
7585 emit_insn (gen_cstore_cc (operands[0], operands[1],
7586 operands[2], operands[3]));
7591 (define_expand "cstoresf4"
7592 [(set (match_operand:SI 0 "s_register_operand")
7593 (match_operator:SI 1 "expandable_comparison_operator"
7594 [(match_operand:SF 2 "s_register_operand")
7595 (match_operand:SF 3 "vfp_compare_operand")]))]
7596 "TARGET_32BIT && TARGET_HARD_FLOAT"
7597 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7598 operands[2], operands[3])); DONE;"
7601 (define_expand "cstoredf4"
7602 [(set (match_operand:SI 0 "s_register_operand")
7603 (match_operator:SI 1 "expandable_comparison_operator"
7604 [(match_operand:DF 2 "s_register_operand")
7605 (match_operand:DF 3 "vfp_compare_operand")]))]
7606 "TARGET_32BIT && TARGET_HARD_FLOAT && !TARGET_VFP_SINGLE"
7607 "emit_insn (gen_cstore_cc (operands[0], operands[1],
7608 operands[2], operands[3])); DONE;"
7611 (define_expand "cstoredi4"
7612 [(set (match_operand:SI 0 "s_register_operand")
7613 (match_operator:SI 1 "expandable_comparison_operator"
7614 [(match_operand:DI 2 "s_register_operand")
7615 (match_operand:DI 3 "reg_or_int_operand")]))]
7618 if (!arm_validize_comparison (&operands[1],
7622 emit_insn (gen_cstore_cc (operands[0], operands[1], operands[2],
7629 ;; Conditional move insns
7631 (define_expand "movsicc"
7632 [(set (match_operand:SI 0 "s_register_operand")
7633 (if_then_else:SI (match_operand 1 "expandable_comparison_operator")
7634 (match_operand:SI 2 "arm_not_operand")
7635 (match_operand:SI 3 "arm_not_operand")))]
7642 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7643 &XEXP (operands[1], 1)))
7646 code = GET_CODE (operands[1]);
7647 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7648 XEXP (operands[1], 1), NULL_RTX);
7649 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7653 (define_expand "movhfcc"
7654 [(set (match_operand:HF 0 "s_register_operand")
7655 (if_then_else:HF (match_operand 1 "arm_cond_move_operator")
7656 (match_operand:HF 2 "s_register_operand")
7657 (match_operand:HF 3 "s_register_operand")))]
7658 "TARGET_VFP_FP16INST"
7661 enum rtx_code code = GET_CODE (operands[1]);
7664 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7665 &XEXP (operands[1], 1)))
7668 code = GET_CODE (operands[1]);
7669 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7670 XEXP (operands[1], 1), NULL_RTX);
7671 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7675 (define_expand "movsfcc"
7676 [(set (match_operand:SF 0 "s_register_operand")
7677 (if_then_else:SF (match_operand 1 "arm_cond_move_operator")
7678 (match_operand:SF 2 "s_register_operand")
7679 (match_operand:SF 3 "s_register_operand")))]
7680 "TARGET_32BIT && TARGET_HARD_FLOAT"
7683 enum rtx_code code = GET_CODE (operands[1]);
7686 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7687 &XEXP (operands[1], 1)))
7690 code = GET_CODE (operands[1]);
7691 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7692 XEXP (operands[1], 1), NULL_RTX);
7693 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7697 (define_expand "movdfcc"
7698 [(set (match_operand:DF 0 "s_register_operand")
7699 (if_then_else:DF (match_operand 1 "arm_cond_move_operator")
7700 (match_operand:DF 2 "s_register_operand")
7701 (match_operand:DF 3 "s_register_operand")))]
7702 "TARGET_32BIT && TARGET_HARD_FLOAT && TARGET_VFP_DOUBLE"
7705 enum rtx_code code = GET_CODE (operands[1]);
7708 if (!arm_validize_comparison (&operands[1], &XEXP (operands[1], 0),
7709 &XEXP (operands[1], 1)))
7711 code = GET_CODE (operands[1]);
7712 ccreg = arm_gen_compare_reg (code, XEXP (operands[1], 0),
7713 XEXP (operands[1], 1), NULL_RTX);
7714 operands[1] = gen_rtx_fmt_ee (code, VOIDmode, ccreg, const0_rtx);
7718 (define_insn "*cmov<mode>"
7719 [(set (match_operand:SDF 0 "s_register_operand" "=<F_constraint>")
7720 (if_then_else:SDF (match_operator 1 "arm_vsel_comparison_operator"
7721 [(match_operand 2 "cc_register" "") (const_int 0)])
7722 (match_operand:SDF 3 "s_register_operand"
7724 (match_operand:SDF 4 "s_register_operand"
7725 "<F_constraint>")))]
7726 "TARGET_HARD_FLOAT && TARGET_VFP5 <vfp_double_cond>"
7729 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7736 return \"vsel%d1.<V_if_elem>\\t%<V_reg>0, %<V_reg>3, %<V_reg>4\";
7741 return \"vsel%D1.<V_if_elem>\\t%<V_reg>0, %<V_reg>4, %<V_reg>3\";
7747 [(set_attr "conds" "use")
7748 (set_attr "type" "fcsel")]
7751 (define_insn "*cmovhf"
7752 [(set (match_operand:HF 0 "s_register_operand" "=t")
7753 (if_then_else:HF (match_operator 1 "arm_vsel_comparison_operator"
7754 [(match_operand 2 "cc_register" "") (const_int 0)])
7755 (match_operand:HF 3 "s_register_operand" "t")
7756 (match_operand:HF 4 "s_register_operand" "t")))]
7757 "TARGET_VFP_FP16INST"
7760 enum arm_cond_code code = maybe_get_arm_condition_code (operands[1]);
7767 return \"vsel%d1.f16\\t%0, %3, %4\";
7772 return \"vsel%D1.f16\\t%0, %4, %3\";
7778 [(set_attr "conds" "use")
7779 (set_attr "type" "fcsel")]
7782 (define_insn_and_split "*movsicc_insn"
7783 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r,r,r,r,r")
7785 (match_operator 3 "arm_comparison_operator"
7786 [(match_operand 4 "cc_register" "") (const_int 0)])
7787 (match_operand:SI 1 "arm_not_operand" "0,0,rI,K,rI,rI,K,K")
7788 (match_operand:SI 2 "arm_not_operand" "rI,K,0,0,rI,K,rI,K")))]
7799 ; alt4: mov%d3\\t%0, %1\;mov%D3\\t%0, %2
7800 ; alt5: mov%d3\\t%0, %1\;mvn%D3\\t%0, #%B2
7801 ; alt6: mvn%d3\\t%0, #%B1\;mov%D3\\t%0, %2
7802 ; alt7: mvn%d3\\t%0, #%B1\;mvn%D3\\t%0, #%B2"
7803 "&& reload_completed"
7806 enum rtx_code rev_code;
7810 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7812 gen_rtx_SET (operands[0], operands[1])));
7814 rev_code = GET_CODE (operands[3]);
7815 mode = GET_MODE (operands[4]);
7816 if (mode == CCFPmode || mode == CCFPEmode)
7817 rev_code = reverse_condition_maybe_unordered (rev_code);
7819 rev_code = reverse_condition (rev_code);
7821 rev_cond = gen_rtx_fmt_ee (rev_code,
7825 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
7827 gen_rtx_SET (operands[0], operands[2])));
7830 [(set_attr "length" "4,4,4,4,8,8,8,8")
7831 (set_attr "conds" "use")
7832 (set_attr_alternative "type"
7833 [(if_then_else (match_operand 2 "const_int_operand" "")
7834 (const_string "mov_imm")
7835 (const_string "mov_reg"))
7836 (const_string "mvn_imm")
7837 (if_then_else (match_operand 1 "const_int_operand" "")
7838 (const_string "mov_imm")
7839 (const_string "mov_reg"))
7840 (const_string "mvn_imm")
7841 (const_string "multiple")
7842 (const_string "multiple")
7843 (const_string "multiple")
7844 (const_string "multiple")])]
7847 (define_insn "*movsfcc_soft_insn"
7848 [(set (match_operand:SF 0 "s_register_operand" "=r,r")
7849 (if_then_else:SF (match_operator 3 "arm_comparison_operator"
7850 [(match_operand 4 "cc_register" "") (const_int 0)])
7851 (match_operand:SF 1 "s_register_operand" "0,r")
7852 (match_operand:SF 2 "s_register_operand" "r,0")))]
7853 "TARGET_ARM && TARGET_SOFT_FLOAT"
7857 [(set_attr "conds" "use")
7858 (set_attr "type" "mov_reg")]
7862 ;; Jump and linkage insns
7864 (define_expand "jump"
7866 (label_ref (match_operand 0 "" "")))]
7871 (define_insn "*arm_jump"
7873 (label_ref (match_operand 0 "" "")))]
7877 if (arm_ccfsm_state == 1 || arm_ccfsm_state == 2)
7879 arm_ccfsm_state += 2;
7882 return \"b%?\\t%l0\";
7885 [(set_attr "predicable" "yes")
7886 (set (attr "length")
7888 (and (match_test "TARGET_THUMB2")
7889 (and (ge (minus (match_dup 0) (pc)) (const_int -2044))
7890 (le (minus (match_dup 0) (pc)) (const_int 2048))))
7893 (set_attr "type" "branch")]
7896 (define_expand "call"
7897 [(parallel [(call (match_operand 0 "memory_operand")
7898 (match_operand 1 "general_operand"))
7899 (use (match_operand 2 "" ""))
7900 (clobber (reg:SI LR_REGNUM))])]
7905 tree addr = MEM_EXPR (operands[0]);
7907 /* In an untyped call, we can get NULL for operand 2. */
7908 if (operands[2] == NULL_RTX)
7909 operands[2] = const0_rtx;
7911 /* Decide if we should generate indirect calls by loading the
7912 32-bit address of the callee into a register before performing the
7914 callee = XEXP (operands[0], 0);
7915 if (GET_CODE (callee) == SYMBOL_REF
7916 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
7918 XEXP (operands[0], 0) = force_reg (Pmode, callee);
7920 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[0], 0)))
7921 /* Indirect call: set r9 with FDPIC value of callee. */
7922 XEXP (operands[0], 0)
7923 = arm_load_function_descriptor (XEXP (operands[0], 0));
7925 if (detect_cmse_nonsecure_call (addr))
7927 pat = gen_nonsecure_call_internal (operands[0], operands[1],
7929 emit_call_insn (pat);
7933 pat = gen_call_internal (operands[0], operands[1], operands[2]);
7934 arm_emit_call_insn (pat, XEXP (operands[0], 0), false);
7937 /* Restore FDPIC register (r9) after call. */
7940 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
7941 rtx initial_fdpic_reg
7942 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
7944 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
7945 initial_fdpic_reg));
7952 (define_insn "restore_pic_register_after_call"
7953 [(set (match_operand:SI 0 "s_register_operand" "+r,r")
7954 (unspec:SI [(match_dup 0)
7955 (match_operand:SI 1 "nonimmediate_operand" "r,m")]
7956 UNSPEC_PIC_RESTORE))]
7963 (define_expand "call_internal"
7964 [(parallel [(call (match_operand 0 "memory_operand")
7965 (match_operand 1 "general_operand"))
7966 (use (match_operand 2 "" ""))
7967 (clobber (reg:SI LR_REGNUM))])])
7969 (define_expand "nonsecure_call_internal"
7970 [(parallel [(call (unspec:SI [(match_operand 0 "memory_operand")]
7971 UNSPEC_NONSECURE_MEM)
7972 (match_operand 1 "general_operand"))
7973 (use (match_operand 2 "" ""))
7974 (clobber (reg:SI LR_REGNUM))])]
7979 tmp = copy_to_suggested_reg (XEXP (operands[0], 0),
7980 gen_rtx_REG (SImode, R4_REGNUM),
7983 operands[0] = replace_equiv_address (operands[0], tmp);
7986 (define_insn "*call_reg_armv5"
7987 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7988 (match_operand 1 "" ""))
7989 (use (match_operand 2 "" ""))
7990 (clobber (reg:SI LR_REGNUM))]
7991 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
7993 [(set_attr "type" "call")]
7996 (define_insn "*call_reg_arm"
7997 [(call (mem:SI (match_operand:SI 0 "s_register_operand" "r"))
7998 (match_operand 1 "" ""))
7999 (use (match_operand 2 "" ""))
8000 (clobber (reg:SI LR_REGNUM))]
8001 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8003 return output_call (operands);
8005 ;; length is worst case, normally it is only two
8006 [(set_attr "length" "12")
8007 (set_attr "type" "call")]
8011 (define_expand "call_value"
8012 [(parallel [(set (match_operand 0 "" "")
8013 (call (match_operand 1 "memory_operand")
8014 (match_operand 2 "general_operand")))
8015 (use (match_operand 3 "" ""))
8016 (clobber (reg:SI LR_REGNUM))])]
8021 tree addr = MEM_EXPR (operands[1]);
8023 /* In an untyped call, we can get NULL for operand 2. */
8024 if (operands[3] == 0)
8025 operands[3] = const0_rtx;
8027 /* Decide if we should generate indirect calls by loading the
8028 32-bit address of the callee into a register before performing the
8030 callee = XEXP (operands[1], 0);
8031 if (GET_CODE (callee) == SYMBOL_REF
8032 ? arm_is_long_call_p (SYMBOL_REF_DECL (callee))
8034 XEXP (operands[1], 0) = force_reg (Pmode, callee);
8036 if (TARGET_FDPIC && !SYMBOL_REF_P (XEXP (operands[1], 0)))
8037 /* Indirect call: set r9 with FDPIC value of callee. */
8038 XEXP (operands[1], 0)
8039 = arm_load_function_descriptor (XEXP (operands[1], 0));
8041 if (detect_cmse_nonsecure_call (addr))
8043 pat = gen_nonsecure_call_value_internal (operands[0], operands[1],
8044 operands[2], operands[3]);
8045 emit_call_insn (pat);
8049 pat = gen_call_value_internal (operands[0], operands[1],
8050 operands[2], operands[3]);
8051 arm_emit_call_insn (pat, XEXP (operands[1], 0), false);
8054 /* Restore FDPIC register (r9) after call. */
8057 rtx fdpic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8058 rtx initial_fdpic_reg
8059 = get_hard_reg_initial_val (Pmode, FDPIC_REGNUM);
8061 emit_insn (gen_restore_pic_register_after_call (fdpic_reg,
8062 initial_fdpic_reg));
8069 (define_expand "call_value_internal"
8070 [(parallel [(set (match_operand 0 "" "")
8071 (call (match_operand 1 "memory_operand")
8072 (match_operand 2 "general_operand")))
8073 (use (match_operand 3 "" ""))
8074 (clobber (reg:SI LR_REGNUM))])])
8076 (define_expand "nonsecure_call_value_internal"
8077 [(parallel [(set (match_operand 0 "" "")
8078 (call (unspec:SI [(match_operand 1 "memory_operand")]
8079 UNSPEC_NONSECURE_MEM)
8080 (match_operand 2 "general_operand")))
8081 (use (match_operand 3 "" ""))
8082 (clobber (reg:SI LR_REGNUM))])]
8087 tmp = copy_to_suggested_reg (XEXP (operands[1], 0),
8088 gen_rtx_REG (SImode, R4_REGNUM),
8091 operands[1] = replace_equiv_address (operands[1], tmp);
8094 (define_insn "*call_value_reg_armv5"
8095 [(set (match_operand 0 "" "")
8096 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8097 (match_operand 2 "" "")))
8098 (use (match_operand 3 "" ""))
8099 (clobber (reg:SI LR_REGNUM))]
8100 "TARGET_ARM && arm_arch5t && !SIBLING_CALL_P (insn)"
8102 [(set_attr "type" "call")]
8105 (define_insn "*call_value_reg_arm"
8106 [(set (match_operand 0 "" "")
8107 (call (mem:SI (match_operand:SI 1 "s_register_operand" "r"))
8108 (match_operand 2 "" "")))
8109 (use (match_operand 3 "" ""))
8110 (clobber (reg:SI LR_REGNUM))]
8111 "TARGET_ARM && !arm_arch5t && !SIBLING_CALL_P (insn)"
8113 return output_call (&operands[1]);
8115 [(set_attr "length" "12")
8116 (set_attr "type" "call")]
8119 ;; Allow calls to SYMBOL_REFs specially as they are not valid general addresses
8120 ;; The 'a' causes the operand to be treated as an address, i.e. no '#' output.
8122 (define_insn "*call_symbol"
8123 [(call (mem:SI (match_operand:SI 0 "" ""))
8124 (match_operand 1 "" ""))
8125 (use (match_operand 2 "" ""))
8126 (clobber (reg:SI LR_REGNUM))]
8128 && !SIBLING_CALL_P (insn)
8129 && (GET_CODE (operands[0]) == SYMBOL_REF)
8130 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[0]))"
8133 rtx op = operands[0];
8135 /* Switch mode now when possible. */
8136 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8137 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8138 return NEED_PLT_RELOC ? \"blx%?\\t%a0(PLT)\" : \"blx%?\\t(%a0)\";
8140 return NEED_PLT_RELOC ? \"bl%?\\t%a0(PLT)\" : \"bl%?\\t%a0\";
8142 [(set_attr "type" "call")]
8145 (define_insn "*call_value_symbol"
8146 [(set (match_operand 0 "" "")
8147 (call (mem:SI (match_operand:SI 1 "" ""))
8148 (match_operand:SI 2 "" "")))
8149 (use (match_operand 3 "" ""))
8150 (clobber (reg:SI LR_REGNUM))]
8152 && !SIBLING_CALL_P (insn)
8153 && (GET_CODE (operands[1]) == SYMBOL_REF)
8154 && !arm_is_long_call_p (SYMBOL_REF_DECL (operands[1]))"
8157 rtx op = operands[1];
8159 /* Switch mode now when possible. */
8160 if (SYMBOL_REF_DECL (op) && !TREE_PUBLIC (SYMBOL_REF_DECL (op))
8161 && arm_arch5t && arm_change_mode_p (SYMBOL_REF_DECL (op)))
8162 return NEED_PLT_RELOC ? \"blx%?\\t%a1(PLT)\" : \"blx%?\\t(%a1)\";
8164 return NEED_PLT_RELOC ? \"bl%?\\t%a1(PLT)\" : \"bl%?\\t%a1\";
8166 [(set_attr "type" "call")]
8169 (define_expand "sibcall_internal"
8170 [(parallel [(call (match_operand 0 "memory_operand")
8171 (match_operand 1 "general_operand"))
8173 (use (match_operand 2 "" ""))])])
8175 ;; We may also be able to do sibcalls for Thumb, but it's much harder...
8176 (define_expand "sibcall"
8177 [(parallel [(call (match_operand 0 "memory_operand")
8178 (match_operand 1 "general_operand"))
8180 (use (match_operand 2 "" ""))])]
8186 if ((!REG_P (XEXP (operands[0], 0))
8187 && GET_CODE (XEXP (operands[0], 0)) != SYMBOL_REF)
8188 || (GET_CODE (XEXP (operands[0], 0)) == SYMBOL_REF
8189 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[0], 0)))))
8190 XEXP (operands[0], 0) = force_reg (SImode, XEXP (operands[0], 0));
8192 if (operands[2] == NULL_RTX)
8193 operands[2] = const0_rtx;
8195 pat = gen_sibcall_internal (operands[0], operands[1], operands[2]);
8196 arm_emit_call_insn (pat, operands[0], true);
8201 (define_expand "sibcall_value_internal"
8202 [(parallel [(set (match_operand 0 "" "")
8203 (call (match_operand 1 "memory_operand")
8204 (match_operand 2 "general_operand")))
8206 (use (match_operand 3 "" ""))])])
8208 (define_expand "sibcall_value"
8209 [(parallel [(set (match_operand 0 "" "")
8210 (call (match_operand 1 "memory_operand")
8211 (match_operand 2 "general_operand")))
8213 (use (match_operand 3 "" ""))])]
8219 if ((!REG_P (XEXP (operands[1], 0))
8220 && GET_CODE (XEXP (operands[1], 0)) != SYMBOL_REF)
8221 || (GET_CODE (XEXP (operands[1], 0)) == SYMBOL_REF
8222 && arm_is_long_call_p (SYMBOL_REF_DECL (XEXP (operands[1], 0)))))
8223 XEXP (operands[1], 0) = force_reg (SImode, XEXP (operands[1], 0));
8225 if (operands[3] == NULL_RTX)
8226 operands[3] = const0_rtx;
8228 pat = gen_sibcall_value_internal (operands[0], operands[1],
8229 operands[2], operands[3]);
8230 arm_emit_call_insn (pat, operands[1], true);
8235 (define_insn "*sibcall_insn"
8236 [(call (mem:SI (match_operand:SI 0 "call_insn_operand" "Cs, US"))
8237 (match_operand 1 "" ""))
8239 (use (match_operand 2 "" ""))]
8240 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8242 if (which_alternative == 1)
8243 return NEED_PLT_RELOC ? \"b%?\\t%a0(PLT)\" : \"b%?\\t%a0\";
8246 if (arm_arch5t || arm_arch4t)
8247 return \"bx%?\\t%0\\t%@ indirect register sibling call\";
8249 return \"mov%?\\t%|pc, %0\\t%@ indirect register sibling call\";
8252 [(set_attr "type" "call")]
8255 (define_insn "*sibcall_value_insn"
8256 [(set (match_operand 0 "" "")
8257 (call (mem:SI (match_operand:SI 1 "call_insn_operand" "Cs,US"))
8258 (match_operand 2 "" "")))
8260 (use (match_operand 3 "" ""))]
8261 "TARGET_32BIT && SIBLING_CALL_P (insn)"
8263 if (which_alternative == 1)
8264 return NEED_PLT_RELOC ? \"b%?\\t%a1(PLT)\" : \"b%?\\t%a1\";
8267 if (arm_arch5t || arm_arch4t)
8268 return \"bx%?\\t%1\";
8270 return \"mov%?\\t%|pc, %1\\t@ indirect sibling call \";
8273 [(set_attr "type" "call")]
8276 (define_expand "<return_str>return"
8278 "(TARGET_ARM || (TARGET_THUMB2
8279 && ARM_FUNC_TYPE (arm_current_func_type ()) == ARM_FT_NORMAL
8280 && !IS_STACKALIGN (arm_current_func_type ())))
8281 <return_cond_false>"
8286 thumb2_expand_return (<return_simple_p>);
8293 ;; Often the return insn will be the same as loading from memory, so set attr
8294 (define_insn "*arm_return"
8296 "TARGET_ARM && USE_RETURN_INSN (FALSE)"
8299 if (arm_ccfsm_state == 2)
8301 arm_ccfsm_state += 2;
8304 return output_return_instruction (const_true_rtx, true, false, false);
8306 [(set_attr "type" "load_4")
8307 (set_attr "length" "12")
8308 (set_attr "predicable" "yes")]
8311 (define_insn "*cond_<return_str>return"
8313 (if_then_else (match_operator 0 "arm_comparison_operator"
8314 [(match_operand 1 "cc_register" "") (const_int 0)])
8317 "TARGET_ARM <return_cond_true>"
8320 if (arm_ccfsm_state == 2)
8322 arm_ccfsm_state += 2;
8325 return output_return_instruction (operands[0], true, false,
8328 [(set_attr "conds" "use")
8329 (set_attr "length" "12")
8330 (set_attr "type" "load_4")]
8333 (define_insn "*cond_<return_str>return_inverted"
8335 (if_then_else (match_operator 0 "arm_comparison_operator"
8336 [(match_operand 1 "cc_register" "") (const_int 0)])
8339 "TARGET_ARM <return_cond_true>"
8342 if (arm_ccfsm_state == 2)
8344 arm_ccfsm_state += 2;
8347 return output_return_instruction (operands[0], true, true,
8350 [(set_attr "conds" "use")
8351 (set_attr "length" "12")
8352 (set_attr "type" "load_4")]
8355 (define_insn "*arm_simple_return"
8360 if (arm_ccfsm_state == 2)
8362 arm_ccfsm_state += 2;
8365 return output_return_instruction (const_true_rtx, true, false, true);
8367 [(set_attr "type" "branch")
8368 (set_attr "length" "4")
8369 (set_attr "predicable" "yes")]
8372 ;; Generate a sequence of instructions to determine if the processor is
8373 ;; in 26-bit or 32-bit mode, and return the appropriate return address
8376 (define_expand "return_addr_mask"
8378 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8380 (set (match_operand:SI 0 "s_register_operand")
8381 (if_then_else:SI (eq (match_dup 1) (const_int 0))
8383 (const_int 67108860)))] ; 0x03fffffc
8386 operands[1] = gen_rtx_REG (CC_NOOVmode, CC_REGNUM);
8389 (define_insn "*check_arch2"
8390 [(set (match_operand:CC_NOOV 0 "cc_register" "")
8391 (compare:CC_NOOV (unspec [(const_int 0)] UNSPEC_CHECK_ARCH)
8394 "teq\\t%|r0, %|r0\;teq\\t%|pc, %|pc"
8395 [(set_attr "length" "8")
8396 (set_attr "conds" "set")
8397 (set_attr "type" "multiple")]
8400 ;; Call subroutine returning any type.
8402 (define_expand "untyped_call"
8403 [(parallel [(call (match_operand 0 "" "")
8405 (match_operand 1 "" "")
8406 (match_operand 2 "" "")])]
8407 "TARGET_EITHER && !TARGET_FDPIC"
8411 rtx par = gen_rtx_PARALLEL (VOIDmode,
8412 rtvec_alloc (XVECLEN (operands[2], 0)));
8413 rtx addr = gen_reg_rtx (Pmode);
8417 emit_move_insn (addr, XEXP (operands[1], 0));
8418 mem = change_address (operands[1], BLKmode, addr);
8420 for (i = 0; i < XVECLEN (operands[2], 0); i++)
8422 rtx src = SET_SRC (XVECEXP (operands[2], 0, i));
8424 /* Default code only uses r0 as a return value, but we could
8425 be using anything up to 4 registers. */
8426 if (REGNO (src) == R0_REGNUM)
8427 src = gen_rtx_REG (TImode, R0_REGNUM);
8429 XVECEXP (par, 0, i) = gen_rtx_EXPR_LIST (VOIDmode, src,
8431 size += GET_MODE_SIZE (GET_MODE (src));
8434 emit_call_insn (gen_call_value (par, operands[0], const0_rtx, NULL));
8438 for (i = 0; i < XVECLEN (par, 0); i++)
8440 HOST_WIDE_INT offset = 0;
8441 rtx reg = XEXP (XVECEXP (par, 0, i), 0);
8444 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8446 mem = change_address (mem, GET_MODE (reg), NULL);
8447 if (REGNO (reg) == R0_REGNUM)
8449 /* On thumb we have to use a write-back instruction. */
8450 emit_insn (arm_gen_store_multiple (arm_regs_in_sequence, 4, addr,
8451 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8452 size = TARGET_ARM ? 16 : 0;
8456 emit_move_insn (mem, reg);
8457 size = GET_MODE_SIZE (GET_MODE (reg));
8461 /* The optimizer does not know that the call sets the function value
8462 registers we stored in the result block. We avoid problems by
8463 claiming that all hard registers are used and clobbered at this
8465 emit_insn (gen_blockage ());
8471 (define_expand "untyped_return"
8472 [(match_operand:BLK 0 "memory_operand")
8473 (match_operand 1 "" "")]
8474 "TARGET_EITHER && !TARGET_FDPIC"
8478 rtx addr = gen_reg_rtx (Pmode);
8482 emit_move_insn (addr, XEXP (operands[0], 0));
8483 mem = change_address (operands[0], BLKmode, addr);
8485 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8487 HOST_WIDE_INT offset = 0;
8488 rtx reg = SET_DEST (XVECEXP (operands[1], 0, i));
8491 emit_move_insn (addr, plus_constant (Pmode, addr, size));
8493 mem = change_address (mem, GET_MODE (reg), NULL);
8494 if (REGNO (reg) == R0_REGNUM)
8496 /* On thumb we have to use a write-back instruction. */
8497 emit_insn (arm_gen_load_multiple (arm_regs_in_sequence, 4, addr,
8498 TARGET_THUMB ? TRUE : FALSE, mem, &offset));
8499 size = TARGET_ARM ? 16 : 0;
8503 emit_move_insn (reg, mem);
8504 size = GET_MODE_SIZE (GET_MODE (reg));
8508 /* Emit USE insns before the return. */
8509 for (i = 0; i < XVECLEN (operands[1], 0); i++)
8510 emit_use (SET_DEST (XVECEXP (operands[1], 0, i)));
8512 /* Construct the return. */
8513 expand_naked_return ();
8519 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
8520 ;; all of memory. This blocks insns from being moved across this point.
8522 (define_insn "blockage"
8523 [(unspec_volatile [(const_int 0)] VUNSPEC_BLOCKAGE)]
8526 [(set_attr "length" "0")
8527 (set_attr "type" "block")]
8530 ;; Since we hard code r0 here use the 'o' constraint to prevent
8531 ;; provoking undefined behaviour in the hardware with putting out
8532 ;; auto-increment operations with potentially r0 as the base register.
8533 (define_insn "probe_stack"
8534 [(set (match_operand:SI 0 "memory_operand" "=o")
8535 (unspec:SI [(const_int 0)] UNSPEC_PROBE_STACK))]
8538 [(set_attr "type" "store_4")
8539 (set_attr "predicable" "yes")]
8542 (define_insn "probe_stack_range"
8543 [(set (match_operand:SI 0 "register_operand" "=r")
8544 (unspec_volatile:SI [(match_operand:SI 1 "register_operand" "0")
8545 (match_operand:SI 2 "register_operand" "r")]
8546 VUNSPEC_PROBE_STACK_RANGE))]
8549 return output_probe_stack_range (operands[0], operands[2]);
8551 [(set_attr "type" "multiple")
8552 (set_attr "conds" "clob")]
8555 ;; Named patterns for stack smashing protection.
8556 (define_expand "stack_protect_combined_set"
8558 [(set (match_operand:SI 0 "memory_operand")
8559 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8561 (clobber (match_scratch:SI 2 ""))
8562 (clobber (match_scratch:SI 3 ""))])]
8567 ;; Use a separate insn from the above expand to be able to have the mem outside
8568 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8569 ;; try to reload the guard since we need to control how PIC access is done in
8570 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8571 ;; legitimize_pic_address ()).
8572 (define_insn_and_split "*stack_protect_combined_set_insn"
8573 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8574 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8576 (clobber (match_scratch:SI 2 "=&l,&r"))
8577 (clobber (match_scratch:SI 3 "=&l,&r"))]
8581 [(parallel [(set (match_dup 0) (unspec:SI [(mem:SI (match_dup 2))]
8583 (clobber (match_dup 2))])]
8591 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8593 pic_reg = operands[3];
8595 /* Forces recomputing of GOT base now. */
8596 legitimize_pic_address (operands[1], SImode, operands[2], pic_reg,
8597 true /*compute_now*/);
8601 if (address_operand (operands[1], SImode))
8602 operands[2] = operands[1];
8605 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8606 emit_move_insn (operands[2], mem);
8610 [(set_attr "arch" "t1,32")]
8613 ;; DO NOT SPLIT THIS INSN. It's important for security reasons that the
8614 ;; canary value does not live beyond the life of this sequence.
8615 (define_insn "*stack_protect_set_insn"
8616 [(set (match_operand:SI 0 "memory_operand" "=m,m")
8617 (unspec:SI [(mem:SI (match_operand:SI 1 "register_operand" "+&l,&r"))]
8619 (clobber (match_dup 1))]
8622 ldr\\t%1, [%1]\;str\\t%1, %0\;movs\t%1, #0
8623 ldr\\t%1, [%1]\;str\\t%1, %0\;mov\t%1, #0"
8624 [(set_attr "length" "8,12")
8625 (set_attr "conds" "clob,nocond")
8626 (set_attr "type" "multiple")
8627 (set_attr "arch" "t1,32")]
8630 (define_expand "stack_protect_combined_test"
8634 (eq (match_operand:SI 0 "memory_operand")
8635 (unspec:SI [(match_operand:SI 1 "guard_operand")]
8637 (label_ref (match_operand 2))
8639 (clobber (match_scratch:SI 3 ""))
8640 (clobber (match_scratch:SI 4 ""))
8641 (clobber (reg:CC CC_REGNUM))])]
8646 ;; Use a separate insn from the above expand to be able to have the mem outside
8647 ;; the operand #1 when register allocation comes. This is needed to avoid LRA
8648 ;; try to reload the guard since we need to control how PIC access is done in
8649 ;; the -fpic/-fPIC case (see COMPUTE_NOW parameter when calling
8650 ;; legitimize_pic_address ()).
8651 (define_insn_and_split "*stack_protect_combined_test_insn"
8654 (eq (match_operand:SI 0 "memory_operand" "m,m")
8655 (unspec:SI [(mem:SI (match_operand:SI 1 "guard_addr_operand" "X,X"))]
8657 (label_ref (match_operand 2))
8659 (clobber (match_scratch:SI 3 "=&l,&r"))
8660 (clobber (match_scratch:SI 4 "=&l,&r"))
8661 (clobber (reg:CC CC_REGNUM))]
8674 pic_reg = gen_rtx_REG (Pmode, FDPIC_REGNUM);
8676 pic_reg = operands[4];
8678 /* Forces recomputing of GOT base now. */
8679 legitimize_pic_address (operands[1], SImode, operands[3], pic_reg,
8680 true /*compute_now*/);
8684 if (address_operand (operands[1], SImode))
8685 operands[3] = operands[1];
8688 rtx mem = XEXP (force_const_mem (SImode, operands[1]), 0);
8689 emit_move_insn (operands[3], mem);
8694 emit_insn (gen_arm_stack_protect_test_insn (operands[4], operands[0],
8696 rtx cc_reg = gen_rtx_REG (CC_Zmode, CC_REGNUM);
8697 eq = gen_rtx_EQ (CC_Zmode, cc_reg, const0_rtx);
8698 emit_jump_insn (gen_arm_cond_branch (operands[2], eq, cc_reg));
8702 emit_insn (gen_thumb1_stack_protect_test_insn (operands[4], operands[0],
8704 eq = gen_rtx_EQ (VOIDmode, operands[4], const0_rtx);
8705 emit_jump_insn (gen_cbranchsi4 (eq, operands[4], const0_rtx,
8710 [(set_attr "arch" "t1,32")]
8713 (define_insn "arm_stack_protect_test_insn"
8714 [(set (reg:CC_Z CC_REGNUM)
8715 (compare:CC_Z (unspec:SI [(match_operand:SI 1 "memory_operand" "m,m")
8716 (mem:SI (match_operand:SI 2 "register_operand" "+l,r"))]
8719 (clobber (match_operand:SI 0 "register_operand" "=&l,&r"))
8720 (clobber (match_dup 2))]
8722 "ldr\t%0, [%2]\;ldr\t%2, %1\;eors\t%0, %2, %0"
8723 [(set_attr "length" "8,12")
8724 (set_attr "conds" "set")
8725 (set_attr "type" "multiple")
8726 (set_attr "arch" "t,32")]
8729 (define_expand "casesi"
8730 [(match_operand:SI 0 "s_register_operand") ; index to jump on
8731 (match_operand:SI 1 "const_int_operand") ; lower bound
8732 (match_operand:SI 2 "const_int_operand") ; total range
8733 (match_operand:SI 3 "" "") ; table label
8734 (match_operand:SI 4 "" "")] ; Out of range label
8735 "(TARGET_32BIT || optimize_size || flag_pic) && !target_pure_code"
8738 enum insn_code code;
8739 if (operands[1] != const0_rtx)
8741 rtx reg = gen_reg_rtx (SImode);
8743 emit_insn (gen_addsi3 (reg, operands[0],
8744 gen_int_mode (-INTVAL (operands[1]),
8750 code = CODE_FOR_arm_casesi_internal;
8751 else if (TARGET_THUMB1)
8752 code = CODE_FOR_thumb1_casesi_internal_pic;
8754 code = CODE_FOR_thumb2_casesi_internal_pic;
8756 code = CODE_FOR_thumb2_casesi_internal;
8758 if (!insn_data[(int) code].operand[1].predicate(operands[2], SImode))
8759 operands[2] = force_reg (SImode, operands[2]);
8761 emit_jump_insn (GEN_FCN ((int) code) (operands[0], operands[2],
8762 operands[3], operands[4]));
8767 ;; The USE in this pattern is needed to tell flow analysis that this is
8768 ;; a CASESI insn. It has no other purpose.
8769 (define_expand "arm_casesi_internal"
8770 [(parallel [(set (pc)
8772 (leu (match_operand:SI 0 "s_register_operand")
8773 (match_operand:SI 1 "arm_rhs_operand"))
8775 (label_ref:SI (match_operand 3 ""))))
8776 (clobber (reg:CC CC_REGNUM))
8777 (use (label_ref:SI (match_operand 2 "")))])]
8780 operands[4] = gen_rtx_MULT (SImode, operands[0], GEN_INT (4));
8781 operands[4] = gen_rtx_PLUS (SImode, operands[4],
8782 gen_rtx_LABEL_REF (SImode, operands[2]));
8783 operands[4] = gen_rtx_MEM (SImode, operands[4]);
8784 MEM_READONLY_P (operands[4]) = 1;
8785 MEM_NOTRAP_P (operands[4]) = 1;
8788 (define_insn "*arm_casesi_internal"
8789 [(parallel [(set (pc)
8791 (leu (match_operand:SI 0 "s_register_operand" "r")
8792 (match_operand:SI 1 "arm_rhs_operand" "rI"))
8793 (mem:SI (plus:SI (mult:SI (match_dup 0) (const_int 4))
8794 (label_ref:SI (match_operand 2 "" ""))))
8795 (label_ref:SI (match_operand 3 "" ""))))
8796 (clobber (reg:CC CC_REGNUM))
8797 (use (label_ref:SI (match_dup 2)))])]
8801 return \"cmp\\t%0, %1\;addls\\t%|pc, %|pc, %0, asl #2\;b\\t%l3\";
8802 return \"cmp\\t%0, %1\;ldrls\\t%|pc, [%|pc, %0, asl #2]\;b\\t%l3\";
8804 [(set_attr "conds" "clob")
8805 (set_attr "length" "12")
8806 (set_attr "type" "multiple")]
8809 (define_expand "indirect_jump"
8811 (match_operand:SI 0 "s_register_operand"))]
8814 /* Thumb-2 doesn't have mov pc, reg. Explicitly set the low bit of the
8815 address and use bx. */
8819 tmp = gen_reg_rtx (SImode);
8820 emit_insn (gen_iorsi3 (tmp, operands[0], GEN_INT(1)));
8826 ;; NB Never uses BX.
8827 (define_insn "*arm_indirect_jump"
8829 (match_operand:SI 0 "s_register_operand" "r"))]
8831 "mov%?\\t%|pc, %0\\t%@ indirect register jump"
8832 [(set_attr "predicable" "yes")
8833 (set_attr "type" "branch")]
8836 (define_insn "*load_indirect_jump"
8838 (match_operand:SI 0 "memory_operand" "m"))]
8840 "ldr%?\\t%|pc, %0\\t%@ indirect memory jump"
8841 [(set_attr "type" "load_4")
8842 (set_attr "pool_range" "4096")
8843 (set_attr "neg_pool_range" "4084")
8844 (set_attr "predicable" "yes")]
8854 [(set (attr "length")
8855 (if_then_else (eq_attr "is_thumb" "yes")
8858 (set_attr "type" "mov_reg")]
8862 [(trap_if (const_int 1) (const_int 0))]
8866 return \".inst\\t0xe7f000f0\";
8868 return \".inst\\t0xdeff\";
8870 [(set (attr "length")
8871 (if_then_else (eq_attr "is_thumb" "yes")
8874 (set_attr "type" "trap")
8875 (set_attr "conds" "unconditional")]
8879 ;; Patterns to allow combination of arithmetic, cond code and shifts
8881 (define_insn "*<arith_shift_insn>_multsi"
8882 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8884 (mult:SI (match_operand:SI 2 "s_register_operand" "r,r")
8885 (match_operand:SI 3 "power_of_two_operand" ""))
8886 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>")))]
8888 "<arith_shift_insn>%?\\t%0, %1, %2, lsl %b3"
8889 [(set_attr "predicable" "yes")
8890 (set_attr "shift" "2")
8891 (set_attr "arch" "a,t2")
8892 (set_attr "type" "alu_shift_imm")])
8894 (define_insn "*<arith_shift_insn>_shiftsi"
8895 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8897 (match_operator:SI 2 "shift_nomul_operator"
8898 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8899 (match_operand:SI 4 "shift_amount_operand" "M,M,r")])
8900 (match_operand:SI 1 "s_register_operand" "rk,<t2_binop0>,rk")))]
8901 "TARGET_32BIT && GET_CODE (operands[2]) != MULT"
8902 "<arith_shift_insn>%?\\t%0, %1, %3%S2"
8903 [(set_attr "predicable" "yes")
8904 (set_attr "shift" "3")
8905 (set_attr "arch" "a,t2,a")
8906 (set_attr "type" "alu_shift_imm,alu_shift_imm,alu_shift_reg")])
8909 [(set (match_operand:SI 0 "s_register_operand" "")
8910 (match_operator:SI 1 "shiftable_operator"
8911 [(match_operator:SI 2 "shiftable_operator"
8912 [(match_operator:SI 3 "shift_operator"
8913 [(match_operand:SI 4 "s_register_operand" "")
8914 (match_operand:SI 5 "reg_or_int_operand" "")])
8915 (match_operand:SI 6 "s_register_operand" "")])
8916 (match_operand:SI 7 "arm_rhs_operand" "")]))
8917 (clobber (match_operand:SI 8 "s_register_operand" ""))]
8920 (match_op_dup 2 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8923 (match_op_dup 1 [(match_dup 8) (match_dup 7)]))]
8926 (define_insn "*arith_shiftsi_compare0"
8927 [(set (reg:CC_NOOV CC_REGNUM)
8929 (match_operator:SI 1 "shiftable_operator"
8930 [(match_operator:SI 3 "shift_operator"
8931 [(match_operand:SI 4 "s_register_operand" "r,r")
8932 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8933 (match_operand:SI 2 "s_register_operand" "r,r")])
8935 (set (match_operand:SI 0 "s_register_operand" "=r,r")
8936 (match_op_dup 1 [(match_op_dup 3 [(match_dup 4) (match_dup 5)])
8939 "%i1s%?\\t%0, %2, %4%S3"
8940 [(set_attr "conds" "set")
8941 (set_attr "shift" "4")
8942 (set_attr "arch" "32,a")
8943 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8945 (define_insn "*arith_shiftsi_compare0_scratch"
8946 [(set (reg:CC_NOOV CC_REGNUM)
8948 (match_operator:SI 1 "shiftable_operator"
8949 [(match_operator:SI 3 "shift_operator"
8950 [(match_operand:SI 4 "s_register_operand" "r,r")
8951 (match_operand:SI 5 "shift_amount_operand" "M,r")])
8952 (match_operand:SI 2 "s_register_operand" "r,r")])
8954 (clobber (match_scratch:SI 0 "=r,r"))]
8956 "%i1s%?\\t%0, %2, %4%S3"
8957 [(set_attr "conds" "set")
8958 (set_attr "shift" "4")
8959 (set_attr "arch" "32,a")
8960 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8962 (define_insn "*sub_shiftsi"
8963 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
8964 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r")
8965 (match_operator:SI 2 "shift_operator"
8966 [(match_operand:SI 3 "s_register_operand" "r,r")
8967 (match_operand:SI 4 "shift_amount_operand" "M,r")])))]
8969 "sub%?\\t%0, %1, %3%S2"
8970 [(set_attr "predicable" "yes")
8971 (set_attr "predicable_short_it" "no")
8972 (set_attr "shift" "3")
8973 (set_attr "arch" "32,a")
8974 (set_attr "type" "alus_shift_imm,alus_shift_reg")])
8976 (define_insn "*sub_shiftsi_compare0"
8977 [(set (reg:CC_NOOV CC_REGNUM)
8979 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8980 (match_operator:SI 2 "shift_operator"
8981 [(match_operand:SI 3 "s_register_operand" "r,r,r")
8982 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
8984 (set (match_operand:SI 0 "s_register_operand" "=r,r,r")
8985 (minus:SI (match_dup 1)
8986 (match_op_dup 2 [(match_dup 3) (match_dup 4)])))]
8988 "subs%?\\t%0, %1, %3%S2"
8989 [(set_attr "conds" "set")
8990 (set_attr "shift" "3")
8991 (set_attr "arch" "32,a,a")
8992 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
8994 (define_insn "*sub_shiftsi_compare0_scratch"
8995 [(set (reg:CC_NOOV CC_REGNUM)
8997 (minus:SI (match_operand:SI 1 "s_register_operand" "r,r,r")
8998 (match_operator:SI 2 "shift_operator"
8999 [(match_operand:SI 3 "s_register_operand" "r,r,r")
9000 (match_operand:SI 4 "shift_amount_operand" "M,r,M")]))
9002 (clobber (match_scratch:SI 0 "=r,r,r"))]
9004 "subs%?\\t%0, %1, %3%S2"
9005 [(set_attr "conds" "set")
9006 (set_attr "shift" "3")
9007 (set_attr "arch" "32,a,a")
9008 (set_attr "type" "alus_shift_imm,alus_shift_reg,alus_shift_imm")])
9011 (define_insn_and_split "*and_scc"
9012 [(set (match_operand:SI 0 "s_register_operand" "=r")
9013 (and:SI (match_operator:SI 1 "arm_comparison_operator"
9014 [(match_operand 2 "cc_register" "") (const_int 0)])
9015 (match_operand:SI 3 "s_register_operand" "r")))]
9017 "#" ; "mov%D1\\t%0, #0\;and%d1\\t%0, %3, #1"
9018 "&& reload_completed"
9019 [(cond_exec (match_dup 5) (set (match_dup 0) (const_int 0)))
9020 (cond_exec (match_dup 4) (set (match_dup 0)
9021 (and:SI (match_dup 3) (const_int 1))))]
9023 machine_mode mode = GET_MODE (operands[2]);
9024 enum rtx_code rc = GET_CODE (operands[1]);
9026 /* Note that operands[4] is the same as operands[1],
9027 but with VOIDmode as the result. */
9028 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9029 if (mode == CCFPmode || mode == CCFPEmode)
9030 rc = reverse_condition_maybe_unordered (rc);
9032 rc = reverse_condition (rc);
9033 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9035 [(set_attr "conds" "use")
9036 (set_attr "type" "multiple")
9037 (set_attr "length" "8")]
9040 (define_insn_and_split "*ior_scc"
9041 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9042 (ior:SI (match_operator:SI 1 "arm_comparison_operator"
9043 [(match_operand 2 "cc_register" "") (const_int 0)])
9044 (match_operand:SI 3 "s_register_operand" "0,?r")))]
9049 "&& reload_completed
9050 && REGNO (operands [0]) != REGNO (operands[3])"
9051 ;; && which_alternative == 1
9052 ; mov%D1\\t%0, %3\;orr%d1\\t%0, %3, #1
9053 [(cond_exec (match_dup 5) (set (match_dup 0) (match_dup 3)))
9054 (cond_exec (match_dup 4) (set (match_dup 0)
9055 (ior:SI (match_dup 3) (const_int 1))))]
9057 machine_mode mode = GET_MODE (operands[2]);
9058 enum rtx_code rc = GET_CODE (operands[1]);
9060 /* Note that operands[4] is the same as operands[1],
9061 but with VOIDmode as the result. */
9062 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9063 if (mode == CCFPmode || mode == CCFPEmode)
9064 rc = reverse_condition_maybe_unordered (rc);
9066 rc = reverse_condition (rc);
9067 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[2], const0_rtx);
9069 [(set_attr "conds" "use")
9070 (set_attr "length" "4,8")
9071 (set_attr "type" "logic_imm,multiple")]
9074 ; A series of splitters for the compare_scc pattern below. Note that
9075 ; order is important.
9077 [(set (match_operand:SI 0 "s_register_operand" "")
9078 (lt:SI (match_operand:SI 1 "s_register_operand" "")
9080 (clobber (reg:CC CC_REGNUM))]
9081 "TARGET_32BIT && reload_completed"
9082 [(set (match_dup 0) (lshiftrt:SI (match_dup 1) (const_int 31)))])
9085 [(set (match_operand:SI 0 "s_register_operand" "")
9086 (ge:SI (match_operand:SI 1 "s_register_operand" "")
9088 (clobber (reg:CC CC_REGNUM))]
9089 "TARGET_32BIT && reload_completed"
9090 [(set (match_dup 0) (not:SI (match_dup 1)))
9091 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 31)))])
9094 [(set (match_operand:SI 0 "s_register_operand" "")
9095 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9097 (clobber (reg:CC CC_REGNUM))]
9098 "arm_arch5t && TARGET_32BIT"
9099 [(set (match_dup 0) (clz:SI (match_dup 1)))
9100 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9104 [(set (match_operand:SI 0 "s_register_operand" "")
9105 (eq:SI (match_operand:SI 1 "s_register_operand" "")
9107 (clobber (reg:CC CC_REGNUM))]
9108 "TARGET_32BIT && reload_completed"
9110 [(set (reg:CC CC_REGNUM)
9111 (compare:CC (const_int 1) (match_dup 1)))
9113 (minus:SI (const_int 1) (match_dup 1)))])
9114 (cond_exec (ltu:CC (reg:CC CC_REGNUM) (const_int 0))
9115 (set (match_dup 0) (const_int 0)))])
9118 [(set (match_operand:SI 0 "s_register_operand" "")
9119 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9120 (match_operand:SI 2 "const_int_operand" "")))
9121 (clobber (reg:CC CC_REGNUM))]
9122 "TARGET_32BIT && reload_completed"
9124 [(set (reg:CC CC_REGNUM)
9125 (compare:CC (match_dup 1) (match_dup 2)))
9126 (set (match_dup 0) (plus:SI (match_dup 1) (match_dup 3)))])
9127 (cond_exec (ne:CC (reg:CC CC_REGNUM) (const_int 0))
9128 (set (match_dup 0) (const_int 1)))]
9130 operands[3] = gen_int_mode (-INTVAL (operands[2]), SImode);
9134 [(set (match_operand:SI 0 "s_register_operand" "")
9135 (ne:SI (match_operand:SI 1 "s_register_operand" "")
9136 (match_operand:SI 2 "arm_add_operand" "")))
9137 (clobber (reg:CC CC_REGNUM))]
9138 "TARGET_32BIT && reload_completed"
9140 [(set (reg:CC_NOOV CC_REGNUM)
9141 (compare:CC_NOOV (minus:SI (match_dup 1) (match_dup 2))
9143 (set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))])
9144 (cond_exec (ne:CC_NOOV (reg:CC_NOOV CC_REGNUM) (const_int 0))
9145 (set (match_dup 0) (const_int 1)))])
9147 (define_insn_and_split "*compare_scc"
9148 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9149 (match_operator:SI 1 "arm_comparison_operator"
9150 [(match_operand:SI 2 "s_register_operand" "r,r")
9151 (match_operand:SI 3 "arm_add_operand" "rI,L")]))
9152 (clobber (reg:CC CC_REGNUM))]
9155 "&& reload_completed"
9156 [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 3)))
9157 (cond_exec (match_dup 4) (set (match_dup 0) (const_int 0)))
9158 (cond_exec (match_dup 5) (set (match_dup 0) (const_int 1)))]
9161 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
9162 operands[2], operands[3]);
9163 enum rtx_code rc = GET_CODE (operands[1]);
9165 tmp1 = gen_rtx_REG (mode, CC_REGNUM);
9167 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9168 if (mode == CCFPmode || mode == CCFPEmode)
9169 rc = reverse_condition_maybe_unordered (rc);
9171 rc = reverse_condition (rc);
9172 operands[4] = gen_rtx_fmt_ee (rc, VOIDmode, tmp1, const0_rtx);
9174 [(set_attr "type" "multiple")]
9177 ;; Attempt to improve the sequence generated by the compare_scc splitters
9178 ;; not to use conditional execution.
9180 ;; Rd = (eq (reg1) (const_int0)) // ARMv5
9184 [(set (reg:CC CC_REGNUM)
9185 (compare:CC (match_operand:SI 1 "register_operand" "")
9187 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9188 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9189 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9190 (set (match_dup 0) (const_int 1)))]
9191 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9192 [(set (match_dup 0) (clz:SI (match_dup 1)))
9193 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9196 ;; Rd = (eq (reg1) (const_int0)) // !ARMv5
9200 [(set (reg:CC CC_REGNUM)
9201 (compare:CC (match_operand:SI 1 "register_operand" "")
9203 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9204 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9205 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9206 (set (match_dup 0) (const_int 1)))
9207 (match_scratch:SI 2 "r")]
9208 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9210 [(set (reg:CC CC_REGNUM)
9211 (compare:CC (const_int 0) (match_dup 1)))
9212 (set (match_dup 2) (minus:SI (const_int 0) (match_dup 1)))])
9214 (plus:SI (plus:SI (match_dup 1) (match_dup 2))
9215 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9218 ;; Rd = (eq (reg1) (reg2/imm)) // ARMv5 and optimising for speed.
9219 ;; sub Rd, Reg1, reg2
9223 [(set (reg:CC CC_REGNUM)
9224 (compare:CC (match_operand:SI 1 "register_operand" "")
9225 (match_operand:SI 2 "arm_rhs_operand" "")))
9226 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9227 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9228 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9229 (set (match_dup 0) (const_int 1)))]
9230 "arm_arch5t && TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)
9231 && !(TARGET_THUMB2 && optimize_insn_for_size_p ())"
9232 [(set (match_dup 0) (minus:SI (match_dup 1) (match_dup 2)))
9233 (set (match_dup 0) (clz:SI (match_dup 0)))
9234 (set (match_dup 0) (lshiftrt:SI (match_dup 0) (const_int 5)))]
9238 ;; Rd = (eq (reg1) (reg2)) // ! ARMv5 or optimising for size.
9239 ;; sub T1, Reg1, reg2
9243 [(set (reg:CC CC_REGNUM)
9244 (compare:CC (match_operand:SI 1 "register_operand" "")
9245 (match_operand:SI 2 "arm_rhs_operand" "")))
9246 (cond_exec (ne (reg:CC CC_REGNUM) (const_int 0))
9247 (set (match_operand:SI 0 "register_operand" "") (const_int 0)))
9248 (cond_exec (eq (reg:CC CC_REGNUM) (const_int 0))
9249 (set (match_dup 0) (const_int 1)))
9250 (match_scratch:SI 3 "r")]
9251 "TARGET_32BIT && peep2_regno_dead_p (3, CC_REGNUM)"
9252 [(set (match_dup 3) (match_dup 4))
9254 [(set (reg:CC CC_REGNUM)
9255 (compare:CC (const_int 0) (match_dup 3)))
9256 (set (match_dup 0) (minus:SI (const_int 0) (match_dup 3)))])
9258 (plus:SI (plus:SI (match_dup 0) (match_dup 3))
9259 (geu:SI (reg:CC CC_REGNUM) (const_int 0))))]
9261 if (CONST_INT_P (operands[2]))
9262 operands[4] = plus_constant (SImode, operands[1], -INTVAL (operands[2]));
9264 operands[4] = gen_rtx_MINUS (SImode, operands[1], operands[2]);
9267 (define_insn "*cond_move"
9268 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
9269 (if_then_else:SI (match_operator 3 "equality_operator"
9270 [(match_operator 4 "arm_comparison_operator"
9271 [(match_operand 5 "cc_register" "") (const_int 0)])
9273 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
9274 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))]
9277 if (GET_CODE (operands[3]) == NE)
9279 if (which_alternative != 1)
9280 output_asm_insn (\"mov%D4\\t%0, %2\", operands);
9281 if (which_alternative != 0)
9282 output_asm_insn (\"mov%d4\\t%0, %1\", operands);
9285 if (which_alternative != 0)
9286 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9287 if (which_alternative != 1)
9288 output_asm_insn (\"mov%d4\\t%0, %2\", operands);
9291 [(set_attr "conds" "use")
9292 (set_attr_alternative "type"
9293 [(if_then_else (match_operand 2 "const_int_operand" "")
9294 (const_string "mov_imm")
9295 (const_string "mov_reg"))
9296 (if_then_else (match_operand 1 "const_int_operand" "")
9297 (const_string "mov_imm")
9298 (const_string "mov_reg"))
9299 (const_string "multiple")])
9300 (set_attr "length" "4,4,8")]
9303 (define_insn "*cond_arith"
9304 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9305 (match_operator:SI 5 "shiftable_operator"
9306 [(match_operator:SI 4 "arm_comparison_operator"
9307 [(match_operand:SI 2 "s_register_operand" "r,r")
9308 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
9309 (match_operand:SI 1 "s_register_operand" "0,?r")]))
9310 (clobber (reg:CC CC_REGNUM))]
9313 if (GET_CODE (operands[4]) == LT && operands[3] == const0_rtx)
9314 return \"%i5\\t%0, %1, %2, lsr #31\";
9316 output_asm_insn (\"cmp\\t%2, %3\", operands);
9317 if (GET_CODE (operands[5]) == AND)
9318 output_asm_insn (\"mov%D4\\t%0, #0\", operands);
9319 else if (GET_CODE (operands[5]) == MINUS)
9320 output_asm_insn (\"rsb%D4\\t%0, %1, #0\", operands);
9321 else if (which_alternative != 0)
9322 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9323 return \"%i5%d4\\t%0, %1, #1\";
9325 [(set_attr "conds" "clob")
9326 (set_attr "length" "12")
9327 (set_attr "type" "multiple")]
9330 (define_insn "*cond_sub"
9331 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
9332 (minus:SI (match_operand:SI 1 "s_register_operand" "0,?r")
9333 (match_operator:SI 4 "arm_comparison_operator"
9334 [(match_operand:SI 2 "s_register_operand" "r,r")
9335 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
9336 (clobber (reg:CC CC_REGNUM))]
9339 output_asm_insn (\"cmp\\t%2, %3\", operands);
9340 if (which_alternative != 0)
9341 output_asm_insn (\"mov%D4\\t%0, %1\", operands);
9342 return \"sub%d4\\t%0, %1, #1\";
9344 [(set_attr "conds" "clob")
9345 (set_attr "length" "8,12")
9346 (set_attr "type" "multiple")]
9349 (define_insn "*cmp_ite0"
9350 [(set (match_operand 6 "dominant_cc_register" "")
9353 (match_operator 4 "arm_comparison_operator"
9354 [(match_operand:SI 0 "s_register_operand"
9355 "l,l,l,r,r,r,r,r,r")
9356 (match_operand:SI 1 "arm_add_operand"
9357 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9358 (match_operator:SI 5 "arm_comparison_operator"
9359 [(match_operand:SI 2 "s_register_operand"
9360 "l,r,r,l,l,r,r,r,r")
9361 (match_operand:SI 3 "arm_add_operand"
9362 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9368 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9370 {\"cmp%d5\\t%0, %1\",
9371 \"cmp%d4\\t%2, %3\"},
9372 {\"cmn%d5\\t%0, #%n1\",
9373 \"cmp%d4\\t%2, %3\"},
9374 {\"cmp%d5\\t%0, %1\",
9375 \"cmn%d4\\t%2, #%n3\"},
9376 {\"cmn%d5\\t%0, #%n1\",
9377 \"cmn%d4\\t%2, #%n3\"}
9379 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9384 \"cmn\\t%0, #%n1\"},
9385 {\"cmn\\t%2, #%n3\",
9387 {\"cmn\\t%2, #%n3\",
9390 static const char * const ite[2] =
9395 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9396 CMP_CMP, CMN_CMP, CMP_CMP,
9397 CMN_CMP, CMP_CMN, CMN_CMN};
9399 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9401 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9402 if (TARGET_THUMB2) {
9403 output_asm_insn (ite[swap], operands);
9405 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9408 [(set_attr "conds" "set")
9409 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9410 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9411 (set_attr "type" "multiple")
9412 (set_attr_alternative "length"
9418 (if_then_else (eq_attr "is_thumb" "no")
9421 (if_then_else (eq_attr "is_thumb" "no")
9424 (if_then_else (eq_attr "is_thumb" "no")
9427 (if_then_else (eq_attr "is_thumb" "no")
9432 (define_insn "*cmp_ite1"
9433 [(set (match_operand 6 "dominant_cc_register" "")
9436 (match_operator 4 "arm_comparison_operator"
9437 [(match_operand:SI 0 "s_register_operand"
9438 "l,l,l,r,r,r,r,r,r")
9439 (match_operand:SI 1 "arm_add_operand"
9440 "lPy,lPy,lPy,rI,L,rI,L,rI,L")])
9441 (match_operator:SI 5 "arm_comparison_operator"
9442 [(match_operand:SI 2 "s_register_operand"
9443 "l,r,r,l,l,r,r,r,r")
9444 (match_operand:SI 3 "arm_add_operand"
9445 "lPy,rI,L,lPy,lPy,rI,rI,L,L")])
9451 static const char * const cmp1[NUM_OF_COND_CMP][2] =
9455 {\"cmn\\t%0, #%n1\",
9458 \"cmn\\t%2, #%n3\"},
9459 {\"cmn\\t%0, #%n1\",
9462 static const char * const cmp2[NUM_OF_COND_CMP][2] =
9464 {\"cmp%d4\\t%2, %3\",
9465 \"cmp%D5\\t%0, %1\"},
9466 {\"cmp%d4\\t%2, %3\",
9467 \"cmn%D5\\t%0, #%n1\"},
9468 {\"cmn%d4\\t%2, #%n3\",
9469 \"cmp%D5\\t%0, %1\"},
9470 {\"cmn%d4\\t%2, #%n3\",
9471 \"cmn%D5\\t%0, #%n1\"}
9473 static const char * const ite[2] =
9478 static const int cmp_idx[9] = {CMP_CMP, CMP_CMP, CMP_CMN,
9479 CMP_CMP, CMN_CMP, CMP_CMP,
9480 CMN_CMP, CMP_CMN, CMN_CMN};
9482 comparison_dominates_p (GET_CODE (operands[5]),
9483 reverse_condition (GET_CODE (operands[4])));
9485 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9486 if (TARGET_THUMB2) {
9487 output_asm_insn (ite[swap], operands);
9489 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9492 [(set_attr "conds" "set")
9493 (set_attr "arch" "t2,t2,t2,t2,t2,any,any,any,any")
9494 (set_attr "enabled_for_short_it" "yes,no,no,no,no,no,no,no,no")
9495 (set_attr_alternative "length"
9501 (if_then_else (eq_attr "is_thumb" "no")
9504 (if_then_else (eq_attr "is_thumb" "no")
9507 (if_then_else (eq_attr "is_thumb" "no")
9510 (if_then_else (eq_attr "is_thumb" "no")
9513 (set_attr "type" "multiple")]
9516 (define_insn "*cmp_and"
9517 [(set (match_operand 6 "dominant_cc_register" "")
9520 (match_operator 4 "arm_comparison_operator"
9521 [(match_operand:SI 0 "s_register_operand"
9522 "l,l,l,r,r,r,r,r,r,r")
9523 (match_operand:SI 1 "arm_add_operand"
9524 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9525 (match_operator:SI 5 "arm_comparison_operator"
9526 [(match_operand:SI 2 "s_register_operand"
9527 "l,r,r,l,l,r,r,r,r,r")
9528 (match_operand:SI 3 "arm_add_operand"
9529 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9534 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9536 {\"cmp%d5\\t%0, %1\",
9537 \"cmp%d4\\t%2, %3\"},
9538 {\"cmn%d5\\t%0, #%n1\",
9539 \"cmp%d4\\t%2, %3\"},
9540 {\"cmp%d5\\t%0, %1\",
9541 \"cmn%d4\\t%2, #%n3\"},
9542 {\"cmn%d5\\t%0, #%n1\",
9543 \"cmn%d4\\t%2, #%n3\"}
9545 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9550 \"cmn\\t%0, #%n1\"},
9551 {\"cmn\\t%2, #%n3\",
9553 {\"cmn\\t%2, #%n3\",
9556 static const char *const ite[2] =
9561 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9562 CMP_CMP, CMN_CMP, CMP_CMP,
9563 CMP_CMP, CMN_CMP, CMP_CMN,
9566 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9568 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9569 if (TARGET_THUMB2) {
9570 output_asm_insn (ite[swap], operands);
9572 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9575 [(set_attr "conds" "set")
9576 (set_attr "predicable" "no")
9577 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9578 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9579 (set_attr_alternative "length"
9586 (if_then_else (eq_attr "is_thumb" "no")
9589 (if_then_else (eq_attr "is_thumb" "no")
9592 (if_then_else (eq_attr "is_thumb" "no")
9595 (if_then_else (eq_attr "is_thumb" "no")
9598 (set_attr "type" "multiple")]
9601 (define_insn "*cmp_ior"
9602 [(set (match_operand 6 "dominant_cc_register" "")
9605 (match_operator 4 "arm_comparison_operator"
9606 [(match_operand:SI 0 "s_register_operand"
9607 "l,l,l,r,r,r,r,r,r,r")
9608 (match_operand:SI 1 "arm_add_operand"
9609 "lPy,lPy,lPy,rI,L,r,rI,L,rI,L")])
9610 (match_operator:SI 5 "arm_comparison_operator"
9611 [(match_operand:SI 2 "s_register_operand"
9612 "l,r,r,l,l,r,r,r,r,r")
9613 (match_operand:SI 3 "arm_add_operand"
9614 "lPy,rI,L,lPy,lPy,r,rI,rI,L,L")]))
9619 static const char *const cmp1[NUM_OF_COND_CMP][2] =
9623 {\"cmn\\t%0, #%n1\",
9626 \"cmn\\t%2, #%n3\"},
9627 {\"cmn\\t%0, #%n1\",
9630 static const char *const cmp2[NUM_OF_COND_CMP][2] =
9632 {\"cmp%D4\\t%2, %3\",
9633 \"cmp%D5\\t%0, %1\"},
9634 {\"cmp%D4\\t%2, %3\",
9635 \"cmn%D5\\t%0, #%n1\"},
9636 {\"cmn%D4\\t%2, #%n3\",
9637 \"cmp%D5\\t%0, %1\"},
9638 {\"cmn%D4\\t%2, #%n3\",
9639 \"cmn%D5\\t%0, #%n1\"}
9641 static const char *const ite[2] =
9646 static const int cmp_idx[] = {CMP_CMP, CMP_CMP, CMP_CMN,
9647 CMP_CMP, CMN_CMP, CMP_CMP,
9648 CMP_CMP, CMN_CMP, CMP_CMN,
9651 comparison_dominates_p (GET_CODE (operands[5]), GET_CODE (operands[4]));
9653 output_asm_insn (cmp1[cmp_idx[which_alternative]][swap], operands);
9654 if (TARGET_THUMB2) {
9655 output_asm_insn (ite[swap], operands);
9657 output_asm_insn (cmp2[cmp_idx[which_alternative]][swap], operands);
9661 [(set_attr "conds" "set")
9662 (set_attr "arch" "t2,t2,t2,t2,t2,t2,any,any,any,any")
9663 (set_attr "enabled_for_short_it" "yes,no,no,no,no,yes,no,no,no,no")
9664 (set_attr_alternative "length"
9671 (if_then_else (eq_attr "is_thumb" "no")
9674 (if_then_else (eq_attr "is_thumb" "no")
9677 (if_then_else (eq_attr "is_thumb" "no")
9680 (if_then_else (eq_attr "is_thumb" "no")
9683 (set_attr "type" "multiple")]
9686 (define_insn_and_split "*ior_scc_scc"
9687 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9688 (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9689 [(match_operand:SI 1 "s_register_operand" "l,r")
9690 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9691 (match_operator:SI 6 "arm_comparison_operator"
9692 [(match_operand:SI 4 "s_register_operand" "l,r")
9693 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9694 (clobber (reg:CC CC_REGNUM))]
9696 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_OR_Y)
9699 "TARGET_32BIT && reload_completed"
9703 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9704 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9706 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9708 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9711 [(set_attr "conds" "clob")
9712 (set_attr "enabled_for_short_it" "yes,no")
9713 (set_attr "length" "16")
9714 (set_attr "type" "multiple")]
9717 ; If the above pattern is followed by a CMP insn, then the compare is
9718 ; redundant, since we can rework the conditional instruction that follows.
9719 (define_insn_and_split "*ior_scc_scc_cmp"
9720 [(set (match_operand 0 "dominant_cc_register" "")
9721 (compare (ior:SI (match_operator:SI 3 "arm_comparison_operator"
9722 [(match_operand:SI 1 "s_register_operand" "l,r")
9723 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9724 (match_operator:SI 6 "arm_comparison_operator"
9725 [(match_operand:SI 4 "s_register_operand" "l,r")
9726 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9728 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9729 (ior:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9730 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9733 "TARGET_32BIT && reload_completed"
9737 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9738 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9740 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9742 [(set_attr "conds" "set")
9743 (set_attr "enabled_for_short_it" "yes,no")
9744 (set_attr "length" "16")
9745 (set_attr "type" "multiple")]
9748 (define_insn_and_split "*and_scc_scc"
9749 [(set (match_operand:SI 0 "s_register_operand" "=Ts,Ts")
9750 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9751 [(match_operand:SI 1 "s_register_operand" "l,r")
9752 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9753 (match_operator:SI 6 "arm_comparison_operator"
9754 [(match_operand:SI 4 "s_register_operand" "l,r")
9755 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")])))
9756 (clobber (reg:CC CC_REGNUM))]
9758 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9761 "TARGET_32BIT && reload_completed
9762 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9767 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9768 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9770 (set (match_dup 0) (ne:SI (match_dup 7) (const_int 0)))]
9772 = gen_rtx_REG (arm_select_dominance_cc_mode (operands[3], operands[6],
9775 [(set_attr "conds" "clob")
9776 (set_attr "enabled_for_short_it" "yes,no")
9777 (set_attr "length" "16")
9778 (set_attr "type" "multiple")]
9781 ; If the above pattern is followed by a CMP insn, then the compare is
9782 ; redundant, since we can rework the conditional instruction that follows.
9783 (define_insn_and_split "*and_scc_scc_cmp"
9784 [(set (match_operand 0 "dominant_cc_register" "")
9785 (compare (and:SI (match_operator:SI 3 "arm_comparison_operator"
9786 [(match_operand:SI 1 "s_register_operand" "l,r")
9787 (match_operand:SI 2 "arm_add_operand" "lPy,rIL")])
9788 (match_operator:SI 6 "arm_comparison_operator"
9789 [(match_operand:SI 4 "s_register_operand" "l,r")
9790 (match_operand:SI 5 "arm_add_operand" "lPy,rIL")]))
9792 (set (match_operand:SI 7 "s_register_operand" "=Ts,Ts")
9793 (and:SI (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9794 (match_op_dup 6 [(match_dup 4) (match_dup 5)])))]
9797 "TARGET_32BIT && reload_completed"
9801 (match_op_dup 3 [(match_dup 1) (match_dup 2)])
9802 (match_op_dup 6 [(match_dup 4) (match_dup 5)]))
9804 (set (match_dup 7) (ne:SI (match_dup 0) (const_int 0)))]
9806 [(set_attr "conds" "set")
9807 (set_attr "enabled_for_short_it" "yes,no")
9808 (set_attr "length" "16")
9809 (set_attr "type" "multiple")]
9812 ;; If there is no dominance in the comparison, then we can still save an
9813 ;; instruction in the AND case, since we can know that the second compare
9814 ;; need only zero the value if false (if true, then the value is already
9816 (define_insn_and_split "*and_scc_scc_nodom"
9817 [(set (match_operand:SI 0 "s_register_operand" "=&Ts,&Ts,&Ts")
9818 (and:SI (match_operator:SI 3 "arm_comparison_operator"
9819 [(match_operand:SI 1 "s_register_operand" "r,r,0")
9820 (match_operand:SI 2 "arm_add_operand" "rIL,0,rIL")])
9821 (match_operator:SI 6 "arm_comparison_operator"
9822 [(match_operand:SI 4 "s_register_operand" "r,r,r")
9823 (match_operand:SI 5 "arm_add_operand" "rIL,rIL,rIL")])))
9824 (clobber (reg:CC CC_REGNUM))]
9826 && (arm_select_dominance_cc_mode (operands[3], operands[6], DOM_CC_X_AND_Y)
9829 "TARGET_32BIT && reload_completed"
9830 [(parallel [(set (match_dup 0)
9831 (match_op_dup 3 [(match_dup 1) (match_dup 2)]))
9832 (clobber (reg:CC CC_REGNUM))])
9833 (set (match_dup 7) (match_op_dup 8 [(match_dup 4) (match_dup 5)]))
9835 (if_then_else:SI (match_op_dup 6 [(match_dup 7) (const_int 0)])
9838 "operands[7] = gen_rtx_REG (SELECT_CC_MODE (GET_CODE (operands[6]),
9839 operands[4], operands[5]),
9841 operands[8] = gen_rtx_COMPARE (GET_MODE (operands[7]), operands[4],
9843 [(set_attr "conds" "clob")
9844 (set_attr "length" "20")
9845 (set_attr "type" "multiple")]
9849 [(set (reg:CC_NOOV CC_REGNUM)
9850 (compare:CC_NOOV (ior:SI
9851 (and:SI (match_operand:SI 0 "s_register_operand" "")
9853 (match_operator:SI 1 "arm_comparison_operator"
9854 [(match_operand:SI 2 "s_register_operand" "")
9855 (match_operand:SI 3 "arm_add_operand" "")]))
9857 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9860 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9862 (set (reg:CC_NOOV CC_REGNUM)
9863 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9868 [(set (reg:CC_NOOV CC_REGNUM)
9869 (compare:CC_NOOV (ior:SI
9870 (match_operator:SI 1 "arm_comparison_operator"
9871 [(match_operand:SI 2 "s_register_operand" "")
9872 (match_operand:SI 3 "arm_add_operand" "")])
9873 (and:SI (match_operand:SI 0 "s_register_operand" "")
9876 (clobber (match_operand:SI 4 "s_register_operand" ""))]
9879 (ior:SI (match_op_dup 1 [(match_dup 2) (match_dup 3)])
9881 (set (reg:CC_NOOV CC_REGNUM)
9882 (compare:CC_NOOV (and:SI (match_dup 4) (const_int 1))
9885 ;; ??? The conditional patterns above need checking for Thumb-2 usefulness
9887 (define_insn_and_split "*negscc"
9888 [(set (match_operand:SI 0 "s_register_operand" "=r")
9889 (neg:SI (match_operator 3 "arm_comparison_operator"
9890 [(match_operand:SI 1 "s_register_operand" "r")
9891 (match_operand:SI 2 "arm_rhs_operand" "rI")])))
9892 (clobber (reg:CC CC_REGNUM))]
9895 "&& reload_completed"
9898 rtx cc_reg = gen_rtx_REG (CCmode, CC_REGNUM);
9900 if (GET_CODE (operands[3]) == LT && operands[2] == const0_rtx)
9902 /* Emit mov\\t%0, %1, asr #31 */
9903 emit_insn (gen_rtx_SET (operands[0],
9904 gen_rtx_ASHIFTRT (SImode,
9909 else if (GET_CODE (operands[3]) == NE)
9911 /* Emit subs\\t%0, %1, %2\;mvnne\\t%0, #0 */
9912 if (CONST_INT_P (operands[2]))
9913 emit_insn (gen_cmpsi2_addneg (operands[0], operands[1], operands[2],
9914 gen_int_mode (-INTVAL (operands[2]),
9917 emit_insn (gen_subsi3_compare (operands[0], operands[1], operands[2]));
9919 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9923 gen_rtx_SET (operands[0],
9929 /* Emit: cmp\\t%1, %2\;mov%D3\\t%0, #0\;mvn%d3\\t%0, #0 */
9930 emit_insn (gen_rtx_SET (cc_reg,
9931 gen_rtx_COMPARE (CCmode, operands[1], operands[2])));
9932 enum rtx_code rc = GET_CODE (operands[3]);
9934 rc = reverse_condition (rc);
9935 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9940 gen_rtx_SET (operands[0], const0_rtx)));
9941 rc = GET_CODE (operands[3]);
9942 emit_insn (gen_rtx_COND_EXEC (VOIDmode,
9947 gen_rtx_SET (operands[0],
9953 [(set_attr "conds" "clob")
9954 (set_attr "length" "12")
9955 (set_attr "type" "multiple")]
9958 (define_insn_and_split "movcond_addsi"
9959 [(set (match_operand:SI 0 "s_register_operand" "=r,l,r")
9961 (match_operator 5 "comparison_operator"
9962 [(plus:SI (match_operand:SI 3 "s_register_operand" "r,r,r")
9963 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL"))
9965 (match_operand:SI 1 "arm_rhs_operand" "rI,rPy,r")
9966 (match_operand:SI 2 "arm_rhs_operand" "rI,rPy,r")))
9967 (clobber (reg:CC CC_REGNUM))]
9970 "&& reload_completed"
9971 [(set (reg:CC_NOOV CC_REGNUM)
9973 (plus:SI (match_dup 3)
9976 (set (match_dup 0) (match_dup 1))
9977 (cond_exec (match_dup 6)
9978 (set (match_dup 0) (match_dup 2)))]
9981 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[5]),
9982 operands[3], operands[4]);
9983 enum rtx_code rc = GET_CODE (operands[5]);
9984 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
9985 gcc_assert (!(mode == CCFPmode || mode == CCFPEmode));
9986 if (!REG_P (operands[2]) || REGNO (operands[2]) != REGNO (operands[0]))
9987 rc = reverse_condition (rc);
9989 std::swap (operands[1], operands[2]);
9991 operands[6] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
9994 [(set_attr "conds" "clob")
9995 (set_attr "enabled_for_short_it" "no,yes,yes")
9996 (set_attr "type" "multiple")]
9999 (define_insn "movcond"
10000 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10002 (match_operator 5 "arm_comparison_operator"
10003 [(match_operand:SI 3 "s_register_operand" "r,r,r")
10004 (match_operand:SI 4 "arm_add_operand" "rIL,rIL,rIL")])
10005 (match_operand:SI 1 "arm_rhs_operand" "0,rI,?rI")
10006 (match_operand:SI 2 "arm_rhs_operand" "rI,0,rI")))
10007 (clobber (reg:CC CC_REGNUM))]
10010 if (GET_CODE (operands[5]) == LT
10011 && (operands[4] == const0_rtx))
10013 if (which_alternative != 1 && REG_P (operands[1]))
10015 if (operands[2] == const0_rtx)
10016 return \"and\\t%0, %1, %3, asr #31\";
10017 return \"ands\\t%0, %1, %3, asr #32\;movcc\\t%0, %2\";
10019 else if (which_alternative != 0 && REG_P (operands[2]))
10021 if (operands[1] == const0_rtx)
10022 return \"bic\\t%0, %2, %3, asr #31\";
10023 return \"bics\\t%0, %2, %3, asr #32\;movcs\\t%0, %1\";
10025 /* The only case that falls through to here is when both ops 1 & 2
10029 if (GET_CODE (operands[5]) == GE
10030 && (operands[4] == const0_rtx))
10032 if (which_alternative != 1 && REG_P (operands[1]))
10034 if (operands[2] == const0_rtx)
10035 return \"bic\\t%0, %1, %3, asr #31\";
10036 return \"bics\\t%0, %1, %3, asr #32\;movcs\\t%0, %2\";
10038 else if (which_alternative != 0 && REG_P (operands[2]))
10040 if (operands[1] == const0_rtx)
10041 return \"and\\t%0, %2, %3, asr #31\";
10042 return \"ands\\t%0, %2, %3, asr #32\;movcc\\t%0, %1\";
10044 /* The only case that falls through to here is when both ops 1 & 2
10047 if (CONST_INT_P (operands[4])
10048 && !const_ok_for_arm (INTVAL (operands[4])))
10049 output_asm_insn (\"cmn\\t%3, #%n4\", operands);
10051 output_asm_insn (\"cmp\\t%3, %4\", operands);
10052 if (which_alternative != 0)
10053 output_asm_insn (\"mov%d5\\t%0, %1\", operands);
10054 if (which_alternative != 1)
10055 output_asm_insn (\"mov%D5\\t%0, %2\", operands);
10058 [(set_attr "conds" "clob")
10059 (set_attr "length" "8,8,12")
10060 (set_attr "type" "multiple")]
10063 ;; ??? The patterns below need checking for Thumb-2 usefulness.
10065 (define_insn "*ifcompare_plus_move"
10066 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10067 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10068 [(match_operand:SI 4 "s_register_operand" "r,r")
10069 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10071 (match_operand:SI 2 "s_register_operand" "r,r")
10072 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))
10073 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10074 (clobber (reg:CC CC_REGNUM))]
10077 [(set_attr "conds" "clob")
10078 (set_attr "length" "8,12")
10079 (set_attr "type" "multiple")]
10082 (define_insn "*if_plus_move"
10083 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10085 (match_operator 4 "arm_comparison_operator"
10086 [(match_operand 5 "cc_register" "") (const_int 0)])
10088 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10089 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))
10090 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")))]
10093 add%d4\\t%0, %2, %3
10094 sub%d4\\t%0, %2, #%n3
10095 add%d4\\t%0, %2, %3\;mov%D4\\t%0, %1
10096 sub%d4\\t%0, %2, #%n3\;mov%D4\\t%0, %1"
10097 [(set_attr "conds" "use")
10098 (set_attr "length" "4,4,8,8")
10099 (set_attr_alternative "type"
10100 [(if_then_else (match_operand 3 "const_int_operand" "")
10101 (const_string "alu_imm" )
10102 (const_string "alu_sreg"))
10103 (const_string "alu_imm")
10104 (const_string "multiple")
10105 (const_string "multiple")])]
10108 (define_insn "*ifcompare_move_plus"
10109 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10110 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10111 [(match_operand:SI 4 "s_register_operand" "r,r")
10112 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10113 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10115 (match_operand:SI 2 "s_register_operand" "r,r")
10116 (match_operand:SI 3 "arm_add_operand" "rIL,rIL"))))
10117 (clobber (reg:CC CC_REGNUM))]
10120 [(set_attr "conds" "clob")
10121 (set_attr "length" "8,12")
10122 (set_attr "type" "multiple")]
10125 (define_insn "*if_move_plus"
10126 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r,r")
10128 (match_operator 4 "arm_comparison_operator"
10129 [(match_operand 5 "cc_register" "") (const_int 0)])
10130 (match_operand:SI 1 "arm_rhs_operand" "0,0,?rI,?rI")
10132 (match_operand:SI 2 "s_register_operand" "r,r,r,r")
10133 (match_operand:SI 3 "arm_add_operand" "rI,L,rI,L"))))]
10136 add%D4\\t%0, %2, %3
10137 sub%D4\\t%0, %2, #%n3
10138 add%D4\\t%0, %2, %3\;mov%d4\\t%0, %1
10139 sub%D4\\t%0, %2, #%n3\;mov%d4\\t%0, %1"
10140 [(set_attr "conds" "use")
10141 (set_attr "length" "4,4,8,8")
10142 (set_attr_alternative "type"
10143 [(if_then_else (match_operand 3 "const_int_operand" "")
10144 (const_string "alu_imm" )
10145 (const_string "alu_sreg"))
10146 (const_string "alu_imm")
10147 (const_string "multiple")
10148 (const_string "multiple")])]
10151 (define_insn "*ifcompare_arith_arith"
10152 [(set (match_operand:SI 0 "s_register_operand" "=r")
10153 (if_then_else:SI (match_operator 9 "arm_comparison_operator"
10154 [(match_operand:SI 5 "s_register_operand" "r")
10155 (match_operand:SI 6 "arm_add_operand" "rIL")])
10156 (match_operator:SI 8 "shiftable_operator"
10157 [(match_operand:SI 1 "s_register_operand" "r")
10158 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10159 (match_operator:SI 7 "shiftable_operator"
10160 [(match_operand:SI 3 "s_register_operand" "r")
10161 (match_operand:SI 4 "arm_rhs_operand" "rI")])))
10162 (clobber (reg:CC CC_REGNUM))]
10165 [(set_attr "conds" "clob")
10166 (set_attr "length" "12")
10167 (set_attr "type" "multiple")]
10170 (define_insn "*if_arith_arith"
10171 [(set (match_operand:SI 0 "s_register_operand" "=r")
10172 (if_then_else:SI (match_operator 5 "arm_comparison_operator"
10173 [(match_operand 8 "cc_register" "") (const_int 0)])
10174 (match_operator:SI 6 "shiftable_operator"
10175 [(match_operand:SI 1 "s_register_operand" "r")
10176 (match_operand:SI 2 "arm_rhs_operand" "rI")])
10177 (match_operator:SI 7 "shiftable_operator"
10178 [(match_operand:SI 3 "s_register_operand" "r")
10179 (match_operand:SI 4 "arm_rhs_operand" "rI")])))]
10181 "%I6%d5\\t%0, %1, %2\;%I7%D5\\t%0, %3, %4"
10182 [(set_attr "conds" "use")
10183 (set_attr "length" "8")
10184 (set_attr "type" "multiple")]
10187 (define_insn "*ifcompare_arith_move"
10188 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10189 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10190 [(match_operand:SI 2 "s_register_operand" "r,r")
10191 (match_operand:SI 3 "arm_add_operand" "rIL,rIL")])
10192 (match_operator:SI 7 "shiftable_operator"
10193 [(match_operand:SI 4 "s_register_operand" "r,r")
10194 (match_operand:SI 5 "arm_rhs_operand" "rI,rI")])
10195 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))
10196 (clobber (reg:CC CC_REGNUM))]
10199 /* If we have an operation where (op x 0) is the identity operation and
10200 the conditional operator is LT or GE and we are comparing against zero and
10201 everything is in registers then we can do this in two instructions. */
10202 if (operands[3] == const0_rtx
10203 && GET_CODE (operands[7]) != AND
10204 && REG_P (operands[5])
10205 && REG_P (operands[1])
10206 && REGNO (operands[1]) == REGNO (operands[4])
10207 && REGNO (operands[4]) != REGNO (operands[0]))
10209 if (GET_CODE (operands[6]) == LT)
10210 return \"and\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10211 else if (GET_CODE (operands[6]) == GE)
10212 return \"bic\\t%0, %5, %2, asr #31\;%I7\\t%0, %4, %0\";
10214 if (CONST_INT_P (operands[3])
10215 && !const_ok_for_arm (INTVAL (operands[3])))
10216 output_asm_insn (\"cmn\\t%2, #%n3\", operands);
10218 output_asm_insn (\"cmp\\t%2, %3\", operands);
10219 output_asm_insn (\"%I7%d6\\t%0, %4, %5\", operands);
10220 if (which_alternative != 0)
10221 return \"mov%D6\\t%0, %1\";
10224 [(set_attr "conds" "clob")
10225 (set_attr "length" "8,12")
10226 (set_attr "type" "multiple")]
10229 (define_insn "*if_arith_move"
10230 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10231 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
10232 [(match_operand 6 "cc_register" "") (const_int 0)])
10233 (match_operator:SI 5 "shiftable_operator"
10234 [(match_operand:SI 2 "s_register_operand" "r,r")
10235 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])
10236 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")))]
10239 %I5%d4\\t%0, %2, %3
10240 %I5%d4\\t%0, %2, %3\;mov%D4\\t%0, %1"
10241 [(set_attr "conds" "use")
10242 (set_attr "length" "4,8")
10243 (set_attr_alternative "type"
10244 [(if_then_else (match_operand 3 "const_int_operand" "")
10245 (const_string "alu_shift_imm" )
10246 (const_string "alu_shift_reg"))
10247 (const_string "multiple")])]
10250 (define_insn "*ifcompare_move_arith"
10251 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10252 (if_then_else:SI (match_operator 6 "arm_comparison_operator"
10253 [(match_operand:SI 4 "s_register_operand" "r,r")
10254 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10255 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10256 (match_operator:SI 7 "shiftable_operator"
10257 [(match_operand:SI 2 "s_register_operand" "r,r")
10258 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))
10259 (clobber (reg:CC CC_REGNUM))]
10262 /* If we have an operation where (op x 0) is the identity operation and
10263 the conditional operator is LT or GE and we are comparing against zero and
10264 everything is in registers then we can do this in two instructions */
10265 if (operands[5] == const0_rtx
10266 && GET_CODE (operands[7]) != AND
10267 && REG_P (operands[3])
10268 && REG_P (operands[1])
10269 && REGNO (operands[1]) == REGNO (operands[2])
10270 && REGNO (operands[2]) != REGNO (operands[0]))
10272 if (GET_CODE (operands[6]) == GE)
10273 return \"and\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10274 else if (GET_CODE (operands[6]) == LT)
10275 return \"bic\\t%0, %3, %4, asr #31\;%I7\\t%0, %2, %0\";
10278 if (CONST_INT_P (operands[5])
10279 && !const_ok_for_arm (INTVAL (operands[5])))
10280 output_asm_insn (\"cmn\\t%4, #%n5\", operands);
10282 output_asm_insn (\"cmp\\t%4, %5\", operands);
10284 if (which_alternative != 0)
10285 output_asm_insn (\"mov%d6\\t%0, %1\", operands);
10286 return \"%I7%D6\\t%0, %2, %3\";
10288 [(set_attr "conds" "clob")
10289 (set_attr "length" "8,12")
10290 (set_attr "type" "multiple")]
10293 (define_insn "*if_move_arith"
10294 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10296 (match_operator 4 "arm_comparison_operator"
10297 [(match_operand 6 "cc_register" "") (const_int 0)])
10298 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
10299 (match_operator:SI 5 "shiftable_operator"
10300 [(match_operand:SI 2 "s_register_operand" "r,r")
10301 (match_operand:SI 3 "arm_rhs_operand" "rI,rI")])))]
10304 %I5%D4\\t%0, %2, %3
10305 %I5%D4\\t%0, %2, %3\;mov%d4\\t%0, %1"
10306 [(set_attr "conds" "use")
10307 (set_attr "length" "4,8")
10308 (set_attr_alternative "type"
10309 [(if_then_else (match_operand 3 "const_int_operand" "")
10310 (const_string "alu_shift_imm" )
10311 (const_string "alu_shift_reg"))
10312 (const_string "multiple")])]
10315 (define_insn "*ifcompare_move_not"
10316 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10318 (match_operator 5 "arm_comparison_operator"
10319 [(match_operand:SI 3 "s_register_operand" "r,r")
10320 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10321 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10323 (match_operand:SI 2 "s_register_operand" "r,r"))))
10324 (clobber (reg:CC CC_REGNUM))]
10327 [(set_attr "conds" "clob")
10328 (set_attr "length" "8,12")
10329 (set_attr "type" "multiple")]
10332 (define_insn "*if_move_not"
10333 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10335 (match_operator 4 "arm_comparison_operator"
10336 [(match_operand 3 "cc_register" "") (const_int 0)])
10337 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10338 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))))]
10342 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2
10343 mvn%d4\\t%0, #%B1\;mvn%D4\\t%0, %2"
10344 [(set_attr "conds" "use")
10345 (set_attr "type" "mvn_reg")
10346 (set_attr "length" "4,8,8")
10347 (set_attr "type" "mvn_reg,multiple,multiple")]
10350 (define_insn "*ifcompare_not_move"
10351 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10353 (match_operator 5 "arm_comparison_operator"
10354 [(match_operand:SI 3 "s_register_operand" "r,r")
10355 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10357 (match_operand:SI 2 "s_register_operand" "r,r"))
10358 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10359 (clobber (reg:CC CC_REGNUM))]
10362 [(set_attr "conds" "clob")
10363 (set_attr "length" "8,12")
10364 (set_attr "type" "multiple")]
10367 (define_insn "*if_not_move"
10368 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10370 (match_operator 4 "arm_comparison_operator"
10371 [(match_operand 3 "cc_register" "") (const_int 0)])
10372 (not:SI (match_operand:SI 2 "s_register_operand" "r,r,r"))
10373 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10377 mov%D4\\t%0, %1\;mvn%d4\\t%0, %2
10378 mvn%D4\\t%0, #%B1\;mvn%d4\\t%0, %2"
10379 [(set_attr "conds" "use")
10380 (set_attr "type" "mvn_reg,multiple,multiple")
10381 (set_attr "length" "4,8,8")]
10384 (define_insn "*ifcompare_shift_move"
10385 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10387 (match_operator 6 "arm_comparison_operator"
10388 [(match_operand:SI 4 "s_register_operand" "r,r")
10389 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10390 (match_operator:SI 7 "shift_operator"
10391 [(match_operand:SI 2 "s_register_operand" "r,r")
10392 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])
10393 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10394 (clobber (reg:CC CC_REGNUM))]
10397 [(set_attr "conds" "clob")
10398 (set_attr "length" "8,12")
10399 (set_attr "type" "multiple")]
10402 (define_insn "*if_shift_move"
10403 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10405 (match_operator 5 "arm_comparison_operator"
10406 [(match_operand 6 "cc_register" "") (const_int 0)])
10407 (match_operator:SI 4 "shift_operator"
10408 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10409 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])
10410 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")))]
10414 mov%D5\\t%0, %1\;mov%d5\\t%0, %2%S4
10415 mvn%D5\\t%0, #%B1\;mov%d5\\t%0, %2%S4"
10416 [(set_attr "conds" "use")
10417 (set_attr "shift" "2")
10418 (set_attr "length" "4,8,8")
10419 (set_attr_alternative "type"
10420 [(if_then_else (match_operand 3 "const_int_operand" "")
10421 (const_string "mov_shift" )
10422 (const_string "mov_shift_reg"))
10423 (const_string "multiple")
10424 (const_string "multiple")])]
10427 (define_insn "*ifcompare_move_shift"
10428 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10430 (match_operator 6 "arm_comparison_operator"
10431 [(match_operand:SI 4 "s_register_operand" "r,r")
10432 (match_operand:SI 5 "arm_add_operand" "rIL,rIL")])
10433 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10434 (match_operator:SI 7 "shift_operator"
10435 [(match_operand:SI 2 "s_register_operand" "r,r")
10436 (match_operand:SI 3 "arm_rhs_operand" "rM,rM")])))
10437 (clobber (reg:CC CC_REGNUM))]
10440 [(set_attr "conds" "clob")
10441 (set_attr "length" "8,12")
10442 (set_attr "type" "multiple")]
10445 (define_insn "*if_move_shift"
10446 [(set (match_operand:SI 0 "s_register_operand" "=r,r,r")
10448 (match_operator 5 "arm_comparison_operator"
10449 [(match_operand 6 "cc_register" "") (const_int 0)])
10450 (match_operand:SI 1 "arm_not_operand" "0,?rI,K")
10451 (match_operator:SI 4 "shift_operator"
10452 [(match_operand:SI 2 "s_register_operand" "r,r,r")
10453 (match_operand:SI 3 "arm_rhs_operand" "rM,rM,rM")])))]
10457 mov%d5\\t%0, %1\;mov%D5\\t%0, %2%S4
10458 mvn%d5\\t%0, #%B1\;mov%D5\\t%0, %2%S4"
10459 [(set_attr "conds" "use")
10460 (set_attr "shift" "2")
10461 (set_attr "length" "4,8,8")
10462 (set_attr_alternative "type"
10463 [(if_then_else (match_operand 3 "const_int_operand" "")
10464 (const_string "mov_shift" )
10465 (const_string "mov_shift_reg"))
10466 (const_string "multiple")
10467 (const_string "multiple")])]
10470 (define_insn "*ifcompare_shift_shift"
10471 [(set (match_operand:SI 0 "s_register_operand" "=r")
10473 (match_operator 7 "arm_comparison_operator"
10474 [(match_operand:SI 5 "s_register_operand" "r")
10475 (match_operand:SI 6 "arm_add_operand" "rIL")])
10476 (match_operator:SI 8 "shift_operator"
10477 [(match_operand:SI 1 "s_register_operand" "r")
10478 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10479 (match_operator:SI 9 "shift_operator"
10480 [(match_operand:SI 3 "s_register_operand" "r")
10481 (match_operand:SI 4 "arm_rhs_operand" "rM")])))
10482 (clobber (reg:CC CC_REGNUM))]
10485 [(set_attr "conds" "clob")
10486 (set_attr "length" "12")
10487 (set_attr "type" "multiple")]
10490 (define_insn "*if_shift_shift"
10491 [(set (match_operand:SI 0 "s_register_operand" "=r")
10493 (match_operator 5 "arm_comparison_operator"
10494 [(match_operand 8 "cc_register" "") (const_int 0)])
10495 (match_operator:SI 6 "shift_operator"
10496 [(match_operand:SI 1 "s_register_operand" "r")
10497 (match_operand:SI 2 "arm_rhs_operand" "rM")])
10498 (match_operator:SI 7 "shift_operator"
10499 [(match_operand:SI 3 "s_register_operand" "r")
10500 (match_operand:SI 4 "arm_rhs_operand" "rM")])))]
10502 "mov%d5\\t%0, %1%S6\;mov%D5\\t%0, %3%S7"
10503 [(set_attr "conds" "use")
10504 (set_attr "shift" "1")
10505 (set_attr "length" "8")
10506 (set (attr "type") (if_then_else
10507 (and (match_operand 2 "const_int_operand" "")
10508 (match_operand 4 "const_int_operand" ""))
10509 (const_string "mov_shift")
10510 (const_string "mov_shift_reg")))]
10513 (define_insn "*ifcompare_not_arith"
10514 [(set (match_operand:SI 0 "s_register_operand" "=r")
10516 (match_operator 6 "arm_comparison_operator"
10517 [(match_operand:SI 4 "s_register_operand" "r")
10518 (match_operand:SI 5 "arm_add_operand" "rIL")])
10519 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10520 (match_operator:SI 7 "shiftable_operator"
10521 [(match_operand:SI 2 "s_register_operand" "r")
10522 (match_operand:SI 3 "arm_rhs_operand" "rI")])))
10523 (clobber (reg:CC CC_REGNUM))]
10526 [(set_attr "conds" "clob")
10527 (set_attr "length" "12")
10528 (set_attr "type" "multiple")]
10531 (define_insn "*if_not_arith"
10532 [(set (match_operand:SI 0 "s_register_operand" "=r")
10534 (match_operator 5 "arm_comparison_operator"
10535 [(match_operand 4 "cc_register" "") (const_int 0)])
10536 (not:SI (match_operand:SI 1 "s_register_operand" "r"))
10537 (match_operator:SI 6 "shiftable_operator"
10538 [(match_operand:SI 2 "s_register_operand" "r")
10539 (match_operand:SI 3 "arm_rhs_operand" "rI")])))]
10541 "mvn%d5\\t%0, %1\;%I6%D5\\t%0, %2, %3"
10542 [(set_attr "conds" "use")
10543 (set_attr "type" "mvn_reg")
10544 (set_attr "length" "8")]
10547 (define_insn "*ifcompare_arith_not"
10548 [(set (match_operand:SI 0 "s_register_operand" "=r")
10550 (match_operator 6 "arm_comparison_operator"
10551 [(match_operand:SI 4 "s_register_operand" "r")
10552 (match_operand:SI 5 "arm_add_operand" "rIL")])
10553 (match_operator:SI 7 "shiftable_operator"
10554 [(match_operand:SI 2 "s_register_operand" "r")
10555 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10556 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))
10557 (clobber (reg:CC CC_REGNUM))]
10560 [(set_attr "conds" "clob")
10561 (set_attr "length" "12")
10562 (set_attr "type" "multiple")]
10565 (define_insn "*if_arith_not"
10566 [(set (match_operand:SI 0 "s_register_operand" "=r")
10568 (match_operator 5 "arm_comparison_operator"
10569 [(match_operand 4 "cc_register" "") (const_int 0)])
10570 (match_operator:SI 6 "shiftable_operator"
10571 [(match_operand:SI 2 "s_register_operand" "r")
10572 (match_operand:SI 3 "arm_rhs_operand" "rI")])
10573 (not:SI (match_operand:SI 1 "s_register_operand" "r"))))]
10575 "mvn%D5\\t%0, %1\;%I6%d5\\t%0, %2, %3"
10576 [(set_attr "conds" "use")
10577 (set_attr "type" "multiple")
10578 (set_attr "length" "8")]
10581 (define_insn "*ifcompare_neg_move"
10582 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10584 (match_operator 5 "arm_comparison_operator"
10585 [(match_operand:SI 3 "s_register_operand" "r,r")
10586 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10587 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))
10588 (match_operand:SI 1 "arm_not_operand" "0,?rIK")))
10589 (clobber (reg:CC CC_REGNUM))]
10592 [(set_attr "conds" "clob")
10593 (set_attr "length" "8,12")
10594 (set_attr "type" "multiple")]
10597 (define_insn_and_split "*if_neg_move"
10598 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10600 (match_operator 4 "arm_comparison_operator"
10601 [(match_operand 3 "cc_register" "") (const_int 0)])
10602 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))
10603 (match_operand:SI 1 "s_register_operand" "0,0")))]
10606 "&& reload_completed"
10607 [(cond_exec (match_op_dup 4 [(match_dup 3) (const_int 0)])
10608 (set (match_dup 0) (neg:SI (match_dup 2))))]
10610 [(set_attr "conds" "use")
10611 (set_attr "length" "4")
10612 (set_attr "arch" "t2,32")
10613 (set_attr "enabled_for_short_it" "yes,no")
10614 (set_attr "type" "logic_shift_imm")]
10617 (define_insn "*ifcompare_move_neg"
10618 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
10620 (match_operator 5 "arm_comparison_operator"
10621 [(match_operand:SI 3 "s_register_operand" "r,r")
10622 (match_operand:SI 4 "arm_add_operand" "rIL,rIL")])
10623 (match_operand:SI 1 "arm_not_operand" "0,?rIK")
10624 (neg:SI (match_operand:SI 2 "s_register_operand" "r,r"))))
10625 (clobber (reg:CC CC_REGNUM))]
10628 [(set_attr "conds" "clob")
10629 (set_attr "length" "8,12")
10630 (set_attr "type" "multiple")]
10633 (define_insn_and_split "*if_move_neg"
10634 [(set (match_operand:SI 0 "s_register_operand" "=l,r")
10636 (match_operator 4 "arm_comparison_operator"
10637 [(match_operand 3 "cc_register" "") (const_int 0)])
10638 (match_operand:SI 1 "s_register_operand" "0,0")
10639 (neg:SI (match_operand:SI 2 "s_register_operand" "l,r"))))]
10642 "&& reload_completed"
10643 [(cond_exec (match_dup 5)
10644 (set (match_dup 0) (neg:SI (match_dup 2))))]
10646 machine_mode mode = GET_MODE (operands[3]);
10647 rtx_code rc = GET_CODE (operands[4]);
10649 if (mode == CCFPmode || mode == CCFPEmode)
10650 rc = reverse_condition_maybe_unordered (rc);
10652 rc = reverse_condition (rc);
10654 operands[5] = gen_rtx_fmt_ee (rc, VOIDmode, operands[3], const0_rtx);
10656 [(set_attr "conds" "use")
10657 (set_attr "length" "4")
10658 (set_attr "arch" "t2,32")
10659 (set_attr "enabled_for_short_it" "yes,no")
10660 (set_attr "type" "logic_shift_imm")]
10663 (define_insn "*arith_adjacentmem"
10664 [(set (match_operand:SI 0 "s_register_operand" "=r")
10665 (match_operator:SI 1 "shiftable_operator"
10666 [(match_operand:SI 2 "memory_operand" "m")
10667 (match_operand:SI 3 "memory_operand" "m")]))
10668 (clobber (match_scratch:SI 4 "=r"))]
10669 "TARGET_ARM && adjacent_mem_locations (operands[2], operands[3])"
10675 HOST_WIDE_INT val1 = 0, val2 = 0;
10677 if (REGNO (operands[0]) > REGNO (operands[4]))
10679 ldm[1] = operands[4];
10680 ldm[2] = operands[0];
10684 ldm[1] = operands[0];
10685 ldm[2] = operands[4];
10688 base_reg = XEXP (operands[2], 0);
10690 if (!REG_P (base_reg))
10692 val1 = INTVAL (XEXP (base_reg, 1));
10693 base_reg = XEXP (base_reg, 0);
10696 if (!REG_P (XEXP (operands[3], 0)))
10697 val2 = INTVAL (XEXP (XEXP (operands[3], 0), 1));
10699 arith[0] = operands[0];
10700 arith[3] = operands[1];
10714 if (val1 !=0 && val2 != 0)
10718 if (val1 == 4 || val2 == 4)
10719 /* Other val must be 8, since we know they are adjacent and neither
10721 output_asm_insn (\"ldmib%?\\t%0, {%1, %2}\", ldm);
10722 else if (const_ok_for_arm (val1) || const_ok_for_arm (-val1))
10724 ldm[0] = ops[0] = operands[4];
10726 ops[2] = GEN_INT (val1);
10727 output_add_immediate (ops);
10729 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10731 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10735 /* Offset is out of range for a single add, so use two ldr. */
10738 ops[2] = GEN_INT (val1);
10739 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10741 ops[2] = GEN_INT (val2);
10742 output_asm_insn (\"ldr%?\\t%0, [%1, %2]\", ops);
10745 else if (val1 != 0)
10748 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10750 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10755 output_asm_insn (\"ldmia%?\\t%0, {%1, %2}\", ldm);
10757 output_asm_insn (\"ldmda%?\\t%0, {%1, %2}\", ldm);
10759 output_asm_insn (\"%I3%?\\t%0, %1, %2\", arith);
10762 [(set_attr "length" "12")
10763 (set_attr "predicable" "yes")
10764 (set_attr "type" "load_4")]
10767 ; This pattern is never tried by combine, so do it as a peephole
10770 [(set (match_operand:SI 0 "arm_general_register_operand" "")
10771 (match_operand:SI 1 "arm_general_register_operand" ""))
10772 (set (reg:CC CC_REGNUM)
10773 (compare:CC (match_dup 1) (const_int 0)))]
10775 [(parallel [(set (reg:CC CC_REGNUM) (compare:CC (match_dup 1) (const_int 0)))
10776 (set (match_dup 0) (match_dup 1))])]
10781 [(set (match_operand:SI 0 "s_register_operand" "")
10782 (and:SI (ge:SI (match_operand:SI 1 "s_register_operand" "")
10784 (neg:SI (match_operator:SI 2 "arm_comparison_operator"
10785 [(match_operand:SI 3 "s_register_operand" "")
10786 (match_operand:SI 4 "arm_rhs_operand" "")]))))
10787 (clobber (match_operand:SI 5 "s_register_operand" ""))]
10789 [(set (match_dup 5) (not:SI (ashiftrt:SI (match_dup 1) (const_int 31))))
10790 (set (match_dup 0) (and:SI (match_op_dup 2 [(match_dup 3) (match_dup 4)])
10795 ;; This split can be used because CC_Z mode implies that the following
10796 ;; branch will be an equality, or an unsigned inequality, so the sign
10797 ;; extension is not needed.
10800 [(set (reg:CC_Z CC_REGNUM)
10802 (ashift:SI (subreg:SI (match_operand:QI 0 "memory_operand" "") 0)
10804 (match_operand 1 "const_int_operand" "")))
10805 (clobber (match_scratch:SI 2 ""))]
10807 && ((UINTVAL (operands[1]))
10808 == ((UINTVAL (operands[1])) >> 24) << 24)"
10809 [(set (match_dup 2) (zero_extend:SI (match_dup 0)))
10810 (set (reg:CC CC_REGNUM) (compare:CC (match_dup 2) (match_dup 1)))]
10812 operands[1] = GEN_INT (((unsigned long) INTVAL (operands[1])) >> 24);
10815 ;; ??? Check the patterns above for Thumb-2 usefulness
10817 (define_expand "prologue"
10818 [(clobber (const_int 0))]
10821 arm_expand_prologue ();
10823 thumb1_expand_prologue ();
10828 (define_expand "epilogue"
10829 [(clobber (const_int 0))]
10832 if (crtl->calls_eh_return)
10833 emit_insn (gen_force_register_use (gen_rtx_REG (Pmode, 2)));
10836 thumb1_expand_epilogue ();
10837 emit_jump_insn (gen_rtx_UNSPEC_VOLATILE (VOIDmode,
10838 gen_rtvec (1, ret_rtx), VUNSPEC_EPILOGUE));
10840 else if (HAVE_return)
10842 /* HAVE_return is testing for USE_RETURN_INSN (FALSE). Hence,
10843 no need for explicit testing again. */
10844 emit_jump_insn (gen_return ());
10846 else if (TARGET_32BIT)
10848 arm_expand_epilogue (true);
10854 ;; Note - although unspec_volatile's USE all hard registers,
10855 ;; USEs are ignored after relaod has completed. Thus we need
10856 ;; to add an unspec of the link register to ensure that flow
10857 ;; does not think that it is unused by the sibcall branch that
10858 ;; will replace the standard function epilogue.
10859 (define_expand "sibcall_epilogue"
10860 [(parallel [(unspec:SI [(reg:SI LR_REGNUM)] UNSPEC_REGISTER_USE)
10861 (unspec_volatile [(return)] VUNSPEC_EPILOGUE)])]
10864 arm_expand_epilogue (false);
10869 (define_expand "eh_epilogue"
10870 [(use (match_operand:SI 0 "register_operand"))
10871 (use (match_operand:SI 1 "register_operand"))
10872 (use (match_operand:SI 2 "register_operand"))]
10876 cfun->machine->eh_epilogue_sp_ofs = operands[1];
10877 if (!REG_P (operands[2]) || REGNO (operands[2]) != 2)
10879 rtx ra = gen_rtx_REG (Pmode, 2);
10881 emit_move_insn (ra, operands[2]);
10884 /* This is a hack -- we may have crystalized the function type too
10886 cfun->machine->func_type = 0;
10890 ;; This split is only used during output to reduce the number of patterns
10891 ;; that need assembler instructions adding to them. We allowed the setting
10892 ;; of the conditions to be implicit during rtl generation so that
10893 ;; the conditional compare patterns would work. However this conflicts to
10894 ;; some extent with the conditional data operations, so we have to split them
10897 ;; ??? Need to audit these splitters for Thumb-2. Why isn't normal
10898 ;; conditional execution sufficient?
10901 [(set (match_operand:SI 0 "s_register_operand" "")
10902 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10903 [(match_operand 2 "" "") (match_operand 3 "" "")])
10905 (match_operand 4 "" "")))
10906 (clobber (reg:CC CC_REGNUM))]
10907 "TARGET_ARM && reload_completed"
10908 [(set (match_dup 5) (match_dup 6))
10909 (cond_exec (match_dup 7)
10910 (set (match_dup 0) (match_dup 4)))]
10913 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10914 operands[2], operands[3]);
10915 enum rtx_code rc = GET_CODE (operands[1]);
10917 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10918 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10919 if (mode == CCFPmode || mode == CCFPEmode)
10920 rc = reverse_condition_maybe_unordered (rc);
10922 rc = reverse_condition (rc);
10924 operands[7] = gen_rtx_fmt_ee (rc, VOIDmode, operands[5], const0_rtx);
10929 [(set (match_operand:SI 0 "s_register_operand" "")
10930 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10931 [(match_operand 2 "" "") (match_operand 3 "" "")])
10932 (match_operand 4 "" "")
10934 (clobber (reg:CC CC_REGNUM))]
10935 "TARGET_ARM && reload_completed"
10936 [(set (match_dup 5) (match_dup 6))
10937 (cond_exec (match_op_dup 1 [(match_dup 5) (const_int 0)])
10938 (set (match_dup 0) (match_dup 4)))]
10941 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10942 operands[2], operands[3]);
10944 operands[5] = gen_rtx_REG (mode, CC_REGNUM);
10945 operands[6] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10950 [(set (match_operand:SI 0 "s_register_operand" "")
10951 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10952 [(match_operand 2 "" "") (match_operand 3 "" "")])
10953 (match_operand 4 "" "")
10954 (match_operand 5 "" "")))
10955 (clobber (reg:CC CC_REGNUM))]
10956 "TARGET_ARM && reload_completed"
10957 [(set (match_dup 6) (match_dup 7))
10958 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10959 (set (match_dup 0) (match_dup 4)))
10960 (cond_exec (match_dup 8)
10961 (set (match_dup 0) (match_dup 5)))]
10964 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10965 operands[2], operands[3]);
10966 enum rtx_code rc = GET_CODE (operands[1]);
10968 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
10969 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
10970 if (mode == CCFPmode || mode == CCFPEmode)
10971 rc = reverse_condition_maybe_unordered (rc);
10973 rc = reverse_condition (rc);
10975 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
10980 [(set (match_operand:SI 0 "s_register_operand" "")
10981 (if_then_else:SI (match_operator 1 "arm_comparison_operator"
10982 [(match_operand:SI 2 "s_register_operand" "")
10983 (match_operand:SI 3 "arm_add_operand" "")])
10984 (match_operand:SI 4 "arm_rhs_operand" "")
10986 (match_operand:SI 5 "s_register_operand" ""))))
10987 (clobber (reg:CC CC_REGNUM))]
10988 "TARGET_ARM && reload_completed"
10989 [(set (match_dup 6) (match_dup 7))
10990 (cond_exec (match_op_dup 1 [(match_dup 6) (const_int 0)])
10991 (set (match_dup 0) (match_dup 4)))
10992 (cond_exec (match_dup 8)
10993 (set (match_dup 0) (not:SI (match_dup 5))))]
10996 machine_mode mode = SELECT_CC_MODE (GET_CODE (operands[1]),
10997 operands[2], operands[3]);
10998 enum rtx_code rc = GET_CODE (operands[1]);
11000 operands[6] = gen_rtx_REG (mode, CC_REGNUM);
11001 operands[7] = gen_rtx_COMPARE (mode, operands[2], operands[3]);
11002 if (mode == CCFPmode || mode == CCFPEmode)
11003 rc = reverse_condition_maybe_unordered (rc);
11005 rc = reverse_condition (rc);
11007 operands[8] = gen_rtx_fmt_ee (rc, VOIDmode, operands[6], const0_rtx);
11011 (define_insn "*cond_move_not"
11012 [(set (match_operand:SI 0 "s_register_operand" "=r,r")
11013 (if_then_else:SI (match_operator 4 "arm_comparison_operator"
11014 [(match_operand 3 "cc_register" "") (const_int 0)])
11015 (match_operand:SI 1 "arm_rhs_operand" "0,?rI")
11017 (match_operand:SI 2 "s_register_operand" "r,r"))))]
11021 mov%d4\\t%0, %1\;mvn%D4\\t%0, %2"
11022 [(set_attr "conds" "use")
11023 (set_attr "type" "mvn_reg,multiple")
11024 (set_attr "length" "4,8")]
11027 ;; The next two patterns occur when an AND operation is followed by a
11028 ;; scc insn sequence
11030 (define_insn "*sign_extract_onebit"
11031 [(set (match_operand:SI 0 "s_register_operand" "=r")
11032 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11034 (match_operand:SI 2 "const_int_operand" "n")))
11035 (clobber (reg:CC CC_REGNUM))]
11038 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11039 output_asm_insn (\"ands\\t%0, %1, %2\", operands);
11040 return \"mvnne\\t%0, #0\";
11042 [(set_attr "conds" "clob")
11043 (set_attr "length" "8")
11044 (set_attr "type" "multiple")]
11047 (define_insn "*not_signextract_onebit"
11048 [(set (match_operand:SI 0 "s_register_operand" "=r")
11050 (sign_extract:SI (match_operand:SI 1 "s_register_operand" "r")
11052 (match_operand:SI 2 "const_int_operand" "n"))))
11053 (clobber (reg:CC CC_REGNUM))]
11056 operands[2] = GEN_INT (1 << INTVAL (operands[2]));
11057 output_asm_insn (\"tst\\t%1, %2\", operands);
11058 output_asm_insn (\"mvneq\\t%0, #0\", operands);
11059 return \"movne\\t%0, #0\";
11061 [(set_attr "conds" "clob")
11062 (set_attr "length" "12")
11063 (set_attr "type" "multiple")]
11065 ;; ??? The above patterns need auditing for Thumb-2
11067 ;; Push multiple registers to the stack. Registers are in parallel (use ...)
11068 ;; expressions. For simplicity, the first register is also in the unspec
11070 ;; To avoid the usage of GNU extension, the length attribute is computed
11071 ;; in a C function arm_attr_length_push_multi.
11072 (define_insn "*push_multi"
11073 [(match_parallel 2 "multi_register_push"
11074 [(set (match_operand:BLK 0 "push_mult_memory_operand" "")
11075 (unspec:BLK [(match_operand:SI 1 "s_register_operand" "")]
11076 UNSPEC_PUSH_MULT))])]
11080 int num_saves = XVECLEN (operands[2], 0);
11082 /* For the StrongARM at least it is faster to
11083 use STR to store only a single register.
11084 In Thumb mode always use push, and the assembler will pick
11085 something appropriate. */
11086 if (num_saves == 1 && TARGET_ARM)
11087 output_asm_insn (\"str%?\\t%1, [%m0, #-4]!\", operands);
11094 strcpy (pattern, \"push%?\\t{%1\");
11096 strcpy (pattern, \"push\\t{%1\");
11098 for (i = 1; i < num_saves; i++)
11100 strcat (pattern, \", %|\");
11102 reg_names[REGNO (XEXP (XVECEXP (operands[2], 0, i), 0))]);
11105 strcat (pattern, \"}\");
11106 output_asm_insn (pattern, operands);
11111 [(set_attr "type" "store_16")
11112 (set (attr "length")
11113 (symbol_ref "arm_attr_length_push_multi (operands[2], operands[1])"))]
11116 (define_insn "stack_tie"
11117 [(set (mem:BLK (scratch))
11118 (unspec:BLK [(match_operand:SI 0 "s_register_operand" "rk")
11119 (match_operand:SI 1 "s_register_operand" "rk")]
11123 [(set_attr "length" "0")
11124 (set_attr "type" "block")]
11127 ;; Pop (as used in epilogue RTL)
11129 (define_insn "*load_multiple_with_writeback"
11130 [(match_parallel 0 "load_multiple_operation"
11131 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11132 (plus:SI (match_dup 1)
11133 (match_operand:SI 2 "const_int_I_operand" "I")))
11134 (set (match_operand:SI 3 "s_register_operand" "=rk")
11135 (mem:SI (match_dup 1)))
11137 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11140 arm_output_multireg_pop (operands, /*return_pc=*/false,
11141 /*cond=*/const_true_rtx,
11147 [(set_attr "type" "load_16")
11148 (set_attr "predicable" "yes")
11149 (set (attr "length")
11150 (symbol_ref "arm_attr_length_pop_multi (operands,
11151 /*return_pc=*/false,
11152 /*write_back_p=*/true)"))]
11155 ;; Pop with return (as used in epilogue RTL)
11157 ;; This instruction is generated when the registers are popped at the end of
11158 ;; epilogue. Here, instead of popping the value into LR and then generating
11159 ;; jump to LR, value is popped into PC directly. Hence, the pattern is combined
11161 (define_insn "*pop_multiple_with_writeback_and_return"
11162 [(match_parallel 0 "pop_multiple_return"
11164 (set (match_operand:SI 1 "s_register_operand" "+rk")
11165 (plus:SI (match_dup 1)
11166 (match_operand:SI 2 "const_int_I_operand" "I")))
11167 (set (match_operand:SI 3 "s_register_operand" "=rk")
11168 (mem:SI (match_dup 1)))
11170 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11173 arm_output_multireg_pop (operands, /*return_pc=*/true,
11174 /*cond=*/const_true_rtx,
11180 [(set_attr "type" "load_16")
11181 (set_attr "predicable" "yes")
11182 (set (attr "length")
11183 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11184 /*write_back_p=*/true)"))]
11187 (define_insn "*pop_multiple_with_return"
11188 [(match_parallel 0 "pop_multiple_return"
11190 (set (match_operand:SI 2 "s_register_operand" "=rk")
11191 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11193 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11196 arm_output_multireg_pop (operands, /*return_pc=*/true,
11197 /*cond=*/const_true_rtx,
11203 [(set_attr "type" "load_16")
11204 (set_attr "predicable" "yes")
11205 (set (attr "length")
11206 (symbol_ref "arm_attr_length_pop_multi (operands, /*return_pc=*/true,
11207 /*write_back_p=*/false)"))]
11210 ;; Load into PC and return
11211 (define_insn "*ldr_with_return"
11213 (set (reg:SI PC_REGNUM)
11214 (mem:SI (post_inc:SI (match_operand:SI 0 "s_register_operand" "+rk"))))]
11215 "TARGET_32BIT && (reload_in_progress || reload_completed)"
11216 "ldr%?\t%|pc, [%0], #4"
11217 [(set_attr "type" "load_4")
11218 (set_attr "predicable" "yes")]
11220 ;; Pop for floating point registers (as used in epilogue RTL)
11221 (define_insn "*vfp_pop_multiple_with_writeback"
11222 [(match_parallel 0 "pop_multiple_fp"
11223 [(set (match_operand:SI 1 "s_register_operand" "+rk")
11224 (plus:SI (match_dup 1)
11225 (match_operand:SI 2 "const_int_I_operand" "I")))
11226 (set (match_operand:DF 3 "vfp_hard_register_operand" "")
11227 (mem:DF (match_dup 1)))])]
11228 "TARGET_32BIT && TARGET_HARD_FLOAT"
11231 int num_regs = XVECLEN (operands[0], 0);
11234 strcpy (pattern, \"vldm\\t\");
11235 strcat (pattern, reg_names[REGNO (SET_DEST (XVECEXP (operands[0], 0, 0)))]);
11236 strcat (pattern, \"!, {\");
11237 op_list[0] = XEXP (XVECEXP (operands[0], 0, 1), 0);
11238 strcat (pattern, \"%P0\");
11239 if ((num_regs - 1) > 1)
11241 strcat (pattern, \"-%P1\");
11242 op_list [1] = XEXP (XVECEXP (operands[0], 0, num_regs - 1), 0);
11245 strcat (pattern, \"}\");
11246 output_asm_insn (pattern, op_list);
11250 [(set_attr "type" "load_16")
11251 (set_attr "conds" "unconditional")
11252 (set_attr "predicable" "no")]
11255 ;; Special patterns for dealing with the constant pool
11257 (define_insn "align_4"
11258 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN)]
11261 assemble_align (32);
11264 [(set_attr "type" "no_insn")]
11267 (define_insn "align_8"
11268 [(unspec_volatile [(const_int 0)] VUNSPEC_ALIGN8)]
11271 assemble_align (64);
11274 [(set_attr "type" "no_insn")]
11277 (define_insn "consttable_end"
11278 [(unspec_volatile [(const_int 0)] VUNSPEC_POOL_END)]
11281 making_const_table = FALSE;
11284 [(set_attr "type" "no_insn")]
11287 (define_insn "consttable_1"
11288 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_1)]
11291 making_const_table = TRUE;
11292 assemble_integer (operands[0], 1, BITS_PER_WORD, 1);
11293 assemble_zeros (3);
11296 [(set_attr "length" "4")
11297 (set_attr "type" "no_insn")]
11300 (define_insn "consttable_2"
11301 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_2)]
11305 rtx x = operands[0];
11306 making_const_table = TRUE;
11307 switch (GET_MODE_CLASS (GET_MODE (x)))
11310 arm_emit_fp16_const (x);
11313 assemble_integer (operands[0], 2, BITS_PER_WORD, 1);
11314 assemble_zeros (2);
11319 [(set_attr "length" "4")
11320 (set_attr "type" "no_insn")]
11323 (define_insn "consttable_4"
11324 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_4)]
11328 rtx x = operands[0];
11329 making_const_table = TRUE;
11330 scalar_float_mode float_mode;
11331 if (is_a <scalar_float_mode> (GET_MODE (x), &float_mode))
11332 assemble_real (*CONST_DOUBLE_REAL_VALUE (x), float_mode, BITS_PER_WORD);
11335 /* XXX: Sometimes gcc does something really dumb and ends up with
11336 a HIGH in a constant pool entry, usually because it's trying to
11337 load into a VFP register. We know this will always be used in
11338 combination with a LO_SUM which ignores the high bits, so just
11339 strip off the HIGH. */
11340 if (GET_CODE (x) == HIGH)
11342 assemble_integer (x, 4, BITS_PER_WORD, 1);
11343 mark_symbol_refs_as_used (x);
11347 [(set_attr "length" "4")
11348 (set_attr "type" "no_insn")]
11351 (define_insn "consttable_8"
11352 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_8)]
11356 making_const_table = TRUE;
11357 scalar_float_mode float_mode;
11358 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11359 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11360 float_mode, BITS_PER_WORD);
11362 assemble_integer (operands[0], 8, BITS_PER_WORD, 1);
11365 [(set_attr "length" "8")
11366 (set_attr "type" "no_insn")]
11369 (define_insn "consttable_16"
11370 [(unspec_volatile [(match_operand 0 "" "")] VUNSPEC_POOL_16)]
11374 making_const_table = TRUE;
11375 scalar_float_mode float_mode;
11376 if (is_a <scalar_float_mode> (GET_MODE (operands[0]), &float_mode))
11377 assemble_real (*CONST_DOUBLE_REAL_VALUE (operands[0]),
11378 float_mode, BITS_PER_WORD);
11380 assemble_integer (operands[0], 16, BITS_PER_WORD, 1);
11383 [(set_attr "length" "16")
11384 (set_attr "type" "no_insn")]
11387 ;; V5 Instructions,
11389 (define_insn "clzsi2"
11390 [(set (match_operand:SI 0 "s_register_operand" "=r")
11391 (clz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11392 "TARGET_32BIT && arm_arch5t"
11394 [(set_attr "predicable" "yes")
11395 (set_attr "type" "clz")])
11397 (define_insn "rbitsi2"
11398 [(set (match_operand:SI 0 "s_register_operand" "=r")
11399 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")] UNSPEC_RBIT))]
11400 "TARGET_32BIT && arm_arch_thumb2"
11402 [(set_attr "predicable" "yes")
11403 (set_attr "type" "clz")])
11405 ;; Keep this as a CTZ expression until after reload and then split
11406 ;; into RBIT + CLZ. Since RBIT is represented as an UNSPEC it is unlikely
11407 ;; to fold with any other expression.
11409 (define_insn_and_split "ctzsi2"
11410 [(set (match_operand:SI 0 "s_register_operand" "=r")
11411 (ctz:SI (match_operand:SI 1 "s_register_operand" "r")))]
11412 "TARGET_32BIT && arm_arch_thumb2"
11414 "&& reload_completed"
11417 emit_insn (gen_rbitsi2 (operands[0], operands[1]));
11418 emit_insn (gen_clzsi2 (operands[0], operands[0]));
11422 ;; V5E instructions.
11424 (define_insn "prefetch"
11425 [(prefetch (match_operand:SI 0 "address_operand" "p")
11426 (match_operand:SI 1 "" "")
11427 (match_operand:SI 2 "" ""))]
11428 "TARGET_32BIT && arm_arch5te"
11430 [(set_attr "type" "load_4")]
11433 ;; General predication pattern
11436 [(match_operator 0 "arm_comparison_operator"
11437 [(match_operand 1 "cc_register" "")
11440 && (!TARGET_NO_VOLATILE_CE || !volatile_refs_p (PATTERN (insn)))"
11442 [(set_attr "predicated" "yes")]
11445 (define_insn "force_register_use"
11446 [(unspec:SI [(match_operand:SI 0 "register_operand" "")] UNSPEC_REGISTER_USE)]
11449 [(set_attr "length" "0")
11450 (set_attr "type" "no_insn")]
11454 ;; Patterns for exception handling
11456 (define_expand "eh_return"
11457 [(use (match_operand 0 "general_operand"))]
11462 emit_insn (gen_arm_eh_return (operands[0]));
11464 emit_insn (gen_thumb_eh_return (operands[0]));
11469 ;; We can't expand this before we know where the link register is stored.
11470 (define_insn_and_split "arm_eh_return"
11471 [(unspec_volatile [(match_operand:SI 0 "s_register_operand" "r")]
11473 (clobber (match_scratch:SI 1 "=&r"))]
11476 "&& reload_completed"
11480 arm_set_return_address (operands[0], operands[1]);
11488 (define_insn "load_tp_hard"
11489 [(set (match_operand:SI 0 "register_operand" "=r")
11490 (unspec:SI [(const_int 0)] UNSPEC_TLS))]
11492 "mrc%?\\tp15, 0, %0, c13, c0, 3\\t@ load_tp_hard"
11493 [(set_attr "predicable" "yes")
11494 (set_attr "type" "mrs")]
11497 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11498 (define_insn "load_tp_soft_fdpic"
11499 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11500 (clobber (reg:SI FDPIC_REGNUM))
11501 (clobber (reg:SI LR_REGNUM))
11502 (clobber (reg:SI IP_REGNUM))
11503 (clobber (reg:CC CC_REGNUM))]
11504 "TARGET_SOFT_TP && TARGET_FDPIC"
11505 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11506 [(set_attr "conds" "clob")
11507 (set_attr "type" "branch")]
11510 ;; Doesn't clobber R1-R3. Must use r0 for the first operand.
11511 (define_insn "load_tp_soft"
11512 [(set (reg:SI 0) (unspec:SI [(const_int 0)] UNSPEC_TLS))
11513 (clobber (reg:SI LR_REGNUM))
11514 (clobber (reg:SI IP_REGNUM))
11515 (clobber (reg:CC CC_REGNUM))]
11516 "TARGET_SOFT_TP && !TARGET_FDPIC"
11517 "bl\\t__aeabi_read_tp\\t@ load_tp_soft"
11518 [(set_attr "conds" "clob")
11519 (set_attr "type" "branch")]
11522 ;; tls descriptor call
11523 (define_insn "tlscall"
11524 [(set (reg:SI R0_REGNUM)
11525 (unspec:SI [(reg:SI R0_REGNUM)
11526 (match_operand:SI 0 "" "X")
11527 (match_operand 1 "" "")] UNSPEC_TLS))
11528 (clobber (reg:SI R1_REGNUM))
11529 (clobber (reg:SI LR_REGNUM))
11530 (clobber (reg:SI CC_REGNUM))]
11533 targetm.asm_out.internal_label (asm_out_file, "LPIC",
11534 INTVAL (operands[1]));
11535 return "bl\\t%c0(tlscall)";
11537 [(set_attr "conds" "clob")
11538 (set_attr "length" "4")
11539 (set_attr "type" "branch")]
11542 ;; For thread pointer builtin
11543 (define_expand "get_thread_pointersi"
11544 [(match_operand:SI 0 "s_register_operand")]
11548 arm_load_tp (operands[0]);
11554 ;; We only care about the lower 16 bits of the constant
11555 ;; being inserted into the upper 16 bits of the register.
11556 (define_insn "*arm_movtas_ze"
11557 [(set (zero_extract:SI (match_operand:SI 0 "s_register_operand" "+r,r")
11560 (match_operand:SI 1 "const_int_operand" ""))]
11565 [(set_attr "arch" "32,v8mb")
11566 (set_attr "predicable" "yes")
11567 (set_attr "length" "4")
11568 (set_attr "type" "alu_sreg")]
11571 (define_insn "*arm_rev"
11572 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11573 (bswap:SI (match_operand:SI 1 "s_register_operand" "l,l,r")))]
11579 [(set_attr "arch" "t1,t2,32")
11580 (set_attr "length" "2,2,4")
11581 (set_attr "predicable" "no,yes,yes")
11582 (set_attr "type" "rev")]
11585 (define_expand "arm_legacy_rev"
11586 [(set (match_operand:SI 2 "s_register_operand")
11587 (xor:SI (rotatert:SI (match_operand:SI 1 "s_register_operand")
11591 (lshiftrt:SI (match_dup 2)
11593 (set (match_operand:SI 3 "s_register_operand")
11594 (rotatert:SI (match_dup 1)
11597 (and:SI (match_dup 2)
11598 (const_int -65281)))
11599 (set (match_operand:SI 0 "s_register_operand")
11600 (xor:SI (match_dup 3)
11606 ;; Reuse temporaries to keep register pressure down.
11607 (define_expand "thumb_legacy_rev"
11608 [(set (match_operand:SI 2 "s_register_operand")
11609 (ashift:SI (match_operand:SI 1 "s_register_operand")
11611 (set (match_operand:SI 3 "s_register_operand")
11612 (lshiftrt:SI (match_dup 1)
11615 (ior:SI (match_dup 3)
11617 (set (match_operand:SI 4 "s_register_operand")
11619 (set (match_operand:SI 5 "s_register_operand")
11620 (rotatert:SI (match_dup 1)
11623 (ashift:SI (match_dup 5)
11626 (lshiftrt:SI (match_dup 5)
11629 (ior:SI (match_dup 5)
11632 (rotatert:SI (match_dup 5)
11634 (set (match_operand:SI 0 "s_register_operand")
11635 (ior:SI (match_dup 5)
11641 ;; ARM-specific expansion of signed mod by power of 2
11642 ;; using conditional negate.
11643 ;; For r0 % n where n is a power of 2 produce:
11645 ;; and r0, r0, #(n - 1)
11646 ;; and r1, r1, #(n - 1)
11647 ;; rsbpl r0, r1, #0
11649 (define_expand "modsi3"
11650 [(match_operand:SI 0 "register_operand")
11651 (match_operand:SI 1 "register_operand")
11652 (match_operand:SI 2 "const_int_operand")]
11655 HOST_WIDE_INT val = INTVAL (operands[2]);
11658 || exact_log2 (val) <= 0)
11661 rtx mask = GEN_INT (val - 1);
11663 /* In the special case of x0 % 2 we can do the even shorter:
11666 rsblt r0, r0, #0. */
11670 rtx cc_reg = arm_gen_compare_reg (LT,
11671 operands[1], const0_rtx, NULL_RTX);
11672 rtx cond = gen_rtx_LT (SImode, cc_reg, const0_rtx);
11673 rtx masked = gen_reg_rtx (SImode);
11675 emit_insn (gen_andsi3 (masked, operands[1], mask));
11676 emit_move_insn (operands[0],
11677 gen_rtx_IF_THEN_ELSE (SImode, cond,
11678 gen_rtx_NEG (SImode,
11684 rtx neg_op = gen_reg_rtx (SImode);
11685 rtx_insn *insn = emit_insn (gen_subsi3_compare0 (neg_op, const0_rtx,
11688 /* Extract the condition register and mode. */
11689 rtx cmp = XVECEXP (PATTERN (insn), 0, 0);
11690 rtx cc_reg = SET_DEST (cmp);
11691 rtx cond = gen_rtx_GE (SImode, cc_reg, const0_rtx);
11693 emit_insn (gen_andsi3 (operands[0], operands[1], mask));
11695 rtx masked_neg = gen_reg_rtx (SImode);
11696 emit_insn (gen_andsi3 (masked_neg, neg_op, mask));
11698 /* We want a conditional negate here, but emitting COND_EXEC rtxes
11699 during expand does not always work. Do an IF_THEN_ELSE instead. */
11700 emit_move_insn (operands[0],
11701 gen_rtx_IF_THEN_ELSE (SImode, cond,
11702 gen_rtx_NEG (SImode, masked_neg),
11710 (define_expand "bswapsi2"
11711 [(set (match_operand:SI 0 "s_register_operand")
11712 (bswap:SI (match_operand:SI 1 "s_register_operand")))]
11713 "TARGET_EITHER && (arm_arch6 || !optimize_size)"
11717 rtx op2 = gen_reg_rtx (SImode);
11718 rtx op3 = gen_reg_rtx (SImode);
11722 rtx op4 = gen_reg_rtx (SImode);
11723 rtx op5 = gen_reg_rtx (SImode);
11725 emit_insn (gen_thumb_legacy_rev (operands[0], operands[1],
11726 op2, op3, op4, op5));
11730 emit_insn (gen_arm_legacy_rev (operands[0], operands[1],
11739 ;; bswap16 patterns: use revsh and rev16 instructions for the signed
11740 ;; and unsigned variants, respectively. For rev16, expose
11741 ;; byte-swapping in the lower 16 bits only.
11742 (define_insn "*arm_revsh"
11743 [(set (match_operand:SI 0 "s_register_operand" "=l,l,r")
11744 (sign_extend:SI (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r"))))]
11750 [(set_attr "arch" "t1,t2,32")
11751 (set_attr "length" "2,2,4")
11752 (set_attr "type" "rev")]
11755 (define_insn "*arm_rev16"
11756 [(set (match_operand:HI 0 "s_register_operand" "=l,l,r")
11757 (bswap:HI (match_operand:HI 1 "s_register_operand" "l,l,r")))]
11763 [(set_attr "arch" "t1,t2,32")
11764 (set_attr "length" "2,2,4")
11765 (set_attr "type" "rev")]
11768 ;; There are no canonicalisation rules for the position of the lshiftrt, ashift
11769 ;; operations within an IOR/AND RTX, therefore we have two patterns matching
11770 ;; each valid permutation.
11772 (define_insn "arm_rev16si2"
11773 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11774 (ior:SI (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "l,l,r")
11776 (match_operand:SI 3 "const_int_operand" "n,n,n"))
11777 (and:SI (lshiftrt:SI (match_dup 1)
11779 (match_operand:SI 2 "const_int_operand" "n,n,n"))))]
11781 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11782 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11784 [(set_attr "arch" "t1,t2,32")
11785 (set_attr "length" "2,2,4")
11786 (set_attr "type" "rev")]
11789 (define_insn "arm_rev16si2_alt"
11790 [(set (match_operand:SI 0 "register_operand" "=l,l,r")
11791 (ior:SI (and:SI (lshiftrt:SI (match_operand:SI 1 "register_operand" "l,l,r")
11793 (match_operand:SI 2 "const_int_operand" "n,n,n"))
11794 (and:SI (ashift:SI (match_dup 1)
11796 (match_operand:SI 3 "const_int_operand" "n,n,n"))))]
11798 && aarch_rev16_shleft_mask_imm_p (operands[3], SImode)
11799 && aarch_rev16_shright_mask_imm_p (operands[2], SImode)"
11801 [(set_attr "arch" "t1,t2,32")
11802 (set_attr "length" "2,2,4")
11803 (set_attr "type" "rev")]
11806 (define_expand "bswaphi2"
11807 [(set (match_operand:HI 0 "s_register_operand")
11808 (bswap:HI (match_operand:HI 1 "s_register_operand")))]
11813 ;; Patterns for LDRD/STRD in Thumb2 mode
11815 (define_insn "*thumb2_ldrd"
11816 [(set (match_operand:SI 0 "s_register_operand" "=r")
11817 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11818 (match_operand:SI 2 "ldrd_strd_offset_operand" "Do"))))
11819 (set (match_operand:SI 3 "s_register_operand" "=r")
11820 (mem:SI (plus:SI (match_dup 1)
11821 (match_operand:SI 4 "const_int_operand" ""))))]
11822 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11823 && ((INTVAL (operands[2]) + 4) == INTVAL (operands[4]))
11824 && (operands_ok_ldrd_strd (operands[0], operands[3],
11825 operands[1], INTVAL (operands[2]),
11827 "ldrd%?\t%0, %3, [%1, %2]"
11828 [(set_attr "type" "load_8")
11829 (set_attr "predicable" "yes")])
11831 (define_insn "*thumb2_ldrd_base"
11832 [(set (match_operand:SI 0 "s_register_operand" "=r")
11833 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11834 (set (match_operand:SI 2 "s_register_operand" "=r")
11835 (mem:SI (plus:SI (match_dup 1)
11837 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11838 && (operands_ok_ldrd_strd (operands[0], operands[2],
11839 operands[1], 0, false, true))"
11840 "ldrd%?\t%0, %2, [%1]"
11841 [(set_attr "type" "load_8")
11842 (set_attr "predicable" "yes")])
11844 (define_insn "*thumb2_ldrd_base_neg"
11845 [(set (match_operand:SI 0 "s_register_operand" "=r")
11846 (mem:SI (plus:SI (match_operand:SI 1 "s_register_operand" "rk")
11848 (set (match_operand:SI 2 "s_register_operand" "=r")
11849 (mem:SI (match_dup 1)))]
11850 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11851 && (operands_ok_ldrd_strd (operands[0], operands[2],
11852 operands[1], -4, false, true))"
11853 "ldrd%?\t%0, %2, [%1, #-4]"
11854 [(set_attr "type" "load_8")
11855 (set_attr "predicable" "yes")])
11857 (define_insn "*thumb2_strd"
11858 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11859 (match_operand:SI 1 "ldrd_strd_offset_operand" "Do")))
11860 (match_operand:SI 2 "s_register_operand" "r"))
11861 (set (mem:SI (plus:SI (match_dup 0)
11862 (match_operand:SI 3 "const_int_operand" "")))
11863 (match_operand:SI 4 "s_register_operand" "r"))]
11864 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11865 && ((INTVAL (operands[1]) + 4) == INTVAL (operands[3]))
11866 && (operands_ok_ldrd_strd (operands[2], operands[4],
11867 operands[0], INTVAL (operands[1]),
11869 "strd%?\t%2, %4, [%0, %1]"
11870 [(set_attr "type" "store_8")
11871 (set_attr "predicable" "yes")])
11873 (define_insn "*thumb2_strd_base"
11874 [(set (mem:SI (match_operand:SI 0 "s_register_operand" "rk"))
11875 (match_operand:SI 1 "s_register_operand" "r"))
11876 (set (mem:SI (plus:SI (match_dup 0)
11878 (match_operand:SI 2 "s_register_operand" "r"))]
11879 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11880 && (operands_ok_ldrd_strd (operands[1], operands[2],
11881 operands[0], 0, false, false))"
11882 "strd%?\t%1, %2, [%0]"
11883 [(set_attr "type" "store_8")
11884 (set_attr "predicable" "yes")])
11886 (define_insn "*thumb2_strd_base_neg"
11887 [(set (mem:SI (plus:SI (match_operand:SI 0 "s_register_operand" "rk")
11889 (match_operand:SI 1 "s_register_operand" "r"))
11890 (set (mem:SI (match_dup 0))
11891 (match_operand:SI 2 "s_register_operand" "r"))]
11892 "TARGET_LDRD && TARGET_THUMB2 && reload_completed
11893 && (operands_ok_ldrd_strd (operands[1], operands[2],
11894 operands[0], -4, false, false))"
11895 "strd%?\t%1, %2, [%0, #-4]"
11896 [(set_attr "type" "store_8")
11897 (set_attr "predicable" "yes")])
11899 ;; ARMv8 CRC32 instructions.
11900 (define_insn "arm_<crc_variant>"
11901 [(set (match_operand:SI 0 "s_register_operand" "=r")
11902 (unspec:SI [(match_operand:SI 1 "s_register_operand" "r")
11903 (match_operand:<crc_mode> 2 "s_register_operand" "r")]
11906 "<crc_variant>\\t%0, %1, %2"
11907 [(set_attr "type" "crc")
11908 (set_attr "conds" "unconditional")]
11911 ;; Load the load/store double peephole optimizations.
11912 (include "ldrdstrd.md")
11914 ;; Load the load/store multiple patterns
11915 (include "ldmstm.md")
11917 ;; Patterns in ldmstm.md don't cover more than 4 registers. This pattern covers
11918 ;; large lists without explicit writeback generated for APCS_FRAME epilogue.
11919 ;; The operands are validated through the load_multiple_operation
11920 ;; match_parallel predicate rather than through constraints so enable it only
11922 (define_insn "*load_multiple"
11923 [(match_parallel 0 "load_multiple_operation"
11924 [(set (match_operand:SI 2 "s_register_operand" "=rk")
11925 (mem:SI (match_operand:SI 1 "s_register_operand" "rk")))
11927 "TARGET_32BIT && reload_completed"
11930 arm_output_multireg_pop (operands, /*return_pc=*/false,
11931 /*cond=*/const_true_rtx,
11937 [(set_attr "predicable" "yes")]
11940 (define_expand "copysignsf3"
11941 [(match_operand:SF 0 "register_operand")
11942 (match_operand:SF 1 "register_operand")
11943 (match_operand:SF 2 "register_operand")]
11944 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11946 emit_move_insn (operands[0], operands[2]);
11947 emit_insn (gen_insv_t2 (simplify_gen_subreg (SImode, operands[0], SFmode, 0),
11948 GEN_INT (31), GEN_INT (0),
11949 simplify_gen_subreg (SImode, operands[1], SFmode, 0)));
11954 (define_expand "copysigndf3"
11955 [(match_operand:DF 0 "register_operand")
11956 (match_operand:DF 1 "register_operand")
11957 (match_operand:DF 2 "register_operand")]
11958 "TARGET_SOFT_FLOAT && arm_arch_thumb2"
11960 rtx op0_low = gen_lowpart (SImode, operands[0]);
11961 rtx op0_high = gen_highpart (SImode, operands[0]);
11962 rtx op1_low = gen_lowpart (SImode, operands[1]);
11963 rtx op1_high = gen_highpart (SImode, operands[1]);
11964 rtx op2_high = gen_highpart (SImode, operands[2]);
11966 rtx scratch1 = gen_reg_rtx (SImode);
11967 rtx scratch2 = gen_reg_rtx (SImode);
11968 emit_move_insn (scratch1, op2_high);
11969 emit_move_insn (scratch2, op1_high);
11971 emit_insn(gen_rtx_SET(scratch1,
11972 gen_rtx_LSHIFTRT (SImode, op2_high, GEN_INT(31))));
11973 emit_insn(gen_insv_t2(scratch2, GEN_INT(1), GEN_INT(31), scratch1));
11974 emit_move_insn (op0_low, op1_low);
11975 emit_move_insn (op0_high, scratch2);
11981 ;; movmisalign patterns for HImode and SImode.
11982 (define_expand "movmisalign<mode>"
11983 [(match_operand:HSI 0 "general_operand")
11984 (match_operand:HSI 1 "general_operand")]
11987 /* This pattern is not permitted to fail during expansion: if both arguments
11988 are non-registers (e.g. memory := constant), force operand 1 into a
11990 rtx (* gen_unaligned_load)(rtx, rtx);
11991 rtx tmp_dest = operands[0];
11992 if (!s_register_operand (operands[0], <MODE>mode)
11993 && !s_register_operand (operands[1], <MODE>mode))
11994 operands[1] = force_reg (<MODE>mode, operands[1]);
11996 if (<MODE>mode == HImode)
11998 gen_unaligned_load = gen_unaligned_loadhiu;
11999 tmp_dest = gen_reg_rtx (SImode);
12002 gen_unaligned_load = gen_unaligned_loadsi;
12004 if (MEM_P (operands[1]))
12006 emit_insn (gen_unaligned_load (tmp_dest, operands[1]));
12007 if (<MODE>mode == HImode)
12008 emit_move_insn (operands[0], gen_lowpart (HImode, tmp_dest));
12011 emit_insn (gen_unaligned_store<mode> (operands[0], operands[1]));
12016 (define_insn "arm_<cdp>"
12017 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12018 (match_operand:SI 1 "immediate_operand" "n")
12019 (match_operand:SI 2 "immediate_operand" "n")
12020 (match_operand:SI 3 "immediate_operand" "n")
12021 (match_operand:SI 4 "immediate_operand" "n")
12022 (match_operand:SI 5 "immediate_operand" "n")] CDPI)]
12023 "arm_coproc_builtin_available (VUNSPEC_<CDP>)"
12025 arm_const_bounds (operands[0], 0, 16);
12026 arm_const_bounds (operands[1], 0, 16);
12027 arm_const_bounds (operands[2], 0, (1 << 5));
12028 arm_const_bounds (operands[3], 0, (1 << 5));
12029 arm_const_bounds (operands[4], 0, (1 << 5));
12030 arm_const_bounds (operands[5], 0, 8);
12031 return "<cdp>\\tp%c0, %1, CR%c2, CR%c3, CR%c4, %5";
12033 [(set_attr "length" "4")
12034 (set_attr "type" "coproc")])
12036 (define_insn "*ldc"
12037 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12038 (match_operand:SI 1 "immediate_operand" "n")
12039 (match_operand:SI 2 "memory_operand" "Uz")] LDCI)]
12040 "arm_coproc_builtin_available (VUNSPEC_<LDC>)"
12042 arm_const_bounds (operands[0], 0, 16);
12043 arm_const_bounds (operands[1], 0, (1 << 5));
12044 return "<ldc>\\tp%c0, CR%c1, %2";
12046 [(set_attr "length" "4")
12047 (set_attr "type" "coproc")])
12049 (define_insn "*stc"
12050 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12051 (match_operand:SI 1 "immediate_operand" "n")
12052 (match_operand:SI 2 "memory_operand" "=Uz")] STCI)]
12053 "arm_coproc_builtin_available (VUNSPEC_<STC>)"
12055 arm_const_bounds (operands[0], 0, 16);
12056 arm_const_bounds (operands[1], 0, (1 << 5));
12057 return "<stc>\\tp%c0, CR%c1, %2";
12059 [(set_attr "length" "4")
12060 (set_attr "type" "coproc")])
12062 (define_expand "arm_<ldc>"
12063 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12064 (match_operand:SI 1 "immediate_operand")
12065 (mem:SI (match_operand:SI 2 "s_register_operand"))] LDCI)]
12066 "arm_coproc_builtin_available (VUNSPEC_<LDC>)")
12068 (define_expand "arm_<stc>"
12069 [(unspec_volatile [(match_operand:SI 0 "immediate_operand")
12070 (match_operand:SI 1 "immediate_operand")
12071 (mem:SI (match_operand:SI 2 "s_register_operand"))] STCI)]
12072 "arm_coproc_builtin_available (VUNSPEC_<STC>)")
12074 (define_insn "arm_<mcr>"
12075 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12076 (match_operand:SI 1 "immediate_operand" "n")
12077 (match_operand:SI 2 "s_register_operand" "r")
12078 (match_operand:SI 3 "immediate_operand" "n")
12079 (match_operand:SI 4 "immediate_operand" "n")
12080 (match_operand:SI 5 "immediate_operand" "n")] MCRI)
12081 (use (match_dup 2))]
12082 "arm_coproc_builtin_available (VUNSPEC_<MCR>)"
12084 arm_const_bounds (operands[0], 0, 16);
12085 arm_const_bounds (operands[1], 0, 8);
12086 arm_const_bounds (operands[3], 0, (1 << 5));
12087 arm_const_bounds (operands[4], 0, (1 << 5));
12088 arm_const_bounds (operands[5], 0, 8);
12089 return "<mcr>\\tp%c0, %1, %2, CR%c3, CR%c4, %5";
12091 [(set_attr "length" "4")
12092 (set_attr "type" "coproc")])
12094 (define_insn "arm_<mrc>"
12095 [(set (match_operand:SI 0 "s_register_operand" "=r")
12096 (unspec_volatile:SI [(match_operand:SI 1 "immediate_operand" "n")
12097 (match_operand:SI 2 "immediate_operand" "n")
12098 (match_operand:SI 3 "immediate_operand" "n")
12099 (match_operand:SI 4 "immediate_operand" "n")
12100 (match_operand:SI 5 "immediate_operand" "n")] MRCI))]
12101 "arm_coproc_builtin_available (VUNSPEC_<MRC>)"
12103 arm_const_bounds (operands[1], 0, 16);
12104 arm_const_bounds (operands[2], 0, 8);
12105 arm_const_bounds (operands[3], 0, (1 << 5));
12106 arm_const_bounds (operands[4], 0, (1 << 5));
12107 arm_const_bounds (operands[5], 0, 8);
12108 return "<mrc>\\tp%c1, %2, %0, CR%c3, CR%c4, %5";
12110 [(set_attr "length" "4")
12111 (set_attr "type" "coproc")])
12113 (define_insn "arm_<mcrr>"
12114 [(unspec_volatile [(match_operand:SI 0 "immediate_operand" "n")
12115 (match_operand:SI 1 "immediate_operand" "n")
12116 (match_operand:DI 2 "s_register_operand" "r")
12117 (match_operand:SI 3 "immediate_operand" "n")] MCRRI)
12118 (use (match_dup 2))]
12119 "arm_coproc_builtin_available (VUNSPEC_<MCRR>)"
12121 arm_const_bounds (operands[0], 0, 16);
12122 arm_const_bounds (operands[1], 0, 8);
12123 arm_const_bounds (operands[3], 0, (1 << 5));
12124 return "<mcrr>\\tp%c0, %1, %Q2, %R2, CR%c3";
12126 [(set_attr "length" "4")
12127 (set_attr "type" "coproc")])
12129 (define_insn "arm_<mrrc>"
12130 [(set (match_operand:DI 0 "s_register_operand" "=r")
12131 (unspec_volatile:DI [(match_operand:SI 1 "immediate_operand" "n")
12132 (match_operand:SI 2 "immediate_operand" "n")
12133 (match_operand:SI 3 "immediate_operand" "n")] MRRCI))]
12134 "arm_coproc_builtin_available (VUNSPEC_<MRRC>)"
12136 arm_const_bounds (operands[1], 0, 16);
12137 arm_const_bounds (operands[2], 0, 8);
12138 arm_const_bounds (operands[3], 0, (1 << 5));
12139 return "<mrrc>\\tp%c1, %2, %Q0, %R0, CR%c3";
12141 [(set_attr "length" "4")
12142 (set_attr "type" "coproc")])
12144 (define_expand "speculation_barrier"
12145 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12148 /* For thumb1 (except Armv8 derivatives), and for pre-Armv7 we don't
12149 have a usable barrier (and probably don't need one in practice).
12150 But to be safe if such code is run on later architectures, call a
12151 helper function in libgcc that will do the thing for the active
12153 if (!(arm_arch7 || arm_arch8))
12155 arm_emit_speculation_barrier_function ();
12161 ;; Generate a hard speculation barrier when we have not enabled speculation
12163 (define_insn "*speculation_barrier_insn"
12164 [(unspec_volatile [(const_int 0)] VUNSPEC_SPECULATION_BARRIER)]
12165 "arm_arch7 || arm_arch8"
12167 [(set_attr "type" "block")
12168 (set_attr "length" "8")]
12171 ;; Vector bits common to IWMMXT and Neon
12172 (include "vec-common.md")
12173 ;; Load the Intel Wireless Multimedia Extension patterns
12174 (include "iwmmxt.md")
12175 ;; Load the VFP co-processor patterns
12177 ;; Thumb-1 patterns
12178 (include "thumb1.md")
12179 ;; Thumb-2 patterns
12180 (include "thumb2.md")
12182 (include "neon.md")
12184 (include "crypto.md")
12185 ;; Synchronization Primitives
12186 (include "sync.md")
12187 ;; Fixed-point patterns
12188 (include "arm-fixed.md")