]> gcc.gnu.org Git - gcc.git/blob - gcc/config/sparc/sparc.md
Delete two define_peepholes which print `bad peephole'.
[gcc.git] / gcc / config / sparc / sparc.md
1 ;;- Machine description for SPARC chip for GNU C compiler
2 ;; Copyright (C) 1987, 88, 89, 92, 93, 94, 1995 Free Software Foundation, Inc.
3 ;; Contributed by Michael Tiemann (tiemann@cygnus.com)
4 ;; 64 bit SPARC V9 support by Michael Tiemann, Jim Wilson, and Doug Evans,
5 ;; at Cygnus Support.
6
7 ;; This file is part of GNU CC.
8
9 ;; GNU CC is free software; you can redistribute it and/or modify
10 ;; it under the terms of the GNU General Public License as published by
11 ;; the Free Software Foundation; either version 2, or (at your option)
12 ;; any later version.
13
14 ;; GNU CC is distributed in the hope that it will be useful,
15 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
16 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
17 ;; GNU General Public License for more details.
18
19 ;; You should have received a copy of the GNU General Public License
20 ;; along with GNU CC; see the file COPYING. If not, write to
21 ;; the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
22
23 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
24
25 ;; The upper 32 fp regs on the v9 can't hold SFmode values. To deal with this
26 ;; a second register class, EXTRA_FP_REGS, exists for the v9 chip. The name
27 ;; is a bit of a misnomer as it covers all 64 fp regs. The corresponding
28 ;; constraint letter is 'e'. To avoid any confusion, 'e' is used instead of
29 ;; 'f' for all DF/TFmode values, including those that are specific to the v8.
30
31 ;; Architecture type. Arch32bit includes v7, sparclite, v8.
32
33 (define_attr "arch" "arch32bit,arch64bit"
34 (const (symbol_ref "sparc_arch_type")))
35
36 ;; CPU type. This is only used for instruction scheduling
37 (define_attr "cpu" "cypress,supersparc"
38 (const
39 (cond [(symbol_ref "TARGET_SUPERSPARC") (const_string "supersparc")]
40 (const_string "cypress"))))
41
42 ;; Insn type. Used to default other attribute values.
43
44 ;; type "unary" insns have one input operand (1) and one output operand (0)
45 ;; type "binary" insns have two input operands (1,2) and one output (0)
46 ;; type "compare" insns have one or two input operands (0,1) and no output
47 ;; type "call_no_delay_slot" is a call followed by an unimp instruction.
48
49 (define_attr "type"
50 "move,unary,binary,compare,load,store,ialu,shift,uncond_branch,branch,call,call_no_delay_slot,address,imul,fpload,fpstore,fp,fpcmp,fpmul,fpdivs,fpdivd,fpsqrt,cmove,multi,misc"
51 (const_string "binary"))
52
53 ;; Set true if insn uses call-clobbered intermediate register.
54 (define_attr "use_clobbered" "false,true"
55 (if_then_else (and (eq_attr "type" "address")
56 (match_operand 0 "clobbered_register" ""))
57 (const_string "true")
58 (const_string "false")))
59
60 ;; Length (in # of insns).
61 (define_attr "length" ""
62 (cond [(eq_attr "type" "load,fpload")
63 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
64 (const_int 2) (const_int 1))
65
66 (eq_attr "type" "store,fpstore")
67 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
68 (const_int 2) (const_int 1))
69
70 (eq_attr "type" "address") (const_int 2)
71
72 (eq_attr "type" "binary")
73 (if_then_else (ior (match_operand 2 "arith_operand" "")
74 (match_operand 2 "arith_double_operand" ""))
75 (const_int 1) (const_int 3))
76
77 (eq_attr "type" "multi") (const_int 2)
78
79 (eq_attr "type" "move,unary")
80 (if_then_else (ior (match_operand 1 "arith_operand" "")
81 (match_operand 1 "arith_double_operand" ""))
82 (const_int 1) (const_int 2))]
83
84 (const_int 1)))
85
86 (define_asm_attributes
87 [(set_attr "length" "1")
88 (set_attr "type" "multi")])
89
90 ;; Attributes for instruction and branch scheduling
91
92 (define_attr "in_call_delay" "false,true"
93 (cond [(eq_attr "type" "uncond_branch,branch,call,call_no_delay_slot,multi")
94 (const_string "false")
95 (eq_attr "type" "load,fpload,store,fpstore")
96 (if_then_else (eq_attr "length" "1")
97 (const_string "true")
98 (const_string "false"))
99 (eq_attr "type" "address")
100 (if_then_else (eq_attr "use_clobbered" "false")
101 (const_string "true")
102 (const_string "false"))]
103 (if_then_else (eq_attr "length" "1")
104 (const_string "true")
105 (const_string "false"))))
106
107 (define_delay (eq_attr "type" "call")
108 [(eq_attr "in_call_delay" "true") (nil) (nil)])
109
110 ;; ??? Should implement the notion of predelay slots for floating point
111 ;; branches. This would allow us to remove the nop always inserted before
112 ;; a floating point branch.
113
114 ;; ??? It is OK for fill_simple_delay_slots to put load/store instructions
115 ;; in a delay slot, but it is not OK for fill_eager_delay_slots to do so.
116 ;; This is because doing so will add several pipeline stalls to the path
117 ;; that the load/store did not come from. Unfortunately, there is no way
118 ;; to prevent fill_eager_delay_slots from using load/store without completely
119 ;; disabling them. For the SPEC benchmark set, this is a serious lose,
120 ;; because it prevents us from moving back the final store of inner loops.
121
122 (define_attr "in_branch_delay" "false,true"
123 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,call,call_no_delay_slot,multi")
124 (eq_attr "length" "1"))
125 (const_string "true")
126 (const_string "false")))
127
128 (define_attr "in_uncond_branch_delay" "false,true"
129 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,call,call_no_delay_slot,multi")
130 (eq_attr "length" "1"))
131 (const_string "true")
132 (const_string "false")))
133
134 (define_attr "in_annul_branch_delay" "false,true"
135 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,call,call_no_delay_slot,multi")
136 (eq_attr "length" "1"))
137 (const_string "true")
138 (const_string "false")))
139
140 (define_delay (eq_attr "type" "branch")
141 [(eq_attr "in_branch_delay" "true")
142 (nil) (eq_attr "in_annul_branch_delay" "true")])
143
144 (define_delay (eq_attr "type" "uncond_branch")
145 [(eq_attr "in_uncond_branch_delay" "true")
146 (nil) (nil)])
147
148 ;; Function units of the SPARC
149
150 ;; (define_function_unit {name} {num-units} {n-users} {test}
151 ;; {ready-delay} {issue-delay} [{conflict-list}])
152
153 ;; The integer ALU.
154 ;; (Noted only for documentation; units that take one cycle do not need to
155 ;; be specified.)
156
157 ;; On the sparclite, integer multiply takes 1, 3, or 5 cycles depending on
158 ;; the inputs.
159
160 ;; (define_function_unit "alu" 1 0
161 ;; (eq_attr "type" "unary,binary,move,address") 1 0)
162
163 ;; ---- cypress CY7C602 scheduling:
164 ;; Memory with load-delay of 1 (i.e., 2 cycle load).
165 (define_function_unit "memory" 1 0
166 (and (eq_attr "type" "load,fpload") (eq_attr "cpu" "cypress")) 2 2)
167
168 ;; SPARC has two floating-point units: the FP ALU,
169 ;; and the FP MUL/DIV/SQRT unit.
170 ;; Instruction timings on the CY7C602 are as follows
171 ;; FABSs 4
172 ;; FADDs/d 5/5
173 ;; FCMPs/d 4/4
174 ;; FDIVs/d 23/37
175 ;; FMOVs 4
176 ;; FMULs/d 5/7
177 ;; FNEGs 4
178 ;; FSQRTs/d 34/63
179 ;; FSUBs/d 5/5
180 ;; FdTOi/s 5/5
181 ;; FsTOi/d 5/5
182 ;; FiTOs/d 9/5
183
184 ;; The CY7C602 can only support 2 fp isnsn simultaneously.
185 ;; More insns cause the chip to stall.
186
187 (define_function_unit "fp_alu" 1 0
188 (and (eq_attr "type" "fp") (eq_attr "cpu" "cypress")) 5 5)
189 (define_function_unit "fp_mds" 1 0
190 (and (eq_attr "type" "fpmul") (eq_attr "cpu" "cypress")) 7 7)
191 (define_function_unit "fp_mds" 1 0
192 (and (eq_attr "type" "fpdivs,fpdivd") (eq_attr "cpu" "cypress")) 37 37)
193 (define_function_unit "fp_mds" 1 0
194 (and (eq_attr "type" "fpsqrt") (eq_attr "cpu" "cypress")) 63 63)
195
196 ;; ----- The TMS390Z55 scheduling
197 ;; The Supersparc can issue 1 - 3 insns per cycle; here we assume
198 ;; three insns/cycle, and hence multiply all costs by three.
199 ;; Combinations up to two integer, one ld/st, one fp.
200 ;; Memory delivers its result in one cycle to IU, zero cycles to FP
201 (define_function_unit "memory" 1 0
202 (and (eq_attr "type" "load") (eq_attr "cpu" "supersparc")) 3 3)
203 (define_function_unit "memory" 1 0
204 (and (eq_attr "type" "fpload") (eq_attr "cpu" "supersparc")) 1 3)
205 ;; at least one in three instructions can be a mem opt.
206 (define_function_unit "memory" 1 0
207 (and (eq_attr "type" "store,fpstore") (eq_attr "cpu" "supersparc")) 1 3)
208 ;; at least one in three instructions can be a shift op.
209 (define_function_unit "shift" 1 0
210 (and (eq_attr "type" "shift") (eq_attr "cpu" "supersparc")) 1 3)
211
212 ;; There are only two write ports to the integer register file
213 ;; A store also uses a write port
214 (define_function_unit "iwport" 2 0
215 (and (eq_attr "type" "load,store,shift,ialu") (eq_attr "cpu" "supersparc")) 1 3)
216
217 ;; Timings; throughput/latency
218 ;; FADD 1/3 add/sub, format conv, compar, abs, neg
219 ;; FMUL 1/3
220 ;; FDIVs 4/6
221 ;; FDIVd 7/9
222 ;; FSQRTs 6/8
223 ;; FSQRTd 10/12
224 ;; IMUL 4/4
225
226 (define_function_unit "fp_alu" 1 0
227 (and (eq_attr "type" "fp,fpcmp") (eq_attr "cpu" "supersparc")) 9 3)
228 (define_function_unit "fp_mds" 1 0
229 (and (eq_attr "type" "fpmul") (eq_attr "cpu" "supersparc")) 9 3)
230 (define_function_unit "fp_mds" 1 0
231 (and (eq_attr "type" "fpdivs") (eq_attr "cpu" "supersparc")) 18 12)
232 (define_function_unit "fp_mds" 1 0
233 (and (eq_attr "type" "fpdivd") (eq_attr "cpu" "supersparc")) 27 21)
234 (define_function_unit "fp_mds" 1 0
235 (and (eq_attr "type" "fpsqrt") (eq_attr "cpu" "supersparc")) 36 30)
236 (define_function_unit "fp_mds" 1 0
237 (and (eq_attr "type" "imul") (eq_attr "cpu" "supersparc")) 12 12)
238 \f
239 ;; Compare instructions.
240 ;; This controls RTL generation and register allocation.
241
242 ;; We generate RTL for comparisons and branches by having the cmpxx
243 ;; patterns store away the operands. Then, the scc and bcc patterns
244 ;; emit RTL for both the compare and the branch.
245 ;;
246 ;; We do this because we want to generate different code for an sne and
247 ;; seq insn. In those cases, if the second operand of the compare is not
248 ;; const0_rtx, we want to compute the xor of the two operands and test
249 ;; it against zero.
250 ;;
251 ;; We start with the DEFINE_EXPANDs, then the DEFINE_INSNs to match
252 ;; the patterns. Finally, we have the DEFINE_SPLITs for some of the scc
253 ;; insns that actually require more than one machine instruction.
254
255 ;; Put cmpsi first among compare insns so it matches two CONST_INT operands.
256
257 (define_expand "cmpsi"
258 [(set (reg:CC 0)
259 (compare:CC (match_operand:SI 0 "register_operand" "")
260 (match_operand:SI 1 "arith_operand" "")))]
261 ""
262 "
263 {
264 sparc_compare_op0 = operands[0];
265 sparc_compare_op1 = operands[1];
266 DONE;
267 }")
268
269 (define_expand "cmpdi"
270 [(set (reg:CCX 0)
271 (compare:CCX (match_operand:DI 0 "register_operand" "")
272 (match_operand:DI 1 "arith_double_operand" "")))]
273 "TARGET_V9"
274 "
275 {
276 sparc_compare_op0 = operands[0];
277 sparc_compare_op1 = operands[1];
278 DONE;
279 }")
280
281 (define_expand "cmpsf"
282 [(set (reg:CCFP 0)
283 (compare:CCFP (match_operand:SF 0 "register_operand" "")
284 (match_operand:SF 1 "register_operand" "")))]
285 "TARGET_FPU"
286 "
287 {
288 sparc_compare_op0 = operands[0];
289 sparc_compare_op1 = operands[1];
290 DONE;
291 }")
292
293 (define_expand "cmpdf"
294 [(set (reg:CCFP 0)
295 (compare:CCFP (match_operand:DF 0 "register_operand" "")
296 (match_operand:DF 1 "register_operand" "")))]
297 "TARGET_FPU"
298 "
299 {
300 sparc_compare_op0 = operands[0];
301 sparc_compare_op1 = operands[1];
302 DONE;
303 }")
304
305 (define_expand "cmptf"
306 [(set (reg:CCFP 0)
307 (compare:CCFP (match_operand:TF 0 "register_operand" "")
308 (match_operand:TF 1 "register_operand" "")))]
309 "TARGET_FPU"
310 "
311 {
312 sparc_compare_op0 = operands[0];
313 sparc_compare_op1 = operands[1];
314 DONE;
315 }")
316
317 ;; Next come the scc insns. For seq, sne, sgeu, and sltu, we can do this
318 ;; without jumps using the addx/subx instructions. For seq/sne on v9 we use
319 ;; the same code as v8 (the addx/subx method has more applications). The
320 ;; exception to this is "reg != 0" which can be done in one instruction on v9
321 ;; (so we do it). For the rest, on v9 we use conditional moves; on v8, we do
322 ;; branches.
323
324 ;; Seq_special[_xxx] and sne_special[_xxx] clobber the CC reg, because they
325 ;; generate addcc/subcc instructions.
326
327 (define_expand "seqsi_special"
328 [(set (match_dup 3)
329 (xor:SI (match_operand:SI 1 "register_operand" "")
330 (match_operand:SI 2 "register_operand" "")))
331 (parallel [(set (match_operand:SI 0 "register_operand" "")
332 (eq:SI (match_dup 3) (const_int 0)))
333 (clobber (reg:CC 0))])]
334 ""
335 "{ operands[3] = gen_reg_rtx (SImode); }")
336
337 (define_expand "seqdi_special"
338 [(set (match_dup 3)
339 (xor:DI (match_operand:DI 1 "register_operand" "")
340 (match_operand:DI 2 "register_operand" "")))
341 (parallel [(set (match_operand:DI 0 "register_operand" "")
342 (eq:DI (match_dup 3) (const_int 0)))
343 (clobber (reg:CCX 0))])]
344 ""
345 "{ operands[3] = gen_reg_rtx (DImode); }")
346
347 (define_expand "snesi_special"
348 [(set (match_dup 3)
349 (xor:SI (match_operand:SI 1 "register_operand" "")
350 (match_operand:SI 2 "register_operand" "")))
351 (parallel [(set (match_operand:SI 0 "register_operand" "")
352 (ne:SI (match_dup 3) (const_int 0)))
353 (clobber (reg:CC 0))])]
354 ""
355 "{ operands[3] = gen_reg_rtx (SImode); }")
356
357 (define_expand "snedi_special"
358 [(set (match_dup 3)
359 (xor:DI (match_operand:DI 1 "register_operand" "")
360 (match_operand:DI 2 "register_operand" "")))
361 (parallel [(set (match_operand:DI 0 "register_operand" "")
362 (ne:DI (match_dup 3) (const_int 0)))
363 (clobber (reg:CCX 0))])]
364 ""
365 "{ operands[3] = gen_reg_rtx (DImode); }")
366
367 (define_expand "seqdi_special_trunc"
368 [(set (match_dup 3)
369 (xor:DI (match_operand:DI 1 "register_operand" "")
370 (match_operand:DI 2 "register_operand" "")))
371 (parallel [(set (match_operand:SI 0 "register_operand" "")
372 (eq:SI (subreg:SI (match_dup 3) 0) (const_int 0)))
373 (clobber (reg:CC 0))])]
374 ""
375 "{ operands[3] = gen_reg_rtx (DImode); }")
376
377 (define_expand "snedi_special_trunc"
378 [(set (match_dup 3)
379 (xor:DI (match_operand:DI 1 "register_operand" "")
380 (match_operand:DI 2 "register_operand" "")))
381 (parallel [(set (match_operand:SI 0 "register_operand" "")
382 (ne:SI (subreg:SI (match_dup 3) 0) (const_int 0)))
383 (clobber (reg:CC 0))])]
384 ""
385 "{ operands[3] = gen_reg_rtx (DImode); }")
386
387 (define_expand "seqsi_special_extend"
388 [(set (subreg:SI (match_dup 3) 0)
389 (xor:SI (match_operand:SI 1 "register_operand" "")
390 (match_operand:SI 2 "register_operand" "")))
391 (parallel [(set (match_operand:DI 0 "register_operand" "")
392 (eq:DI (match_dup 3) (const_int 0)))
393 (clobber (reg:CCX 0))])]
394 ""
395 "{ operands[3] = gen_reg_rtx (DImode); }")
396
397 (define_expand "snesi_special_extend"
398 [(set (subreg:SI (match_dup 3) 0)
399 (xor:SI (match_operand:SI 1 "register_operand" "")
400 (match_operand:SI 2 "register_operand" "")))
401 (parallel [(set (match_operand:DI 0 "register_operand" "")
402 (ne:DI (match_dup 3) (const_int 0)))
403 (clobber (reg:CCX 0))])]
404 ""
405 "{ operands[3] = gen_reg_rtx (DImode); }")
406
407 ;; ??? v9: Operand 0 needs a mode, so SImode was chosen.
408 ;; However, the code handles both SImode and DImode.
409 (define_expand "seq"
410 [(set (match_operand:SI 0 "intreg_operand" "")
411 (eq:SI (match_dup 1) (const_int 0)))]
412 ""
413 "
414 {
415 if (GET_MODE (sparc_compare_op0) == SImode)
416 {
417 rtx pat;
418
419 if (GET_MODE (operands[0]) == SImode)
420 pat = gen_seqsi_special (operands[0], sparc_compare_op0,
421 sparc_compare_op1);
422 else if (! TARGET_V9)
423 FAIL;
424 else
425 pat = gen_seqsi_special_extend (operands[0], sparc_compare_op0,
426 sparc_compare_op1);
427 emit_insn (pat);
428 DONE;
429 }
430 else if (GET_MODE (sparc_compare_op0) == DImode)
431 {
432 rtx pat;
433
434 if (GET_MODE (operands[0]) == SImode)
435 pat = gen_seqdi_special_trunc (operands[0], sparc_compare_op0,
436 sparc_compare_op1);
437 else if (! TARGET_V9)
438 FAIL;
439 else
440 pat = gen_seqdi_special (operands[0], sparc_compare_op0,
441 sparc_compare_op1);
442 emit_insn (pat);
443 DONE;
444 }
445 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
446 {
447 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, EQ);
448 emit_insn (gen_sne (operands[0]));
449 DONE;
450 }
451 else if (TARGET_V9)
452 {
453 if (gen_v9_scc (EQ, operands))
454 DONE;
455 /* fall through */
456 }
457 operands[1] = gen_compare_reg (EQ, sparc_compare_op0, sparc_compare_op1);
458 }")
459
460 ;; ??? v9: Operand 0 needs a mode, so SImode was chosen.
461 ;; However, the code handles both SImode and DImode.
462 (define_expand "sne"
463 [(set (match_operand:SI 0 "intreg_operand" "")
464 (ne:SI (match_dup 1) (const_int 0)))]
465 ""
466 "
467 {
468 if (GET_MODE (sparc_compare_op0) == SImode)
469 {
470 rtx pat;
471
472 if (GET_MODE (operands[0]) == SImode)
473 pat = gen_snesi_special (operands[0], sparc_compare_op0,
474 sparc_compare_op1);
475 else if (! TARGET_V9)
476 FAIL;
477 else
478 pat = gen_snesi_special_extend (operands[0], sparc_compare_op0,
479 sparc_compare_op1);
480 emit_insn (pat);
481 DONE;
482 }
483 else if (GET_MODE (sparc_compare_op0) == DImode)
484 {
485 rtx pat;
486
487 if (GET_MODE (operands[0]) == SImode)
488 pat = gen_snedi_special_trunc (operands[0], sparc_compare_op0,
489 sparc_compare_op1);
490 else if (! TARGET_V9)
491 FAIL;
492 else
493 pat = gen_snedi_special (operands[0], sparc_compare_op0,
494 sparc_compare_op1);
495 emit_insn (pat);
496 DONE;
497 }
498 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
499 {
500 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, NE);
501 emit_insn (gen_sne (operands[0]));
502 DONE;
503 }
504 else if (TARGET_V9)
505 {
506 if (gen_v9_scc (NE, operands))
507 DONE;
508 /* fall through */
509 }
510 operands[1] = gen_compare_reg (NE, sparc_compare_op0, sparc_compare_op1);
511 }")
512
513 (define_expand "sgt"
514 [(set (match_operand:SI 0 "intreg_operand" "")
515 (gt:SI (match_dup 1) (const_int 0)))]
516 ""
517 "
518 {
519 if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
520 {
521 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, GT);
522 emit_insn (gen_sne (operands[0]));
523 DONE;
524 }
525 else if (TARGET_V9)
526 {
527 if (gen_v9_scc (GT, operands))
528 DONE;
529 /* fall through */
530 }
531 operands[1] = gen_compare_reg (GT, sparc_compare_op0, sparc_compare_op1);
532 }")
533
534 (define_expand "slt"
535 [(set (match_operand:SI 0 "intreg_operand" "")
536 (lt:SI (match_dup 1) (const_int 0)))]
537 ""
538 "
539 {
540 if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
541 {
542 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, LT);
543 emit_insn (gen_sne (operands[0]));
544 DONE;
545 }
546 else if (TARGET_V9)
547 {
548 if (gen_v9_scc (LT, operands))
549 DONE;
550 /* fall through */
551 }
552 operands[1] = gen_compare_reg (LT, sparc_compare_op0, sparc_compare_op1);
553 }")
554
555 (define_expand "sge"
556 [(set (match_operand:SI 0 "intreg_operand" "")
557 (ge:SI (match_dup 1) (const_int 0)))]
558 ""
559 "
560 {
561 if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
562 {
563 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, GE);
564 emit_insn (gen_sne (operands[0]));
565 DONE;
566 }
567 else if (TARGET_V9)
568 {
569 if (gen_v9_scc (GE, operands))
570 DONE;
571 /* fall through */
572 }
573 operands[1] = gen_compare_reg (GE, sparc_compare_op0, sparc_compare_op1);
574 }")
575
576 (define_expand "sle"
577 [(set (match_operand:SI 0 "intreg_operand" "")
578 (le:SI (match_dup 1) (const_int 0)))]
579 ""
580 "
581 {
582 if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
583 {
584 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, LE);
585 emit_insn (gen_sne (operands[0]));
586 DONE;
587 }
588 else if (TARGET_V9)
589 {
590 if (gen_v9_scc (LE, operands))
591 DONE;
592 /* fall through */
593 }
594 operands[1] = gen_compare_reg (LE, sparc_compare_op0, sparc_compare_op1);
595 }")
596
597 (define_expand "sgtu"
598 [(set (match_operand:SI 0 "intreg_operand" "")
599 (gtu:SI (match_dup 1) (const_int 0)))]
600 ""
601 "
602 {
603 if (! TARGET_V9)
604 {
605 rtx tem;
606
607 /* We can do ltu easily, so if both operands are registers, swap them and
608 do a LTU. */
609 if ((GET_CODE (sparc_compare_op0) == REG
610 || GET_CODE (sparc_compare_op0) == SUBREG)
611 && (GET_CODE (sparc_compare_op1) == REG
612 || GET_CODE (sparc_compare_op1) == SUBREG))
613 {
614 tem = sparc_compare_op0;
615 sparc_compare_op0 = sparc_compare_op1;
616 sparc_compare_op1 = tem;
617 emit_insn (gen_sltu (operands[0]));
618 DONE;
619 }
620 }
621 else
622 {
623 if (gen_v9_scc (GTU, operands))
624 DONE;
625 }
626 operands[1] = gen_compare_reg (GTU, sparc_compare_op0, sparc_compare_op1);
627 }")
628
629 (define_expand "sltu"
630 [(set (match_operand:SI 0 "intreg_operand" "")
631 (ltu:SI (match_dup 1) (const_int 0)))]
632 ""
633 "
634 {
635 if (TARGET_V9)
636 {
637 if (gen_v9_scc (LTU, operands))
638 DONE;
639 }
640 operands[1] = gen_compare_reg (LTU, sparc_compare_op0, sparc_compare_op1);
641 }")
642
643 (define_expand "sgeu"
644 [(set (match_operand:SI 0 "intreg_operand" "")
645 (geu:SI (match_dup 1) (const_int 0)))]
646 ""
647 "
648 {
649 if (TARGET_V9)
650 {
651 if (gen_v9_scc (GEU, operands))
652 DONE;
653 }
654 operands[1] = gen_compare_reg (GEU, sparc_compare_op0, sparc_compare_op1);
655 }")
656
657 (define_expand "sleu"
658 [(set (match_operand:SI 0 "intreg_operand" "")
659 (leu:SI (match_dup 1) (const_int 0)))]
660 ""
661 "
662 {
663 if (! TARGET_V9)
664 {
665 rtx tem;
666
667 /* We can do geu easily, so if both operands are registers, swap them and
668 do a GEU. */
669 if ((GET_CODE (sparc_compare_op0) == REG
670 || GET_CODE (sparc_compare_op0) == SUBREG)
671 && (GET_CODE (sparc_compare_op1) == REG
672 || GET_CODE (sparc_compare_op1) == SUBREG))
673 {
674 tem = sparc_compare_op0;
675 sparc_compare_op0 = sparc_compare_op1;
676 sparc_compare_op1 = tem;
677 emit_insn (gen_sgeu (operands[0]));
678 DONE;
679 }
680 }
681 else
682 {
683 if (gen_v9_scc (LEU, operands))
684 DONE;
685 }
686 operands[1] = gen_compare_reg (LEU, sparc_compare_op0, sparc_compare_op1);
687 }")
688
689 ;; Now the DEFINE_INSNs for the compare and scc cases. First the compares.
690
691 (define_insn "*cmpsi_insn"
692 [(set (reg:CC 0)
693 (compare:CC (match_operand:SI 0 "register_operand" "r")
694 (match_operand:SI 1 "arith_operand" "rI")))]
695 ""
696 "cmp %r0,%1"
697 [(set_attr "type" "compare")])
698
699 (define_insn "*cmpsf_fpe_sp32"
700 [(set (reg:CCFPE 0)
701 (compare:CCFPE (match_operand:SF 0 "register_operand" "f")
702 (match_operand:SF 1 "register_operand" "f")))]
703 "! TARGET_V9 && TARGET_FPU"
704 "fcmpes %0,%1"
705 [(set_attr "type" "fpcmp")])
706
707 (define_insn "*cmpdf_fpe_sp32"
708 [(set (reg:CCFPE 0)
709 (compare:CCFPE (match_operand:DF 0 "register_operand" "e")
710 (match_operand:DF 1 "register_operand" "e")))]
711 "! TARGET_V9 && TARGET_FPU"
712 "fcmped %0,%1"
713 [(set_attr "type" "fpcmp")])
714
715 (define_insn "*cmptf_fpe_sp32"
716 [(set (reg:CCFPE 0)
717 (compare:CCFPE (match_operand:TF 0 "register_operand" "e")
718 (match_operand:TF 1 "register_operand" "e")))]
719 "! TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
720 "fcmpeq %0,%1"
721 [(set_attr "type" "fpcmp")])
722
723 (define_insn "*cmpsf_fp_sp32"
724 [(set (reg:CCFP 0)
725 (compare:CCFP (match_operand:SF 0 "register_operand" "f")
726 (match_operand:SF 1 "register_operand" "f")))]
727 "! TARGET_V9 && TARGET_FPU"
728 "fcmps %0,%1"
729 [(set_attr "type" "fpcmp")])
730
731 (define_insn "*cmpdf_fp_sp32"
732 [(set (reg:CCFP 0)
733 (compare:CCFP (match_operand:DF 0 "register_operand" "e")
734 (match_operand:DF 1 "register_operand" "e")))]
735 "! TARGET_V9 && TARGET_FPU"
736 "fcmpd %0,%1"
737 [(set_attr "type" "fpcmp")])
738
739 (define_insn "*cmptf_fp_sp32"
740 [(set (reg:CCFP 0)
741 (compare:CCFP (match_operand:TF 0 "register_operand" "e")
742 (match_operand:TF 1 "register_operand" "e")))]
743 "! TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
744 "fcmpq %0,%1"
745 [(set_attr "type" "fpcmp")])
746
747 (define_insn "*cmpdi_sp64"
748 [(set (reg:CCX 0)
749 (compare:CCX (match_operand:DI 0 "register_operand" "r")
750 (match_operand:DI 1 "arith_double_operand" "rHI")))]
751 "TARGET_V9"
752 "cmp %r0,%1"
753 [(set_attr "type" "compare")])
754
755 (define_insn "*cmpsf_fpe_sp64"
756 [(set (match_operand:CCFPE 0 "ccfp_reg_operand" "=c")
757 (compare:CCFPE (match_operand:SF 1 "register_operand" "f")
758 (match_operand:SF 2 "register_operand" "f")))]
759 "TARGET_V9 && TARGET_FPU"
760 "fcmpes %0,%1,%2"
761 [(set_attr "type" "fpcmp")])
762
763 (define_insn "*cmpdf_fpe_sp64"
764 [(set (match_operand:CCFPE 0 "ccfp_reg_operand" "=c")
765 (compare:CCFPE (match_operand:DF 1 "register_operand" "e")
766 (match_operand:DF 2 "register_operand" "e")))]
767 "TARGET_V9 && TARGET_FPU"
768 "fcmped %0,%1,%2"
769 [(set_attr "type" "fpcmp")])
770
771 (define_insn "*cmptf_fpe_sp64"
772 [(set (match_operand:CCFPE 0 "ccfp_reg_operand" "=c")
773 (compare:CCFPE (match_operand:TF 1 "register_operand" "e")
774 (match_operand:TF 2 "register_operand" "e")))]
775 "TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
776 "fcmpeq %0,%1,%2"
777 [(set_attr "type" "fpcmp")])
778
779 (define_insn "*cmpsf_fp_sp64"
780 [(set (match_operand:CCFP 0 "ccfp_reg_operand" "=c")
781 (compare:CCFP (match_operand:SF 1 "register_operand" "f")
782 (match_operand:SF 2 "register_operand" "f")))]
783 "TARGET_V9 && TARGET_FPU"
784 "fcmps %0,%1,%2"
785 [(set_attr "type" "fpcmp")])
786
787 (define_insn "*cmpdf_fp_sp64"
788 [(set (match_operand:CCFP 0 "ccfp_reg_operand" "=c")
789 (compare:CCFP (match_operand:DF 1 "register_operand" "e")
790 (match_operand:DF 2 "register_operand" "e")))]
791 "TARGET_V9 && TARGET_FPU"
792 "fcmpd %0,%1,%2"
793 [(set_attr "type" "fpcmp")])
794
795 (define_insn "*cmptf_fp_sp64"
796 [(set (match_operand:CCFP 0 "ccfp_reg_operand" "=c")
797 (compare:CCFP (match_operand:TF 1 "register_operand" "e")
798 (match_operand:TF 2 "register_operand" "e")))]
799 "TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
800 "fcmpq %0,%1,%2"
801 [(set_attr "type" "fpcmp")])
802
803 ;; The SEQ and SNE patterns are special because they can be done
804 ;; without any branching and do not involve a COMPARE.
805
806 (define_insn "*snesi_zero"
807 [(set (match_operand:SI 0 "register_operand" "=r")
808 (ne:SI (match_operand:SI 1 "register_operand" "r")
809 (const_int 0)))
810 (clobber (reg:CC 0))]
811 ""
812 "subcc %%g0,%1,%%g0\;addx %%g0,0,%0"
813 [(set_attr "type" "unary")
814 (set_attr "length" "2")])
815
816 (define_insn "*neg_snesi_zero"
817 [(set (match_operand:SI 0 "register_operand" "=r")
818 (neg:SI (ne:SI (match_operand:SI 1 "register_operand" "r")
819 (const_int 0))))
820 (clobber (reg:CC 0))]
821 ""
822 "subcc %%g0,%1,%%g0\;subx %%g0,0,%0"
823 [(set_attr "type" "unary")
824 (set_attr "length" "2")])
825
826 (define_insn "*snedi_zero"
827 [(set (match_operand:DI 0 "register_operand" "=r")
828 (ne:DI (match_operand:DI 1 "register_operand" "r")
829 (const_int 0)))
830 (clobber (reg:CCX 0))]
831 "TARGET_V9"
832 "mov 0,%0\;movrnz %1,1,%0"
833 [(set_attr "type" "unary")
834 (set_attr "length" "2")])
835
836 (define_insn "*neg_snedi_zero"
837 [(set (match_operand:DI 0 "register_operand" "=r")
838 (neg:DI (ne:DI (match_operand:DI 1 "register_operand" "r")
839 (const_int 0))))
840 (clobber (reg:CCX 0))]
841 "TARGET_V9"
842 "mov 0,%0\;movrnz %1,-1,%0"
843 [(set_attr "type" "unary")
844 (set_attr "length" "2")])
845
846 (define_insn "*seqsi_zero"
847 [(set (match_operand:SI 0 "register_operand" "=r")
848 (eq:SI (match_operand:SI 1 "register_operand" "r")
849 (const_int 0)))
850 (clobber (reg:CC 0))]
851 ""
852 "subcc %%g0,%1,%%g0\;subx %%g0,-1,%0"
853 [(set_attr "type" "unary")
854 (set_attr "length" "2")])
855
856 (define_insn "*neg_seqsi_zero"
857 [(set (match_operand:SI 0 "register_operand" "=r")
858 (neg:SI (eq:SI (match_operand:SI 1 "register_operand" "r")
859 (const_int 0))))
860 (clobber (reg:CC 0))]
861 ""
862 "subcc %%g0,%1,%%g0\;addx %%g0,-1,%0"
863 [(set_attr "type" "unary")
864 (set_attr "length" "2")])
865
866 (define_insn "*seqdi_zero"
867 [(set (match_operand:DI 0 "register_operand" "=r")
868 (eq:DI (match_operand:DI 1 "register_operand" "r")
869 (const_int 0)))
870 (clobber (reg:CCX 0))]
871 "TARGET_V9"
872 "mov 0,%0\;movrz %1,1,%0"
873 [(set_attr "type" "unary")
874 (set_attr "length" "2")])
875
876 (define_insn "*neg_seqdi_zero"
877 [(set (match_operand:DI 0 "register_operand" "=r")
878 (neg:DI (eq:DI (match_operand:DI 1 "register_operand" "r")
879 (const_int 0))))
880 (clobber (reg:CCX 0))]
881 "TARGET_V9"
882 "mov 0,%0\;movrz %1,-1,%0"
883 [(set_attr "type" "unary")
884 (set_attr "length" "2")])
885
886 ;; We can also do (x + (i == 0)) and related, so put them in.
887 ;; ??? The addx/subx insns use the 32 bit carry flag so there are no DImode
888 ;; versions for v9.
889
890 (define_insn "*x_plus_i_ne_0"
891 [(set (match_operand:SI 0 "register_operand" "=r")
892 (plus:SI (ne:SI (match_operand:SI 1 "register_operand" "r")
893 (const_int 0))
894 (match_operand:SI 2 "register_operand" "r")))
895 (clobber (reg:CC 0))]
896 ""
897 "subcc %%g0,%1,%%g0\;addx %2,0,%0"
898 [(set_attr "length" "2")])
899
900 (define_insn "*x_minus_i_ne_0"
901 [(set (match_operand:SI 0 "register_operand" "=r")
902 (minus:SI (match_operand:SI 2 "register_operand" "r")
903 (ne:SI (match_operand:SI 1 "register_operand" "r")
904 (const_int 0))))
905 (clobber (reg:CC 0))]
906 ""
907 "subcc %%g0,%1,%%g0\;subx %2,0,%0"
908 [(set_attr "length" "2")])
909
910 (define_insn "*x_plus_i_eq_0"
911 [(set (match_operand:SI 0 "register_operand" "=r")
912 (plus:SI (eq:SI (match_operand:SI 1 "register_operand" "r")
913 (const_int 0))
914 (match_operand:SI 2 "register_operand" "r")))
915 (clobber (reg:CC 0))]
916 ""
917 "subcc %%g0,%1,%%g0\;subx %2,-1,%0"
918 [(set_attr "length" "2")])
919
920 (define_insn "*x_minus_i_eq_0"
921 [(set (match_operand:SI 0 "register_operand" "=r")
922 (minus:SI (match_operand:SI 2 "register_operand" "r")
923 (eq:SI (match_operand:SI 1 "register_operand" "r")
924 (const_int 0))))
925 (clobber (reg:CC 0))]
926 ""
927 "subcc %%g0,%1,%%g0\;addx %2,-1,%0"
928 [(set_attr "length" "2")])
929
930 ;; We can also do GEU and LTU directly, but these operate after a compare.
931 ;; ??? The addx/subx insns use the 32 bit carry flag so there are no DImode
932 ;; versions for v9.
933
934 (define_insn "*sltu_insn"
935 [(set (match_operand:SI 0 "register_operand" "=r")
936 (ltu:SI (reg:CC 0) (const_int 0)))]
937 ""
938 "addx %%g0,0,%0"
939 [(set_attr "type" "misc")])
940
941 (define_insn "*neg_sltu_insn"
942 [(set (match_operand:SI 0 "register_operand" "=r")
943 (neg:SI (ltu:SI (reg:CC 0) (const_int 0))))]
944 ""
945 "subx %%g0,0,%0"
946 [(set_attr "type" "misc")])
947
948 ;; ??? Combine should canonicalize these next two to the same pattern.
949 (define_insn "*neg_sltu_minus_x"
950 [(set (match_operand:SI 0 "register_operand" "=r")
951 (minus:SI (neg:SI (ltu:SI (reg:CC 0) (const_int 0)))
952 (match_operand:SI 1 "arith_operand" "rI")))]
953 ""
954 "subx %%g0,%1,%0"
955 [(set_attr "type" "unary")])
956
957 (define_insn "*neg_sltu_plus_x"
958 [(set (match_operand:SI 0 "register_operand" "=r")
959 (neg:SI (plus:SI (ltu:SI (reg:CC 0) (const_int 0))
960 (match_operand:SI 1 "arith_operand" "rI"))))]
961 ""
962 "subx %%g0,%1,%0"
963 [(set_attr "type" "unary")])
964
965 (define_insn "*sgeu_insn"
966 [(set (match_operand:SI 0 "register_operand" "=r")
967 (geu:SI (reg:CC 0) (const_int 0)))]
968 ""
969 "subx %%g0,-1,%0"
970 [(set_attr "type" "misc")])
971
972 (define_insn "*neg_sgeu_insn"
973 [(set (match_operand:SI 0 "register_operand" "=r")
974 (neg:SI (geu:SI (reg:CC 0) (const_int 0))))]
975 ""
976 "addx %%g0,-1,%0"
977 [(set_attr "type" "misc")])
978
979 ;; We can also do (x + ((unsigned) i >= 0)) and related, so put them in.
980 ;; ??? The addx/subx insns use the 32 bit carry flag so there are no DImode
981 ;; versions for v9.
982
983 (define_insn "*sltu_plus_x"
984 [(set (match_operand:SI 0 "register_operand" "=r")
985 (plus:SI (ltu:SI (reg:CC 0) (const_int 0))
986 (match_operand:SI 1 "arith_operand" "rI")))]
987 ""
988 "addx %%g0,%1,%0"
989 [(set_attr "type" "unary")])
990
991 (define_insn "*sltu_plus_x_plus_y"
992 [(set (match_operand:SI 0 "register_operand" "=r")
993 (plus:SI (ltu:SI (reg:CC 0) (const_int 0))
994 (plus:SI (match_operand:SI 1 "arith_operand" "%r")
995 (match_operand:SI 2 "arith_operand" "rI"))))]
996 ""
997 "addx %1,%2,%0")
998
999 (define_insn "*x_minus_sltu"
1000 [(set (match_operand:SI 0 "register_operand" "=r")
1001 (minus:SI (match_operand:SI 1 "register_operand" "r")
1002 (ltu:SI (reg:CC 0) (const_int 0))))]
1003 ""
1004 "subx %1,0,%0"
1005 [(set_attr "type" "unary")])
1006
1007 ;; ??? Combine should canonicalize these next two to the same pattern.
1008 (define_insn "*x_minus_y_minus_sltu"
1009 [(set (match_operand:SI 0 "register_operand" "=r")
1010 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
1011 (match_operand:SI 2 "arith_operand" "rI"))
1012 (ltu:SI (reg:CC 0) (const_int 0))))]
1013 ""
1014 "subx %1,%2,%0")
1015
1016 (define_insn "*x_minus_sltu_plus_y"
1017 [(set (match_operand:SI 0 "register_operand" "=r")
1018 (minus:SI (match_operand:SI 1 "register_operand" "r")
1019 (plus:SI (ltu:SI (reg:CC 0) (const_int 0))
1020 (match_operand:SI 2 "arith_operand" "rI"))))]
1021 ""
1022 "subx %1,%2,%0")
1023
1024 (define_insn "*sgeu_plus_x"
1025 [(set (match_operand:SI 0 "register_operand" "=r")
1026 (plus:SI (geu:SI (reg:CC 0) (const_int 0))
1027 (match_operand:SI 1 "register_operand" "r")))]
1028 ""
1029 "subx %1,-1,%0"
1030 [(set_attr "type" "unary")])
1031
1032 (define_insn "*x_minus_sgeu"
1033 [(set (match_operand:SI 0 "register_operand" "=r")
1034 (minus:SI (match_operand:SI 1 "register_operand" "r")
1035 (geu:SI (reg:CC 0) (const_int 0))))]
1036 ""
1037 "addx %1,-1,%0"
1038 [(set_attr "type" "unary")])
1039
1040 ;; Now we have the generic scc insns.
1041 ;; !v9: These will be done using a jump.
1042 ;; v9: Use conditional moves which are defined elsewhere.
1043 ;; We have to exclude the cases above, since we will not want combine to
1044 ;; turn something that does not require a jump into something that does.
1045
1046 (define_insn "*scc_si"
1047 [(set (match_operand:SI 0 "register_operand" "=r")
1048 (match_operator:SI 1 "noov_compare_op" [(reg 0) (const_int 0)]))]
1049 ""
1050 "* return output_scc_insn (operands, insn); "
1051 [(set_attr "type" "multi")
1052 (set_attr "length" "3")])
1053
1054 (define_insn "*scc_di"
1055 [(set (match_operand:DI 0 "register_operand" "=r")
1056 (match_operator:DI 1 "noov_compare_op" [(reg 0) (const_int 0)]))]
1057 "TARGET_V9"
1058 "* return output_scc_insn (operands, insn); "
1059 [(set_attr "type" "multi")
1060 (set_attr "length" "3")])
1061 \f
1062 ;; These control RTL generation for conditional jump insns
1063
1064 ;; The quad-word fp compare library routines all return nonzero to indicate
1065 ;; true, which is different from the equivalent libgcc routines, so we must
1066 ;; handle them specially here.
1067
1068 (define_expand "beq"
1069 [(set (pc)
1070 (if_then_else (eq (match_dup 1) (const_int 0))
1071 (label_ref (match_operand 0 "" ""))
1072 (pc)))]
1073 ""
1074 "
1075 {
1076 if (TARGET_V9 && sparc_compare_op1 == const0_rtx
1077 && GET_CODE (sparc_compare_op0) == REG
1078 && GET_MODE (sparc_compare_op0) == DImode)
1079 {
1080 emit_v9_brxx_insn (EQ, sparc_compare_op0, operands[0]);
1081 DONE;
1082 }
1083 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
1084 {
1085 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, EQ);
1086 emit_jump_insn (gen_bne (operands[0]));
1087 DONE;
1088 }
1089 operands[1] = gen_compare_reg (EQ, sparc_compare_op0, sparc_compare_op1);
1090 }")
1091
1092 (define_expand "bne"
1093 [(set (pc)
1094 (if_then_else (ne (match_dup 1) (const_int 0))
1095 (label_ref (match_operand 0 "" ""))
1096 (pc)))]
1097 ""
1098 "
1099 {
1100 if (TARGET_V9 && sparc_compare_op1 == const0_rtx
1101 && GET_CODE (sparc_compare_op0) == REG
1102 && GET_MODE (sparc_compare_op0) == DImode)
1103 {
1104 emit_v9_brxx_insn (NE, sparc_compare_op0, operands[0]);
1105 DONE;
1106 }
1107 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
1108 {
1109 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, NE);
1110 emit_jump_insn (gen_bne (operands[0]));
1111 DONE;
1112 }
1113 operands[1] = gen_compare_reg (NE, sparc_compare_op0, sparc_compare_op1);
1114 }")
1115
1116 (define_expand "bgt"
1117 [(set (pc)
1118 (if_then_else (gt (match_dup 1) (const_int 0))
1119 (label_ref (match_operand 0 "" ""))
1120 (pc)))]
1121 ""
1122 "
1123 {
1124 if (TARGET_V9 && sparc_compare_op1 == const0_rtx
1125 && GET_CODE (sparc_compare_op0) == REG
1126 && GET_MODE (sparc_compare_op0) == DImode)
1127 {
1128 emit_v9_brxx_insn (GT, sparc_compare_op0, operands[0]);
1129 DONE;
1130 }
1131 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
1132 {
1133 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, GT);
1134 emit_jump_insn (gen_bne (operands[0]));
1135 DONE;
1136 }
1137 operands[1] = gen_compare_reg (GT, sparc_compare_op0, sparc_compare_op1);
1138 }")
1139
1140 (define_expand "bgtu"
1141 [(set (pc)
1142 (if_then_else (gtu (match_dup 1) (const_int 0))
1143 (label_ref (match_operand 0 "" ""))
1144 (pc)))]
1145 ""
1146 "
1147 { operands[1] = gen_compare_reg (GTU, sparc_compare_op0, sparc_compare_op1);
1148 }")
1149
1150 (define_expand "blt"
1151 [(set (pc)
1152 (if_then_else (lt (match_dup 1) (const_int 0))
1153 (label_ref (match_operand 0 "" ""))
1154 (pc)))]
1155 ""
1156 "
1157 {
1158 if (TARGET_V9 && sparc_compare_op1 == const0_rtx
1159 && GET_CODE (sparc_compare_op0) == REG
1160 && GET_MODE (sparc_compare_op0) == DImode)
1161 {
1162 emit_v9_brxx_insn (LT, sparc_compare_op0, operands[0]);
1163 DONE;
1164 }
1165 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
1166 {
1167 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, LT);
1168 emit_jump_insn (gen_bne (operands[0]));
1169 DONE;
1170 }
1171 operands[1] = gen_compare_reg (LT, sparc_compare_op0, sparc_compare_op1);
1172 }")
1173
1174 (define_expand "bltu"
1175 [(set (pc)
1176 (if_then_else (ltu (match_dup 1) (const_int 0))
1177 (label_ref (match_operand 0 "" ""))
1178 (pc)))]
1179 ""
1180 "
1181 { operands[1] = gen_compare_reg (LTU, sparc_compare_op0, sparc_compare_op1);
1182 }")
1183
1184 (define_expand "bge"
1185 [(set (pc)
1186 (if_then_else (ge (match_dup 1) (const_int 0))
1187 (label_ref (match_operand 0 "" ""))
1188 (pc)))]
1189 ""
1190 "
1191 {
1192 if (TARGET_V9 && sparc_compare_op1 == const0_rtx
1193 && GET_CODE (sparc_compare_op0) == REG
1194 && GET_MODE (sparc_compare_op0) == DImode)
1195 {
1196 emit_v9_brxx_insn (GE, sparc_compare_op0, operands[0]);
1197 DONE;
1198 }
1199 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
1200 {
1201 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, GE);
1202 emit_jump_insn (gen_bne (operands[0]));
1203 DONE;
1204 }
1205 operands[1] = gen_compare_reg (GE, sparc_compare_op0, sparc_compare_op1);
1206 }")
1207
1208 (define_expand "bgeu"
1209 [(set (pc)
1210 (if_then_else (geu (match_dup 1) (const_int 0))
1211 (label_ref (match_operand 0 "" ""))
1212 (pc)))]
1213 ""
1214 "
1215 { operands[1] = gen_compare_reg (GEU, sparc_compare_op0, sparc_compare_op1);
1216 }")
1217
1218 (define_expand "ble"
1219 [(set (pc)
1220 (if_then_else (le (match_dup 1) (const_int 0))
1221 (label_ref (match_operand 0 "" ""))
1222 (pc)))]
1223 ""
1224 "
1225 {
1226 if (TARGET_V9 && sparc_compare_op1 == const0_rtx
1227 && GET_CODE (sparc_compare_op0) == REG
1228 && GET_MODE (sparc_compare_op0) == DImode)
1229 {
1230 emit_v9_brxx_insn (LE, sparc_compare_op0, operands[0]);
1231 DONE;
1232 }
1233 else if (GET_MODE (sparc_compare_op0) == TFmode && ! TARGET_HARD_QUAD)
1234 {
1235 emit_float_lib_cmp (sparc_compare_op0, sparc_compare_op1, LE);
1236 emit_jump_insn (gen_bne (operands[0]));
1237 DONE;
1238 }
1239 operands[1] = gen_compare_reg (LE, sparc_compare_op0, sparc_compare_op1);
1240 }")
1241
1242 (define_expand "bleu"
1243 [(set (pc)
1244 (if_then_else (leu (match_dup 1) (const_int 0))
1245 (label_ref (match_operand 0 "" ""))
1246 (pc)))]
1247 ""
1248 "
1249 { operands[1] = gen_compare_reg (LEU, sparc_compare_op0, sparc_compare_op1);
1250 }")
1251 \f
1252 ;; Now match both normal and inverted jump.
1253
1254 (define_insn "*normal_branch"
1255 [(set (pc)
1256 (if_then_else (match_operator 0 "noov_compare_op"
1257 [(reg 0) (const_int 0)])
1258 (label_ref (match_operand 1 "" ""))
1259 (pc)))]
1260 ""
1261 "*
1262 {
1263 return output_cbranch (operands[0], 0, 1, 0,
1264 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1265 ! final_sequence);
1266 }"
1267 [(set_attr "type" "branch")])
1268
1269 (define_insn "*inverted_branch"
1270 [(set (pc)
1271 (if_then_else (match_operator 0 "noov_compare_op"
1272 [(reg 0) (const_int 0)])
1273 (pc)
1274 (label_ref (match_operand 1 "" ""))))]
1275 ""
1276 "*
1277 {
1278 return output_cbranch (operands[0], 0, 1, 1,
1279 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1280 ! final_sequence);
1281 }"
1282 [(set_attr "type" "branch")])
1283
1284 (define_insn "*normal_fp_branch_sp64"
1285 [(set (pc)
1286 (if_then_else (match_operator 0 "comparison_operator"
1287 [(match_operand:CCFP 1 "ccfp_reg_operand" "c")
1288 (const_int 0)])
1289 (label_ref (match_operand 2 "" ""))
1290 (pc)))]
1291 "TARGET_V9"
1292 "*
1293 {
1294 return output_cbranch (operands[0], operands[1], 2, 0,
1295 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1296 ! final_sequence);
1297 }"
1298 [(set_attr "type" "branch")])
1299
1300 (define_insn "*inverted_fp_branch_sp64"
1301 [(set (pc)
1302 (if_then_else (match_operator 0 "comparison_operator"
1303 [(match_operand:CCFP 1 "ccfp_reg_operand" "c")
1304 (const_int 0)])
1305 (pc)
1306 (label_ref (match_operand 2 "" ""))))]
1307 "TARGET_V9"
1308 "*
1309 {
1310 return output_cbranch (operands[0], operands[1], 2, 1,
1311 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1312 ! final_sequence);
1313 }"
1314 [(set_attr "type" "branch")])
1315
1316 (define_insn "*normal_fpe_branch_sp64"
1317 [(set (pc)
1318 (if_then_else (match_operator 0 "comparison_operator"
1319 [(match_operand:CCFPE 1 "ccfp_reg_operand" "c")
1320 (const_int 0)])
1321 (label_ref (match_operand 2 "" ""))
1322 (pc)))]
1323 "TARGET_V9"
1324 "*
1325 {
1326 return output_cbranch (operands[0], operands[1], 2, 0,
1327 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1328 ! final_sequence);
1329 }"
1330 [(set_attr "type" "branch")])
1331
1332 (define_insn "*inverted_fpe_branch_sp64"
1333 [(set (pc)
1334 (if_then_else (match_operator 0 "comparison_operator"
1335 [(match_operand:CCFPE 1 "ccfp_reg_operand" "c")
1336 (const_int 0)])
1337 (pc)
1338 (label_ref (match_operand 2 "" ""))))]
1339 "TARGET_V9"
1340 "*
1341 {
1342 return output_cbranch (operands[0], operands[1], 2, 1,
1343 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1344 ! final_sequence);
1345 }"
1346 [(set_attr "type" "branch")])
1347
1348 ;; Sparc V9-specific jump insns. None of these are guaranteed to be
1349 ;; in the architecture.
1350
1351 ;; There are no 32 bit brreg insns.
1352
1353 (define_insn "*normal_int_branch_sp64"
1354 [(set (pc)
1355 (if_then_else (match_operator 0 "v9_regcmp_op"
1356 [(match_operand:DI 1 "register_operand" "r")
1357 (const_int 0)])
1358 (label_ref (match_operand 2 "" ""))
1359 (pc)))]
1360 "TARGET_V9"
1361 "*
1362 {
1363 return output_v9branch (operands[0], 1, 2, 0,
1364 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1365 ! final_sequence);
1366 }"
1367 [(set_attr "type" "branch")])
1368
1369 (define_insn "*inverted_int_branch_sp64"
1370 [(set (pc)
1371 (if_then_else (match_operator 0 "v9_regcmp_op"
1372 [(match_operand:DI 1 "register_operand" "r")
1373 (const_int 0)])
1374 (pc)
1375 (label_ref (match_operand 2 "" ""))))]
1376 "TARGET_V9"
1377 "*
1378 {
1379 return output_v9branch (operands[0], 1, 2, 1,
1380 final_sequence && INSN_ANNULLED_BRANCH_P (insn),
1381 ! final_sequence);
1382 }"
1383 [(set_attr "type" "branch")])
1384 \f
1385 ;; Esoteric move insns (lo_sum, high, pic).
1386
1387 (define_insn "*lo_sum_si"
1388 [(set (match_operand:SI 0 "register_operand" "=r")
1389 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1390 (match_operand:SI 2 "immediate_operand" "in")))]
1391 ""
1392 ;; V9 needs "add" because of the code models. We still use "or" for v8
1393 ;; so we can compare the old compiler with the new.
1394 "* return TARGET_V9 ? \"add %1,%%lo(%a2),%0\" : \"or %1,%%lo(%a2),%0\";"
1395 ;; Need to set length for this arith insn because operand2
1396 ;; is not an "arith_operand".
1397 [(set_attr "length" "1")])
1398
1399 ;; For PIC, symbol_refs are put inside unspec so that the optimizer will not
1400 ;; confuse them with real addresses.
1401 (define_insn "*pic_lo_sum_si"
1402 [(set (match_operand:SI 0 "register_operand" "=r")
1403 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1404 (unspec:SI [(match_operand:SI 2 "immediate_operand" "in")] 0)))]
1405 ""
1406 ;; V9 needs "add" because of the code models. We still use "or" for v8
1407 ;; so we can compare the old compiler with the new.
1408 "* return TARGET_V9 ? \"add %1,%%lo(%a2),%0\" : \"or %1,%%lo(%a2),%0\";"
1409 ;; Need to set length for this arith insn because operand2
1410 ;; is not an "arith_operand".
1411 [(set_attr "length" "1")])
1412
1413 ;; For PIC, symbol_refs are put inside unspec so that the optimizer will not
1414 ;; confuse them with real addresses.
1415 (define_insn "*pic_sethi_si"
1416 [(set (match_operand:SI 0 "register_operand" "=r")
1417 (high:SI (unspec:SI [(match_operand 1 "" "")] 0)))]
1418 "check_pic (1)"
1419 "sethi %%hi(%a1),%0"
1420 [(set_attr "type" "move")
1421 (set_attr "length" "1")])
1422
1423 (define_insn "*sethi_si"
1424 [(set (match_operand:SI 0 "register_operand" "=r")
1425 (high:SI (match_operand 1 "" "")))]
1426 "check_pic (1)"
1427 "sethi %%hi(%a1),%0"
1428 [(set_attr "type" "move")
1429 (set_attr "length" "1")])
1430
1431 (define_insn "*sethi_hi"
1432 [(set (match_operand:HI 0 "register_operand" "=r")
1433 (high:HI (match_operand 1 "" "")))]
1434 "check_pic (1)"
1435 "sethi %%hi(%a1),%0"
1436 [(set_attr "type" "move")
1437 (set_attr "length" "1")])
1438
1439 ;; Special pic pattern, for loading the address of a label into a register.
1440 ;; It clobbers o7 because the call puts the return address (i.e. pc value)
1441 ;; there.
1442
1443 (define_insn "*move_pic_label_si"
1444 [(set (match_operand:SI 0 "register_operand" "=r")
1445 (match_operand:SI 1 "move_pic_label" "i"))
1446 (set (reg:SI 15) (pc))]
1447 ""
1448 "\\n1:\;call 2f\;sethi %%hi(%l1-1b),%0\\n2:\\tor %0,%%lo(%l1-1b),%0\;add %0,%%o7,%0"
1449 [(set_attr "type" "multi")
1450 (set_attr "length" "4")])
1451
1452 ;; v9 special pic pattern, for loading the address of a label into a register.
1453
1454 (define_insn "*move_pic_label_di"
1455 [(set (match_operand:DI 0 "register_operand" "=r")
1456 (match_operand:DI 1 "move_pic_label" "i"))
1457 (set (reg:DI 15) (pc))]
1458 "TARGET_V9"
1459 "\\n1:\;call 2f\;sethi %%hi(%l1-1b),%0\\n2:\\tor %0,%%lo(%l1-1b),%0\;add %0,%%o7,%0"
1460 [(set_attr "type" "multi")
1461 (set_attr "length" "4")])
1462
1463 (define_insn "*lo_sum_di_sp32"
1464 [(set (match_operand:DI 0 "register_operand" "=r")
1465 (lo_sum:DI (match_operand:DI 1 "register_operand" "0")
1466 (match_operand:DI 2 "immediate_operand" "in")))]
1467 "! TARGET_V9"
1468 "*
1469 {
1470 /* Don't output a 64 bit constant, since we can't trust the assembler to
1471 handle it correctly. */
1472 if (GET_CODE (operands[2]) == CONST_DOUBLE)
1473 operands[2] = gen_rtx (CONST_INT, VOIDmode, CONST_DOUBLE_LOW (operands[2]));
1474 return \"or %R1,%%lo(%a2),%R0\";
1475 }"
1476 ;; Need to set length for this arith insn because operand2
1477 ;; is not an "arith_operand".
1478 [(set_attr "length" "1")])
1479
1480 ;; ??? Gas does not handle %lo(DI), so we use the same code for ! TARGET_V9.
1481 ;; ??? The previous comment is obsolete.
1482 ;; ??? Optimizer does not handle "or %o1,%lo(0),%o1". How about add?
1483
1484 (define_insn "*lo_sum_di_sp64"
1485 [(set (match_operand:DI 0 "register_operand" "=r")
1486 (lo_sum:DI (match_operand:DI 1 "register_operand" "0")
1487 (match_operand:DI 2 "immediate_operand" "in")))]
1488 "TARGET_V9"
1489 "*
1490 {
1491 /* Don't output a 64 bit constant, since we can't trust the assembler to
1492 handle it correctly. */
1493 if (GET_CODE (operands[2]) == CONST_DOUBLE)
1494 operands[2] = gen_rtx (CONST_INT, VOIDmode, CONST_DOUBLE_LOW (operands[2]));
1495 /* Note that we use add here. This is important because Medium/Anywhere
1496 code model support depends on it. */
1497 return \"add %1,%%lo(%a2),%0\";
1498 }"
1499 ;; Need to set length for this arith insn because operand2
1500 ;; is not an "arith_operand".
1501 [(set_attr "length" "1")])
1502
1503 (define_insn "*sethi_di_sp32"
1504 [(set (match_operand:DI 0 "register_operand" "=r")
1505 (high:DI (match_operand 1 "" "")))]
1506 "! TARGET_V9 && check_pic (1)"
1507 "*
1508 {
1509 rtx op0 = operands[0];
1510 rtx op1 = operands[1];
1511
1512 if (GET_CODE (op1) == CONST_INT)
1513 {
1514 operands[0] = operand_subword (op0, 1, 0, DImode);
1515 output_asm_insn (\"sethi %%hi(%a1),%0\", operands);
1516
1517 operands[0] = operand_subword (op0, 0, 0, DImode);
1518 if (INTVAL (op1) < 0)
1519 return \"mov -1,%0\";
1520 else
1521 return \"mov 0,%0\";
1522 }
1523 else if (GET_CODE (op1) == CONST_DOUBLE)
1524 {
1525 operands[0] = operand_subword (op0, 1, 0, DImode);
1526 operands[1] = gen_rtx (CONST_INT, VOIDmode, CONST_DOUBLE_LOW (op1));
1527 output_asm_insn (\"sethi %%hi(%a1),%0\", operands);
1528
1529 operands[0] = operand_subword (op0, 0, 0, DImode);
1530 operands[1] = gen_rtx (CONST_INT, VOIDmode, CONST_DOUBLE_HIGH (op1));
1531 return singlemove_string (operands);
1532 }
1533 else
1534 abort ();
1535 return \"\";
1536 }"
1537 [(set_attr "type" "move")
1538 (set_attr "length" "2")])
1539
1540 ;;; ??? This pattern originally clobbered a scratch register. However, this
1541 ;;; is invalid, the movdi pattern may not use a temp register because it
1542 ;;; may be called from reload to reload a DImode value. In that case, we
1543 ;;; end up with a scratch register that never gets allocated. To avoid this,
1544 ;;; we use global register 1 which is never otherwise used by gcc as a temp.
1545 ;;; The correct solution here might be to force DImode constants to memory,
1546 ;;; e.g. by using a toc like the romp and rs6000 ports do for addresses, reg
1547 ;;; 1 will then no longer need to be considered a fixed reg.
1548
1549 ;;; Gas doesn't have any 64 bit constant support, so don't use %uhi and %ulo
1550 ;;; on constants. Symbols have to be handled by the linker, so we must use
1551 ;;; %uhi and %ulo for them, but gas will handle these correctly.
1552 ;;; ??? This comment is obsolete, gas handles them now.
1553
1554 (define_insn "*sethi_di_sp64"
1555 [(set (match_operand:DI 0 "register_operand" "=r")
1556 (high:DI (match_operand 1 "const_double_operand" "")))
1557 (clobber (reg:DI 1))]
1558 "TARGET_V9 && check_pic (1)"
1559 "*
1560 {
1561 rtx high, low;
1562
1563 split_double (operands[1], &high, &low);
1564
1565 if (high == const0_rtx)
1566 {
1567 operands[1] = low;
1568 output_asm_insn (\"sethi %%hi(%a1),%0\", operands);
1569 }
1570 else
1571 {
1572 operands[1] = high;
1573 output_asm_insn (singlemove_string (operands), operands);
1574
1575 operands[1] = low;
1576 output_asm_insn (\"sllx %0,32,%0\", operands);
1577 if (low != const0_rtx)
1578 output_asm_insn (\"sethi %%hi(%a1),%%g1; or %0,%%g1,%0\", operands);
1579 }
1580 }"
1581 [(set_attr "type" "move")
1582 (set_attr "length" "5")])
1583
1584 ;; Most of the required support for the various code models is here.
1585 ;; We can do this because sparcs need the high insn to load the address. We
1586 ;; just need to get high to do the right thing for each code model. Then each
1587 ;; uses the same "%X+%lo(...)" in the load/store insn.
1588
1589 ;; When TARGET_MEDLOW, assume that the upper 32 bits of symbol addresses are
1590 ;; always 0.
1591 ;; When TARGET_MEDANY, the upper 32 bits of function addresses are 0.
1592 ;; The data segment has a maximum size of 32 bits, but may be located anywhere.
1593 ;; MEDANY_BASE_REG contains the start address, currently %g4.
1594 ;; When TARGET_FULLANY, symbolic addresses are 64 bits.
1595
1596 (define_insn "*sethi_di_medlow"
1597 [(set (match_operand:DI 0 "register_operand" "=r")
1598 (high:DI (match_operand 1 "" "")))
1599 ;; ??? Why the clobber?
1600 (clobber (reg:DI 1))]
1601 "TARGET_MEDLOW && check_pic (1)"
1602 "sethi %%hi(%a1),%0"
1603 [(set_attr "type" "move")
1604 (set_attr "length" "1")])
1605
1606 ;; WARNING: %0 gets %hi(%1)+%g4.
1607 ;; You cannot OR in %lo(%1), it must be added in.
1608
1609 (define_insn "*sethi_di_medany_data"
1610 [(set (match_operand:DI 0 "register_operand" "=r")
1611 (high:DI (match_operand 1 "data_segment_operand" "")))
1612 ;; ??? Why the clobber?
1613 (clobber (reg:DI 1))]
1614 "TARGET_MEDANY && check_pic (1)"
1615 "sethi %%hi(%a1),%0; add %0,%%g4,%0"
1616 [(set_attr "type" "move")
1617 (set_attr "length" "2")])
1618
1619 (define_insn "*sethi_di_medany_text"
1620 [(set (match_operand:DI 0 "register_operand" "=r")
1621 (high:DI (match_operand 1 "text_segment_operand" "")))
1622 ;; ??? Why the clobber?
1623 (clobber (reg:DI 1))]
1624 "TARGET_MEDANY && check_pic (1)"
1625 "sethi %%hi(%a1),%0"
1626 [(set_attr "type" "move")
1627 (set_attr "length" "1")])
1628
1629 (define_insn "*sethi_di_fullany"
1630 [(set (match_operand:DI 0 "register_operand" "=r")
1631 (high:DI (match_operand 1 "" "")))
1632 (clobber (reg:DI 1))]
1633 "TARGET_FULLANY && check_pic (1)"
1634 "sethi %%uhi(%a1),%%g1; or %%g1,%%ulo(%a1),%%g1; sllx %%g1,32,%%g1; sethi %%hi(%a1),%0; or %0,%%g1,%0"
1635 [(set_attr "type" "move")
1636 (set_attr "length" "5")])
1637 \f
1638 ;; Move instructions
1639
1640 (define_expand "movqi"
1641 [(set (match_operand:QI 0 "general_operand" "")
1642 (match_operand:QI 1 "general_operand" ""))]
1643 ""
1644 "
1645 {
1646 if (emit_move_sequence (operands, QImode))
1647 DONE;
1648 }")
1649
1650 (define_insn "*movqi_insn"
1651 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,Q")
1652 (match_operand:QI 1 "move_operand" "rI,K,Q,rJ"))]
1653 "register_operand (operands[0], QImode)
1654 || register_operand (operands[1], QImode)
1655 || operands[1] == const0_rtx"
1656 "@
1657 mov %1,%0
1658 sethi %%hi(%a1),%0
1659 ldub %1,%0
1660 stb %r1,%0"
1661 [(set_attr "type" "move,move,load,store")
1662 (set_attr "length" "*,1,*,1")])
1663
1664 (define_insn "*lo_sum_qi"
1665 [(set (match_operand:QI 0 "register_operand" "=r")
1666 (subreg:QI (lo_sum:SI (match_operand:QI 1 "register_operand" "r")
1667 (match_operand 2 "immediate_operand" "in")) 0))]
1668 ""
1669 "or %1,%%lo(%a2),%0"
1670 [(set_attr "length" "1")])
1671
1672 (define_insn "*store_qi"
1673 [(set (mem:QI (match_operand:SI 0 "symbolic_operand" ""))
1674 (match_operand:QI 1 "reg_or_0_operand" "rJ"))
1675 (clobber (match_scratch:SI 2 "=&r"))]
1676 "(reload_completed || reload_in_progress) && ! TARGET_PTR64"
1677 "sethi %%hi(%a0),%2\;stb %r1,[%2+%%lo(%a0)]"
1678 [(set_attr "type" "store")
1679 (set_attr "length" "2")])
1680
1681 (define_expand "movhi"
1682 [(set (match_operand:HI 0 "general_operand" "")
1683 (match_operand:HI 1 "general_operand" ""))]
1684 ""
1685 "
1686 {
1687 if (emit_move_sequence (operands, HImode))
1688 DONE;
1689 }")
1690
1691 (define_insn "*movhi_insn"
1692 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,Q")
1693 (match_operand:HI 1 "move_operand" "rI,K,Q,rJ"))]
1694 "register_operand (operands[0], HImode)
1695 || register_operand (operands[1], HImode)
1696 || operands[1] == const0_rtx"
1697 "@
1698 mov %1,%0
1699 sethi %%hi(%a1),%0
1700 lduh %1,%0
1701 sth %r1,%0"
1702 [(set_attr "type" "move,move,load,store")
1703 (set_attr "length" "*,1,*,1")])
1704
1705 (define_insn "*lo_sum_hi"
1706 [(set (match_operand:HI 0 "register_operand" "=r")
1707 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1708 (match_operand 2 "immediate_operand" "in")))]
1709 ""
1710 "or %1,%%lo(%a2),%0"
1711 [(set_attr "length" "1")])
1712
1713 (define_insn "*store_hi"
1714 [(set (mem:HI (match_operand:SI 0 "symbolic_operand" ""))
1715 (match_operand:HI 1 "reg_or_0_operand" "rJ"))
1716 (clobber (match_scratch:SI 2 "=&r"))]
1717 "(reload_completed || reload_in_progress) && ! TARGET_PTR64"
1718 "sethi %%hi(%a0),%2\;sth %r1,[%2+%%lo(%a0)]"
1719 [(set_attr "type" "store")
1720 (set_attr "length" "2")])
1721
1722 (define_expand "movsi"
1723 [(set (match_operand:SI 0 "general_operand" "")
1724 (match_operand:SI 1 "general_operand" ""))]
1725 ""
1726 "
1727 {
1728 if (emit_move_sequence (operands, SImode))
1729 DONE;
1730 }")
1731
1732 ;; We must support both 'r' and 'f' registers here, because combine may
1733 ;; convert SFmode hard registers to SImode hard registers when simplifying
1734 ;; subreg sets.
1735
1736 ;; We cannot combine the similar 'r' and 'f' constraints, because it causes
1737 ;; problems with register allocation. Reload might try to put an integer
1738 ;; in an fp register, or an fp number is an integer register.
1739
1740 (define_insn "*movsi_insn"
1741 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand" "=r,f,r,r,f,Q,Q")
1742 (match_operand:SI 1 "move_operand" "rI,!f,K,Q,!Q,rJ,!f"))]
1743 "register_operand (operands[0], SImode)
1744 || register_operand (operands[1], SImode)
1745 || operands[1] == const0_rtx"
1746 "@
1747 mov %1,%0
1748 fmovs %1,%0
1749 sethi %%hi(%a1),%0
1750 ld %1,%0
1751 ld %1,%0
1752 st %r1,%0
1753 st %r1,%0"
1754 [(set_attr "type" "move,fp,move,load,load,store,store")
1755 (set_attr "length" "*,*,1,*,*,*,*")])
1756
1757 (define_insn "*store_si"
1758 [(set (mem:SI (match_operand:SI 0 "symbolic_operand" ""))
1759 (match_operand:SI 1 "reg_or_0_operand" "rJ"))
1760 (clobber (match_scratch:SI 2 "=&r"))]
1761 "(reload_completed || reload_in_progress) && ! TARGET_PTR64"
1762 "sethi %%hi(%a0),%2\;st %r1,[%2+%%lo(%a0)]"
1763 [(set_attr "type" "store")
1764 (set_attr "length" "2")])
1765
1766 (define_expand "movdi"
1767 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
1768 (match_operand:DI 1 "general_operand" ""))]
1769 ""
1770 "
1771 {
1772 if (emit_move_sequence (operands, DImode))
1773 DONE;
1774 }")
1775
1776 (define_insn "*movdi_sp32_insn"
1777 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "=r,T,U,Q,r,r,?f,?f,?Q")
1778 (match_operand:DI 1 "general_operand" "r,U,T,r,Q,i,f,Q,f"))]
1779 "! TARGET_V9
1780 && (register_operand (operands[0], DImode)
1781 || register_operand (operands[1], DImode)
1782 || operands[1] == const0_rtx)"
1783 "*
1784 {
1785 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1]))
1786 return output_fp_move_double (operands);
1787 return output_move_double (operands);
1788 }"
1789 [(set_attr "type" "move,store,load,store,load,multi,fp,fpload,fpstore")
1790 (set_attr "length" "2,1,1,3,3,3,2,3,3")])
1791
1792 ;;; ??? The trick used below can be extended to load any negative 32 bit
1793 ;;; constant in two instructions. Currently the compiler will use HIGH/LO_SUM
1794 ;;; for anything not matching the HIK constraints, which results in 5
1795 ;;; instructions. Positive 32 bit constants can be loaded in the obvious way
1796 ;;; with sethi/ori. To extend the trick, in the xor instruction, use
1797 ;;; xor %o0, ((op1 & 0x3ff) | -0x400), %o0
1798 ;;; This needs the original value of operands[1], not the inverted value.
1799
1800 (define_insn "*movdi_sp64_insn"
1801 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,Q,?f,?f,?Q")
1802 (match_operand:DI 1 "move_operand" "rI,K,Q,rJ,f,Q,f"))]
1803 "TARGET_V9
1804 && (register_operand (operands[0], DImode)
1805 || register_operand (operands[1], DImode)
1806 || operands[1] == const0_rtx)"
1807 "*
1808 {
1809 switch (which_alternative)
1810 {
1811 case 0:
1812 return \"mov %1,%0\";
1813 case 1:
1814 /* Sethi does not sign extend, so we must use a little trickery
1815 to use it for negative numbers. Invert the constant before
1816 loading it in, then use a xor immediate to invert the loaded bits
1817 (along with the upper 32 bits) to the desired constant. This
1818 works because the sethi and immediate fields overlap. */
1819
1820 if ((INTVAL (operands[1]) & 0x80000000) == 0)
1821 return \"sethi %%hi(%a1),%0\";
1822 else
1823 {
1824 operands[1] = gen_rtx (CONST_INT, VOIDmode,
1825 ~ INTVAL (operands[1]));
1826 output_asm_insn (\"sethi %%hi(%a1),%0\", operands);
1827 /* The low 10 bits are already zero, but invert the rest.
1828 Assemblers don't accept 0x1c00, so use -0x400 instead. */
1829 return \"xor %0,-0x400,%0\";
1830 }
1831 case 2:
1832 return \"ldx %1,%0\";
1833 case 3:
1834 return \"stx %r1,%0\";
1835 case 4:
1836 return \"mov %1,%0\";
1837 case 5:
1838 return \"ldd %1,%0\";
1839 case 6:
1840 return \"std %1,%0\";
1841 }
1842 }"
1843 [(set_attr "type" "move,move,load,store,fp,fpload,fpstore")
1844 (set_attr "length" "1,2,1,1,1,1,1")])
1845
1846 ;; ??? There's no symbolic (set (mem:DI ...) ...).
1847 ;; Experimentation with v9 suggested one isn't needed.
1848 \f
1849 ;; Block move insns.
1850
1851 ;; ??? We get better code without it. See output_block_move in sparc.c.
1852
1853 ;; The definition of this insn does not really explain what it does,
1854 ;; but it should suffice
1855 ;; that anything generated as this insn will be recognized as one
1856 ;; and that it will not successfully combine with anything.
1857 ;(define_expand "movstrsi"
1858 ; [(parallel [(set (mem:BLK (match_operand:BLK 0 "general_operand" ""))
1859 ; (mem:BLK (match_operand:BLK 1 "general_operand" "")))
1860 ; (use (match_operand:SI 2 "nonmemory_operand" ""))
1861 ; (use (match_operand:SI 3 "immediate_operand" ""))
1862 ; (clobber (match_dup 0))
1863 ; (clobber (match_dup 1))
1864 ; (clobber (match_scratch:SI 4 ""))
1865 ; (clobber (reg:SI 0))
1866 ; (clobber (reg:SI 1))])]
1867 ; ""
1868 ; "
1869 ;{
1870 ; /* If the size isn't known, don't emit inline code. output_block_move
1871 ; would output code that's much slower than the library function.
1872 ; Also don't output code for large blocks. */
1873 ; if (GET_CODE (operands[2]) != CONST_INT
1874 ; || GET_CODE (operands[3]) != CONST_INT
1875 ; || INTVAL (operands[2]) / INTVAL (operands[3]) > 16)
1876 ; FAIL;
1877 ;
1878 ; operands[0] = copy_to_mode_reg (Pmode, XEXP (operands[0], 0));
1879 ; operands[1] = copy_to_mode_reg (Pmode, XEXP (operands[1], 0));
1880 ; operands[2] = force_not_mem (operands[2]);
1881 ;}")
1882
1883 ;(define_insn "*block_move_insn"
1884 ; [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r"))
1885 ; (mem:BLK (match_operand:SI 1 "register_operand" "+r")))
1886 ; (use (match_operand:SI 2 "nonmemory_operand" "rn"))
1887 ; (use (match_operand:SI 3 "immediate_operand" "i"))
1888 ; (clobber (match_dup 0))
1889 ; (clobber (match_dup 1))
1890 ; (clobber (match_scratch:SI 4 "=&r"))
1891 ; (clobber (reg:SI 0))
1892 ; (clobber (reg:SI 1))]
1893 ; ""
1894 ; "* return output_block_move (operands);"
1895 ; [(set_attr "type" "multi")
1896 ; (set_attr "length" "6")])
1897 \f
1898 ;; Floating point move insns
1899
1900 ;; This pattern forces (set (reg:SF ...) (const_double ...))
1901 ;; to be reloaded by putting the constant into memory.
1902 ;; It must come before the more general movsf pattern.
1903 (define_insn "*movsf_const_insn"
1904 [(set (match_operand:SF 0 "general_operand" "=?r,f,m")
1905 (match_operand:SF 1 "" "?F,m,G"))]
1906 "TARGET_FPU && GET_CODE (operands[1]) == CONST_DOUBLE"
1907 "*
1908 {
1909 switch (which_alternative)
1910 {
1911 case 0:
1912 return singlemove_string (operands);
1913 case 1:
1914 return \"ld %1,%0\";
1915 case 2:
1916 return \"st %%g0,%0\";
1917 }
1918 }"
1919 [(set_attr "type" "load,fpload,store")
1920 (set_attr "length" "2,1,1")])
1921
1922 (define_expand "movsf"
1923 [(set (match_operand:SF 0 "general_operand" "")
1924 (match_operand:SF 1 "general_operand" ""))]
1925 ""
1926 "
1927 {
1928 if (emit_move_sequence (operands, SFmode))
1929 DONE;
1930 }")
1931
1932 (define_insn "*movsf_insn"
1933 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand" "=f,r,f,r,Q,Q")
1934 (match_operand:SF 1 "reg_or_nonsymb_mem_operand" "f,r,Q,Q,f,r"))]
1935 "TARGET_FPU
1936 && (register_operand (operands[0], SFmode)
1937 || register_operand (operands[1], SFmode))"
1938 "@
1939 fmovs %1,%0
1940 mov %1,%0
1941 ld %1,%0
1942 ld %1,%0
1943 st %r1,%0
1944 st %r1,%0"
1945 [(set_attr "type" "fp,move,fpload,load,fpstore,store")])
1946
1947 ;; Exactly the same as above, except that all `f' cases are deleted.
1948 ;; This is necessary to prevent reload from ever trying to use a `f' reg
1949 ;; when -mno-fpu.
1950
1951 (define_insn "*movsf_no_f_insn"
1952 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand" "=r,r,Q")
1953 (match_operand:SF 1 "reg_or_nonsymb_mem_operand" "r,Q,r"))]
1954 "! TARGET_FPU
1955 && (register_operand (operands[0], SFmode)
1956 || register_operand (operands[1], SFmode))"
1957 "@
1958 mov %1,%0
1959 ld %1,%0
1960 st %r1,%0"
1961 [(set_attr "type" "move,load,store")])
1962
1963 (define_insn "*store_sf"
1964 [(set (mem:SF (match_operand:SI 0 "symbolic_operand" "i"))
1965 (match_operand:SF 1 "reg_or_0_operand" "rfG"))
1966 (clobber (match_scratch:SI 2 "=&r"))]
1967 "(reload_completed || reload_in_progress) && ! TARGET_PTR64"
1968 "sethi %%hi(%a0),%2\;st %r1,[%2+%%lo(%a0)]"
1969 [(set_attr "type" "store")
1970 (set_attr "length" "2")])
1971
1972 ;; This pattern forces (set (reg:DF ...) (const_double ...))
1973 ;; to be reloaded by putting the constant into memory.
1974 ;; It must come before the more general movdf pattern.
1975
1976 (define_insn "*movdf_const_insn"
1977 [(set (match_operand:DF 0 "general_operand" "=?r,e,o")
1978 (match_operand:DF 1 "" "?F,m,G"))]
1979 "TARGET_FPU && GET_CODE (operands[1]) == CONST_DOUBLE"
1980 "*
1981 {
1982 switch (which_alternative)
1983 {
1984 case 0:
1985 return output_move_double (operands);
1986 case 1:
1987 return output_fp_move_double (operands);
1988 case 2:
1989 if (TARGET_V9)
1990 {
1991 return \"stx %%g0,%0\";
1992 }
1993 else
1994 {
1995 operands[1] = adj_offsettable_operand (operands[0], 4);
1996 return \"st %%g0,%0\;st %%g0,%1\";
1997 }
1998 }
1999 }"
2000 [(set_attr "type" "load,fpload,store")
2001 (set_attr "length" "3,3,3")])
2002
2003 (define_expand "movdf"
2004 [(set (match_operand:DF 0 "general_operand" "")
2005 (match_operand:DF 1 "general_operand" ""))]
2006 ""
2007 "
2008 {
2009 if (emit_move_sequence (operands, DFmode))
2010 DONE;
2011 }")
2012
2013 (define_insn "*movdf_insn"
2014 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand" "=T,U,e,r,Q,Q,e,r")
2015 (match_operand:DF 1 "reg_or_nonsymb_mem_operand" "U,T,e,r,e,r,Q,Q"))]
2016 "TARGET_FPU
2017 && (register_operand (operands[0], DFmode)
2018 || register_operand (operands[1], DFmode))"
2019 "*
2020 {
2021 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1]))
2022 return output_fp_move_double (operands);
2023 return output_move_double (operands);
2024 }"
2025 [(set_attr "type" "fpstore,fpload,fp,move,fpstore,store,fpload,load")
2026 (set_attr "length" "1,1,2,2,3,3,3,3")])
2027
2028 ;; Exactly the same as above, except that all `e' cases are deleted.
2029 ;; This is necessary to prevent reload from ever trying to use a `e' reg
2030 ;; when -mno-fpu.
2031
2032 (define_insn "*movdf_no_e_insn"
2033 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand" "=T,U,r,Q,&r")
2034 (match_operand:DF 1 "reg_or_nonsymb_mem_operand" "U,T,r,r,Q"))]
2035 "! TARGET_FPU
2036 && (register_operand (operands[0], DFmode)
2037 || register_operand (operands[1], DFmode))"
2038 "* return output_move_double (operands);"
2039 [(set_attr "type" "store,load,move,store,load")
2040 (set_attr "length" "1,1,2,3,3")])
2041
2042 ;; Must handle overlapping registers here, since parameters can be unaligned
2043 ;; in registers.
2044 ;; ??? Do we need a v9 version of this?
2045 (define_split
2046 [(set (match_operand:DF 0 "register_operand" "")
2047 (match_operand:DF 1 "register_operand" ""))]
2048 "! TARGET_V9 && reload_completed"
2049 [(set (match_dup 2) (match_dup 3))
2050 (set (match_dup 4) (match_dup 5))]
2051 "
2052 {
2053 rtx first_set = operand_subword (operands[0], 0, 0, DFmode);
2054 rtx second_use = operand_subword (operands[1], 1, 0, DFmode);
2055
2056 if (REGNO (first_set) == REGNO (second_use))
2057 {
2058 operands[2] = operand_subword (operands[0], 1, 0, DFmode);
2059 operands[3] = second_use;
2060 operands[4] = first_set;
2061 operands[5] = operand_subword (operands[1], 0, 0, DFmode);
2062 }
2063 else
2064 {
2065 operands[2] = first_set;
2066 operands[3] = operand_subword (operands[1], 0, 0, DFmode);
2067 operands[4] = operand_subword (operands[0], 1, 0, DFmode);
2068 operands[5] = second_use;
2069 }
2070 }")
2071
2072 (define_insn "*store_df"
2073 [(set (mem:DF (match_operand:SI 0 "symbolic_operand" "i,i"))
2074 (match_operand:DF 1 "reg_or_0_operand" "re,G"))
2075 (clobber (match_scratch:SI 2 "=&r,&r"))]
2076 "(reload_completed || reload_in_progress) && ! TARGET_PTR64"
2077 "*
2078 {
2079 output_asm_insn (\"sethi %%hi(%a0),%2\", operands);
2080 if (which_alternative == 0)
2081 return \"std %1,[%2+%%lo(%a0)]\";
2082 else
2083 return \"st %%g0,[%2+%%lo(%a0)]\;st %%g0,[%2+%%lo(%a0+4)]\";
2084 }"
2085 [(set_attr "type" "store")
2086 (set_attr "length" "3")])
2087
2088 ;; This pattern forces (set (reg:TF ...) (const_double ...))
2089 ;; to be reloaded by putting the constant into memory.
2090 ;; It must come before the more general movtf pattern.
2091 (define_insn "*movtf_const_insn"
2092 [(set (match_operand:TF 0 "general_operand" "=?r,e,o")
2093 (match_operand:TF 1 "" "?F,m,G"))]
2094 "TARGET_FPU && GET_CODE (operands[1]) == CONST_DOUBLE"
2095 "*
2096 {
2097 switch (which_alternative)
2098 {
2099 case 0:
2100 return output_move_quad (operands);
2101 case 1:
2102 return output_fp_move_quad (operands);
2103 case 2:
2104 if (TARGET_V9)
2105 {
2106 operands[1] = adj_offsettable_operand (operands[0], 8);
2107 return \"stx %%g0,%0\;stx %%g0,%1\";
2108 }
2109 else
2110 {
2111 /* ??? Do we run off the end of the array here? */
2112 operands[1] = adj_offsettable_operand (operands[0], 4);
2113 operands[2] = adj_offsettable_operand (operands[0], 8);
2114 operands[3] = adj_offsettable_operand (operands[0], 12);
2115 return \"st %%g0,%0\;st %%g0,%1\;st %%g0,%2\;st %%g0,%3\";
2116 }
2117 }
2118 }"
2119 [(set_attr "type" "load,fpload,store")
2120 (set_attr "length" "5,5,5")])
2121
2122 (define_expand "movtf"
2123 [(set (match_operand:TF 0 "general_operand" "")
2124 (match_operand:TF 1 "general_operand" ""))]
2125 ""
2126 "
2127 {
2128 if (emit_move_sequence (operands, TFmode))
2129 DONE;
2130 }")
2131
2132 (define_insn "*movtf_insn"
2133 [(set (match_operand:TF 0 "reg_or_nonsymb_mem_operand" "=e,r,Q,Q,e,&r")
2134 (match_operand:TF 1 "reg_or_nonsymb_mem_operand" "e,r,e,r,Q,Q"))]
2135 "TARGET_FPU
2136 && (register_operand (operands[0], TFmode)
2137 || register_operand (operands[1], TFmode))"
2138 "*
2139 {
2140 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1]))
2141 return output_fp_move_quad (operands);
2142 return output_move_quad (operands);
2143 }"
2144 [(set_attr "type" "fp,move,fpstore,store,fpload,load")
2145 (set_attr "length" "4,4,5,5,5,5")])
2146
2147 ;; Exactly the same as above, except that all `e' cases are deleted.
2148 ;; This is necessary to prevent reload from ever trying to use a `e' reg
2149 ;; when -mno-fpu.
2150
2151 (define_insn "*movtf_no_e_insn"
2152 [(set (match_operand:TF 0 "reg_or_nonsymb_mem_operand" "=r,Q,&r")
2153 (match_operand:TF 1 "reg_or_nonsymb_mem_operand" "r,r,Q"))]
2154 "! TARGET_FPU
2155 && (register_operand (operands[0], TFmode)
2156 || register_operand (operands[1], TFmode))"
2157 "*
2158 {
2159 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1]))
2160 return output_fp_move_quad (operands);
2161 return output_move_quad (operands);
2162 }"
2163 [(set_attr "type" "move,store,load")
2164 (set_attr "length" "4,5,5")])
2165
2166 ;; This is disabled because it does not work. Long doubles have only 8
2167 ;; byte alignment. Adding an offset of 8 or 12 to an 8 byte aligned %lo may
2168 ;; cause it to overflow. See also GO_IF_LEGITIMATE_ADDRESS.
2169 (define_insn "*store_tf"
2170 [(set (mem:TF (match_operand:SI 0 "symbolic_operand" "i,i"))
2171 (match_operand:TF 1 "reg_or_0_operand" "re,G"))
2172 (clobber (match_scratch:SI 2 "=&r,&r"))]
2173 "0 && (reload_completed || reload_in_progress) && ! TARGET_PTR64"
2174 "*
2175 {
2176 output_asm_insn (\"sethi %%hi(%a0),%2\", operands);
2177 if (which_alternative == 0)
2178 return \"std %1,[%2+%%lo(%a0)]\;std %S1,[%2+%%lo(%a0+8)]\";
2179 else
2180 return \"st %%g0,[%2+%%lo(%a0)]\;st %%g0,[%2+%%lo(%a0+4)]\; st %%g0,[%2+%%lo(%a0+8)]\;st %%g0,[%2+%%lo(%a0+12)]\";
2181 }"
2182 [(set_attr "type" "store")
2183 (set_attr "length" "5")])
2184 \f
2185 ;; Sparc V9 conditional move instructions.
2186
2187 ;; We can handle larger constants here for some flavors, but for now we play
2188 ;; it safe and only allow those constants supported by all flavours.
2189
2190 (define_expand "movsicc"
2191 [(set (match_operand:SI 0 "register_operand" "")
2192 (if_then_else (match_operand 1 "comparison_operator" "")
2193 (match_operand:SI 2 "arith10_operand" "")
2194 (match_operand:SI 3 "register_operand" "")))]
2195 "TARGET_V9"
2196 "
2197 {
2198 enum rtx_code code = GET_CODE (operands[1]);
2199
2200 if (sparc_compare_op1 == const0_rtx
2201 && GET_CODE (sparc_compare_op0) == REG
2202 && GET_MODE (sparc_compare_op0) == DImode
2203 && v9_regcmp_p (code))
2204 {
2205 operands[1] = gen_rtx (code, DImode,
2206 sparc_compare_op0, sparc_compare_op1);
2207 }
2208 else
2209 {
2210 rtx cc_reg = gen_compare_reg (code,
2211 sparc_compare_op0, sparc_compare_op1);
2212 operands[1] = gen_rtx (code, GET_MODE (cc_reg), cc_reg, const0_rtx);
2213 }
2214 }")
2215
2216 (define_expand "movdicc"
2217 [(set (match_operand:DI 0 "register_operand" "")
2218 (if_then_else (match_operand 1 "comparison_operator" "")
2219 (match_operand:DI 2 "arith10_operand" "")
2220 (match_operand:DI 3 "register_operand" "")))]
2221 "TARGET_V9"
2222 "
2223 {
2224 enum rtx_code code = GET_CODE (operands[1]);
2225
2226 if (sparc_compare_op1 == const0_rtx
2227 && GET_CODE (sparc_compare_op0) == REG
2228 && GET_MODE (sparc_compare_op0) == DImode
2229 && v9_regcmp_p (code))
2230 {
2231 operands[1] = gen_rtx (code, DImode,
2232 sparc_compare_op0, sparc_compare_op1);
2233 }
2234 else
2235 {
2236 rtx cc_reg = gen_compare_reg (code,
2237 sparc_compare_op0, sparc_compare_op1);
2238 operands[1] = gen_rtx (code, GET_MODE (cc_reg), cc_reg, const0_rtx);
2239 }
2240 }")
2241
2242 (define_expand "movsfcc"
2243 [(set (match_operand:SF 0 "register_operand" "")
2244 (if_then_else (match_operand 1 "comparison_operator" "")
2245 (match_operand:SF 2 "register_operand" "")
2246 (match_operand:SF 3 "register_operand" "")))]
2247 "TARGET_V9"
2248 "
2249 {
2250 enum rtx_code code = GET_CODE (operands[1]);
2251
2252 if (sparc_compare_op1 == const0_rtx
2253 && GET_CODE (sparc_compare_op0) == REG
2254 && GET_MODE (sparc_compare_op0) == DImode
2255 && v9_regcmp_p (code))
2256 {
2257 operands[1] = gen_rtx (code, DImode,
2258 sparc_compare_op0, sparc_compare_op1);
2259 }
2260 else
2261 {
2262 rtx cc_reg = gen_compare_reg (code,
2263 sparc_compare_op0, sparc_compare_op1);
2264 operands[1] = gen_rtx (code, GET_MODE (cc_reg), cc_reg, const0_rtx);
2265 }
2266 }")
2267
2268 (define_expand "movdfcc"
2269 [(set (match_operand:DF 0 "register_operand" "")
2270 (if_then_else (match_operand 1 "comparison_operator" "")
2271 (match_operand:DF 2 "register_operand" "")
2272 (match_operand:DF 3 "register_operand" "")))]
2273 "TARGET_V9"
2274 "
2275 {
2276 enum rtx_code code = GET_CODE (operands[1]);
2277
2278 if (sparc_compare_op1 == const0_rtx
2279 && GET_CODE (sparc_compare_op0) == REG
2280 && GET_MODE (sparc_compare_op0) == DImode
2281 && v9_regcmp_p (code))
2282 {
2283 operands[1] = gen_rtx (code, DImode,
2284 sparc_compare_op0, sparc_compare_op1);
2285 }
2286 else
2287 {
2288 rtx cc_reg = gen_compare_reg (code,
2289 sparc_compare_op0, sparc_compare_op1);
2290 operands[1] = gen_rtx (code, GET_MODE (cc_reg), cc_reg, const0_rtx);
2291 }
2292 }")
2293
2294 (define_expand "movtfcc"
2295 [(set (match_operand:TF 0 "register_operand" "")
2296 (if_then_else (match_operand 1 "comparison_operator" "")
2297 (match_operand:TF 2 "register_operand" "")
2298 (match_operand:TF 3 "register_operand" "")))]
2299 "TARGET_V9"
2300 "
2301 {
2302 enum rtx_code code = GET_CODE (operands[1]);
2303
2304 if (sparc_compare_op1 == const0_rtx
2305 && GET_CODE (sparc_compare_op0) == REG
2306 && GET_MODE (sparc_compare_op0) == DImode
2307 && v9_regcmp_p (code))
2308 {
2309 operands[1] = gen_rtx (code, DImode,
2310 sparc_compare_op0, sparc_compare_op1);
2311 }
2312 else
2313 {
2314 rtx cc_reg = gen_compare_reg (code,
2315 sparc_compare_op0, sparc_compare_op1);
2316 operands[1] = gen_rtx (code, GET_MODE (cc_reg), cc_reg, const0_rtx);
2317 }
2318 }")
2319
2320 /* Conditional move define_insns. */
2321
2322 (define_insn "*movsi_cc_sp64"
2323 [(set (match_operand:SI 0 "register_operand" "=r")
2324 (if_then_else (match_operator 1 "comparison_operator"
2325 [(reg:CC 0) (const_int 0)])
2326 (match_operand:SI 2 "arith11_operand" "ri")
2327 (match_operand:SI 3 "register_operand" "0")))]
2328 "TARGET_V9"
2329 "mov%C1 %%icc,%2,%0"
2330 [(set_attr "type" "cmove")])
2331
2332 (define_insn "*movdi_cc_sp64"
2333 [(set (match_operand:DI 0 "register_operand" "=r")
2334 (if_then_else (match_operator 1 "comparison_operator"
2335 [(reg:CC 0) (const_int 0)])
2336 (match_operand:DI 2 "arith11_double_operand" "rHI")
2337 (match_operand:DI 3 "register_operand" "0")))]
2338 "TARGET_V9"
2339 "mov%C1 %%icc,%2,%0"
2340 [(set_attr "type" "cmove")])
2341
2342 (define_insn "*movsi_ccx_sp64"
2343 [(set (match_operand:SI 0 "register_operand" "=r")
2344 (if_then_else (match_operator 1 "comparison_operator"
2345 [(reg:CCX 0) (const_int 0)])
2346 (match_operand:SI 2 "arith11_operand" "ri")
2347 (match_operand:SI 3 "register_operand" "0")))]
2348 "TARGET_V9"
2349 "mov%C1 %%xcc,%2,%0"
2350 [(set_attr "type" "cmove")])
2351
2352 (define_insn "*movdi_ccx_sp64"
2353 [(set (match_operand:DI 0 "register_operand" "=r")
2354 (if_then_else (match_operator 1 "comparison_operator"
2355 [(reg:CCX 0) (const_int 0)])
2356 (match_operand:DI 2 "arith11_double_operand" "rHI")
2357 (match_operand:DI 3 "register_operand" "0")))]
2358 "TARGET_V9"
2359 "mov%C1 %%xcc,%2,%0"
2360 [(set_attr "type" "cmove")])
2361
2362 (define_insn "*movsi_ccfp_sp64"
2363 [(set (match_operand:SI 0 "register_operand" "=r")
2364 (if_then_else (match_operator 1 "comparison_operator"
2365 [(match_operand:CCFP 2 "ccfp_reg_operand" "c")
2366 (const_int 0)])
2367 (match_operand:SI 3 "arith11_operand" "ri")
2368 (match_operand:SI 4 "register_operand" "0")))]
2369 "TARGET_V9"
2370 "mov%C1 %2,%3,%0"
2371 [(set_attr "type" "cmove")])
2372
2373 (define_insn "*movsi_ccfpe_sp64"
2374 [(set (match_operand:SI 0 "register_operand" "=r")
2375 (if_then_else (match_operator 1 "comparison_operator"
2376 [(match_operand:CCFPE 2 "ccfp_reg_operand" "c")
2377 (const_int 0)])
2378 (match_operand:SI 3 "arith11_operand" "ri")
2379 (match_operand:SI 4 "register_operand" "0")))]
2380 "TARGET_V9"
2381 "mov%C1 %2,%3,%0"
2382 [(set_attr "type" "cmove")])
2383
2384 (define_insn "*movdi_ccfp_sp64"
2385 [(set (match_operand:DI 0 "register_operand" "=r")
2386 (if_then_else (match_operator 1 "comparison_operator"
2387 [(match_operand:CCFP 2 "ccfp_reg_operand" "c")
2388 (const_int 0)])
2389 (match_operand:DI 3 "arith11_double_operand" "rHI")
2390 (match_operand:DI 4 "register_operand" "0")))]
2391 "TARGET_V9"
2392 "mov%C1 %2,%3,%0"
2393 [(set_attr "type" "cmove")])
2394
2395 (define_insn "*movdi_ccfpe_sp64"
2396 [(set (match_operand:DI 0 "register_operand" "=r")
2397 (if_then_else (match_operator 1 "comparison_operator"
2398 [(match_operand:CCFPE 2 "ccfp_reg_operand" "c")
2399 (const_int 0)])
2400 (match_operand:DI 3 "arith11_double_operand" "rHI")
2401 (match_operand:DI 4 "register_operand" "0")))]
2402 "TARGET_V9"
2403 "mov%C1 %2,%3,%0"
2404 [(set_attr "type" "cmove")])
2405
2406 (define_insn "*movsi_cc_reg_sp64"
2407 [(set (match_operand:SI 0 "register_operand" "=r")
2408 (if_then_else (match_operator 1 "v9_regcmp_op"
2409 [(match_operand:DI 2 "register_operand" "r")
2410 (const_int 0)])
2411 (match_operand:SI 3 "arith10_operand" "ri")
2412 (match_operand:SI 4 "register_operand" "0")))]
2413 "TARGET_V9"
2414 "movr%D1 %2,%r3,%0"
2415 [(set_attr "type" "cmove")])
2416
2417 (define_insn "*movdi_cc_reg_sp64"
2418 [(set (match_operand:DI 0 "register_operand" "=r")
2419 (if_then_else (match_operator 1 "v9_regcmp_op"
2420 [(match_operand:DI 2 "register_operand" "r")
2421 (const_int 0)])
2422 (match_operand:DI 3 "arith10_double_operand" "ri")
2423 (match_operand:DI 4 "register_operand" "0")))]
2424 "TARGET_V9"
2425 "movr%D1 %2,%r3,%0"
2426 [(set_attr "type" "cmove")])
2427
2428 (define_insn "*movsf_cc_reg_sp64"
2429 [(set (match_operand:SF 0 "register_operand" "=f")
2430 (if_then_else (match_operator 1 "v9_regcmp_op"
2431 [(match_operand:DI 2 "register_operand" "r")
2432 (const_int 0)])
2433 (match_operand:SF 3 "register_operand" "f")
2434 (match_operand:SF 4 "register_operand" "0")))]
2435 "TARGET_V9 && TARGET_FPU"
2436 "fmovrs%D1 %2,%r3,%0"
2437 [(set_attr "type" "cmove")])
2438
2439 (define_insn "*movdf_cc_reg_sp64"
2440 [(set (match_operand:DF 0 "register_operand" "=e")
2441 (if_then_else (match_operator 1 "v9_regcmp_op"
2442 [(match_operand:DI 2 "register_operand" "r")
2443 (const_int 0)])
2444 (match_operand:DF 3 "register_operand" "e")
2445 (match_operand:DF 4 "register_operand" "0")))]
2446 "TARGET_V9 && TARGET_FPU"
2447 "fmovrd%D1 %2,%r3,%0"
2448 [(set_attr "type" "cmove")])
2449
2450 (define_insn "*movtf_cc_reg_sp64"
2451 [(set (match_operand:TF 0 "register_operand" "=e")
2452 (if_then_else (match_operator 1 "v9_regcmp_op"
2453 [(match_operand:DI 2 "register_operand" "r")
2454 (const_int 0)])
2455 (match_operand:TF 3 "register_operand" "e")
2456 (match_operand:TF 4 "register_operand" "0")))]
2457 "TARGET_V9 && TARGET_FPU"
2458 "fmovrq%D1 %2,%r3,%0"
2459 [(set_attr "type" "cmove")])
2460
2461 (define_insn "*movsf_ccfp_sp64"
2462 [(set (match_operand:SF 0 "register_operand" "=f")
2463 (if_then_else (match_operator 1 "comparison_operator"
2464 [(match_operand:CCFP 2 "ccfp_reg_operand" "c")
2465 (const_int 0)])
2466 (match_operand:SF 3 "register_operand" "f")
2467 (match_operand:SF 4 "register_operand" "0")))]
2468 "TARGET_V9 && TARGET_FPU"
2469 "fmovs%C1 %2,%3,%0"
2470 [(set_attr "type" "cmove")])
2471
2472 (define_insn "*movsf_ccfpe_sp64"
2473 [(set (match_operand:SF 0 "register_operand" "=f")
2474 (if_then_else (match_operator 1 "comparison_operator"
2475 [(match_operand:CCFPE 2 "ccfp_reg_operand" "c")
2476 (const_int 0)])
2477 (match_operand:SF 3 "register_operand" "f")
2478 (match_operand:SF 4 "register_operand" "0")))]
2479 "TARGET_V9 && TARGET_FPU"
2480 "fmovs%C1 %2,%3,%0"
2481 [(set_attr "type" "cmove")])
2482
2483 (define_insn "*movdf_ccfp_sp64"
2484 [(set (match_operand:DF 0 "register_operand" "=e")
2485 (if_then_else (match_operator 1 "comparison_operator"
2486 [(match_operand:CCFP 2 "ccfp_reg_operand" "c")
2487 (const_int 0)])
2488 (match_operand:DF 3 "register_operand" "e")
2489 (match_operand:DF 4 "register_operand" "0")))]
2490 "TARGET_V9 && TARGET_FPU"
2491 "fmovd%C1 %2,%3,%0"
2492 [(set_attr "type" "cmove")])
2493
2494 (define_insn "*movdf_ccfpe_sp64"
2495 [(set (match_operand:DF 0 "register_operand" "=e")
2496 (if_then_else (match_operator 1 "comparison_operator"
2497 [(match_operand:CCFPE 2 "ccfp_reg_operand" "c")
2498 (const_int 0)])
2499 (match_operand:DF 3 "register_operand" "e")
2500 (match_operand:DF 4 "register_operand" "0")))]
2501 "TARGET_V9 && TARGET_FPU"
2502 "fmovd%C1 %2,%3,%0"
2503 [(set_attr "type" "cmove")])
2504
2505 (define_insn "*movtf_ccfp_sp64"
2506 [(set (match_operand:TF 0 "register_operand" "=e")
2507 (if_then_else (match_operator 1 "comparison_operator"
2508 [(match_operand:CCFP 2 "ccfp_reg_operand" "c")
2509 (const_int 0)])
2510 (match_operand:TF 3 "register_operand" "e")
2511 (match_operand:TF 4 "register_operand" "0")))]
2512 "TARGET_V9 && TARGET_FPU"
2513 "fmovq%C1 %2,%3,%0"
2514 [(set_attr "type" "cmove")])
2515
2516 (define_insn "*movtf_ccfpe_sp64"
2517 [(set (match_operand:TF 0 "register_operand" "=e")
2518 (if_then_else (match_operator 1 "comparison_operator"
2519 [(match_operand:CCFPE 2 "ccfp_reg_operand" "c")
2520 (const_int 0)])
2521 (match_operand:TF 3 "register_operand" "e")
2522 (match_operand:TF 4 "register_operand" "0")))]
2523 "TARGET_V9 && TARGET_FPU"
2524 "fmovq%C1 %2,%3,%0"
2525 [(set_attr "type" "cmove")])
2526
2527 (define_insn "*movsf_cc_sp64"
2528 [(set (match_operand:SF 0 "register_operand" "=f")
2529 (if_then_else (match_operator 1 "comparison_operator"
2530 [(reg:CC 0) (const_int 0)])
2531 (match_operand:SF 2 "register_operand" "f")
2532 (match_operand:SF 3 "register_operand" "0")))]
2533 "TARGET_V9 && TARGET_FPU"
2534 "fmovs%C1 %%icc,%2,%0"
2535 [(set_attr "type" "cmove")])
2536
2537 (define_insn "*movdf_cc_sp64"
2538 [(set (match_operand:DF 0 "register_operand" "=e")
2539 (if_then_else (match_operator 1 "comparison_operator"
2540 [(reg:CC 0) (const_int 0)])
2541 (match_operand:DF 2 "register_operand" "e")
2542 (match_operand:DF 3 "register_operand" "0")))]
2543 "TARGET_V9 && TARGET_FPU"
2544 "fmovd%C1 %%icc,%2,%0"
2545 [(set_attr "type" "cmove")])
2546
2547 (define_insn "*movtf_cc_sp64"
2548 [(set (match_operand:TF 0 "register_operand" "=e")
2549 (if_then_else (match_operator 1 "comparison_operator"
2550 [(reg:CC 0) (const_int 0)])
2551 (match_operand:TF 2 "register_operand" "e")
2552 (match_operand:TF 3 "register_operand" "0")))]
2553 "TARGET_V9 && TARGET_FPU"
2554 "fmovq%C1 %%icc,%2,%0"
2555 [(set_attr "type" "cmove")])
2556
2557 (define_insn "*movsf_ccx_sp64"
2558 [(set (match_operand:SF 0 "register_operand" "=f")
2559 (if_then_else (match_operator 1 "comparison_operator"
2560 [(reg:CCX 0) (const_int 0)])
2561 (match_operand:SF 2 "register_operand" "f")
2562 (match_operand:SF 3 "register_operand" "0")))]
2563 "TARGET_V9 && TARGET_FPU"
2564 "fmovs%C1 %%xcc,%2,%0"
2565 [(set_attr "type" "cmove")])
2566
2567 (define_insn "*movdf_ccx_sp64"
2568 [(set (match_operand:DF 0 "register_operand" "=e")
2569 (if_then_else (match_operator 1 "comparison_operator"
2570 [(reg:CCX 0) (const_int 0)])
2571 (match_operand:DF 2 "register_operand" "e")
2572 (match_operand:DF 3 "register_operand" "0")))]
2573 "TARGET_V9 && TARGET_FPU"
2574 "fmovd%C1 %%xcc,%2,%0"
2575 [(set_attr "type" "cmove")])
2576
2577 (define_insn "*movtf_ccx_sp64"
2578 [(set (match_operand:TF 0 "register_operand" "=e")
2579 (if_then_else (match_operator 1 "comparison_operator"
2580 [(reg:CCX 0) (const_int 0)])
2581 (match_operand:TF 2 "register_operand" "e")
2582 (match_operand:TF 3 "register_operand" "0")))]
2583 "TARGET_V9 && TARGET_FPU"
2584 "fmovq%C1 %%xcc,%2,%0"
2585 [(set_attr "type" "cmove")])
2586 \f
2587 ;;- zero extension instructions
2588
2589 ;; These patterns originally accepted general_operands, however, slightly
2590 ;; better code is generated by only accepting register_operands, and then
2591 ;; letting combine generate the ldu[hb] insns.
2592
2593 (define_expand "zero_extendhisi2"
2594 [(set (match_operand:SI 0 "register_operand" "")
2595 (zero_extend:SI (match_operand:HI 1 "register_operand" "")))]
2596 ""
2597 "
2598 {
2599 rtx temp = gen_reg_rtx (SImode);
2600 rtx shift_16 = gen_rtx (CONST_INT, VOIDmode, 16);
2601 int op1_subword = 0;
2602
2603 if (GET_CODE (operand1) == SUBREG)
2604 {
2605 op1_subword = SUBREG_WORD (operand1);
2606 operand1 = XEXP (operand1, 0);
2607 }
2608
2609 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
2610 op1_subword),
2611 shift_16));
2612 emit_insn (gen_lshrsi3 (operand0, temp, shift_16));
2613 DONE;
2614 }")
2615
2616 (define_insn "*zero_extendhisi2_insn"
2617 [(set (match_operand:SI 0 "register_operand" "=r")
2618 (zero_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
2619 ""
2620 "lduh %1,%0"
2621 [(set_attr "type" "load")])
2622
2623 (define_expand "zero_extendqihi2"
2624 [(set (match_operand:HI 0 "register_operand" "")
2625 (zero_extend:HI (match_operand:QI 1 "register_operand" "")))]
2626 ""
2627 "")
2628
2629 (define_insn "*zero_extendqihi2_insn"
2630 [(set (match_operand:HI 0 "register_operand" "=r,r")
2631 (zero_extend:HI (match_operand:QI 1 "sparc_operand" "r,Q")))]
2632 "GET_CODE (operands[1]) != CONST_INT"
2633 "@
2634 and %1,0xff,%0
2635 ldub %1,%0"
2636 [(set_attr "type" "unary,load")
2637 (set_attr "length" "1")])
2638
2639 (define_expand "zero_extendqisi2"
2640 [(set (match_operand:SI 0 "register_operand" "")
2641 (zero_extend:SI (match_operand:QI 1 "register_operand" "")))]
2642 ""
2643 "")
2644
2645 (define_insn "*zero_extendqisi2_insn"
2646 [(set (match_operand:SI 0 "register_operand" "=r,r")
2647 (zero_extend:SI (match_operand:QI 1 "sparc_operand" "r,Q")))]
2648 "GET_CODE (operands[1]) != CONST_INT"
2649 "@
2650 and %1,0xff,%0
2651 ldub %1,%0"
2652 [(set_attr "type" "unary,load")
2653 (set_attr "length" "1")])
2654
2655 (define_expand "zero_extendqidi2"
2656 [(set (match_operand:DI 0 "register_operand" "")
2657 (zero_extend:DI (match_operand:QI 1 "register_operand" "")))]
2658 "TARGET_V9"
2659 "")
2660
2661 (define_insn "*zero_extendqidi2_insn"
2662 [(set (match_operand:DI 0 "register_operand" "=r,r")
2663 (zero_extend:DI (match_operand:QI 1 "sparc_operand" "r,Q")))]
2664 "TARGET_V9 && GET_CODE (operands[1]) != CONST_INT"
2665 "@
2666 and %1,0xff,%0
2667 ldub %1,%0"
2668 [(set_attr "type" "unary,load")
2669 (set_attr "length" "1")])
2670
2671 (define_expand "zero_extendhidi2"
2672 [(set (match_operand:DI 0 "register_operand" "")
2673 (zero_extend:DI (match_operand:HI 1 "register_operand" "")))]
2674 "TARGET_V9"
2675 "
2676 {
2677 rtx temp = gen_reg_rtx (DImode);
2678 rtx shift_48 = gen_rtx (CONST_INT, VOIDmode, 48);
2679 int op1_subword = 0;
2680
2681 if (GET_CODE (operand1) == SUBREG)
2682 {
2683 op1_subword = SUBREG_WORD (operand1);
2684 operand1 = XEXP (operand1, 0);
2685 }
2686
2687 emit_insn (gen_ashldi3 (temp, gen_rtx (SUBREG, DImode, operand1,
2688 op1_subword),
2689 shift_48));
2690 emit_insn (gen_lshrdi3 (operand0, temp, shift_48));
2691 DONE;
2692 }")
2693
2694 (define_insn "*zero_extendhidi2_insn"
2695 [(set (match_operand:DI 0 "register_operand" "=r")
2696 (zero_extend:DI (match_operand:HI 1 "memory_operand" "m")))]
2697 "TARGET_V9"
2698 "lduh %1,%0"
2699 [(set_attr "type" "load")])
2700
2701 ;; ??? Write truncdisi pattern using sra?
2702
2703 (define_expand "zero_extendsidi2"
2704 [(set (match_operand:DI 0 "register_operand" "")
2705 (zero_extend:DI (match_operand:SI 1 "register_operand" "")))]
2706 "TARGET_V9"
2707 "")
2708
2709 (define_insn "*zero_extendsidi2_insn"
2710 [(set (match_operand:DI 0 "register_operand" "=r,r")
2711 (zero_extend:DI (match_operand:SI 1 "sparc_operand" "r,Q")))]
2712 "TARGET_V9 && GET_CODE (operands[1]) != CONST_INT"
2713 "@
2714 srl %1,0,%0
2715 lduw %1,%0"
2716 [(set_attr "type" "unary,load")
2717 (set_attr "length" "1")])
2718
2719 ;; Simplify comparisons of extended values.
2720
2721 (define_insn "*cmp_zero_extendqisi2"
2722 [(set (reg:CC 0)
2723 (compare:CC (zero_extend:SI (match_operand:QI 0 "register_operand" "r"))
2724 (const_int 0)))]
2725 ""
2726 "andcc %0,0xff,%%g0"
2727 [(set_attr "type" "compare")])
2728
2729 (define_insn "*cmp_zero_extendqisi2_set"
2730 [(set (reg:CC 0)
2731 (compare:CC (zero_extend:SI (match_operand:QI 1 "register_operand" "r"))
2732 (const_int 0)))
2733 (set (match_operand:SI 0 "register_operand" "=r")
2734 (zero_extend:SI (match_dup 1)))]
2735 ""
2736 "andcc %1,0xff,%0"
2737 [(set_attr "type" "unary")])
2738
2739 ;; Similarly, handle SI->QI mode truncation followed by a compare.
2740
2741 (define_insn "*cmp_siqi_trunc"
2742 [(set (reg:CC 0)
2743 (compare:CC (subreg:QI (match_operand:SI 0 "register_operand" "r") 0)
2744 (const_int 0)))]
2745 ""
2746 "andcc %0,0xff,%%g0"
2747 [(set_attr "type" "compare")])
2748
2749 (define_insn "*cmp_siqi_trunc_set"
2750 [(set (reg:CC 0)
2751 (compare:CC (subreg:QI (match_operand:SI 1 "register_operand" "r") 0)
2752 (const_int 0)))
2753 (set (match_operand:QI 0 "register_operand" "=r")
2754 (match_dup 1))]
2755 ""
2756 "andcc %1,0xff,%0"
2757 [(set_attr "type" "unary")])
2758 \f
2759 ;;- sign extension instructions
2760
2761 ;; These patterns originally accepted general_operands, however, slightly
2762 ;; better code is generated by only accepting register_operands, and then
2763 ;; letting combine generate the lds[hb] insns.
2764
2765 (define_expand "extendhisi2"
2766 [(set (match_operand:SI 0 "register_operand" "")
2767 (sign_extend:SI (match_operand:HI 1 "register_operand" "")))]
2768 ""
2769 "
2770 {
2771 rtx temp = gen_reg_rtx (SImode);
2772 rtx shift_16 = gen_rtx (CONST_INT, VOIDmode, 16);
2773 int op1_subword = 0;
2774
2775 if (GET_CODE (operand1) == SUBREG)
2776 {
2777 op1_subword = SUBREG_WORD (operand1);
2778 operand1 = XEXP (operand1, 0);
2779 }
2780
2781 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
2782 op1_subword),
2783 shift_16));
2784 emit_insn (gen_ashrsi3 (operand0, temp, shift_16));
2785 DONE;
2786 }")
2787
2788 (define_insn "*sign_extendhisi2_insn"
2789 [(set (match_operand:SI 0 "register_operand" "=r")
2790 (sign_extend:SI (match_operand:HI 1 "memory_operand" "m")))]
2791 ""
2792 "ldsh %1,%0"
2793 [(set_attr "type" "load")])
2794
2795 (define_expand "extendqihi2"
2796 [(set (match_operand:HI 0 "register_operand" "")
2797 (sign_extend:HI (match_operand:QI 1 "register_operand" "")))]
2798 ""
2799 "
2800 {
2801 rtx temp = gen_reg_rtx (SImode);
2802 rtx shift_24 = gen_rtx (CONST_INT, VOIDmode, 24);
2803 int op1_subword = 0;
2804 int op0_subword = 0;
2805
2806 if (GET_CODE (operand1) == SUBREG)
2807 {
2808 op1_subword = SUBREG_WORD (operand1);
2809 operand1 = XEXP (operand1, 0);
2810 }
2811 if (GET_CODE (operand0) == SUBREG)
2812 {
2813 op0_subword = SUBREG_WORD (operand0);
2814 operand0 = XEXP (operand0, 0);
2815 }
2816 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
2817 op1_subword),
2818 shift_24));
2819 if (GET_MODE (operand0) != SImode)
2820 operand0 = gen_rtx (SUBREG, SImode, operand0, op0_subword);
2821 emit_insn (gen_ashrsi3 (operand0, temp, shift_24));
2822 DONE;
2823 }")
2824
2825 (define_insn "*sign_extendqihi2_insn"
2826 [(set (match_operand:HI 0 "register_operand" "=r")
2827 (sign_extend:HI (match_operand:QI 1 "memory_operand" "m")))]
2828 ""
2829 "ldsb %1,%0"
2830 [(set_attr "type" "load")])
2831
2832 (define_expand "extendqisi2"
2833 [(set (match_operand:SI 0 "register_operand" "")
2834 (sign_extend:SI (match_operand:QI 1 "register_operand" "")))]
2835 ""
2836 "
2837 {
2838 rtx temp = gen_reg_rtx (SImode);
2839 rtx shift_24 = gen_rtx (CONST_INT, VOIDmode, 24);
2840 int op1_subword = 0;
2841
2842 if (GET_CODE (operand1) == SUBREG)
2843 {
2844 op1_subword = SUBREG_WORD (operand1);
2845 operand1 = XEXP (operand1, 0);
2846 }
2847
2848 emit_insn (gen_ashlsi3 (temp, gen_rtx (SUBREG, SImode, operand1,
2849 op1_subword),
2850 shift_24));
2851 emit_insn (gen_ashrsi3 (operand0, temp, shift_24));
2852 DONE;
2853 }")
2854
2855 (define_insn "*sign_extendqisi2_insn"
2856 [(set (match_operand:SI 0 "register_operand" "=r")
2857 (sign_extend:SI (match_operand:QI 1 "memory_operand" "m")))]
2858 ""
2859 "ldsb %1,%0"
2860 [(set_attr "type" "load")])
2861
2862 (define_expand "extendqidi2"
2863 [(set (match_operand:DI 0 "register_operand" "")
2864 (sign_extend:DI (match_operand:QI 1 "register_operand" "")))]
2865 "TARGET_V9"
2866 "
2867 {
2868 rtx temp = gen_reg_rtx (DImode);
2869 rtx shift_56 = gen_rtx (CONST_INT, VOIDmode, 56);
2870 int op1_subword = 0;
2871
2872 if (GET_CODE (operand1) == SUBREG)
2873 {
2874 op1_subword = SUBREG_WORD (operand1);
2875 operand1 = XEXP (operand1, 0);
2876 }
2877
2878 emit_insn (gen_ashldi3 (temp, gen_rtx (SUBREG, DImode, operand1,
2879 op1_subword),
2880 shift_56));
2881 emit_insn (gen_ashrdi3 (operand0, temp, shift_56));
2882 DONE;
2883 }")
2884
2885 (define_insn "*sign_extendqidi2_insn"
2886 [(set (match_operand:DI 0 "register_operand" "=r")
2887 (sign_extend:DI (match_operand:QI 1 "memory_operand" "m")))]
2888 "TARGET_V9"
2889 "ldsb %1,%0"
2890 [(set_attr "type" "load")])
2891
2892 (define_expand "extendhidi2"
2893 [(set (match_operand:DI 0 "register_operand" "")
2894 (sign_extend:DI (match_operand:HI 1 "register_operand" "")))]
2895 "TARGET_V9"
2896 "
2897 {
2898 rtx temp = gen_reg_rtx (DImode);
2899 rtx shift_48 = gen_rtx (CONST_INT, VOIDmode, 48);
2900 int op1_subword = 0;
2901
2902 if (GET_CODE (operand1) == SUBREG)
2903 {
2904 op1_subword = SUBREG_WORD (operand1);
2905 operand1 = XEXP (operand1, 0);
2906 }
2907
2908 emit_insn (gen_ashldi3 (temp, gen_rtx (SUBREG, DImode, operand1,
2909 op1_subword),
2910 shift_48));
2911 emit_insn (gen_ashrdi3 (operand0, temp, shift_48));
2912 DONE;
2913 }")
2914
2915 (define_insn "*sign_extendhidi2_insn"
2916 [(set (match_operand:DI 0 "register_operand" "=r")
2917 (sign_extend:DI (match_operand:HI 1 "memory_operand" "m")))]
2918 "TARGET_V9"
2919 "ldsh %1,%0"
2920 [(set_attr "type" "load")])
2921
2922 (define_expand "extendsidi2"
2923 [(set (match_operand:DI 0 "register_operand" "")
2924 (sign_extend:DI (match_operand:SI 1 "register_operand" "")))]
2925 "TARGET_V9"
2926 "")
2927
2928 (define_insn "*sign_extendsidi2_insn"
2929 [(set (match_operand:DI 0 "register_operand" "=r,r")
2930 (sign_extend:DI (match_operand:SI 1 "sparc_operand" "r,Q")))]
2931 "TARGET_V9"
2932 "@
2933 sra %1,0,%0
2934 ldsw %1,%0"
2935 [(set_attr "type" "unary,load")
2936 (set_attr "length" "1")])
2937 \f
2938 ;; Special pattern for optimizing bit-field compares. This is needed
2939 ;; because combine uses this as a canonical form.
2940
2941 (define_insn "*cmp_zero_extract"
2942 [(set (reg:CC 0)
2943 (compare:CC
2944 (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
2945 (match_operand:SI 1 "small_int" "n")
2946 (match_operand:SI 2 "small_int" "n"))
2947 (const_int 0)))]
2948 "INTVAL (operands[2]) > 19"
2949 "*
2950 {
2951 int len = INTVAL (operands[1]);
2952 int pos = 32 - INTVAL (operands[2]) - len;
2953 unsigned mask = ((1 << len) - 1) << pos;
2954
2955 operands[1] = gen_rtx (CONST_INT, VOIDmode, mask);
2956 return \"andcc %0,%1,%%g0\";
2957 }")
2958
2959 (define_insn "*cmp_zero_extract_sp64"
2960 [(set (reg:CCX 0)
2961 (compare:CCX
2962 (zero_extract:DI (match_operand:DI 0 "register_operand" "r")
2963 (match_operand:SI 1 "small_int" "n")
2964 (match_operand:SI 2 "small_int" "n"))
2965 (const_int 0)))]
2966 "TARGET_V9 && INTVAL (operands[2]) > 51"
2967 "*
2968 {
2969 int len = INTVAL (operands[1]);
2970 int pos = 64 - INTVAL (operands[2]) - len;
2971 unsigned mask = ((1 << len) - 1) << pos;
2972
2973 operands[1] = gen_rtx (CONST_INT, VOIDmode, mask);
2974 return \"andcc %0,%1,%%g0\";
2975 }")
2976 \f
2977 ;; Conversions between float, double and long double.
2978
2979 (define_insn "extendsfdf2"
2980 [(set (match_operand:DF 0 "register_operand" "=e")
2981 (float_extend:DF
2982 (match_operand:SF 1 "register_operand" "f")))]
2983 "TARGET_FPU"
2984 "fstod %1,%0"
2985 [(set_attr "type" "fp")])
2986
2987 (define_insn "extendsftf2"
2988 [(set (match_operand:TF 0 "register_operand" "=e")
2989 (float_extend:TF
2990 (match_operand:SF 1 "register_operand" "f")))]
2991 "TARGET_FPU && TARGET_HARD_QUAD"
2992 "fstoq %1,%0"
2993 [(set_attr "type" "fp")])
2994
2995 (define_insn "extenddftf2"
2996 [(set (match_operand:TF 0 "register_operand" "=e")
2997 (float_extend:TF
2998 (match_operand:DF 1 "register_operand" "e")))]
2999 "TARGET_FPU && TARGET_HARD_QUAD"
3000 "fdtoq %1,%0"
3001 [(set_attr "type" "fp")])
3002
3003 (define_insn "truncdfsf2"
3004 [(set (match_operand:SF 0 "register_operand" "=f")
3005 (float_truncate:SF
3006 (match_operand:DF 1 "register_operand" "e")))]
3007 "TARGET_FPU"
3008 "fdtos %1,%0"
3009 [(set_attr "type" "fp")])
3010
3011 (define_insn "trunctfsf2"
3012 [(set (match_operand:SF 0 "register_operand" "=f")
3013 (float_truncate:SF
3014 (match_operand:TF 1 "register_operand" "e")))]
3015 "TARGET_FPU && TARGET_HARD_QUAD"
3016 "fqtos %1,%0"
3017 [(set_attr "type" "fp")])
3018
3019 (define_insn "trunctfdf2"
3020 [(set (match_operand:DF 0 "register_operand" "=e")
3021 (float_truncate:DF
3022 (match_operand:TF 1 "register_operand" "e")))]
3023 "TARGET_FPU && TARGET_HARD_QUAD"
3024 "fqtod %1,%0"
3025 [(set_attr "type" "fp")])
3026 \f
3027 ;; Conversion between fixed point and floating point.
3028
3029 (define_insn "floatsisf2"
3030 [(set (match_operand:SF 0 "register_operand" "=f")
3031 (float:SF (match_operand:SI 1 "register_operand" "f")))]
3032 "TARGET_FPU"
3033 "fitos %1,%0"
3034 [(set_attr "type" "fp")])
3035
3036 (define_insn "floatsidf2"
3037 [(set (match_operand:DF 0 "register_operand" "=e")
3038 (float:DF (match_operand:SI 1 "register_operand" "f")))]
3039 "TARGET_FPU"
3040 "fitod %1,%0"
3041 [(set_attr "type" "fp")])
3042
3043 (define_insn "floatsitf2"
3044 [(set (match_operand:TF 0 "register_operand" "=e")
3045 (float:TF (match_operand:SI 1 "register_operand" "f")))]
3046 "TARGET_FPU && TARGET_HARD_QUAD"
3047 "fitoq %1,%0"
3048 [(set_attr "type" "fp")])
3049
3050 ;; Now the same for 64 bit sources.
3051 ;; ??? We cannot put DImode values in fp regs (see below near fix_truncdfsi2).
3052
3053 (define_expand "floatdisf2"
3054 [(parallel [(set (match_operand:SF 0 "register_operand" "")
3055 (float:SF (match_operand:DI 1 "general_operand" "")))
3056 (clobber (match_dup 2))
3057 (clobber (match_dup 3))])]
3058 "TARGET_V9 && TARGET_FPU"
3059 "
3060 {
3061 operands[2] = gen_reg_rtx (DFmode);
3062 operands[3] = sparc64_fpconv_stack_temp ();
3063 }")
3064
3065 (define_expand "floatdidf2"
3066 [(parallel [(set (match_operand:DF 0 "register_operand" "")
3067 (float:DF (match_operand:DI 1 "general_operand" "")))
3068 (clobber (match_dup 2))
3069 (clobber (match_dup 3))])]
3070 "TARGET_V9 && TARGET_FPU"
3071 "
3072 {
3073 operands[2] = gen_reg_rtx (DFmode);
3074 operands[3] = sparc64_fpconv_stack_temp ();
3075 }")
3076
3077 (define_expand "floatditf2"
3078 [(parallel [(set (match_operand:TF 0 "register_operand" "")
3079 (float:TF (match_operand:DI 1 "general_operand" "")))
3080 (clobber (match_dup 2))
3081 (clobber (match_dup 3))])]
3082 "TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
3083 "
3084 {
3085 operands[2] = gen_reg_rtx (DFmode);
3086 operands[3] = sparc64_fpconv_stack_temp ();
3087 }")
3088
3089 (define_insn "*floatdisf2_insn"
3090 [(parallel [(set (match_operand:SF 0 "register_operand" "=f")
3091 (float:SF (match_operand:DI 1 "general_operand" "rm")))
3092 (clobber (match_operand:DF 2 "register_operand" "=&e"))
3093 (clobber (match_operand:DI 3 "memory_operand" "m"))])]
3094 "TARGET_V9 && TARGET_FPU"
3095 "*
3096 {
3097 if (GET_CODE (operands[1]) == MEM)
3098 output_asm_insn (\"ldd %1,%2\", operands);
3099 else
3100 output_asm_insn (\"stx %1,%3\;ldd %3,%2\", operands);
3101 return \"fxtos %2,%0\";
3102 }"
3103 [(set_attr "type" "fp")
3104 (set_attr "length" "3")])
3105
3106 (define_insn "*floatdidf2_insn"
3107 [(parallel [(set (match_operand:DF 0 "register_operand" "=e")
3108 (float:DF (match_operand:DI 1 "general_operand" "rm")))
3109 (clobber (match_operand:DF 2 "register_operand" "=&e"))
3110 (clobber (match_operand:DI 3 "memory_operand" "m"))])]
3111 "TARGET_V9 && TARGET_FPU"
3112 "*
3113 {
3114 if (GET_CODE (operands[1]) == MEM)
3115 output_asm_insn (\"ldd %1,%2\", operands);
3116 else
3117 output_asm_insn (\"stx %1,%3\;ldd %3,%2\", operands);
3118 return \"fxtod %2,%0\";
3119 }"
3120 [(set_attr "type" "fp")
3121 (set_attr "length" "3")])
3122
3123 (define_insn "*floatditf2_insn"
3124 [(parallel [(set (match_operand:TF 0 "register_operand" "=e")
3125 (float:TF (match_operand:DI 1 "general_operand" "rm")))
3126 (clobber (match_operand:DF 2 "register_operand" "=&e"))
3127 (clobber (match_operand:DI 3 "memory_operand" "m"))])]
3128 "TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
3129 "*
3130 {
3131 if (GET_CODE (operands[1]) == MEM)
3132 output_asm_insn (\"ldd %1,%2\", operands);
3133 else
3134 output_asm_insn (\"stx %1,%3\;ldd %3,%2\", operands);
3135 return \"fxtoq %2,%0\";
3136 }"
3137 [(set_attr "type" "fp")
3138 (set_attr "length" "3")])
3139
3140 ;; ??? Ideally, these are what we would like to use.
3141
3142 (define_insn "floatdisf2_sp64"
3143 [(set (match_operand:SF 0 "register_operand" "=f")
3144 (float:SF (match_operand:DI 1 "register_operand" "e")))]
3145 "0 && TARGET_V9 && TARGET_FPU"
3146 "fxtos %1,%0"
3147 [(set_attr "type" "fp")])
3148
3149 (define_insn "floatdidf2_sp64"
3150 [(set (match_operand:DF 0 "register_operand" "=e")
3151 (float:DF (match_operand:DI 1 "register_operand" "e")))]
3152 "0 && TARGET_V9 && TARGET_FPU"
3153 "fxtod %1,%0"
3154 [(set_attr "type" "fp")])
3155
3156 (define_insn "floatditf2_sp64"
3157 [(set (match_operand:TF 0 "register_operand" "=e")
3158 (float:TF (match_operand:DI 1 "register_operand" "e")))]
3159 "0 && TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
3160 "fxtoq %1,%0"
3161 [(set_attr "type" "fp")])
3162
3163 ;; Convert a float to an actual integer.
3164 ;; Truncation is performed as part of the conversion.
3165
3166 (define_insn "fix_truncsfsi2"
3167 [(set (match_operand:SI 0 "register_operand" "=f")
3168 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
3169 "TARGET_FPU"
3170 "fstoi %1,%0"
3171 [(set_attr "type" "fp")])
3172
3173 (define_insn "fix_truncdfsi2"
3174 [(set (match_operand:SI 0 "register_operand" "=f")
3175 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "e"))))]
3176 "TARGET_FPU"
3177 "fdtoi %1,%0"
3178 [(set_attr "type" "fp")])
3179
3180 (define_insn "fix_trunctfsi2"
3181 [(set (match_operand:SI 0 "register_operand" "=f")
3182 (fix:SI (fix:TF (match_operand:TF 1 "register_operand" "e"))))]
3183 "TARGET_FPU && TARGET_HARD_QUAD"
3184 "fqtoi %1,%0"
3185 [(set_attr "type" "fp")])
3186
3187 ;; Now the same, for 64-bit targets
3188 ;; ??? We try to work around an interesting problem.
3189 ;; If gcc tries to do a subreg on the result it will get the wrong answer:
3190 ;; "(subreg:SI (reg:DI M int-reg) 0)" is the same as
3191 ;; "(subreg:SI (reg:DI N float-reg) 1)", but gcc does not know how to change
3192 ;; the "0" to a "1". One could enhance alter_subreg but it is not clear how to
3193 ;; do this cleanly.
3194
3195 (define_expand "fix_truncsfdi2"
3196 [(parallel [(set (match_operand:DI 0 "general_operand" "")
3197 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" ""))))
3198 (clobber (match_dup 2))
3199 (clobber (match_dup 3))])]
3200 "TARGET_V9 && TARGET_FPU"
3201 "
3202 {
3203 operands[2] = gen_reg_rtx (DFmode);
3204 operands[3] = sparc64_fpconv_stack_temp ();
3205 }")
3206
3207 (define_expand "fix_truncdfdi2"
3208 [(parallel [(set (match_operand:DI 0 "general_operand" "")
3209 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" ""))))
3210 (clobber (match_dup 2))
3211 (clobber (match_dup 3))])]
3212 "TARGET_V9 && TARGET_FPU"
3213 "
3214 {
3215 operands[2] = gen_reg_rtx (DFmode);
3216 operands[3] = sparc64_fpconv_stack_temp ();
3217 }")
3218
3219 (define_expand "fix_trunctfdi2"
3220 [(parallel [(set (match_operand:DI 0 "general_operand" "")
3221 (fix:DI (fix:TF (match_operand:TF 1 "register_operand" ""))))
3222 (clobber (match_dup 2))
3223 (clobber (match_dup 3))])]
3224 "TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
3225 "
3226 {
3227 operands[2] = gen_reg_rtx (DFmode);
3228 operands[3] = sparc64_fpconv_stack_temp ();
3229 }")
3230
3231 (define_insn "*fix_truncsfdi2_insn"
3232 [(parallel [(set (match_operand:DI 0 "general_operand" "=rm")
3233 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))
3234 (clobber (match_operand:DF 2 "register_operand" "=&e"))
3235 (clobber (match_operand:DI 3 "memory_operand" "m"))])]
3236 "TARGET_V9 && TARGET_FPU"
3237 "*
3238 {
3239 output_asm_insn (\"fstox %1,%2\", operands);
3240 if (GET_CODE (operands[0]) == MEM)
3241 return \"std %2,%0\";
3242 else
3243 return \"std %2,%3\;ldx %3,%0\";
3244 }"
3245 [(set_attr "type" "fp")
3246 (set_attr "length" "3")])
3247
3248 (define_insn "*fix_truncdfdi2_insn"
3249 [(parallel [(set (match_operand:DI 0 "general_operand" "=rm")
3250 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "e"))))
3251 (clobber (match_operand:DF 2 "register_operand" "=&e"))
3252 (clobber (match_operand:DI 3 "memory_operand" "m"))])]
3253 "TARGET_V9 && TARGET_FPU"
3254 "*
3255 {
3256 output_asm_insn (\"fdtox %1,%2\", operands);
3257 if (GET_CODE (operands[0]) == MEM)
3258 return \"std %2,%0\";
3259 else
3260 return \"std %2,%3\;ldx %3,%0\";
3261 }"
3262 [(set_attr "type" "fp")
3263 (set_attr "length" "3")])
3264
3265 (define_insn "*fix_trunctfdi2_insn"
3266 [(parallel [(set (match_operand:DI 0 "general_operand" "=rm")
3267 (fix:DI (fix:TF (match_operand:TF 1 "register_operand" "e"))))
3268 (clobber (match_operand:DF 2 "register_operand" "=&e"))
3269 (clobber (match_operand:DI 3 "memory_operand" "m"))])]
3270 "TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
3271 "*
3272 {
3273 output_asm_insn (\"fqtox %1,%2\", operands);
3274 if (GET_CODE (operands[0]) == MEM)
3275 return \"std %2,%0\";
3276 else
3277 return \"std %2,%3\;ldx %3,%0\";
3278 }"
3279 [(set_attr "type" "fp")
3280 (set_attr "length" "3")])
3281
3282 ;; ??? Ideally, these are what we would like to use.
3283
3284 (define_insn "fix_truncsfdi2_sp64"
3285 [(set (match_operand:DI 0 "register_operand" "=e")
3286 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
3287 "0 && TARGET_V9 && TARGET_FPU"
3288 "fstox %1,%0"
3289 [(set_attr "type" "fp")])
3290
3291 (define_insn "fix_truncdfdi2_sp64"
3292 [(set (match_operand:DI 0 "register_operand" "=e")
3293 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "e"))))]
3294 "0 && TARGET_V9 && TARGET_FPU"
3295 "fdtox %1,%0"
3296 [(set_attr "type" "fp")])
3297
3298 (define_insn "fix_trunctfdi2_sp64"
3299 [(set (match_operand:DI 0 "register_operand" "=e")
3300 (fix:DI (fix:TF (match_operand:TF 1 "register_operand" "e"))))]
3301 "0 && TARGET_V9 && TARGET_FPU && TARGET_HARD_QUAD"
3302 "fqtox %1,%0"
3303 [(set_attr "type" "fp")])
3304 \f
3305 ;;- arithmetic instructions
3306
3307 (define_expand "adddi3"
3308 [(set (match_operand:DI 0 "register_operand" "=r")
3309 (plus:DI (match_operand:DI 1 "arith_double_operand" "%r")
3310 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3311 ""
3312 "
3313 {
3314 if (! TARGET_V9)
3315 {
3316 emit_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
3317 gen_rtx (SET, VOIDmode, operands[0],
3318 gen_rtx (PLUS, DImode, operands[1],
3319 operands[2])),
3320 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, SImode, 0)))));
3321 DONE;
3322 }
3323 }")
3324
3325 (define_insn "*adddi3_sp32"
3326 [(set (match_operand:DI 0 "register_operand" "=r")
3327 (plus:DI (match_operand:DI 1 "arith_double_operand" "%r")
3328 (match_operand:DI 2 "arith_double_operand" "rHI")))
3329 (clobber (reg:SI 0))]
3330 "! TARGET_V9"
3331 "*
3332 {
3333 rtx op2 = operands[2];
3334
3335 /* If constant is positive, upper bits zeroed, otherwise unchanged.
3336 Give the assembler a chance to pick the move instruction. */
3337 if (GET_CODE (op2) == CONST_INT)
3338 {
3339 int sign = INTVAL (op2);
3340 if (sign < 0)
3341 return \"addcc %R1,%2,%R0\;addx %1,-1,%0\";
3342 return \"addcc %R1,%2,%R0\;addx %1,0,%0\";
3343 }
3344 else if (GET_CODE (op2) == CONST_DOUBLE)
3345 {
3346 rtx xoperands[4];
3347 xoperands[0] = operands[0];
3348 xoperands[1] = operands[1];
3349 xoperands[2] = GEN_INT (CONST_DOUBLE_LOW (op2));
3350 xoperands[3] = GEN_INT (CONST_DOUBLE_HIGH (op2));
3351 if (xoperands[2] == const0_rtx && xoperands[0] == xoperands[1])
3352 output_asm_insn (\"add %1,%3,%0\", xoperands);
3353 else
3354 output_asm_insn (\"addcc %R1,%2,%R0\;addx %1,%3,%0\", xoperands);
3355 return \"\";
3356 }
3357 return \"addcc %R1,%R2,%R0\;addx %1,%2,%0\";
3358 }"
3359 [(set_attr "length" "2")])
3360
3361 (define_insn "*adddi3_sp64"
3362 [(set (match_operand:DI 0 "register_operand" "=r")
3363 (plus:DI (match_operand:DI 1 "arith_double_operand" "%r")
3364 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3365 "TARGET_V9"
3366 "add %1,%2,%0")
3367
3368 (define_insn "addsi3"
3369 [(set (match_operand:SI 0 "register_operand" "=r")
3370 (plus:SI (match_operand:SI 1 "arith_operand" "%r")
3371 (match_operand:SI 2 "arith_operand" "rI")))]
3372 ""
3373 "add %1,%2,%0"
3374 [(set_attr "type" "ialu")])
3375
3376 (define_insn "*cmp_cc_plus"
3377 [(set (reg:CC_NOOV 0)
3378 (compare:CC_NOOV (plus:SI (match_operand:SI 0 "arith_operand" "%r")
3379 (match_operand:SI 1 "arith_operand" "rI"))
3380 (const_int 0)))]
3381 ""
3382 "addcc %0,%1,%%g0"
3383 [(set_attr "type" "compare")])
3384
3385 (define_insn "*cmp_ccx_plus"
3386 [(set (reg:CCX_NOOV 0)
3387 (compare:CCX_NOOV (plus:DI (match_operand:DI 0 "arith_double_operand" "%r")
3388 (match_operand:DI 1 "arith_double_operand" "rHI"))
3389 (const_int 0)))]
3390 "TARGET_V9"
3391 "addcc %0,%1,%%g0"
3392 [(set_attr "type" "compare")])
3393
3394 (define_insn "*cmp_cc_plus_set"
3395 [(set (reg:CC_NOOV 0)
3396 (compare:CC_NOOV (plus:SI (match_operand:SI 1 "arith_operand" "%r")
3397 (match_operand:SI 2 "arith_operand" "rI"))
3398 (const_int 0)))
3399 (set (match_operand:SI 0 "register_operand" "=r")
3400 (plus:SI (match_dup 1) (match_dup 2)))]
3401 ""
3402 "addcc %1,%2,%0")
3403
3404 (define_insn "*cmp_ccx_plus_set"
3405 [(set (reg:CCX_NOOV 0)
3406 (compare:CCX_NOOV (plus:DI (match_operand:DI 1 "arith_double_operand" "%r")
3407 (match_operand:DI 2 "arith_double_operand" "rHI"))
3408 (const_int 0)))
3409 (set (match_operand:DI 0 "register_operand" "=r")
3410 (plus:DI (match_dup 1) (match_dup 2)))]
3411 "TARGET_V9"
3412 "addcc %1,%2,%0")
3413
3414 (define_expand "subdi3"
3415 [(set (match_operand:DI 0 "register_operand" "=r")
3416 (minus:DI (match_operand:DI 1 "register_operand" "r")
3417 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3418 ""
3419 "
3420 {
3421 if (! TARGET_V9)
3422 {
3423 emit_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
3424 gen_rtx (SET, VOIDmode, operands[0],
3425 gen_rtx (MINUS, DImode, operands[1],
3426 operands[2])),
3427 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, SImode, 0)))));
3428 DONE;
3429 }
3430 }")
3431
3432 (define_insn "*subdi3_sp32"
3433 [(set (match_operand:DI 0 "register_operand" "=r")
3434 (minus:DI (match_operand:DI 1 "register_operand" "r")
3435 (match_operand:DI 2 "arith_double_operand" "rHI")))
3436 (clobber (reg:SI 0))]
3437 "! TARGET_V9"
3438 "*
3439 {
3440 rtx op2 = operands[2];
3441
3442 /* If constant is positive, upper bits zeroed, otherwise unchanged.
3443 Give the assembler a chance to pick the move instruction. */
3444 if (GET_CODE (op2) == CONST_INT)
3445 {
3446 int sign = INTVAL (op2);
3447 if (sign < 0)
3448 return \"subcc %R1,%2,%R0\;subx %1,-1,%0\";
3449 return \"subcc %R1,%2,%R0\;subx %1,0,%0\";
3450 }
3451 else if (GET_CODE (op2) == CONST_DOUBLE)
3452 {
3453 rtx xoperands[4];
3454 xoperands[0] = operands[0];
3455 xoperands[1] = operands[1];
3456 xoperands[2] = GEN_INT (CONST_DOUBLE_LOW (op2));
3457 xoperands[3] = GEN_INT (CONST_DOUBLE_HIGH (op2));
3458 if (xoperands[2] == const0_rtx && xoperands[0] == xoperands[1])
3459 output_asm_insn (\"sub %1,%3,%0\", xoperands);
3460 else
3461 output_asm_insn (\"subcc %R1,%2,%R0\;subx %1,%3,%0\", xoperands);
3462 return \"\";
3463 }
3464 return \"subcc %R1,%R2,%R0\;subx %1,%2,%0\";
3465 }"
3466 [(set_attr "length" "2")])
3467
3468 (define_insn "*subdi3_sp64"
3469 [(set (match_operand:DI 0 "register_operand" "=r")
3470 (minus:DI (match_operand:DI 1 "register_operand" "r")
3471 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3472 "TARGET_V9"
3473 "sub %1,%2,%0")
3474
3475 (define_insn "subsi3"
3476 [(set (match_operand:SI 0 "register_operand" "=r")
3477 (minus:SI (match_operand:SI 1 "register_operand" "r")
3478 (match_operand:SI 2 "arith_operand" "rI")))]
3479 ""
3480 "sub %1,%2,%0"
3481 [(set_attr "type" "ialu")])
3482
3483 (define_insn "*cmp_minus_cc"
3484 [(set (reg:CC_NOOV 0)
3485 (compare:CC_NOOV (minus:SI (match_operand:SI 0 "register_operand" "r")
3486 (match_operand:SI 1 "arith_operand" "rI"))
3487 (const_int 0)))]
3488 ""
3489 "subcc %0,%1,%%g0"
3490 [(set_attr "type" "compare")])
3491
3492 (define_insn "*cmp_minus_ccx"
3493 [(set (reg:CCX_NOOV 0)
3494 (compare:CCX_NOOV (minus:DI (match_operand:DI 0 "register_operand" "r")
3495 (match_operand:DI 1 "arith_double_operand" "rHI"))
3496 (const_int 0)))]
3497 "TARGET_V9"
3498 "subcc %0,%1,%%g0"
3499 [(set_attr "type" "compare")])
3500
3501 (define_insn "*cmp_minus_cc_set"
3502 [(set (reg:CC_NOOV 0)
3503 (compare:CC_NOOV (minus:SI (match_operand:SI 1 "register_operand" "r")
3504 (match_operand:SI 2 "arith_operand" "rI"))
3505 (const_int 0)))
3506 (set (match_operand:SI 0 "register_operand" "=r")
3507 (minus:SI (match_dup 1) (match_dup 2)))]
3508 ""
3509 "subcc %1,%2,%0")
3510
3511 (define_insn "*cmp_minus_ccx_set"
3512 [(set (reg:CCX_NOOV 0)
3513 (compare:CCX_NOOV (minus:DI (match_operand:DI 1 "register_operand" "r")
3514 (match_operand:DI 2 "arith_double_operand" "rHI"))
3515 (const_int 0)))
3516 (set (match_operand:DI 0 "register_operand" "=r")
3517 (minus:DI (match_dup 1) (match_dup 2)))]
3518 "TARGET_V9"
3519 "subcc %1,%2,%0")
3520
3521 ;; This is anachronistic, and should not be used in v9 software.
3522 ;; The v9 compiler will widen the args and use muldi3.
3523
3524 (define_insn "mulsi3"
3525 [(set (match_operand:SI 0 "register_operand" "=r")
3526 (mult:SI (match_operand:SI 1 "arith_operand" "%r")
3527 (match_operand:SI 2 "arith_operand" "rI")))]
3528 "TARGET_V8 || TARGET_SPARCLITE"
3529 "smul %1,%2,%0"
3530 [(set_attr "type" "imul")])
3531
3532 (define_insn "muldi3"
3533 [(set (match_operand:DI 0 "register_operand" "=r")
3534 (mult:DI (match_operand:DI 1 "arith_double_operand" "%r")
3535 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3536 "TARGET_V9"
3537 "mulx %1,%2,%0")
3538
3539 ;; It is not known whether this will match.
3540
3541 (define_insn "*cmp_mul_set"
3542 [(set (match_operand:SI 0 "register_operand" "=r")
3543 (mult:SI (match_operand:SI 1 "arith_operand" "%r")
3544 (match_operand:SI 2 "arith_operand" "rI")))
3545 (set (reg:CC_NOOV 0)
3546 (compare:CC_NOOV (mult:SI (match_dup 1) (match_dup 2))
3547 (const_int 0)))]
3548 "TARGET_V8 || TARGET_SPARCLITE"
3549 "smulcc %1,%2,%0"
3550 [(set_attr "type" "imul")])
3551
3552 (define_expand "mulsidi3"
3553 [(set (match_operand:DI 0 "register_operand" "")
3554 (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" ""))
3555 (sign_extend:DI (match_operand:SI 2 "arith_operand" ""))))]
3556 "TARGET_V8 || TARGET_SPARCLITE"
3557 "
3558 {
3559 if (CONSTANT_P (operands[2]))
3560 {
3561 emit_insn (gen_const_mulsidi3 (operands[0], operands[1], operands[2]));
3562 DONE;
3563 }
3564 }")
3565
3566 (define_insn "*mulsidi3_sp32"
3567 [(set (match_operand:DI 0 "register_operand" "=r")
3568 (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
3569 (sign_extend:DI (match_operand:SI 2 "register_operand" "r"))))]
3570 "TARGET_V8 || TARGET_SPARCLITE"
3571 "smul %1,%2,%R0\;rd %%y,%0"
3572 [(set_attr "length" "2")])
3573
3574 ;; Extra pattern, because sign_extend of a constant isn't valid.
3575
3576 (define_insn "const_mulsidi3"
3577 [(set (match_operand:DI 0 "register_operand" "=r")
3578 (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
3579 (match_operand:SI 2 "small_int" "I")))]
3580 "TARGET_V8 || TARGET_SPARCLITE"
3581 "smul %1,%2,%R0\;rd %%y,%0"
3582 [(set_attr "length" "2")])
3583
3584 (define_expand "smulsi3_highpart"
3585 [(set (match_operand:SI 0 "register_operand" "")
3586 (truncate:SI
3587 (lshiftrt:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" ""))
3588 (sign_extend:DI (match_operand:SI 2 "arith_operand" "")))
3589 (const_int 32))))]
3590 "TARGET_V8 || TARGET_SPARCLITE"
3591 "
3592 {
3593 if (CONSTANT_P (operands[2]))
3594 {
3595 emit_insn (gen_const_smulsi3_highpart (operands[0], operands[1], operands[2]));
3596 DONE;
3597 }
3598 }")
3599
3600 (define_insn "*smulsidi3_highpart_sp32"
3601 [(set (match_operand:SI 0 "register_operand" "=r")
3602 (truncate:SI
3603 (lshiftrt:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
3604 (sign_extend:DI (match_operand:SI 2 "register_operand" "r")))
3605 (const_int 32))))]
3606 "TARGET_V8 || TARGET_SPARCLITE"
3607 "smul %1,%2,%%g0\;rd %%y,%0"
3608 [(set_attr "length" "2")])
3609
3610 (define_insn "const_smulsi3_highpart"
3611 [(set (match_operand:SI 0 "register_operand" "=r")
3612 (truncate:SI
3613 (lshiftrt:DI (mult:DI (sign_extend:DI (match_operand:SI 1 "register_operand" "r"))
3614 (match_operand:SI 2 "register_operand" "r"))
3615 (const_int 32))))]
3616 "TARGET_V8 || TARGET_SPARCLITE"
3617 "smul %1,%2,%%g0\;rd %%y,%0"
3618 [(set_attr "length" "2")])
3619
3620 (define_expand "umulsidi3"
3621 [(set (match_operand:DI 0 "register_operand" "")
3622 (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" ""))
3623 (zero_extend:DI (match_operand:SI 2 "uns_arith_operand" ""))))]
3624 "TARGET_V8 || TARGET_SPARCLITE"
3625 "
3626 {
3627 if (CONSTANT_P (operands[2]))
3628 {
3629 emit_insn (gen_const_umulsidi3 (operands[0], operands[1], operands[2]));
3630 DONE;
3631 }
3632 }")
3633
3634 (define_insn "*umulsidi3_sp32"
3635 [(set (match_operand:DI 0 "register_operand" "=r")
3636 (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
3637 (zero_extend:DI (match_operand:SI 2 "register_operand" "r"))))]
3638 "TARGET_V8 || TARGET_SPARCLITE"
3639 "umul %1,%2,%R0\;rd %%y,%0"
3640 [(set_attr "length" "2")])
3641
3642 ;; Extra pattern, because sign_extend of a constant isn't valid.
3643
3644 (define_insn "const_umulsidi3"
3645 [(set (match_operand:DI 0 "register_operand" "=r")
3646 (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
3647 (match_operand:SI 2 "uns_small_int" "")))]
3648 "TARGET_V8 || TARGET_SPARCLITE"
3649 "umul %1,%2,%R0\;rd %%y,%0"
3650 [(set_attr "length" "2")])
3651
3652 (define_expand "umulsi3_highpart"
3653 [(set (match_operand:SI 0 "register_operand" "")
3654 (truncate:SI
3655 (lshiftrt:DI (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" ""))
3656 (zero_extend:DI (match_operand:SI 2 "uns_arith_operand" "")))
3657 (const_int 32))))]
3658 "TARGET_V8 || TARGET_SPARCLITE"
3659 "
3660 {
3661 if (CONSTANT_P (operands[2]))
3662 {
3663 emit_insn (gen_const_umulsi3_highpart (operands[0], operands[1], operands[2]));
3664 DONE;
3665 }
3666 }")
3667
3668 (define_insn "*umulsidi3_highpart_sp32"
3669 [(set (match_operand:SI 0 "register_operand" "=r")
3670 (truncate:SI
3671 (lshiftrt:DI (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
3672 (zero_extend:DI (match_operand:SI 2 "register_operand" "r")))
3673 (const_int 32))))]
3674 "TARGET_V8 || TARGET_SPARCLITE"
3675 "umul %1,%2,%%g0\;rd %%y,%0"
3676 [(set_attr "length" "2")])
3677
3678 (define_insn "const_umulsi3_highpart"
3679 [(set (match_operand:SI 0 "register_operand" "=r")
3680 (truncate:SI
3681 (lshiftrt:DI (mult:DI (zero_extend:DI (match_operand:SI 1 "register_operand" "r"))
3682 (match_operand:SI 2 "uns_small_int" ""))
3683 (const_int 32))))]
3684 "TARGET_V8 || TARGET_SPARCLITE"
3685 "umul %1,%2,%%g0\;rd %%y,%0"
3686 [(set_attr "length" "2")])
3687
3688 ;; The architecture specifies that there must be 3 instructions between
3689 ;; a y register write and a use of it for correct results.
3690
3691 (define_insn "divsi3"
3692 [(set (match_operand:SI 0 "register_operand" "=r")
3693 (div:SI (match_operand:SI 1 "register_operand" "r")
3694 (match_operand:SI 2 "arith_operand" "rI")))
3695 (clobber (match_scratch:SI 3 "=&r"))]
3696 "TARGET_V8"
3697 "sra %1,31,%3\;wr %%g0,%3,%%y\;nop\;nop\;nop\;sdiv %1,%2,%0"
3698 [(set_attr "length" "6")])
3699
3700 (define_insn "divdi3"
3701 [(set (match_operand:DI 0 "register_operand" "=r")
3702 (div:DI (match_operand:DI 1 "register_operand" "r")
3703 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3704 "TARGET_V9"
3705 "sdivx %1,%2,%0")
3706
3707 ;; It is not known whether this will match.
3708
3709 (define_insn "*cmp_sdiv_cc_set"
3710 [(set (match_operand:SI 0 "register_operand" "=r")
3711 (div:SI (match_operand:SI 1 "register_operand" "r")
3712 (match_operand:SI 2 "arith_operand" "rI")))
3713 (set (reg:CC 0)
3714 (compare:CC (div:SI (match_dup 1) (match_dup 2))
3715 (const_int 0)))
3716 (clobber (match_scratch:SI 3 "=&r"))]
3717 "TARGET_V8"
3718 "sra %1,31,%3\;wr %%g0,%3,%%y\;nop\;nop\;nop\;sdivcc %1,%2,%0"
3719 [(set_attr "length" "6")])
3720
3721 (define_insn "udivsi3"
3722 [(set (match_operand:SI 0 "register_operand" "=r")
3723 (udiv:SI (match_operand:SI 1 "register_operand" "r")
3724 (match_operand:SI 2 "arith_operand" "rI")))]
3725 "TARGET_V8"
3726 "wr %%g0,%%g0,%%y\;nop\;nop\;nop\;udiv %1,%2,%0"
3727 [(set_attr "length" "5")])
3728
3729 (define_insn "udivdi3"
3730 [(set (match_operand:DI 0 "register_operand" "=r")
3731 (udiv:DI (match_operand:DI 1 "register_operand" "r")
3732 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3733 "TARGET_V9"
3734 "udivx %1,%2,%0")
3735
3736 ;; It is not known whether this will match.
3737
3738 (define_insn "*cmp_udiv_cc_set"
3739 [(set (match_operand:SI 0 "register_operand" "=r")
3740 (udiv:SI (match_operand:SI 1 "register_operand" "r")
3741 (match_operand:SI 2 "arith_operand" "rI")))
3742 (set (reg:CC 0)
3743 (compare:CC (udiv:SI (match_dup 1) (match_dup 2))
3744 (const_int 0)))]
3745 "TARGET_V8"
3746 "wr %%g0,%%g0,%%y\;nop\;nop\;nop\;udivcc %1,%2,%0"
3747 [(set_attr "length" "5")])
3748 \f
3749 ;;- Boolean instructions
3750 ;; We define DImode `and` so with DImode `not` we can get
3751 ;; DImode `andn`. Other combinations are possible.
3752
3753 (define_expand "anddi3"
3754 [(set (match_operand:DI 0 "register_operand" "")
3755 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3756 (match_operand:DI 2 "arith_double_operand" "")))]
3757 ""
3758 "")
3759
3760 (define_insn "*anddi3_sp32"
3761 [(set (match_operand:DI 0 "register_operand" "=r")
3762 (and:DI (match_operand:DI 1 "arith_double_operand" "%r")
3763 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3764 "! TARGET_V9"
3765 "*
3766 {
3767 rtx op2 = operands[2];
3768
3769 /* If constant is positive, upper bits zeroed, otherwise unchanged.
3770 Give the assembler a chance to pick the move instruction. */
3771 if (GET_CODE (op2) == CONST_INT)
3772 {
3773 int sign = INTVAL (op2);
3774 if (sign < 0)
3775 return \"mov %1,%0\;and %R1,%2,%R0\";
3776 return \"mov 0,%0\;and %R1,%2,%R0\";
3777 }
3778 else if (GET_CODE (op2) == CONST_DOUBLE)
3779 {
3780 rtx xoperands[4];
3781 xoperands[0] = operands[0];
3782 xoperands[1] = operands[1];
3783 xoperands[2] = GEN_INT (CONST_DOUBLE_LOW (op2));
3784 xoperands[3] = GEN_INT (CONST_DOUBLE_HIGH (op2));
3785 /* We could optimize then operands[1] == operands[0]
3786 and either half of the constant is -1. */
3787 output_asm_insn (\"and %R1,%2,%R0\;and %1,%3,%0\", xoperands);
3788 return \"\";
3789 }
3790 return \"and %1,%2,%0\;and %R1,%R2,%R0\";
3791 }"
3792 [(set_attr "length" "2")])
3793
3794 (define_insn "*anddi3_sp64"
3795 [(set (match_operand:DI 0 "register_operand" "=r")
3796 (and:DI (match_operand:DI 1 "arith_double_operand" "%r")
3797 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3798 "TARGET_V9"
3799 "and %1,%2,%0")
3800
3801 (define_insn "andsi3"
3802 [(set (match_operand:SI 0 "register_operand" "=r")
3803 (and:SI (match_operand:SI 1 "arith_operand" "%r")
3804 (match_operand:SI 2 "arith_operand" "rI")))]
3805 ""
3806 "and %1,%2,%0"
3807 [(set_attr "type" "ialu")])
3808
3809 (define_split
3810 [(set (match_operand:SI 0 "register_operand" "")
3811 (and:SI (match_operand:SI 1 "register_operand" "")
3812 (match_operand:SI 2 "" "")))
3813 (clobber (match_operand:SI 3 "register_operand" ""))]
3814 "GET_CODE (operands[2]) == CONST_INT
3815 && !SMALL_INT (operands[2])
3816 && (INTVAL (operands[2]) & 0x3ff) == 0x3ff"
3817 [(set (match_dup 3) (match_dup 4))
3818 (set (match_dup 0) (and:SI (not:SI (match_dup 3)) (match_dup 1)))]
3819 "
3820 {
3821 operands[4] = gen_rtx (CONST_INT, VOIDmode, ~INTVAL (operands[2]));
3822 }")
3823
3824 (define_insn "*and_not_di_sp32"
3825 [(set (match_operand:DI 0 "register_operand" "=r")
3826 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3827 (match_operand:DI 2 "register_operand" "r")))]
3828 "! TARGET_V9"
3829 "andn %2,%1,%0\;andn %R2,%R1,%R0"
3830 [(set_attr "length" "2")])
3831
3832 (define_insn "*and_not_di_sp64"
3833 [(set (match_operand:DI 0 "register_operand" "=r")
3834 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3835 (match_operand:DI 2 "register_operand" "r")))]
3836 "TARGET_V9"
3837 "andn %2,%1,%0")
3838
3839 (define_insn "*and_not_si"
3840 [(set (match_operand:SI 0 "register_operand" "=r")
3841 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3842 (match_operand:SI 2 "register_operand" "r")))]
3843 ""
3844 "andn %2,%1,%0"
3845 [(set_attr "type" "ialu")])
3846
3847 (define_expand "iordi3"
3848 [(set (match_operand:DI 0 "register_operand" "")
3849 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3850 (match_operand:DI 2 "arith_double_operand" "")))]
3851 ""
3852 "")
3853
3854 (define_insn "*iordi3_sp32"
3855 [(set (match_operand:DI 0 "register_operand" "=r")
3856 (ior:DI (match_operand:DI 1 "arith_double_operand" "%r")
3857 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3858 "! TARGET_V9"
3859 "*
3860 {
3861 rtx op2 = operands[2];
3862
3863 /* If constant is positive, upper bits zeroed, otherwise unchanged.
3864 Give the assembler a chance to pick the move instruction. */
3865 if (GET_CODE (op2) == CONST_INT)
3866 {
3867 int sign = INTVAL (op2);
3868 if (sign < 0)
3869 return \"mov -1,%0\;or %R1,%2,%R0\";
3870 return \"mov %1,%0\;or %R1,%2,%R0\";
3871 }
3872 else if (GET_CODE (op2) == CONST_DOUBLE)
3873 {
3874 rtx xoperands[4];
3875 xoperands[0] = operands[0];
3876 xoperands[1] = operands[1];
3877 xoperands[2] = GEN_INT (CONST_DOUBLE_LOW (op2));
3878 xoperands[3] = GEN_INT (CONST_DOUBLE_HIGH (op2));
3879 /* We could optimize then operands[1] == operands[0]
3880 and either half of the constant is 0. */
3881 output_asm_insn (\"or %R1,%2,%R0\;or %1,%3,%0\", xoperands);
3882 return \"\";
3883 }
3884 return \"or %1,%2,%0\;or %R1,%R2,%R0\";
3885 }"
3886 [(set_attr "length" "2")])
3887
3888 (define_insn "*iordi3_sp64"
3889 [(set (match_operand:DI 0 "register_operand" "=r")
3890 (ior:DI (match_operand:DI 1 "arith_double_operand" "%r")
3891 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3892 "TARGET_V9"
3893 "or %1,%2,%0")
3894
3895 (define_insn "iorsi3"
3896 [(set (match_operand:SI 0 "register_operand" "=r")
3897 (ior:SI (match_operand:SI 1 "arith_operand" "%r")
3898 (match_operand:SI 2 "arith_operand" "rI")))]
3899 ""
3900 "or %1,%2,%0"
3901 [(set_attr "type" "ialu")])
3902
3903 (define_split
3904 [(set (match_operand:SI 0 "register_operand" "")
3905 (ior:SI (match_operand:SI 1 "register_operand" "")
3906 (match_operand:SI 2 "" "")))
3907 (clobber (match_operand:SI 3 "register_operand" ""))]
3908 "GET_CODE (operands[2]) == CONST_INT
3909 && !SMALL_INT (operands[2])
3910 && (INTVAL (operands[2]) & 0x3ff) == 0x3ff"
3911 [(set (match_dup 3) (match_dup 4))
3912 (set (match_dup 0) (ior:SI (not:SI (match_dup 3)) (match_dup 1)))]
3913 "
3914 {
3915 operands[4] = gen_rtx (CONST_INT, VOIDmode, ~INTVAL (operands[2]));
3916 }")
3917
3918 (define_insn "*or_not_di_sp32"
3919 [(set (match_operand:DI 0 "register_operand" "=r")
3920 (ior:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3921 (match_operand:DI 2 "register_operand" "r")))]
3922 "! TARGET_V9"
3923 "orn %2,%1,%0\;orn %R2,%R1,%R0"
3924 [(set_attr "length" "2")])
3925
3926 (define_insn "*or_not_di_sp64"
3927 [(set (match_operand:DI 0 "register_operand" "=r")
3928 (ior:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3929 (match_operand:DI 2 "register_operand" "r")))]
3930 "TARGET_V9"
3931 "orn %2,%1,%0")
3932
3933 (define_insn "*or_not_si"
3934 [(set (match_operand:SI 0 "register_operand" "=r")
3935 (ior:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3936 (match_operand:SI 2 "register_operand" "r")))]
3937 ""
3938 "orn %2,%1,%0"
3939 [(set_attr "type" "ialu")])
3940
3941 (define_expand "xordi3"
3942 [(set (match_operand:DI 0 "register_operand" "")
3943 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3944 (match_operand:DI 2 "arith_double_operand" "")))]
3945 ""
3946 "")
3947
3948 (define_insn "*xorsi3_sp32"
3949 [(set (match_operand:DI 0 "register_operand" "=r")
3950 (xor:DI (match_operand:DI 1 "arith_double_operand" "%r")
3951 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3952 "! TARGET_V9"
3953 "*
3954 {
3955 rtx op2 = operands[2];
3956
3957 /* If constant is positive, upper bits zeroed, otherwise unchanged.
3958 Give the assembler a chance to pick the move instruction. */
3959 if (GET_CODE (op2) == CONST_INT)
3960 {
3961 int sign = INTVAL (op2);
3962 if (sign < 0)
3963 return \"xor %1,-1,%0\;xor %R1,%2,%R0\";
3964 return \"mov %1,%0\;xor %R1,%2,%R0\";
3965 }
3966 else if (GET_CODE (op2) == CONST_DOUBLE)
3967 {
3968 rtx xoperands[4];
3969 xoperands[0] = operands[0];
3970 xoperands[1] = operands[1];
3971 xoperands[2] = GEN_INT (CONST_DOUBLE_LOW (op2));
3972 xoperands[3] = GEN_INT (CONST_DOUBLE_HIGH (op2));
3973 /* We could optimize then operands[1] == operands[0]
3974 and either half of the constant is 0. */
3975 output_asm_insn (\"xor %R1,%2,%R0\;xor %1,%3,%0\", xoperands);
3976 return \"\";
3977 }
3978 return \"xor %1,%2,%0\;xor %R1,%R2,%R0\";
3979 }"
3980 [(set_attr "length" "2")])
3981
3982 (define_insn "*xordi3_sp64"
3983 [(set (match_operand:DI 0 "register_operand" "=r")
3984 (xor:DI (match_operand:DI 1 "arith_double_operand" "%rJ")
3985 (match_operand:DI 2 "arith_double_operand" "rHI")))]
3986 "TARGET_V9"
3987 "xor %r1,%2,%0")
3988
3989 (define_insn "xorsi3"
3990 [(set (match_operand:SI 0 "register_operand" "=r")
3991 (xor:SI (match_operand:SI 1 "arith_operand" "%rJ")
3992 (match_operand:SI 2 "arith_operand" "rI")))]
3993 ""
3994 "xor %r1,%2,%0"
3995 [(set_attr "type" "ialu")])
3996
3997 (define_split
3998 [(set (match_operand:SI 0 "register_operand" "")
3999 (xor:SI (match_operand:SI 1 "register_operand" "")
4000 (match_operand:SI 2 "" "")))
4001 (clobber (match_operand:SI 3 "register_operand" ""))]
4002 "GET_CODE (operands[2]) == CONST_INT
4003 && !SMALL_INT (operands[2])
4004 && (INTVAL (operands[2]) & 0x3ff) == 0x3ff"
4005 [(set (match_dup 3) (match_dup 4))
4006 (set (match_dup 0) (not:SI (xor:SI (match_dup 3) (match_dup 1))))]
4007 "
4008 {
4009 operands[4] = gen_rtx (CONST_INT, VOIDmode, ~INTVAL (operands[2]));
4010 }")
4011
4012 (define_split
4013 [(set (match_operand:SI 0 "register_operand" "")
4014 (not:SI (xor:SI (match_operand:SI 1 "register_operand" "")
4015 (match_operand:SI 2 "" ""))))
4016 (clobber (match_operand:SI 3 "register_operand" ""))]
4017 "GET_CODE (operands[2]) == CONST_INT
4018 && !SMALL_INT (operands[2])
4019 && (INTVAL (operands[2]) & 0x3ff) == 0x3ff"
4020 [(set (match_dup 3) (match_dup 4))
4021 (set (match_dup 0) (xor:SI (match_dup 3) (match_dup 1)))]
4022 "
4023 {
4024 operands[4] = gen_rtx (CONST_INT, VOIDmode, ~INTVAL (operands[2]));
4025 }")
4026
4027 ;; xnor patterns. Note that (a ^ ~b) == (~a ^ b) == ~(a ^ b).
4028 ;; Combine now canonicalizes to the rightmost expression.
4029 (define_insn "*xor_not_di_sp32"
4030 [(set (match_operand:DI 0 "register_operand" "=r")
4031 (not:DI (xor:DI (match_operand:DI 1 "register_operand" "r")
4032 (match_operand:DI 2 "register_operand" "r"))))]
4033 "! TARGET_V9"
4034 "xnor %1,%2,%0\;xnor %R1,%R2,%R0"
4035 [(set_attr "length" "2")])
4036
4037 (define_insn "*xor_not_di_sp64"
4038 [(set (match_operand:DI 0 "register_operand" "=r")
4039 (not:DI (xor:DI (match_operand:DI 1 "reg_or_0_operand" "rJ")
4040 (match_operand:DI 2 "arith_double_operand" "rHI"))))]
4041 "TARGET_V9"
4042 "xnor %r1,%2,%0")
4043
4044 (define_insn "*xor_not_si"
4045 [(set (match_operand:SI 0 "register_operand" "=r")
4046 (not:SI (xor:SI (match_operand:SI 1 "reg_or_0_operand" "rJ")
4047 (match_operand:SI 2 "arith_operand" "rI"))))]
4048 ""
4049 "xnor %r1,%2,%0"
4050 [(set_attr "type" "ialu")])
4051
4052 ;; These correspond to the above in the case where we also (or only)
4053 ;; want to set the condition code.
4054
4055 (define_insn "*cmp_cc_arith_op"
4056 [(set (reg:CC 0)
4057 (compare:CC
4058 (match_operator:SI 2 "cc_arithop"
4059 [(match_operand:SI 0 "arith_operand" "%r")
4060 (match_operand:SI 1 "arith_operand" "rI")])
4061 (const_int 0)))]
4062 ""
4063 "%A2cc %0,%1,%%g0"
4064 [(set_attr "type" "compare")])
4065
4066 (define_insn "*cmp_ccx_arith_op"
4067 [(set (reg:CCX 0)
4068 (compare:CCX
4069 (match_operator:DI 2 "cc_arithop"
4070 [(match_operand:DI 0 "arith_double_operand" "%r")
4071 (match_operand:DI 1 "arith_double_operand" "rHI")])
4072 (const_int 0)))]
4073 "TARGET_V9"
4074 "%A2cc %0,%1,%%g0"
4075 [(set_attr "type" "compare")])
4076
4077 (define_insn "*cmp_cc_arith_op_set"
4078 [(set (reg:CC 0)
4079 (compare:CC
4080 (match_operator:SI 3 "cc_arithop"
4081 [(match_operand:SI 1 "arith_operand" "%r")
4082 (match_operand:SI 2 "arith_operand" "rI")])
4083 (const_int 0)))
4084 (set (match_operand:SI 0 "register_operand" "=r")
4085 (match_dup 3))]
4086 ""
4087 "%A3cc %1,%2,%0")
4088
4089 (define_insn "*cmp_ccx_arith_op_set"
4090 [(set (reg:CCX 0)
4091 (compare:CCX
4092 (match_operator:DI 3 "cc_arithop"
4093 [(match_operand:DI 1 "arith_double_operand" "%r")
4094 (match_operand:DI 2 "arith_double_operand" "rHI")])
4095 (const_int 0)))
4096 (set (match_operand:DI 0 "register_operand" "=r")
4097 (match_dup 3))]
4098 "TARGET_V9"
4099 "%A3cc %1,%2,%0")
4100
4101 (define_insn "*cmp_cc_xor_not"
4102 [(set (reg:CC 0)
4103 (compare:CC
4104 (not:SI (xor:SI (match_operand:SI 0 "reg_or_0_operand" "%rJ")
4105 (match_operand:SI 1 "arith_operand" "rI")))
4106 (const_int 0)))]
4107 ""
4108 "xnorcc %r0,%1,%%g0"
4109 [(set_attr "type" "compare")])
4110
4111 (define_insn "*cmp_ccx_xor_not"
4112 [(set (reg:CCX 0)
4113 (compare:CCX
4114 (not:DI (xor:DI (match_operand:DI 0 "reg_or_0_operand" "%rJ")
4115 (match_operand:DI 1 "arith_double_operand" "rHI")))
4116 (const_int 0)))]
4117 "TARGET_V9"
4118 "xnorcc %r0,%1,%%g0"
4119 [(set_attr "type" "compare")])
4120
4121 (define_insn "*cmp_cc_xor_not_set"
4122 [(set (reg:CC 0)
4123 (compare:CC
4124 (not:SI (xor:SI (match_operand:SI 1 "reg_or_0_operand" "%rJ")
4125 (match_operand:SI 2 "arith_operand" "rI")))
4126 (const_int 0)))
4127 (set (match_operand:SI 0 "register_operand" "=r")
4128 (not:SI (xor:SI (match_dup 1) (match_dup 2))))]
4129 ""
4130 "xnorcc %r1,%2,%0")
4131
4132 (define_insn "*cmp_ccx_xor_not_set"
4133 [(set (reg:CCX 0)
4134 (compare:CCX
4135 (not:DI (xor:DI (match_operand:DI 1 "reg_or_0_operand" "%rJ")
4136 (match_operand:DI 2 "arith_double_operand" "rHI")))
4137 (const_int 0)))
4138 (set (match_operand:DI 0 "register_operand" "=r")
4139 (not:DI (xor:DI (match_dup 1) (match_dup 2))))]
4140 "TARGET_V9"
4141 "xnorcc %r1,%2,%0")
4142
4143 (define_insn "*cmp_cc_arith_op_not"
4144 [(set (reg:CC 0)
4145 (compare:CC
4146 (match_operator:SI 2 "cc_arithopn"
4147 [(not:SI (match_operand:SI 0 "arith_operand" "rI"))
4148 (match_operand:SI 1 "reg_or_0_operand" "rJ")])
4149 (const_int 0)))]
4150 ""
4151 "%B2cc %r1,%0,%%g0"
4152 [(set_attr "type" "compare")])
4153
4154 (define_insn "*cmp_ccx_arith_op_not"
4155 [(set (reg:CCX 0)
4156 (compare:CCX
4157 (match_operator:DI 2 "cc_arithopn"
4158 [(not:DI (match_operand:DI 0 "arith_double_operand" "rHI"))
4159 (match_operand:DI 1 "reg_or_0_operand" "rJ")])
4160 (const_int 0)))]
4161 "TARGET_V9"
4162 "%B2cc %r1,%0,%%g0"
4163 [(set_attr "type" "compare")])
4164
4165 (define_insn "*cmp_cc_arith_op_not_set"
4166 [(set (reg:CC 0)
4167 (compare:CC
4168 (match_operator:SI 3 "cc_arithopn"
4169 [(not:SI (match_operand:SI 1 "arith_operand" "rI"))
4170 (match_operand:SI 2 "reg_or_0_operand" "rJ")])
4171 (const_int 0)))
4172 (set (match_operand:SI 0 "register_operand" "=r")
4173 (match_dup 3))]
4174 ""
4175 "%B3cc %r2,%1,%0")
4176
4177 (define_insn "*cmp_ccx_arith_op_not_set"
4178 [(set (reg:CCX 0)
4179 (compare:CCX
4180 (match_operator:DI 3 "cc_arithopn"
4181 [(not:DI (match_operand:DI 1 "arith_double_operand" "rHI"))
4182 (match_operand:DI 2 "reg_or_0_operand" "rJ")])
4183 (const_int 0)))
4184 (set (match_operand:DI 0 "register_operand" "=r")
4185 (match_dup 3))]
4186 "TARGET_V9"
4187 "%B3cc %r2,%1,%0")
4188
4189 ;; We cannot use the "neg" pseudo insn because the Sun assembler
4190 ;; does not know how to make it work for constants.
4191
4192 (define_expand "negdi2"
4193 [(set (match_operand:DI 0 "register_operand" "=r")
4194 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
4195 ""
4196 "
4197 {
4198 if (! TARGET_V9)
4199 {
4200 emit_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
4201 gen_rtx (SET, VOIDmode, operand0,
4202 gen_rtx (NEG, DImode, operand1)),
4203 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, SImode, 0)))));
4204 DONE;
4205 }
4206 }")
4207
4208 (define_insn "*negdi2_sp32"
4209 [(set (match_operand:DI 0 "register_operand" "=r")
4210 (neg:DI (match_operand:DI 1 "register_operand" "r")))
4211 (clobber (reg:SI 0))]
4212 "! TARGET_V9"
4213 "subcc %%g0,%R1,%R0\;subx %%g0,%1,%0"
4214 [(set_attr "type" "unary")
4215 (set_attr "length" "2")])
4216
4217 (define_insn "*negdi2_sp64"
4218 [(set (match_operand:DI 0 "register_operand" "=r")
4219 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
4220 "TARGET_V9"
4221 "sub %%g0,%1,%0"
4222 [(set_attr "type" "unary")
4223 (set_attr "length" "1")])
4224
4225 (define_insn "negsi2"
4226 [(set (match_operand:SI 0 "register_operand" "=r")
4227 (neg:SI (match_operand:SI 1 "arith_operand" "rI")))]
4228 ""
4229 "sub %%g0,%1,%0"
4230 [(set_attr "type" "unary")])
4231
4232 (define_insn "*cmp_cc_neg"
4233 [(set (reg:CC_NOOV 0)
4234 (compare:CC_NOOV (neg:SI (match_operand:SI 0 "arith_operand" "rI"))
4235 (const_int 0)))]
4236 ""
4237 "subcc %%g0,%0,%%g0"
4238 [(set_attr "type" "compare")])
4239
4240 (define_insn "*cmp_ccx_neg"
4241 [(set (reg:CCX_NOOV 0)
4242 (compare:CCX_NOOV (neg:DI (match_operand:DI 0 "arith_double_operand" "rHI"))
4243 (const_int 0)))]
4244 "TARGET_V9"
4245 "subcc %%g0,%0,%%g0"
4246 [(set_attr "type" "compare")])
4247
4248 (define_insn "*cmp_cc_set_neg"
4249 [(set (reg:CC_NOOV 0)
4250 (compare:CC_NOOV (neg:SI (match_operand:SI 1 "arith_operand" "rI"))
4251 (const_int 0)))
4252 (set (match_operand:SI 0 "register_operand" "=r")
4253 (neg:SI (match_dup 1)))]
4254 ""
4255 "subcc %%g0,%1,%0"
4256 [(set_attr "type" "unary")])
4257
4258 (define_insn "*cmp_ccx_set_neg"
4259 [(set (reg:CCX_NOOV 0)
4260 (compare:CCX_NOOV (neg:DI (match_operand:DI 1 "arith_double_operand" "rHI"))
4261 (const_int 0)))
4262 (set (match_operand:DI 0 "register_operand" "=r")
4263 (neg:DI (match_dup 1)))]
4264 "TARGET_V9"
4265 "subcc %%g0,%1,%0"
4266 [(set_attr "type" "unary")])
4267
4268 ;; We cannot use the "not" pseudo insn because the Sun assembler
4269 ;; does not know how to make it work for constants.
4270 (define_expand "one_cmpldi2"
4271 [(set (match_operand:DI 0 "register_operand" "")
4272 (not:DI (match_operand:DI 1 "register_operand" "")))]
4273 ""
4274 "")
4275
4276 (define_insn "*one_cmpldi2_sp32"
4277 [(set (match_operand:DI 0 "register_operand" "=r")
4278 (not:DI (match_operand:DI 1 "register_operand" "r")))]
4279 "! TARGET_V9"
4280 "xnor %%g0,%1,%0\;xnor %%g0,%R1,%R0"
4281 [(set_attr "type" "unary")
4282 (set_attr "length" "2")])
4283
4284 (define_insn "*one_cmpldi2_sp64"
4285 [(set (match_operand:DI 0 "register_operand" "=r")
4286 (not:DI (match_operand:DI 1 "arith_double_operand" "rHI")))]
4287 "TARGET_V9"
4288 "xnor %%g0,%1,%0"
4289 [(set_attr "type" "unary")])
4290
4291 (define_insn "one_cmplsi2"
4292 [(set (match_operand:SI 0 "register_operand" "=r")
4293 (not:SI (match_operand:SI 1 "arith_operand" "rI")))]
4294 ""
4295 "xnor %%g0,%1,%0"
4296 [(set_attr "type" "unary")])
4297
4298 (define_insn "*cmp_cc_not"
4299 [(set (reg:CC 0)
4300 (compare:CC (not:SI (match_operand:SI 0 "arith_operand" "rI"))
4301 (const_int 0)))]
4302 ""
4303 "xnorcc %%g0,%0,%%g0"
4304 [(set_attr "type" "compare")])
4305
4306 (define_insn "*cmp_ccx_not"
4307 [(set (reg:CCX 0)
4308 (compare:CCX (not:DI (match_operand:DI 0 "arith_double_operand" "rHI"))
4309 (const_int 0)))]
4310 "TARGET_V9"
4311 "xnorcc %%g0,%0,%%g0"
4312 [(set_attr "type" "compare")])
4313
4314 (define_insn "*cmp_cc_set_not"
4315 [(set (reg:CC 0)
4316 (compare:CC (not:SI (match_operand:SI 1 "arith_operand" "rI"))
4317 (const_int 0)))
4318 (set (match_operand:SI 0 "register_operand" "=r")
4319 (not:SI (match_dup 1)))]
4320 ""
4321 "xnorcc %%g0,%1,%0"
4322 [(set_attr "type" "unary")])
4323
4324 (define_insn "*cmp_ccx_set_not"
4325 [(set (reg:CCX 0)
4326 (compare:CCX (not:DI (match_operand:DI 1 "arith_double_operand" "rHI"))
4327 (const_int 0)))
4328 (set (match_operand:DI 0 "register_operand" "=r")
4329 (not:DI (match_dup 1)))]
4330 "TARGET_V9"
4331 "xnorcc %%g0,%1,%0"
4332 [(set_attr "type" "unary")])
4333 \f
4334 ;; Floating point arithmetic instructions.
4335
4336 (define_insn "addtf3"
4337 [(set (match_operand:TF 0 "register_operand" "=e")
4338 (plus:TF (match_operand:TF 1 "register_operand" "e")
4339 (match_operand:TF 2 "register_operand" "e")))]
4340 "TARGET_FPU && TARGET_HARD_QUAD"
4341 "faddq %1,%2,%0"
4342 [(set_attr "type" "fp")])
4343
4344 (define_insn "adddf3"
4345 [(set (match_operand:DF 0 "register_operand" "=e")
4346 (plus:DF (match_operand:DF 1 "register_operand" "e")
4347 (match_operand:DF 2 "register_operand" "e")))]
4348 "TARGET_FPU"
4349 "faddd %1,%2,%0"
4350 [(set_attr "type" "fp")])
4351
4352 (define_insn "addsf3"
4353 [(set (match_operand:SF 0 "register_operand" "=f")
4354 (plus:SF (match_operand:SF 1 "register_operand" "f")
4355 (match_operand:SF 2 "register_operand" "f")))]
4356 "TARGET_FPU"
4357 "fadds %1,%2,%0"
4358 [(set_attr "type" "fp")])
4359
4360 (define_insn "subtf3"
4361 [(set (match_operand:TF 0 "register_operand" "=e")
4362 (minus:TF (match_operand:TF 1 "register_operand" "e")
4363 (match_operand:TF 2 "register_operand" "e")))]
4364 "TARGET_FPU && TARGET_HARD_QUAD"
4365 "fsubq %1,%2,%0"
4366 [(set_attr "type" "fp")])
4367
4368 (define_insn "subdf3"
4369 [(set (match_operand:DF 0 "register_operand" "=e")
4370 (minus:DF (match_operand:DF 1 "register_operand" "e")
4371 (match_operand:DF 2 "register_operand" "e")))]
4372 "TARGET_FPU"
4373 "fsubd %1,%2,%0"
4374 [(set_attr "type" "fp")])
4375
4376 (define_insn "subsf3"
4377 [(set (match_operand:SF 0 "register_operand" "=f")
4378 (minus:SF (match_operand:SF 1 "register_operand" "f")
4379 (match_operand:SF 2 "register_operand" "f")))]
4380 "TARGET_FPU"
4381 "fsubs %1,%2,%0"
4382 [(set_attr "type" "fp")])
4383
4384 (define_insn "multf3"
4385 [(set (match_operand:TF 0 "register_operand" "=e")
4386 (mult:TF (match_operand:TF 1 "register_operand" "e")
4387 (match_operand:TF 2 "register_operand" "e")))]
4388 "TARGET_FPU && TARGET_HARD_QUAD"
4389 "fmulq %1,%2,%0"
4390 [(set_attr "type" "fpmul")])
4391
4392 (define_insn "muldf3"
4393 [(set (match_operand:DF 0 "register_operand" "=e")
4394 (mult:DF (match_operand:DF 1 "register_operand" "e")
4395 (match_operand:DF 2 "register_operand" "e")))]
4396 "TARGET_FPU"
4397 "fmuld %1,%2,%0"
4398 [(set_attr "type" "fpmul")])
4399
4400 (define_insn "mulsf3"
4401 [(set (match_operand:SF 0 "register_operand" "=f")
4402 (mult:SF (match_operand:SF 1 "register_operand" "f")
4403 (match_operand:SF 2 "register_operand" "f")))]
4404 "TARGET_FPU"
4405 "fmuls %1,%2,%0"
4406 [(set_attr "type" "fpmul")])
4407
4408 (define_insn "*muldf3_extend"
4409 [(set (match_operand:DF 0 "register_operand" "=e")
4410 (mult:DF (float_extend:DF (match_operand:SF 1 "register_operand" "f"))
4411 (float_extend:DF (match_operand:SF 2 "register_operand" "f"))))]
4412 "(TARGET_V8 || TARGET_V9) && TARGET_FPU"
4413 "fsmuld %1,%2,%0"
4414 [(set_attr "type" "fpmul")])
4415
4416 (define_insn "*multf3_extend"
4417 [(set (match_operand:TF 0 "register_operand" "=e")
4418 (mult:TF (float_extend:TF (match_operand:DF 1 "register_operand" "e"))
4419 (float_extend:TF (match_operand:DF 2 "register_operand" "e"))))]
4420 "(TARGET_V8 || TARGET_V9) && TARGET_FPU"
4421 "fdmulq %1,%2,%0"
4422 [(set_attr "type" "fpmul")])
4423
4424 ;; don't have timing for quad-prec. divide.
4425 (define_insn "divtf3"
4426 [(set (match_operand:TF 0 "register_operand" "=e")
4427 (div:TF (match_operand:TF 1 "register_operand" "e")
4428 (match_operand:TF 2 "register_operand" "e")))]
4429 "TARGET_FPU && TARGET_HARD_QUAD"
4430 "fdivq %1,%2,%0"
4431 [(set_attr "type" "fpdivd")])
4432
4433 (define_insn "divdf3"
4434 [(set (match_operand:DF 0 "register_operand" "=e")
4435 (div:DF (match_operand:DF 1 "register_operand" "e")
4436 (match_operand:DF 2 "register_operand" "e")))]
4437 "TARGET_FPU"
4438 "fdivd %1,%2,%0"
4439 [(set_attr "type" "fpdivd")])
4440
4441 (define_insn "divsf3"
4442 [(set (match_operand:SF 0 "register_operand" "=f")
4443 (div:SF (match_operand:SF 1 "register_operand" "f")
4444 (match_operand:SF 2 "register_operand" "f")))]
4445 "TARGET_FPU"
4446 "fdivs %1,%2,%0"
4447 [(set_attr "type" "fpdivs")])
4448
4449 (define_insn "negtf2"
4450 [(set (match_operand:TF 0 "register_operand" "=e,e")
4451 (neg:TF (match_operand:TF 1 "register_operand" "0,e")))]
4452 "TARGET_FPU"
4453 "*
4454 {
4455 if (TARGET_V9)
4456 return \"fnegd %1,%0\"; /* Can't use fnegs, won't work with upper regs. */
4457 else if (which_alternative == 0)
4458 return \"fnegs %0,%0\";
4459 else
4460 return \"fnegs %1,%0\;fmovs %R1,%R0\;fmovs %S1,%S0\;fmovs %T1,%T0\";
4461 }"
4462 [(set_attr "type" "fp")
4463 (set_attr_alternative "length"
4464 [(const_int 1)
4465 (if_then_else (eq_attr "arch" "arch32bit") (const_int 4) (const_int 1))])])
4466
4467 (define_insn "negdf2"
4468 [(set (match_operand:DF 0 "register_operand" "=e,e")
4469 (neg:DF (match_operand:DF 1 "register_operand" "0,e")))]
4470 "TARGET_FPU"
4471 "*
4472 {
4473 if (TARGET_V9)
4474 return \"fnegd %1,%0\";
4475 else if (which_alternative == 0)
4476 return \"fnegs %0,%0\";
4477 else
4478 return \"fnegs %1,%0\;fmovs %R1,%R0\";
4479 }"
4480 [(set_attr "type" "fp")
4481 (set_attr_alternative "length"
4482 [(const_int 1)
4483 (if_then_else (eq_attr "arch" "arch32bit") (const_int 2) (const_int 1))])])
4484
4485 (define_insn "negsf2"
4486 [(set (match_operand:SF 0 "register_operand" "=f")
4487 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
4488 "TARGET_FPU"
4489 "fnegs %1,%0"
4490 [(set_attr "type" "fp")])
4491
4492 (define_insn "abstf2"
4493 [(set (match_operand:TF 0 "register_operand" "=e,e")
4494 (abs:TF (match_operand:TF 1 "register_operand" "0,e")))]
4495 "TARGET_FPU"
4496 "*
4497 {
4498 if (TARGET_V9)
4499 return \"fabsd %1,%0\"; /* Can't use fabss, won't work with upper regs. */
4500 else if (which_alternative == 0)
4501 return \"fabss %0,%0\";
4502 else
4503 return \"fabss %1,%0\;fmovs %R1,%R0\;fmovs %S1,%S0\;fmovs %T1,%T0\";
4504 }"
4505 [(set_attr "type" "fp")
4506 (set_attr_alternative "length"
4507 [(const_int 1)
4508 (if_then_else (eq_attr "arch" "arch32bit") (const_int 4) (const_int 1))])])
4509
4510 (define_insn "absdf2"
4511 [(set (match_operand:DF 0 "register_operand" "=e,e")
4512 (abs:DF (match_operand:DF 1 "register_operand" "0,e")))]
4513 "TARGET_FPU"
4514 "*
4515 {
4516 if (TARGET_V9)
4517 return \"fabsd %1,%0\";
4518 else if (which_alternative == 0)
4519 return \"fabss %0,%0\";
4520 else
4521 return \"fabss %1,%0\;fmovs %R1,%R0\";
4522 }"
4523 [(set_attr "type" "fp")
4524 (set_attr_alternative "length"
4525 [(const_int 1)
4526 (if_then_else (eq_attr "arch" "arch32bit") (const_int 2) (const_int 1))])])
4527
4528 (define_insn "abssf2"
4529 [(set (match_operand:SF 0 "register_operand" "=f")
4530 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
4531 "TARGET_FPU"
4532 "fabss %1,%0"
4533 [(set_attr "type" "fp")])
4534
4535 (define_insn "sqrttf2"
4536 [(set (match_operand:TF 0 "register_operand" "=e")
4537 (sqrt:TF (match_operand:TF 1 "register_operand" "e")))]
4538 "TARGET_FPU && TARGET_HARD_QUAD"
4539 "fsqrtq %1,%0"
4540 [(set_attr "type" "fpsqrt")])
4541
4542 (define_insn "sqrtdf2"
4543 [(set (match_operand:DF 0 "register_operand" "=e")
4544 (sqrt:DF (match_operand:DF 1 "register_operand" "e")))]
4545 "TARGET_FPU"
4546 "fsqrtd %1,%0"
4547 [(set_attr "type" "fpsqrt")])
4548
4549 (define_insn "sqrtsf2"
4550 [(set (match_operand:SF 0 "register_operand" "=f")
4551 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
4552 "TARGET_FPU"
4553 "fsqrts %1,%0"
4554 [(set_attr "type" "fpsqrt")])
4555 \f
4556 ;;- arithmetic shift instructions
4557
4558 (define_insn "ashlsi3"
4559 [(set (match_operand:SI 0 "register_operand" "=r")
4560 (ashift:SI (match_operand:SI 1 "register_operand" "r")
4561 (match_operand:SI 2 "arith_operand" "rI")))]
4562 ""
4563 "*
4564 {
4565 if (GET_CODE (operands[2]) == CONST_INT
4566 && (unsigned) INTVAL (operands[2]) > 31)
4567 operands[2] = GEN_INT (INTVAL (operands[2]) & 0x1f);
4568
4569 return \"sll %1,%2,%0\";
4570 }"
4571 [(set_attr "type" "shift")])
4572
4573 (define_insn "ashldi3"
4574 [(set (match_operand:DI 0 "register_operand" "=r")
4575 (ashift:DI (match_operand:DI 1 "register_operand" "r")
4576 (match_operand:SI 2 "arith_operand" "rI")))]
4577 "TARGET_V9"
4578 "*
4579 {
4580 if (GET_CODE (operands[2]) == CONST_INT
4581 && (unsigned) INTVAL (operands[2]) > 63)
4582 operands[2] = GEN_INT (INTVAL (operands[2]) & 0x3f);
4583
4584 return \"sllx %1,%2,%0\";
4585 }")
4586
4587 (define_insn "*cmp_cc_ashift_1"
4588 [(set (reg:CC_NOOV 0)
4589 (compare:CC_NOOV (ashift:SI (match_operand:SI 0 "register_operand" "r")
4590 (const_int 1))
4591 (const_int 0)))]
4592 ""
4593 "addcc %0,%0,%%g0"
4594 [(set_attr "type" "compare")])
4595
4596 (define_insn "*cmp_cc_set_ashift_1"
4597 [(set (reg:CC_NOOV 0)
4598 (compare:CC_NOOV (ashift:SI (match_operand:SI 1 "register_operand" "r")
4599 (const_int 1))
4600 (const_int 0)))
4601 (set (match_operand:SI 0 "register_operand" "=r")
4602 (ashift:SI (match_dup 1) (const_int 1)))]
4603 ""
4604 "addcc %1,%1,%0")
4605
4606 (define_insn "ashrsi3"
4607 [(set (match_operand:SI 0 "register_operand" "=r")
4608 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
4609 (match_operand:SI 2 "arith_operand" "rI")))]
4610 ""
4611 "*
4612 {
4613 if (GET_CODE (operands[2]) == CONST_INT
4614 && (unsigned) INTVAL (operands[2]) > 31)
4615 operands[2] = GEN_INT (INTVAL (operands[2]) & 0x1f);
4616
4617 return \"sra %1,%2,%0\";
4618 }"
4619 [(set_attr "type" "shift")])
4620
4621 (define_insn "ashrdi3"
4622 [(set (match_operand:DI 0 "register_operand" "=r")
4623 (ashiftrt:DI (match_operand:DI 1 "register_operand" "r")
4624 (match_operand:SI 2 "arith_operand" "rI")))]
4625 "TARGET_V9"
4626 "*
4627 {
4628 if (GET_CODE (operands[2]) == CONST_INT
4629 && (unsigned) INTVAL (operands[2]) > 63)
4630 operands[2] = GEN_INT (INTVAL (operands[2]) & 0x3f);
4631
4632 return \"srax %1,%2,%0\";
4633 }")
4634
4635 (define_insn "lshrsi3"
4636 [(set (match_operand:SI 0 "register_operand" "=r")
4637 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r")
4638 (match_operand:SI 2 "arith_operand" "rI")))]
4639 ""
4640 "*
4641 {
4642 if (GET_CODE (operands[2]) == CONST_INT
4643 && (unsigned) INTVAL (operands[2]) > 31)
4644 operands[2] = GEN_INT (INTVAL (operands[2]) & 0x1f);
4645
4646 return \"srl %1,%2,%0\";
4647 }"
4648 [(set_attr "type" "shift")])
4649
4650 (define_insn "lshrdi3"
4651 [(set (match_operand:DI 0 "register_operand" "=r")
4652 (lshiftrt:DI (match_operand:DI 1 "register_operand" "r")
4653 (match_operand:SI 2 "arith_operand" "rI")))]
4654 "TARGET_V9"
4655 "*
4656 {
4657 if (GET_CODE (operands[2]) == CONST_INT
4658 && (unsigned) INTVAL (operands[2]) > 63)
4659 operands[2] = GEN_INT (INTVAL (operands[2]) & 0x3f);
4660
4661 return \"srlx %1,%2,%0\";
4662 }")
4663 \f
4664 ;; Unconditional and other jump instructions
4665 ;; On the Sparc, by setting the annul bit on an unconditional branch, the
4666 ;; following insn is never executed. This saves us a nop. Dbx does not
4667 ;; handle such branches though, so we only use them when optimizing.
4668 (define_insn "jump"
4669 [(set (pc) (label_ref (match_operand 0 "" "")))]
4670 ""
4671 "b%* %l0%("
4672 [(set_attr "type" "uncond_branch")])
4673
4674 (define_expand "tablejump"
4675 [(parallel [(set (pc) (match_operand 0 "register_operand" "r"))
4676 (use (label_ref (match_operand 1 "" "")))])]
4677 ""
4678 "
4679 {
4680 if (GET_MODE (operands[0]) != Pmode)
4681 abort ();
4682
4683 /* We need to use the PC value in %o7 that was set up when the address
4684 of the label was loaded into a register, so we need different RTL. */
4685 if (flag_pic)
4686 {
4687 if (!TARGET_PTR64)
4688 emit_jump_insn (gen_pic_tablejump_32 (operands[0], operands[1]));
4689 else
4690 emit_jump_insn (gen_pic_tablejump_64 (operands[0], operands[1]));
4691 DONE;
4692 }
4693 }")
4694
4695 (define_insn "pic_tablejump_32"
4696 [(set (pc) (match_operand:SI 0 "register_operand" "r"))
4697 (use (label_ref (match_operand 1 "" "")))
4698 (use (reg:SI 15))]
4699 "! TARGET_PTR64"
4700 "jmp %%o7+%0%#"
4701 [(set_attr "type" "uncond_branch")])
4702
4703 (define_insn "pic_tablejump_64"
4704 [(set (pc) (match_operand:DI 0 "register_operand" "r"))
4705 (use (label_ref (match_operand 1 "" "")))
4706 (use (reg:DI 15))]
4707 "TARGET_PTR64"
4708 "jmp %%o7+%0%#"
4709 [(set_attr "type" "uncond_branch")])
4710
4711 (define_insn "*tablejump_sp32"
4712 [(set (pc) (match_operand:SI 0 "address_operand" "p"))
4713 (use (label_ref (match_operand 1 "" "")))]
4714 "! TARGET_PTR64"
4715 "jmp %a0%#"
4716 [(set_attr "type" "uncond_branch")])
4717
4718 (define_insn "*tablejump_sp64"
4719 [(set (pc) (match_operand:DI 0 "address_operand" "p"))
4720 (use (label_ref (match_operand 1 "" "")))]
4721 "TARGET_PTR64"
4722 "jmp %a0%#"
4723 [(set_attr "type" "uncond_branch")])
4724
4725 (define_insn "*get_pc_sp32"
4726 [(set (pc) (label_ref (match_operand 0 "" "")))
4727 (set (reg:SI 15) (label_ref (match_dup 0)))]
4728 "! TARGET_PTR64"
4729 "call %l0%#"
4730 [(set_attr "type" "uncond_branch")])
4731
4732 (define_insn "*get_pc_sp64"
4733 [(set (pc) (label_ref (match_operand 0 "" "")))
4734 (set (reg:DI 15) (label_ref (match_dup 0)))]
4735 "TARGET_PTR64"
4736 "call %l0%#"
4737 [(set_attr "type" "uncond_branch")])
4738
4739 ;; This pattern recognizes the "instruction" that appears in
4740 ;; a function call that wants a structure value,
4741 ;; to inform the called function if compiled with Sun CC.
4742 ;(define_insn "*unimp_insn"
4743 ; [(match_operand:SI 0 "immediate_operand" "")]
4744 ; "GET_CODE (operands[0]) == CONST_INT && INTVAL (operands[0]) > 0"
4745 ; "unimp %0"
4746 ; [(set_attr "type" "marker")])
4747
4748 ;;- jump to subroutine
4749 (define_expand "call"
4750 ;; Note that this expression is not used for generating RTL.
4751 ;; All the RTL is generated explicitly below.
4752 [(call (match_operand 0 "call_operand" "")
4753 (match_operand 3 "" "i"))]
4754 ;; operands[2] is next_arg_register
4755 ;; operands[3] is struct_value_size_rtx.
4756 ""
4757 "
4758 {
4759 rtx fn_rtx, nregs_rtx;
4760
4761 if (GET_MODE (operands[0]) != FUNCTION_MODE)
4762 abort ();
4763
4764 if (GET_CODE (XEXP (operands[0], 0)) == LABEL_REF)
4765 {
4766 /* This is really a PIC sequence. We want to represent
4767 it as a funny jump so it's delay slots can be filled.
4768
4769 ??? But if this really *is* a CALL, will not it clobber the
4770 call-clobbered registers? We lose this if it is a JUMP_INSN.
4771 Why cannot we have delay slots filled if it were a CALL? */
4772
4773 if (! TARGET_V9 && INTVAL (operands[3]) != 0)
4774 emit_jump_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
4775 gen_rtx (SET, VOIDmode, pc_rtx,
4776 XEXP (operands[0], 0)),
4777 operands[3],
4778 gen_rtx (CLOBBER, VOIDmode,
4779 gen_rtx (REG, Pmode, 15)))));
4780 else
4781 emit_jump_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
4782 gen_rtx (SET, VOIDmode, pc_rtx,
4783 XEXP (operands[0], 0)),
4784 gen_rtx (CLOBBER, VOIDmode,
4785 gen_rtx (REG, Pmode, 15)))));
4786 goto finish_call;
4787 }
4788
4789 fn_rtx = operands[0];
4790
4791 /* Count the number of parameter registers being used by this call.
4792 if that argument is NULL, it means we are using them all, which
4793 means 6 on the sparc. */
4794 #if 0
4795 if (operands[2])
4796 nregs_rtx = gen_rtx (CONST_INT, VOIDmode, REGNO (operands[2]) - 8);
4797 else
4798 nregs_rtx = gen_rtx (CONST_INT, VOIDmode, 6);
4799 #else
4800 nregs_rtx = const0_rtx;
4801 #endif
4802
4803 if (! TARGET_V9 && INTVAL (operands[3]) != 0)
4804 emit_call_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (3,
4805 gen_rtx (CALL, VOIDmode, fn_rtx, nregs_rtx),
4806 operands[3],
4807 gen_rtx (CLOBBER, VOIDmode,
4808 gen_rtx (REG, Pmode, 15)))));
4809 else
4810 emit_call_insn (gen_rtx (PARALLEL, VOIDmode, gen_rtvec (2,
4811 gen_rtx (CALL, VOIDmode, fn_rtx, nregs_rtx),
4812 gen_rtx (CLOBBER, VOIDmode,
4813 gen_rtx (REG, Pmode, 15)))));
4814
4815 finish_call:
4816 #if 0
4817 /* If this call wants a structure value,
4818 emit an unimp insn to let the called function know about this. */
4819 if (! TARGET_V9 && INTVAL (operands[3]) > 0)
4820 {
4821 rtx insn = emit_insn (operands[3]);
4822 SCHED_GROUP_P (insn) = 1;
4823 }
4824 #endif
4825
4826 DONE;
4827 }")
4828
4829 ;; We can't use the same pattern for these two insns, because then registers
4830 ;; in the address may not be properly reloaded.
4831
4832 (define_insn "*call_address_sp32"
4833 [(call (mem:SI (match_operand:SI 0 "address_operand" "p"))
4834 (match_operand 1 "" ""))
4835 (clobber (reg:SI 15))]
4836 ;;- Do not use operand 1 for most machines.
4837 "! TARGET_PTR64"
4838 "*
4839 {
4840 return \"call %a0,%1%#\";
4841 }"
4842 [(set_attr "type" "call")])
4843
4844 (define_insn "*call_symbolic_sp32"
4845 [(call (mem:SI (match_operand:SI 0 "symbolic_operand" "s"))
4846 (match_operand 1 "" ""))
4847 (clobber (reg:SI 15))]
4848 ;;- Do not use operand 1 for most machines.
4849 "! TARGET_PTR64"
4850 "*
4851 {
4852 return \"call %a0,%1%#\";
4853 }"
4854 [(set_attr "type" "call")])
4855
4856 (define_insn "*call_address_sp64"
4857 [(call (mem:SI (match_operand:DI 0 "address_operand" "p"))
4858 (match_operand 1 "" ""))
4859 (clobber (reg:DI 15))]
4860 ;;- Do not use operand 1 for most machines.
4861 "TARGET_PTR64"
4862 "*
4863 {
4864 return \"call %a0,%1%#\";
4865 }"
4866 [(set_attr "type" "call")])
4867
4868 (define_insn "*call_symbolic_sp64"
4869 [(call (mem:SI (match_operand:DI 0 "symbolic_operand" "s"))
4870 (match_operand 1 "" ""))
4871 (clobber (reg:DI 15))]
4872 ;;- Do not use operand 1 for most machines.
4873 "TARGET_PTR64"
4874 "*
4875 {
4876 return \"call %a0,%1%#\";
4877 }"
4878 [(set_attr "type" "call")])
4879
4880 ;; This is a call that wants a structure value.
4881 ;; There is no such critter for v9 (??? we may need one anyway).
4882 (define_insn "*call_address_struct_value_sp32"
4883 [(call (mem:SI (match_operand:SI 0 "address_operand" "p"))
4884 (match_operand 1 "" ""))
4885 (match_operand 2 "immediate_operand" "")
4886 (clobber (reg:SI 15))]
4887 ;;- Do not use operand 1 for most machines.
4888 "! TARGET_V9 && GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) > 0"
4889 "*
4890 {
4891 return \"call %a0,%1\;nop\;unimp %2\";
4892 }"
4893 [(set_attr "type" "call_no_delay_slot")])
4894
4895 ;; This is a call that wants a structure value.
4896 ;; There is no such critter for v9 (??? we may need one anyway).
4897 (define_insn "*call_symbolic_struct_value_sp32"
4898 [(call (mem:SI (match_operand:SI 0 "symbolic_operand" "s"))
4899 (match_operand 1 "" ""))
4900 (match_operand 2 "immediate_operand" "")
4901 (clobber (reg:SI 15))]
4902 ;;- Do not use operand 1 for most machines.
4903 "! TARGET_V9 && GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) > 0"
4904 "*
4905 {
4906 return \"call %a0,%1\;nop\;unimp %2\";
4907 }"
4908 [(set_attr "type" "call_no_delay_slot")])
4909
4910 ;; This is a call that may want a structure value. This is used for
4911 ;; untyped_calls.
4912 (define_insn "*call_address_untyped_struct_value_sp32"
4913 [(call (mem:SI (match_operand:SI 0 "address_operand" "p"))
4914 (match_operand 1 "" ""))
4915 (match_operand 2 "immediate_operand" "")
4916 (clobber (reg:SI 15))]
4917 ;;- Do not use operand 1 for most machines.
4918 "! TARGET_V9 && GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) < 0"
4919 "*
4920 {
4921 return \"call %a0,%1\;nop\;nop\";
4922 }"
4923 [(set_attr "type" "call_no_delay_slot")])
4924
4925 ;; This is a call that wants a structure value.
4926 (define_insn "*call_symbolic_untyped_struct_value_sp32"
4927 [(call (mem:SI (match_operand:SI 0 "symbolic_operand" "s"))
4928 (match_operand 1 "" ""))
4929 (match_operand 2 "immediate_operand" "")
4930 (clobber (reg:SI 15))]
4931 ;;- Do not use operand 1 for most machines.
4932 "! TARGET_V9 && GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) < 0"
4933 "*
4934 {
4935 return \"call %a0,%1\;nop\;nop\";
4936 }"
4937 [(set_attr "type" "call_no_delay_slot")])
4938
4939 (define_expand "call_value"
4940 ;; Note that this expression is not used for generating RTL.
4941 ;; All the RTL is generated explicitly below.
4942 [(set (match_operand 0 "register_operand" "=rf")
4943 (call (match_operand:SI 1 "" "")
4944 (match_operand 4 "" "")))]
4945 ;; operand 2 is stack_size_rtx
4946 ;; operand 3 is next_arg_register
4947 ""
4948 "
4949 {
4950 rtx fn_rtx, nregs_rtx;
4951 rtvec vec;
4952
4953 if (GET_MODE (operands[1]) != FUNCTION_MODE)
4954 abort ();
4955
4956 fn_rtx = operands[1];
4957
4958 #if 0
4959 if (operands[3])
4960 nregs_rtx = gen_rtx (CONST_INT, VOIDmode, REGNO (operands[3]) - 8);
4961 else
4962 nregs_rtx = gen_rtx (CONST_INT, VOIDmode, 6);
4963 #else
4964 nregs_rtx = const0_rtx;
4965 #endif
4966
4967 vec = gen_rtvec (2,
4968 gen_rtx (SET, VOIDmode, operands[0],
4969 gen_rtx (CALL, VOIDmode, fn_rtx, nregs_rtx)),
4970 gen_rtx (CLOBBER, VOIDmode, gen_rtx (REG, Pmode, 15)));
4971
4972 emit_call_insn (gen_rtx (PARALLEL, VOIDmode, vec));
4973
4974 DONE;
4975 }")
4976
4977 (define_insn "*call_value_address_sp32"
4978 [(set (match_operand 0 "" "=rf")
4979 (call (mem:SI (match_operand:SI 1 "address_operand" "p"))
4980 (match_operand 2 "" "")))
4981 (clobber (reg:SI 15))]
4982 ;;- Do not use operand 2 for most machines.
4983 "! TARGET_PTR64"
4984 "*
4985 {
4986 return \"call %a1,%2%#\";
4987 }"
4988 [(set_attr "type" "call")])
4989
4990 (define_insn "*call_value_symbolic_sp32"
4991 [(set (match_operand 0 "" "=rf")
4992 (call (mem:SI (match_operand:SI 1 "symbolic_operand" "s"))
4993 (match_operand 2 "" "")))
4994 (clobber (reg:SI 15))]
4995 ;;- Do not use operand 2 for most machines.
4996 "! TARGET_PTR64"
4997 "*
4998 {
4999 return \"call %a1,%2%#\";
5000 }"
5001 [(set_attr "type" "call")])
5002
5003 (define_insn "*call_value_address_sp64"
5004 [(set (match_operand 0 "" "=rf")
5005 (call (mem:SI (match_operand:DI 1 "address_operand" "p"))
5006 (match_operand 2 "" "")))
5007 (clobber (reg:DI 15))]
5008 ;;- Do not use operand 2 for most machines.
5009 "TARGET_PTR64"
5010 "*
5011 {
5012 return \"call %a1,%2%#\";
5013 }"
5014 [(set_attr "type" "call")])
5015
5016 (define_insn "*call_value_symbolic_sp64"
5017 [(set (match_operand 0 "" "=rf")
5018 (call (mem:SI (match_operand:DI 1 "symbolic_operand" "s"))
5019 (match_operand 2 "" "")))
5020 (clobber (reg:DI 15))]
5021 ;;- Do not use operand 2 for most machines.
5022 "TARGET_PTR64"
5023 "*
5024 {
5025 return \"call %a1,%2%#\";
5026 }"
5027 [(set_attr "type" "call")])
5028
5029 (define_expand "untyped_call"
5030 [(parallel [(call (match_operand 0 "" "")
5031 (const_int 0))
5032 (match_operand 1 "" "")
5033 (match_operand 2 "" "")])]
5034 ""
5035 "
5036 {
5037 int i;
5038
5039 /* Pass constm1 to indicate that it may expect a structure value, but
5040 we don't know what size it is. */
5041 emit_call_insn (gen_call (operands[0], const0_rtx, NULL, constm1_rtx));
5042
5043 for (i = 0; i < XVECLEN (operands[2], 0); i++)
5044 {
5045 rtx set = XVECEXP (operands[2], 0, i);
5046 emit_move_insn (SET_DEST (set), SET_SRC (set));
5047 }
5048
5049 /* The optimizer does not know that the call sets the function value
5050 registers we stored in the result block. We avoid problems by
5051 claiming that all hard registers are used and clobbered at this
5052 point. */
5053 emit_insn (gen_blockage ());
5054
5055 DONE;
5056 }")
5057
5058 ;; UNSPEC_VOLATILE is considered to use and clobber all hard registers and
5059 ;; all of memory. This blocks insns from being moved across this point.
5060
5061 (define_insn "blockage"
5062 [(unspec_volatile [(const_int 0)] 0)]
5063 ""
5064 "")
5065
5066 ;; Prepare to return any type including a structure value.
5067
5068 (define_expand "untyped_return"
5069 [(match_operand:BLK 0 "memory_operand" "")
5070 (match_operand 1 "" "")]
5071 ""
5072 "
5073 {
5074 rtx valreg1 = gen_rtx (REG, DImode, 24);
5075 rtx valreg2 = gen_rtx (REG, TARGET_V9 ? TFmode : DFmode, 32);
5076 rtx result = operands[0];
5077
5078 if (! TARGET_V9)
5079 {
5080 rtx rtnreg = gen_rtx (REG, SImode, (leaf_function ? 15 : 31));
5081 rtx value = gen_reg_rtx (SImode);
5082
5083 /* Fetch the instruction where we will return to and see if it's an unimp
5084 instruction (the most significant 10 bits will be zero). If so,
5085 update the return address to skip the unimp instruction. */
5086 emit_move_insn (value,
5087 gen_rtx (MEM, SImode, plus_constant (rtnreg, 8)));
5088 emit_insn (gen_lshrsi3 (value, value, GEN_INT (22)));
5089 emit_insn (gen_update_return (rtnreg, value));
5090 }
5091
5092 /* Reload the function value registers. */
5093 emit_move_insn (valreg1, change_address (result, DImode, XEXP (result, 0)));
5094 emit_move_insn (valreg2,
5095 change_address (result, TARGET_V9 ? TFmode : DFmode,
5096 plus_constant (XEXP (result, 0), 8)));
5097
5098 /* Put USE insns before the return. */
5099 emit_insn (gen_rtx (USE, VOIDmode, valreg1));
5100 emit_insn (gen_rtx (USE, VOIDmode, valreg2));
5101
5102 /* Construct the return. */
5103 expand_null_return ();
5104
5105 DONE;
5106 }")
5107
5108 ;; This is a bit of a hack. We're incrementing a fixed register (%i7),
5109 ;; and parts of the compiler don't want to believe that the add is needed.
5110
5111 (define_insn "update_return"
5112 [(unspec:SI [(match_operand:SI 0 "register_operand" "r")
5113 (match_operand:SI 1 "register_operand" "r")] 0)]
5114 "! TARGET_V9"
5115 "cmp %1,0\;be,a .+8\;add %0,4,%0"
5116 [(set_attr "type" "multi")])
5117 \f
5118 (define_insn "return"
5119 [(return)]
5120 "! TARGET_EPILOGUE"
5121 "* return output_return (operands);"
5122 [(set_attr "type" "multi")])
5123
5124 (define_insn "nop"
5125 [(const_int 0)]
5126 ""
5127 "nop")
5128
5129 (define_expand "indirect_jump"
5130 [(set (pc) (match_operand 0 "address_operand" "p"))]
5131 ""
5132 "")
5133
5134 (define_insn "*branch_sp32"
5135 [(set (pc) (match_operand:SI 0 "address_operand" "p"))]
5136 "! TARGET_PTR64"
5137 "jmp %a0%#"
5138 [(set_attr "type" "uncond_branch")])
5139
5140 (define_insn "*branch_sp64"
5141 [(set (pc) (match_operand:DI 0 "address_operand" "p"))]
5142 "TARGET_PTR64"
5143 "jmp %a0%#"
5144 [(set_attr "type" "uncond_branch")])
5145
5146 ;; ??? Doesn't work with -mflat.
5147 (define_expand "nonlocal_goto"
5148 [(match_operand:SI 0 "general_operand" "")
5149 (match_operand:SI 1 "general_operand" "")
5150 (match_operand:SI 2 "general_operand" "")
5151 (match_operand:SI 3 "" "")]
5152 ""
5153 "
5154 {
5155 /* Trap instruction to flush all the register windows. */
5156 emit_insn (gen_flush_register_windows ());
5157 /* Load the fp value for the containing fn into %fp.
5158 This is needed because operands[2] refers to %fp.
5159 Virtual register instantiation fails if the virtual %fp isn't set from a
5160 register. Thus we must copy operands[0] into a register if it isn't
5161 already one. */
5162 if (GET_CODE (operands[0]) != REG)
5163 operands[0] = force_reg (Pmode, operands[0]);
5164 emit_move_insn (virtual_stack_vars_rtx, operands[0]);
5165 /* Find the containing function's current nonlocal goto handler,
5166 which will do any cleanups and then jump to the label. */
5167 emit_move_insn (gen_rtx (REG, Pmode, 8), operands[1]);
5168 /* Restore %fp from stack pointer value for containing function.
5169 The restore insn that follows will move this to %sp,
5170 and reload the appropriate value into %fp. */
5171 emit_move_insn (frame_pointer_rtx, operands[2]);
5172 /* Put in the static chain register the nonlocal label address. */
5173 emit_move_insn (static_chain_rtx, operands[3]);
5174 /* USE of frame_pointer_rtx added for consistency; not clear if
5175 really needed. */
5176 emit_insn (gen_rtx (USE, VOIDmode, frame_pointer_rtx));
5177 emit_insn (gen_rtx (USE, VOIDmode, stack_pointer_rtx));
5178 emit_insn (gen_rtx (USE, VOIDmode, static_chain_rtx));
5179 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, Pmode, 8)));
5180 /* Return, restoring reg window and jumping to goto handler. */
5181 emit_insn (gen_goto_handler_and_restore ());
5182 DONE;
5183 }")
5184
5185 ;; Special trap insn to flush register windows.
5186 (define_insn "flush_register_windows"
5187 [(unspec_volatile [(const_int 0)] 1)]
5188 ""
5189 "* return TARGET_V9 ? \"flushw\" : \"ta 3\";"
5190 [(set_attr "type" "misc")])
5191
5192 (define_insn "goto_handler_and_restore"
5193 [(unspec_volatile [(const_int 0)] 2)]
5194 ""
5195 "jmp %%o0+0\;restore"
5196 [(set_attr "type" "misc")
5197 (set_attr "length" "2")])
5198
5199 ;; Special pattern for the FLUSH instruction.
5200
5201 (define_insn "flush"
5202 [(unspec_volatile [(match_operand 0 "memory_operand" "m")] 3)]
5203 ""
5204 "* return TARGET_V9 ? \"flush %f0\" : \"iflush %f0\";"
5205 [(set_attr "type" "misc")])
5206 \f
5207 ;; find first set.
5208
5209 ;; The scan instruction searches from the most significant bit while ffs
5210 ;; searches from the least significant bit. The bit index and treatment of
5211 ;; zero also differ. It takes at least 7 instructions to get the proper
5212 ;; result. Here is an obvious 8 instruction seequence.
5213
5214 (define_insn "ffssi2"
5215 [(set (match_operand:SI 0 "register_operand" "=&r")
5216 (ffs:SI (match_operand:SI 1 "register_operand" "r")))
5217 (clobber (match_scratch:SI 2 "=&r"))]
5218 "TARGET_SPARCLITE"
5219 "sub %%g0,%1,%0\;and %0,%1,%0\;scan %0,0,%0\;mov 32,%2\;sub %2,%0,%0\;sra %0,31,%2\;and %2,31,%2\;add %2,%0,%0"
5220 [(set_attr "type" "multi")
5221 (set_attr "length" "8")])
5222
5223 ;; ??? This should be a define expand, so that the extra instruction have
5224 ;; a chance of being optimized away.
5225
5226 (define_insn "ffsdi2"
5227 [(set (match_operand:DI 0 "register_operand" "=&r")
5228 (ffs:DI (match_operand:DI 1 "register_operand" "r")))
5229 (clobber (match_scratch:DI 2 "=&r"))]
5230 "TARGET_V9"
5231 "neg %1,%2\;not %2,%2\;xor %1,%2,%2\;popc %2,%0\;movrz %1,%%g0,%0"
5232 [(set_attr "type" "multi")
5233 (set_attr "length" "5")])
5234 \f
5235 ;; Split up troublesome insns for better scheduling. */
5236
5237 ;; The following patterns are straightforward. They can be applied
5238 ;; either before or after register allocation.
5239
5240 (define_split
5241 [(set (match_operand 0 "splittable_symbolic_memory_operand" "")
5242 (match_operand 1 "reg_or_0_operand" ""))
5243 (clobber (match_operand:SI 2 "register_operand" ""))]
5244 "! flag_pic"
5245 [(set (match_dup 2) (high:SI (match_dup 3)))
5246 (set (match_dup 4) (match_dup 1))]
5247 "
5248 {
5249 operands[3] = XEXP (operands[0], 0);
5250 operands[4] = gen_rtx (MEM, GET_MODE (operands[0]),
5251 gen_rtx (LO_SUM, SImode, operands[2], operands[3]));
5252 MEM_IN_STRUCT_P (operands[4]) = MEM_IN_STRUCT_P (operands[0]);
5253 MEM_VOLATILE_P (operands[4]) = MEM_VOLATILE_P (operands[0]);
5254 RTX_UNCHANGING_P (operands[4]) = RTX_UNCHANGING_P (operands[0]);
5255 }")
5256
5257 (define_split
5258 [(set (match_operand 0 "splittable_immediate_memory_operand" "")
5259 (match_operand 1 "general_operand" ""))
5260 (clobber (match_operand:SI 2 "register_operand" ""))]
5261 "flag_pic"
5262 [(set (match_dup 3) (match_dup 1))]
5263 "
5264 {
5265 rtx addr = legitimize_pic_address (XEXP (operands[0], 0),
5266 GET_MODE (operands[0]),
5267 operands[2]);
5268 operands[3] = gen_rtx (MEM, GET_MODE (operands[0]), addr);
5269 MEM_IN_STRUCT_P (operands[3]) = MEM_IN_STRUCT_P (operands[0]);
5270 MEM_VOLATILE_P (operands[3]) = MEM_VOLATILE_P (operands[0]);
5271 RTX_UNCHANGING_P (operands[3]) = RTX_UNCHANGING_P (operands[0]);
5272 }")
5273
5274 (define_split
5275 [(set (match_operand 0 "register_operand" "")
5276 (match_operand 1 "splittable_immediate_memory_operand" ""))]
5277 "flag_pic"
5278 [(set (match_dup 0) (match_dup 2))]
5279 "
5280 {
5281 rtx addr = legitimize_pic_address (XEXP (operands[1], 0),
5282 GET_MODE (operands[1]),
5283 operands[0]);
5284 operands[2] = gen_rtx (MEM, GET_MODE (operands[1]), addr);
5285 MEM_IN_STRUCT_P (operands[2]) = MEM_IN_STRUCT_P (operands[1]);
5286 MEM_VOLATILE_P (operands[2]) = MEM_VOLATILE_P (operands[1]);
5287 RTX_UNCHANGING_P (operands[2]) = RTX_UNCHANGING_P (operands[1]);
5288 }")
5289
5290 ;; Sign- and Zero-extend operations can have symbolic memory operands.
5291
5292 (define_split
5293 [(set (match_operand 0 "register_operand" "")
5294 (match_operator 1 "extend_op" [(match_operand 2 "splittable_immediate_memory_operand" "")]))]
5295 "flag_pic"
5296 [(set (match_dup 0) (match_op_dup 1 [(match_dup 3)]))]
5297 "
5298 {
5299 rtx addr = legitimize_pic_address (XEXP (operands[2], 0),
5300 GET_MODE (operands[2]),
5301 operands[0]);
5302 operands[3] = gen_rtx (MEM, GET_MODE (operands[2]), addr);
5303 MEM_IN_STRUCT_P (operands[3]) = MEM_IN_STRUCT_P (operands[2]);
5304 MEM_VOLATILE_P (operands[3]) = MEM_VOLATILE_P (operands[2]);
5305 RTX_UNCHANGING_P (operands[3]) = RTX_UNCHANGING_P (operands[2]);
5306 }")
5307
5308 (define_split
5309 [(set (match_operand:SI 0 "register_operand" "")
5310 (match_operand:SI 1 "immediate_operand" ""))]
5311 "! flag_pic && (GET_CODE (operands[1]) == SYMBOL_REF
5312 || GET_CODE (operands[1]) == CONST
5313 || GET_CODE (operands[1]) == LABEL_REF)"
5314 [(set (match_dup 0) (high:SI (match_dup 1)))
5315 (set (match_dup 0)
5316 (lo_sum:SI (match_dup 0) (match_dup 1)))]
5317 "")
5318
5319 ;; LABEL_REFs are not modified by `legitimize_pic_address`
5320 ;; so do not recurse infinitely in the PIC case.
5321 (define_split
5322 [(set (match_operand:SI 0 "register_operand" "")
5323 (match_operand:SI 1 "immediate_operand" ""))]
5324 "flag_pic && (GET_CODE (operands[1]) == SYMBOL_REF
5325 || GET_CODE (operands[1]) == CONST)"
5326 [(set (match_dup 0) (match_dup 1))]
5327 "
5328 {
5329 operands[1] = legitimize_pic_address (operands[1], Pmode, operands[0]);
5330 }")
5331 \f
5332 ;; These split sne/seq insns. The forms of the resulting insns are
5333 ;; somewhat bogus, but they avoid extra patterns and show data dependency.
5334 ;; Nothing will look at these in detail after splitting has occurred.
5335
5336 ;; ??? v9 DImode versions are missing because addc and subc use %icc.
5337
5338 (define_split
5339 [(set (match_operand:SI 0 "register_operand" "")
5340 (ne:SI (match_operand:SI 1 "register_operand" "")
5341 (const_int 0)))
5342 (clobber (reg:CC 0))]
5343 ""
5344 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5345 (const_int 0)))
5346 (set (match_dup 0) (ltu:SI (reg:CC 0) (const_int 0)))]
5347 "")
5348
5349 (define_split
5350 [(set (match_operand:SI 0 "register_operand" "")
5351 (neg:SI (ne:SI (match_operand:SI 1 "register_operand" "")
5352 (const_int 0))))
5353 (clobber (reg:CC 0))]
5354 ""
5355 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5356 (const_int 0)))
5357 (set (match_dup 0) (neg:SI (ltu:SI (reg:CC 0) (const_int 0))))]
5358 "")
5359
5360 (define_split
5361 [(set (match_operand:SI 0 "register_operand" "")
5362 (eq:SI (match_operand:SI 1 "register_operand" "")
5363 (const_int 0)))
5364 (clobber (reg:CC 0))]
5365 ""
5366 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5367 (const_int 0)))
5368 (set (match_dup 0) (geu:SI (reg:CC 0) (const_int 0)))]
5369 "")
5370
5371 (define_split
5372 [(set (match_operand:SI 0 "register_operand" "")
5373 (neg:SI (eq:SI (match_operand:SI 1 "register_operand" "")
5374 (const_int 0))))
5375 (clobber (reg:CC 0))]
5376 ""
5377 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5378 (const_int 0)))
5379 (set (match_dup 0) (neg:SI (geu:SI (reg:CC 0) (const_int 0))))]
5380 "")
5381
5382 (define_split
5383 [(set (match_operand:SI 0 "register_operand" "")
5384 (plus:SI (ne:SI (match_operand:SI 1 "register_operand" "")
5385 (const_int 0))
5386 (match_operand:SI 2 "register_operand" "")))
5387 (clobber (reg:CC 0))]
5388 ""
5389 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5390 (const_int 0)))
5391 (set (match_dup 0) (plus:SI (ltu:SI (reg:CC 0) (const_int 0))
5392 (match_dup 2)))]
5393 "")
5394
5395 (define_split
5396 [(set (match_operand:SI 0 "register_operand" "")
5397 (minus:SI (match_operand:SI 2 "register_operand" "")
5398 (ne:SI (match_operand:SI 1 "register_operand" "")
5399 (const_int 0))))
5400 (clobber (reg:CC 0))]
5401 ""
5402 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5403 (const_int 0)))
5404 (set (match_dup 0) (minus:SI (match_dup 2)
5405 (ltu:SI (reg:CC 0) (const_int 0))))]
5406 "")
5407
5408 (define_split
5409 [(set (match_operand:SI 0 "register_operand" "")
5410 (plus:SI (eq:SI (match_operand:SI 1 "register_operand" "")
5411 (const_int 0))
5412 (match_operand:SI 2 "register_operand" "")))
5413 (clobber (reg:CC 0))]
5414 ""
5415 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5416 (const_int 0)))
5417 (set (match_dup 0) (plus:SI (geu:SI (reg:CC 0) (const_int 0))
5418 (match_dup 2)))]
5419 "")
5420
5421 (define_split
5422 [(set (match_operand:SI 0 "register_operand" "")
5423 (minus:SI (match_operand:SI 2 "register_operand" "")
5424 (eq:SI (match_operand:SI 1 "register_operand" "")
5425 (const_int 0))))
5426 (clobber (reg:CC 0))]
5427 ""
5428 [(set (reg:CC_NOOV 0) (compare:CC_NOOV (neg:SI (match_dup 1))
5429 (const_int 0)))
5430 (set (match_dup 0) (minus:SI (match_dup 2)
5431 (geu:SI (reg:CC 0) (const_int 0))))]
5432 "")
5433 \f
5434 ;; Peepholes go at the end.
5435
5436 ;; Optimize consecutive loads or stores into ldd and std when possible.
5437 ;; The conditions in which we do this are very restricted and are
5438 ;; explained in the code for {registers,memory}_ok_for_ldd functions.
5439
5440 (define_peephole
5441 [(set (match_operand:SI 0 "register_operand" "=rf")
5442 (match_operand:SI 1 "memory_operand" ""))
5443 (set (match_operand:SI 2 "register_operand" "=rf")
5444 (match_operand:SI 3 "memory_operand" ""))]
5445 "! TARGET_V9
5446 && registers_ok_for_ldd_peep (operands[0], operands[2])
5447 && ! MEM_VOLATILE_P (operands[1]) && ! MEM_VOLATILE_P (operands[3])
5448 && addrs_ok_for_ldd_peep (XEXP (operands[1], 0), XEXP (operands[3], 0))"
5449 "ldd %1,%0")
5450
5451 (define_peephole
5452 [(set (match_operand:SI 0 "memory_operand" "")
5453 (match_operand:SI 1 "register_operand" "rf"))
5454 (set (match_operand:SI 2 "memory_operand" "")
5455 (match_operand:SI 3 "register_operand" "rf"))]
5456 "! TARGET_V9
5457 && registers_ok_for_ldd_peep (operands[1], operands[3])
5458 && ! MEM_VOLATILE_P (operands[0]) && ! MEM_VOLATILE_P (operands[2])
5459 && addrs_ok_for_ldd_peep (XEXP (operands[0], 0), XEXP (operands[2], 0))"
5460 "std %1,%0")
5461
5462 (define_peephole
5463 [(set (match_operand:SF 0 "register_operand" "=fr")
5464 (match_operand:SF 1 "memory_operand" ""))
5465 (set (match_operand:SF 2 "register_operand" "=fr")
5466 (match_operand:SF 3 "memory_operand" ""))]
5467 "! TARGET_V9
5468 && registers_ok_for_ldd_peep (operands[0], operands[2])
5469 && ! MEM_VOLATILE_P (operands[1]) && ! MEM_VOLATILE_P (operands[3])
5470 && addrs_ok_for_ldd_peep (XEXP (operands[1], 0), XEXP (operands[3], 0))"
5471 "ldd %1,%0")
5472
5473 (define_peephole
5474 [(set (match_operand:SF 0 "memory_operand" "")
5475 (match_operand:SF 1 "register_operand" "fr"))
5476 (set (match_operand:SF 2 "memory_operand" "")
5477 (match_operand:SF 3 "register_operand" "fr"))]
5478 "! TARGET_V9
5479 && registers_ok_for_ldd_peep (operands[1], operands[3])
5480 && ! MEM_VOLATILE_P (operands[0]) && ! MEM_VOLATILE_P (operands[2])
5481 && addrs_ok_for_ldd_peep (XEXP (operands[0], 0), XEXP (operands[2], 0))"
5482 "std %1,%0")
5483
5484 (define_peephole
5485 [(set (match_operand:SI 0 "register_operand" "=rf")
5486 (match_operand:SI 1 "memory_operand" ""))
5487 (set (match_operand:SI 2 "register_operand" "=rf")
5488 (match_operand:SI 3 "memory_operand" ""))]
5489 "! TARGET_V9
5490 && registers_ok_for_ldd_peep (operands[2], operands[0])
5491 && ! MEM_VOLATILE_P (operands[3]) && ! MEM_VOLATILE_P (operands[1])
5492 && addrs_ok_for_ldd_peep (XEXP (operands[3], 0), XEXP (operands[1], 0))"
5493 "ldd %3,%2")
5494
5495 (define_peephole
5496 [(set (match_operand:SI 0 "memory_operand" "")
5497 (match_operand:SI 1 "register_operand" "rf"))
5498 (set (match_operand:SI 2 "memory_operand" "")
5499 (match_operand:SI 3 "register_operand" "rf"))]
5500 "! TARGET_V9
5501 && registers_ok_for_ldd_peep (operands[3], operands[1])
5502 && ! MEM_VOLATILE_P (operands[2]) && ! MEM_VOLATILE_P (operands[0])
5503 && addrs_ok_for_ldd_peep (XEXP (operands[2], 0), XEXP (operands[0], 0))"
5504 "std %3,%2")
5505
5506 (define_peephole
5507 [(set (match_operand:SF 0 "register_operand" "=fr")
5508 (match_operand:SF 1 "memory_operand" ""))
5509 (set (match_operand:SF 2 "register_operand" "=fr")
5510 (match_operand:SF 3 "memory_operand" ""))]
5511 "! TARGET_V9
5512 && registers_ok_for_ldd_peep (operands[2], operands[0])
5513 && ! MEM_VOLATILE_P (operands[3]) && ! MEM_VOLATILE_P (operands[1])
5514 && addrs_ok_for_ldd_peep (XEXP (operands[3], 0), XEXP (operands[1], 0))"
5515 "ldd %3,%2")
5516
5517 (define_peephole
5518 [(set (match_operand:SF 0 "memory_operand" "")
5519 (match_operand:SF 1 "register_operand" "fr"))
5520 (set (match_operand:SF 2 "memory_operand" "")
5521 (match_operand:SF 3 "register_operand" "fr"))]
5522 "! TARGET_V9
5523 && registers_ok_for_ldd_peep (operands[3], operands[1])
5524 && ! MEM_VOLATILE_P (operands[2]) && ! MEM_VOLATILE_P (operands[0])
5525 && addrs_ok_for_ldd_peep (XEXP (operands[2], 0), XEXP (operands[0], 0))"
5526 "std %3,%2")
5527
5528 ;; Optimize the case of following a reg-reg move with a test
5529 ;; of reg just moved. Don't allow floating point regs for operand 0 or 1.
5530 ;; This can result from a float to fix conversion.
5531
5532 (define_peephole
5533 [(set (match_operand:SI 0 "register_operand" "=r")
5534 (match_operand:SI 1 "register_operand" "r"))
5535 (set (reg:CC 0)
5536 (compare:CC (match_operand:SI 2 "register_operand" "r")
5537 (const_int 0)))]
5538 "(rtx_equal_p (operands[2], operands[0])
5539 || rtx_equal_p (operands[2], operands[1]))
5540 && ! FP_REG_P (operands[0]) && ! FP_REG_P (operands[1])"
5541 "orcc %1,%%g0,%0")
5542
5543 (define_peephole
5544 [(set (match_operand:DI 0 "register_operand" "=r")
5545 (match_operand:DI 1 "register_operand" "r"))
5546 (set (reg:CCX 0)
5547 (compare:CCX (match_operand:DI 2 "register_operand" "r")
5548 (const_int 0)))]
5549 "TARGET_V9
5550 && (rtx_equal_p (operands[2], operands[0])
5551 || rtx_equal_p (operands[2], operands[1]))
5552 && ! FP_REG_P (operands[0]) && ! FP_REG_P (operands[1])"
5553 "orcc %1,%%g0,%0")
5554
5555 ;; Do {sign,zero}-extended compares somewhat more efficiently.
5556 ;; ??? Is this now the Right Way to do this? Or will SCRATCH
5557 ;; eventually have some impact here?
5558
5559 (define_peephole
5560 [(set (match_operand:HI 0 "register_operand" "")
5561 (match_operand:HI 1 "memory_operand" ""))
5562 (set (match_operand:SI 2 "register_operand" "")
5563 (sign_extend:SI (match_dup 0)))
5564 (set (reg:CC 0)
5565 (compare:CC (match_dup 2)
5566 (const_int 0)))]
5567 ""
5568 "ldsh %1,%0\;orcc %0,%%g0,%2")
5569
5570 (define_peephole
5571 [(set (match_operand:HI 0 "register_operand" "")
5572 (match_operand:HI 1 "memory_operand" ""))
5573 (set (match_operand:DI 2 "register_operand" "")
5574 (sign_extend:DI (match_dup 0)))
5575 (set (reg:CCX 0)
5576 (compare:CCX (match_dup 2)
5577 (const_int 0)))]
5578 "TARGET_V9"
5579 "ldsh %1,%0\;orcc %0,%%g0,%2")
5580
5581 (define_peephole
5582 [(set (match_operand:QI 0 "register_operand" "")
5583 (match_operand:QI 1 "memory_operand" ""))
5584 (set (match_operand:SI 2 "register_operand" "")
5585 (sign_extend:SI (match_dup 0)))
5586 (set (reg:CC 0)
5587 (compare:CC (match_dup 2)
5588 (const_int 0)))]
5589 ""
5590 "ldsb %1,%0\;orcc %0,%%g0,%2")
5591
5592 (define_peephole
5593 [(set (match_operand:QI 0 "register_operand" "")
5594 (match_operand:QI 1 "memory_operand" ""))
5595 (set (match_operand:DI 2 "register_operand" "")
5596 (sign_extend:DI (match_dup 0)))
5597 (set (reg:CCX 0)
5598 (compare:CCX (match_dup 2)
5599 (const_int 0)))]
5600 "TARGET_V9"
5601 "ldsb %1,%0\;orcc %0,%%g0,%2")
5602
5603 ;; Floating-point move peepholes
5604 ;; ??? v9: Do we want similar ones?
5605
5606 (define_peephole
5607 [(set (match_operand:SI 0 "register_operand" "=r")
5608 (lo_sum:SI (match_dup 0)
5609 (match_operand:SI 1 "immediate_operand" "i")))
5610 (set (match_operand:DF 2 "register_operand" "=er")
5611 (mem:DF (match_dup 0)))]
5612 "RTX_UNCHANGING_P (operands[1]) && reg_unused_after (operands[0], insn)"
5613 "*
5614 {
5615 /* Go by way of output_move_double in case the register in operand 2
5616 is not properly aligned for ldd. */
5617 operands[1] = gen_rtx (MEM, DFmode,
5618 gen_rtx (LO_SUM, SImode, operands[0], operands[1]));
5619 operands[0] = operands[2];
5620 return output_move_double (operands);
5621 }")
5622
5623 (define_peephole
5624 [(set (match_operand:SI 0 "register_operand" "=r")
5625 (lo_sum:SI (match_dup 0)
5626 (match_operand:SI 1 "immediate_operand" "i")))
5627 (set (match_operand:SF 2 "register_operand" "=fr")
5628 (mem:SF (match_dup 0)))]
5629 "RTX_UNCHANGING_P (operands[1]) && reg_unused_after (operands[0], insn)"
5630 "ld [%0+%%lo(%a1)],%2")
5631
5632 ;; Return peepholes. First the "normal" ones
5633
5634 ;; ??? There are QImode, HImode, and SImode versions of this pattern.
5635 ;; It might be possible to write one more general pattern instead of three.
5636
5637 (define_insn "*return_qi"
5638 [(set (match_operand:QI 0 "restore_operand" "")
5639 (match_operand:QI 1 "arith_operand" "rI"))
5640 (return)]
5641 "! TARGET_EPILOGUE"
5642 "*
5643 {
5644 if (! TARGET_V9 && current_function_returns_struct)
5645 return \"jmp %%i7+12\;restore %%g0,%1,%Y0\";
5646 else
5647 return \"ret\;restore %%g0,%1,%Y0\";
5648 }"
5649 [(set_attr "type" "multi")])
5650
5651 (define_insn "*return_hi"
5652 [(set (match_operand:HI 0 "restore_operand" "")
5653 (match_operand:HI 1 "arith_operand" "rI"))
5654 (return)]
5655 "! TARGET_EPILOGUE"
5656 "*
5657 {
5658 if (! TARGET_V9 && current_function_returns_struct)
5659 return \"jmp %%i7+12\;restore %%g0,%1,%Y0\";
5660 else
5661 return \"ret\;restore %%g0,%1,%Y0\";
5662 }"
5663 [(set_attr "type" "multi")])
5664
5665 (define_insn "*return_si"
5666 [(set (match_operand:SI 0 "restore_operand" "")
5667 (match_operand:SI 1 "arith_operand" "rI"))
5668 (return)]
5669 "! TARGET_EPILOGUE"
5670 "*
5671 {
5672 if (! TARGET_V9 && current_function_returns_struct)
5673 return \"jmp %%i7+12\;restore %%g0,%1,%Y0\";
5674 else
5675 return \"ret\;restore %%g0,%1,%Y0\";
5676 }"
5677 [(set_attr "type" "multi")])
5678
5679 ;; The following pattern is only generated by delayed-branch scheduling,
5680 ;; when the insn winds up in the epilogue. This can only happen when
5681 ;; ! TARGET_FPU because otherwise fp return values are in %f0.
5682 (define_insn "*return_sf_no_fpu"
5683 [(set (match_operand:SF 0 "restore_operand" "r")
5684 (match_operand:SF 1 "register_operand" "r"))
5685 (return)]
5686 "! TARGET_FPU && ! TARGET_EPILOGUE"
5687 "*
5688 {
5689 if (! TARGET_V9 && current_function_returns_struct)
5690 return \"jmp %%i7+12\;restore %%g0,%1,%Y0\";
5691 else
5692 return \"ret\;restore %%g0,%1,%Y0\";
5693 }"
5694 [(set_attr "type" "multi")])
5695
5696 (define_insn "*return_addsi"
5697 [(set (match_operand:SI 0 "restore_operand" "")
5698 (plus:SI (match_operand:SI 1 "arith_operand" "%r")
5699 (match_operand:SI 2 "arith_operand" "rI")))
5700 (return)]
5701 "! TARGET_EPILOGUE"
5702 "*
5703 {
5704 if (! TARGET_V9 && current_function_returns_struct)
5705 return \"jmp %%i7+12\;restore %r1,%2,%Y0\";
5706 else
5707 return \"ret\;restore %r1,%2,%Y0\";
5708 }"
5709 [(set_attr "type" "multi")])
5710
5711 (define_insn "*return_di"
5712 [(set (match_operand:DI 0 "restore_operand" "")
5713 (match_operand:DI 1 "arith_double_operand" "rHI"))
5714 (return)]
5715 "TARGET_V9 && ! TARGET_EPILOGUE"
5716 "ret\;restore %%g0,%1,%Y0"
5717 [(set_attr "type" "multi")])
5718
5719 (define_insn "*return_adddi"
5720 [(set (match_operand:DI 0 "restore_operand" "")
5721 (plus:DI (match_operand:DI 1 "arith_operand" "%r")
5722 (match_operand:DI 2 "arith_double_operand" "rHI")))
5723 (return)]
5724 "TARGET_V9 && ! TARGET_EPILOGUE"
5725 "ret\;restore %r1,%2,%Y0"
5726 [(set_attr "type" "multi")])
5727
5728 ;; Turned off because it should never match (subtracting a constant
5729 ;; is turned into addition) and because it would do the wrong thing
5730 ;; when operand 2 is -4096 (--4096 == 4096 is not a valid immediate).
5731 ;;(define_insn "*minus_const"
5732 ;; [(set (match_operand:SI 0 "restore_operand" "")
5733 ;; (minus:SI (match_operand:SI 1 "register_operand" "r")
5734 ;; (match_operand:SI 2 "small_int" "I")))
5735 ;; (return)]
5736 ;; "! TARGET_EPILOGUE"
5737 ;; "ret\;restore %1,-(%2),%Y0"
5738 ;; [(set_attr "type" "multi")])
5739
5740 ;; The following pattern is only generated by delayed-branch scheduling,
5741 ;; when the insn winds up in the epilogue.
5742 (define_insn "*return_sf"
5743 [(set (reg:SF 32)
5744 (match_operand:SF 0 "register_operand" "f"))
5745 (return)]
5746 "! TARGET_EPILOGUE"
5747 "ret\;fmovs %0,%%f0"
5748 [(set_attr "type" "multi")])
5749
5750 ;; Now peepholes to do a call followed by a jump.
5751
5752 (define_peephole
5753 [(parallel [(set (match_operand 0 "" "")
5754 (call (mem:SI (match_operand:SI 1 "call_operand_address" "ps"))
5755 (match_operand 2 "" "")))
5756 (clobber (reg:SI 15))])
5757 (set (pc) (label_ref (match_operand 3 "" "")))]
5758 "short_branch (INSN_UID (insn), INSN_UID (operands[3]))"
5759 "*
5760 {
5761 return \"call %a1,%2\;add %%o7,(%l3-.-4),%%o7\";
5762 }")
5763
5764 (define_peephole
5765 [(parallel [(call (mem:SI (match_operand:SI 0 "call_operand_address" "ps"))
5766 (match_operand 1 "" ""))
5767 (clobber (reg:SI 15))])
5768 (set (pc) (label_ref (match_operand 2 "" "")))]
5769 "short_branch (INSN_UID (insn), INSN_UID (operands[2]))"
5770 "*
5771 {
5772 return \"call %a0,%1\;add %%o7,(%l2-.-4),%%o7\";
5773 }")
5774
5775 (define_peephole
5776 [(parallel [(set (match_operand 0 "" "")
5777 (call (mem:SI (match_operand:DI 1 "call_operand_address" "ps"))
5778 (match_operand 2 "" "")))
5779 (clobber (reg:DI 15))])
5780 (set (pc) (label_ref (match_operand 3 "" "")))]
5781 "TARGET_V9 && short_branch (INSN_UID (insn), INSN_UID (operands[3]))"
5782 "*
5783 {
5784 return \"call %a1,%2\;add %%o7,(%l3-.-4),%%o7\";
5785 }")
5786
5787 (define_peephole
5788 [(parallel [(call (mem:SI (match_operand:DI 0 "call_operand_address" "ps"))
5789 (match_operand 1 "" ""))
5790 (clobber (reg:DI 15))])
5791 (set (pc) (label_ref (match_operand 2 "" "")))]
5792 "TARGET_V9 && short_branch (INSN_UID (insn), INSN_UID (operands[2]))"
5793 "*
5794 {
5795 return \"call %a0,%1\;add %%o7,(%l2-.-4),%%o7\";
5796 }")
5797
5798 ;; Other miscellaneous peepholes.
5799
5800 (define_peephole
5801 [(parallel [(set (match_operand:SI 0 "register_operand" "=r")
5802 (minus:SI (match_operand:SI 1 "reg_or_0_operand" "rJ")
5803 (reg:SI 0)))
5804 (clobber (reg:CC 0))])
5805 (set (reg:CC 0) (compare (match_dup 0) (const_int 0)))]
5806 ""
5807 "subxcc %r1,0,%0")
This page took 0.263198 seconds and 5 git commands to generate.