]> gcc.gnu.org Git - gcc.git/blob - gcc/config/pa/pa.md
pa.h (PRINT_OPERAND_ADDRESS): Output "%r0", not "r0" for the base register in an...
[gcc.git] / gcc / config / pa / pa.md
1 ;;- Machine description for HP PA-RISC architecture for GNU C compiler
2 ;; Copyright (C) 1992, 93-98, 1999 Free Software Foundation, Inc.
3 ;; Contributed by the Center for Software Science at the University
4 ;; of Utah.
5
6 ;; This file is part of GNU CC.
7
8 ;; GNU CC is free software; you can redistribute it and/or modify
9 ;; it under the terms of the GNU General Public License as published by
10 ;; the Free Software Foundation; either version 2, or (at your option)
11 ;; any later version.
12
13 ;; GNU CC is distributed in the hope that it will be useful,
14 ;; but WITHOUT ANY WARRANTY; without even the implied warranty of
15 ;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 ;; GNU General Public License for more details.
17
18 ;; You should have received a copy of the GNU General Public License
19 ;; along with GNU CC; see the file COPYING. If not, write to
20 ;; the Free Software Foundation, 59 Temple Place - Suite 330,
21 ;; Boston, MA 02111-1307, USA.
22
23 ;; This gcc Version 2 machine description is inspired by sparc.md and
24 ;; mips.md.
25
26 ;;- See file "rtl.def" for documentation on define_insn, match_*, et. al.
27
28 ;; Insn type. Used to default other attribute values.
29
30 ;; type "unary" insns have one input operand (1) and one output operand (0)
31 ;; type "binary" insns have two input operands (1,2) and one output (0)
32
33 (define_attr "type"
34 "move,unary,binary,shift,nullshift,compare,load,store,uncond_branch,branch,cbranch,fbranch,call,dyncall,fpload,fpstore,fpalu,fpcc,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,multi,milli,parallel_branch"
35 (const_string "binary"))
36
37 (define_attr "pa_combine_type"
38 "fmpy,faddsub,uncond_branch,addmove,none"
39 (const_string "none"))
40
41 ;; Processor type (for scheduling, not code generation) -- this attribute
42 ;; must exactly match the processor_type enumeration in pa.h.
43 ;;
44 ;; FIXME: Add 800 scheduling for completeness?
45
46 (define_attr "cpu" "700,7100,7100LC,7200,8000" (const (symbol_ref "pa_cpu_attr")))
47
48 ;; Length (in # of insns).
49 (define_attr "length" ""
50 (cond [(eq_attr "type" "load,fpload")
51 (if_then_else (match_operand 1 "symbolic_memory_operand" "")
52 (const_int 8) (const_int 4))
53
54 (eq_attr "type" "store,fpstore")
55 (if_then_else (match_operand 0 "symbolic_memory_operand" "")
56 (const_int 8) (const_int 4))
57
58 (eq_attr "type" "binary,shift,nullshift")
59 (if_then_else (match_operand 2 "arith_operand" "")
60 (const_int 4) (const_int 12))
61
62 (eq_attr "type" "move,unary,shift,nullshift")
63 (if_then_else (match_operand 1 "arith_operand" "")
64 (const_int 4) (const_int 8))]
65
66 (const_int 4)))
67
68 (define_asm_attributes
69 [(set_attr "length" "4")
70 (set_attr "type" "multi")])
71
72 ;; Attributes for instruction and branch scheduling
73
74 ;; For conditional branches.
75 (define_attr "in_branch_delay" "false,true"
76 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
77 (eq_attr "length" "4"))
78 (const_string "true")
79 (const_string "false")))
80
81 ;; Disallow instructions which use the FPU since they will tie up the FPU
82 ;; even if the instruction is nullified.
83 (define_attr "in_nullified_branch_delay" "false,true"
84 (if_then_else (and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpdivdbl,fpsqrtsgl,fpsqrtdbl,parallel_branch")
85 (eq_attr "length" "4"))
86 (const_string "true")
87 (const_string "false")))
88
89 ;; For calls and millicode calls. Allow unconditional branches in the
90 ;; delay slot.
91 (define_attr "in_call_delay" "false,true"
92 (cond [(and (eq_attr "type" "!uncond_branch,branch,cbranch,fbranch,call,dyncall,multi,milli,parallel_branch")
93 (eq_attr "length" "4"))
94 (const_string "true")
95 (eq_attr "type" "uncond_branch")
96 (if_then_else (ne (symbol_ref "TARGET_JUMP_IN_DELAY")
97 (const_int 0))
98 (const_string "true")
99 (const_string "false"))]
100 (const_string "false")))
101
102
103 ;; Call delay slot description.
104 (define_delay (eq_attr "type" "call")
105 [(eq_attr "in_call_delay" "true") (nil) (nil)])
106
107 ;; millicode call delay slot description. Note it disallows delay slot
108 ;; when TARGET_PORTABLE_RUNTIME is true.
109 (define_delay (eq_attr "type" "milli")
110 [(and (eq_attr "in_call_delay" "true")
111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME") (const_int 0)))
112 (nil) (nil)])
113
114 ;; Return and other similar instructions.
115 (define_delay (eq_attr "type" "branch,parallel_branch")
116 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
117
118 ;; Floating point conditional branch delay slot description and
119 (define_delay (eq_attr "type" "fbranch")
120 [(eq_attr "in_branch_delay" "true")
121 (eq_attr "in_nullified_branch_delay" "true")
122 (nil)])
123
124 ;; Integer conditional branch delay slot description.
125 ;; Nullification of conditional branches on the PA is dependent on the
126 ;; direction of the branch. Forward branches nullify true and
127 ;; backward branches nullify false. If the direction is unknown
128 ;; then nullification is not allowed.
129 (define_delay (eq_attr "type" "cbranch")
130 [(eq_attr "in_branch_delay" "true")
131 (and (eq_attr "in_nullified_branch_delay" "true")
132 (attr_flag "forward"))
133 (and (eq_attr "in_nullified_branch_delay" "true")
134 (attr_flag "backward"))])
135
136 (define_delay (and (eq_attr "type" "uncond_branch")
137 (eq (symbol_ref "following_call (insn)")
138 (const_int 0)))
139 [(eq_attr "in_branch_delay" "true") (nil) (nil)])
140
141 ;; Function units of the HPPA. The following data is for the 700 CPUs
142 ;; (Mustang CPU + Timex FPU aka PA-89) because that's what I have the docs for.
143 ;; Scheduling instructions for PA-83 machines according to the Snake
144 ;; constraints shouldn't hurt.
145
146 ;; (define_function_unit {name} {num-units} {n-users} {test}
147 ;; {ready-delay} {issue-delay} [{conflict-list}])
148
149 ;; The integer ALU.
150 ;; (Noted only for documentation; units that take one cycle do not need to
151 ;; be specified.)
152
153 ;; (define_function_unit "alu" 1 0
154 ;; (and (eq_attr "type" "unary,shift,nullshift,binary,move,address")
155 ;; (eq_attr "cpu" "700"))
156 ;; 1 0)
157
158
159 ;; Memory. Disregarding Cache misses, the Mustang memory times are:
160 ;; load: 2, fpload: 3
161 ;; store, fpstore: 3, no D-cache operations should be scheduled.
162
163 (define_function_unit "pa700memory" 1 0
164 (and (eq_attr "type" "load,fpload")
165 (eq_attr "cpu" "700")) 2 0)
166 (define_function_unit "pa700memory" 1 0
167 (and (eq_attr "type" "store,fpstore")
168 (eq_attr "cpu" "700")) 3 3)
169
170 ;; The Timex (aka 700) has two floating-point units: ALU, and MUL/DIV/SQRT.
171 ;; Timings:
172 ;; Instruction Time Unit Minimum Distance (unit contention)
173 ;; fcpy 3 ALU 2
174 ;; fabs 3 ALU 2
175 ;; fadd 3 ALU 2
176 ;; fsub 3 ALU 2
177 ;; fcmp 3 ALU 2
178 ;; fcnv 3 ALU 2
179 ;; fmpyadd 3 ALU,MPY 2
180 ;; fmpysub 3 ALU,MPY 2
181 ;; fmpycfxt 3 ALU,MPY 2
182 ;; fmpy 3 MPY 2
183 ;; fmpyi 3 MPY 2
184 ;; fdiv,sgl 10 MPY 10
185 ;; fdiv,dbl 12 MPY 12
186 ;; fsqrt,sgl 14 MPY 14
187 ;; fsqrt,dbl 18 MPY 18
188
189 (define_function_unit "pa700fp_alu" 1 0
190 (and (eq_attr "type" "fpcc")
191 (eq_attr "cpu" "700")) 4 2)
192 (define_function_unit "pa700fp_alu" 1 0
193 (and (eq_attr "type" "fpalu")
194 (eq_attr "cpu" "700")) 3 2)
195 (define_function_unit "pa700fp_mpy" 1 0
196 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
197 (eq_attr "cpu" "700")) 3 2)
198 (define_function_unit "pa700fp_mpy" 1 0
199 (and (eq_attr "type" "fpdivsgl")
200 (eq_attr "cpu" "700")) 10 10)
201 (define_function_unit "pa700fp_mpy" 1 0
202 (and (eq_attr "type" "fpdivdbl")
203 (eq_attr "cpu" "700")) 12 12)
204 (define_function_unit "pa700fp_mpy" 1 0
205 (and (eq_attr "type" "fpsqrtsgl")
206 (eq_attr "cpu" "700")) 14 14)
207 (define_function_unit "pa700fp_mpy" 1 0
208 (and (eq_attr "type" "fpsqrtdbl")
209 (eq_attr "cpu" "700")) 18 18)
210
211 ;; Function units for the 7100 and 7150. The 7100/7150 can dual-issue
212 ;; floating point computations with non-floating point computations (fp loads
213 ;; and stores are not fp computations).
214 ;;
215
216 ;; Memory. Disregarding Cache misses, memory loads take two cycles; stores also
217 ;; take two cycles, during which no Dcache operations should be scheduled.
218 ;; Any special cases are handled in pa_adjust_cost. The 7100, 7150 and 7100LC
219 ;; all have the same memory characteristics if one disregards cache misses.
220 (define_function_unit "pa7100memory" 1 0
221 (and (eq_attr "type" "load,fpload")
222 (eq_attr "cpu" "7100,7100LC")) 2 0)
223 (define_function_unit "pa7100memory" 1 0
224 (and (eq_attr "type" "store,fpstore")
225 (eq_attr "cpu" "7100,7100LC")) 2 2)
226
227 ;; The 7100/7150 has three floating-point units: ALU, MUL, and DIV.
228 ;; Timings:
229 ;; Instruction Time Unit Minimum Distance (unit contention)
230 ;; fcpy 2 ALU 1
231 ;; fabs 2 ALU 1
232 ;; fadd 2 ALU 1
233 ;; fsub 2 ALU 1
234 ;; fcmp 2 ALU 1
235 ;; fcnv 2 ALU 1
236 ;; fmpyadd 2 ALU,MPY 1
237 ;; fmpysub 2 ALU,MPY 1
238 ;; fmpycfxt 2 ALU,MPY 1
239 ;; fmpy 2 MPY 1
240 ;; fmpyi 2 MPY 1
241 ;; fdiv,sgl 8 DIV 8
242 ;; fdiv,dbl 15 DIV 15
243 ;; fsqrt,sgl 8 DIV 8
244 ;; fsqrt,dbl 15 DIV 15
245
246 (define_function_unit "pa7100fp_alu" 1 0
247 (and (eq_attr "type" "fpcc,fpalu")
248 (eq_attr "cpu" "7100")) 2 1)
249 (define_function_unit "pa7100fp_mpy" 1 0
250 (and (eq_attr "type" "fpmulsgl,fpmuldbl")
251 (eq_attr "cpu" "7100")) 2 1)
252 (define_function_unit "pa7100fp_div" 1 0
253 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
254 (eq_attr "cpu" "7100")) 8 8)
255 (define_function_unit "pa7100fp_div" 1 0
256 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
257 (eq_attr "cpu" "7100")) 15 15)
258
259 ;; To encourage dual issue we define function units corresponding to
260 ;; the instructions which can be dual issued. This is a rather crude
261 ;; approximation, the "pa7100nonflop" test in particular could be refined.
262 (define_function_unit "pa7100flop" 1 1
263 (and
264 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
265 (eq_attr "cpu" "7100")) 1 1)
266
267 (define_function_unit "pa7100nonflop" 1 1
268 (and
269 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
270 (eq_attr "cpu" "7100")) 1 1)
271
272
273 ;; Memory subsystem works just like 7100/7150 (except for cache miss times which
274 ;; we don't model here).
275
276 ;; The 7100LC has three floating-point units: ALU, MUL, and DIV.
277 ;; Note divides and sqrt flops lock the cpu until the flop is
278 ;; finished. fmpy and xmpyu (fmpyi) lock the cpu for one cycle.
279 ;; There's no way to avoid the penalty.
280 ;; Timings:
281 ;; Instruction Time Unit Minimum Distance (unit contention)
282 ;; fcpy 2 ALU 1
283 ;; fabs 2 ALU 1
284 ;; fadd 2 ALU 1
285 ;; fsub 2 ALU 1
286 ;; fcmp 2 ALU 1
287 ;; fcnv 2 ALU 1
288 ;; fmpyadd,sgl 2 ALU,MPY 1
289 ;; fmpyadd,dbl 3 ALU,MPY 2
290 ;; fmpysub,sgl 2 ALU,MPY 1
291 ;; fmpysub,dbl 3 ALU,MPY 2
292 ;; fmpycfxt,sgl 2 ALU,MPY 1
293 ;; fmpycfxt,dbl 3 ALU,MPY 2
294 ;; fmpy,sgl 2 MPY 1
295 ;; fmpy,dbl 3 MPY 2
296 ;; fmpyi 3 MPY 2
297 ;; fdiv,sgl 8 DIV 8
298 ;; fdiv,dbl 15 DIV 15
299 ;; fsqrt,sgl 8 DIV 8
300 ;; fsqrt,dbl 15 DIV 15
301
302 (define_function_unit "pa7100LCfp_alu" 1 0
303 (and (eq_attr "type" "fpcc,fpalu")
304 (eq_attr "cpu" "7100LC,7200")) 2 1)
305 (define_function_unit "pa7100LCfp_mpy" 1 0
306 (and (eq_attr "type" "fpmulsgl")
307 (eq_attr "cpu" "7100LC,7200")) 2 1)
308 (define_function_unit "pa7100LCfp_mpy" 1 0
309 (and (eq_attr "type" "fpmuldbl")
310 (eq_attr "cpu" "7100LC,7200")) 3 2)
311 (define_function_unit "pa7100LCfp_div" 1 0
312 (and (eq_attr "type" "fpdivsgl,fpsqrtsgl")
313 (eq_attr "cpu" "7100LC,7200")) 8 8)
314 (define_function_unit "pa7100LCfp_div" 1 0
315 (and (eq_attr "type" "fpdivdbl,fpsqrtdbl")
316 (eq_attr "cpu" "7100LC,7200")) 15 15)
317
318 ;; Define the various functional units for dual-issue.
319
320 ;; There's only one floating point unit.
321 (define_function_unit "pa7100LCflop" 1 1
322 (and
323 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
324 (eq_attr "cpu" "7100LC,7200")) 1 1)
325
326 ;; Shifts and memory ops execute in only one of the integer ALUs
327 (define_function_unit "pa7100LCshiftmem" 1 1
328 (and
329 (eq_attr "type" "shift,nullshift,load,fpload,store,fpstore")
330 (eq_attr "cpu" "7100LC,7200")) 1 1)
331
332 ;; We have two basic ALUs.
333 (define_function_unit "pa7100LCalu" 2 1
334 (and
335 (eq_attr "type" "!fpcc,fpalu,fpmulsgl,fpmuldbl,fpdivsgl,fpsqrtsgl,fpdivdbl,fpsqrtdbl")
336 (eq_attr "cpu" "7100LC,7200")) 1 1)
337
338 ;; I don't have complete information on the PA7200; however, most of
339 ;; what I've heard makes it look like a 7100LC without the store-store
340 ;; penalty. So that's how we'll model it.
341
342 ;; Memory. Disregarding Cache misses, memory loads and stores take
343 ;; two cycles. Any special cases are handled in pa_adjust_cost.
344 (define_function_unit "pa7200memory" 1 0
345 (and (eq_attr "type" "load,fpload,store,fpstore")
346 (eq_attr "cpu" "7200")) 2 0)
347
348 ;; I don't have detailed information on the PA7200 FP pipeline, so I
349 ;; treat it just like the 7100LC pipeline.
350 ;; Similarly for the multi-issue fake units.
351
352 ;;
353 ;; Scheduling for the PA8000 is somewhat different than scheduling for a
354 ;; traditional architecture.
355 ;;
356 ;; The PA8000 has a large (56) entry reorder buffer that is split between
357 ;; memory and non-memory operations.
358 ;;
359 ;; The PA800 can issue two memory and two non-memory operations per cycle to
360 ;; the function units. Similarly, the PA8000 can retire two memory and two
361 ;; non-memory operations per cycle.
362 ;;
363 ;; Given the large reorder buffer, the processor can hide most latencies.
364 ;; According to HP, they've got the best results by scheduling for retirement
365 ;; bandwidth with limited latency scheduling for floating point operations.
366 ;; Latency for integer operations and memory references is ignored.
367 ;;
368 ;; We claim floating point operations have a 2 cycle latency and are
369 ;; fully pipelined, except for div and sqrt which are not pipelined.
370 ;;
371 ;; It is not necessary to define the shifter and integer alu units.
372 ;;
373 ;; These first two define_unit_unit descriptions model retirement from
374 ;; the reorder buffer.
375 (define_function_unit "pa8000lsu" 2 1
376 (and
377 (eq_attr "type" "load,fpload,store,fpstore")
378 (eq_attr "cpu" "8000")) 1 1)
379
380 (define_function_unit "pa8000alu" 2 1
381 (and
382 (eq_attr "type" "!load,fpload,store,fpstore")
383 (eq_attr "cpu" "8000")) 1 1)
384
385 ;; Claim floating point ops have a 2 cycle latency, excluding div and
386 ;; sqrt, which are not pipelined and issue to different units.
387 (define_function_unit "pa8000fmac" 2 0
388 (and
389 (eq_attr "type" "fpcc,fpalu,fpmulsgl,fpmuldbl")
390 (eq_attr "cpu" "8000")) 2 1)
391
392 (define_function_unit "pa8000fdiv" 2 1
393 (and
394 (eq_attr "type" "fpdivsgl,fpsqrtsgl")
395 (eq_attr "cpu" "8000")) 17 17)
396
397 (define_function_unit "pa8000fdiv" 2 1
398 (and
399 (eq_attr "type" "fpdivdbl,fpsqrtdbl")
400 (eq_attr "cpu" "8000")) 31 31)
401
402 \f
403 ;; Compare instructions.
404 ;; This controls RTL generation and register allocation.
405
406 ;; We generate RTL for comparisons and branches by having the cmpxx
407 ;; patterns store away the operands. Then, the scc and bcc patterns
408 ;; emit RTL for both the compare and the branch.
409 ;;
410
411 (define_expand "cmpsi"
412 [(set (reg:CC 0)
413 (compare:CC (match_operand:SI 0 "reg_or_0_operand" "")
414 (match_operand:SI 1 "arith5_operand" "")))]
415 ""
416 "
417 {
418 hppa_compare_op0 = operands[0];
419 hppa_compare_op1 = operands[1];
420 hppa_branch_type = CMP_SI;
421 DONE;
422 }")
423
424 (define_expand "cmpsf"
425 [(set (reg:CCFP 0)
426 (compare:CCFP (match_operand:SF 0 "reg_or_0_operand" "")
427 (match_operand:SF 1 "reg_or_0_operand" "")))]
428 "! TARGET_SOFT_FLOAT"
429 "
430 {
431 hppa_compare_op0 = operands[0];
432 hppa_compare_op1 = operands[1];
433 hppa_branch_type = CMP_SF;
434 DONE;
435 }")
436
437 (define_expand "cmpdf"
438 [(set (reg:CCFP 0)
439 (compare:CCFP (match_operand:DF 0 "reg_or_0_operand" "")
440 (match_operand:DF 1 "reg_or_0_operand" "")))]
441 "! TARGET_SOFT_FLOAT"
442 "
443 {
444 hppa_compare_op0 = operands[0];
445 hppa_compare_op1 = operands[1];
446 hppa_branch_type = CMP_DF;
447 DONE;
448 }")
449
450 (define_insn ""
451 [(set (reg:CCFP 0)
452 (match_operator:CCFP 2 "comparison_operator"
453 [(match_operand:SF 0 "reg_or_0_operand" "fG")
454 (match_operand:SF 1 "reg_or_0_operand" "fG")]))]
455 "! TARGET_SOFT_FLOAT"
456 "fcmp,sgl,%Y2 %f0,%f1"
457 [(set_attr "length" "4")
458 (set_attr "type" "fpcc")])
459
460 (define_insn ""
461 [(set (reg:CCFP 0)
462 (match_operator:CCFP 2 "comparison_operator"
463 [(match_operand:DF 0 "reg_or_0_operand" "fG")
464 (match_operand:DF 1 "reg_or_0_operand" "fG")]))]
465 "! TARGET_SOFT_FLOAT"
466 "fcmp,dbl,%Y2 %f0,%f1"
467 [(set_attr "length" "4")
468 (set_attr "type" "fpcc")])
469
470 ;; scc insns.
471
472 (define_expand "seq"
473 [(set (match_operand:SI 0 "register_operand" "")
474 (eq:SI (match_dup 1)
475 (match_dup 2)))]
476 ""
477 "
478 {
479 /* fp scc patterns rarely match, and are not a win on the PA. */
480 if (hppa_branch_type != CMP_SI)
481 FAIL;
482 /* set up operands from compare. */
483 operands[1] = hppa_compare_op0;
484 operands[2] = hppa_compare_op1;
485 /* fall through and generate default code */
486 }")
487
488 (define_expand "sne"
489 [(set (match_operand:SI 0 "register_operand" "")
490 (ne:SI (match_dup 1)
491 (match_dup 2)))]
492 ""
493 "
494 {
495 /* fp scc patterns rarely match, and are not a win on the PA. */
496 if (hppa_branch_type != CMP_SI)
497 FAIL;
498 operands[1] = hppa_compare_op0;
499 operands[2] = hppa_compare_op1;
500 }")
501
502 (define_expand "slt"
503 [(set (match_operand:SI 0 "register_operand" "")
504 (lt:SI (match_dup 1)
505 (match_dup 2)))]
506 ""
507 "
508 {
509 /* fp scc patterns rarely match, and are not a win on the PA. */
510 if (hppa_branch_type != CMP_SI)
511 FAIL;
512 operands[1] = hppa_compare_op0;
513 operands[2] = hppa_compare_op1;
514 }")
515
516 (define_expand "sgt"
517 [(set (match_operand:SI 0 "register_operand" "")
518 (gt:SI (match_dup 1)
519 (match_dup 2)))]
520 ""
521 "
522 {
523 /* fp scc patterns rarely match, and are not a win on the PA. */
524 if (hppa_branch_type != CMP_SI)
525 FAIL;
526 operands[1] = hppa_compare_op0;
527 operands[2] = hppa_compare_op1;
528 }")
529
530 (define_expand "sle"
531 [(set (match_operand:SI 0 "register_operand" "")
532 (le:SI (match_dup 1)
533 (match_dup 2)))]
534 ""
535 "
536 {
537 /* fp scc patterns rarely match, and are not a win on the PA. */
538 if (hppa_branch_type != CMP_SI)
539 FAIL;
540 operands[1] = hppa_compare_op0;
541 operands[2] = hppa_compare_op1;
542 }")
543
544 (define_expand "sge"
545 [(set (match_operand:SI 0 "register_operand" "")
546 (ge:SI (match_dup 1)
547 (match_dup 2)))]
548 ""
549 "
550 {
551 /* fp scc patterns rarely match, and are not a win on the PA. */
552 if (hppa_branch_type != CMP_SI)
553 FAIL;
554 operands[1] = hppa_compare_op0;
555 operands[2] = hppa_compare_op1;
556 }")
557
558 (define_expand "sltu"
559 [(set (match_operand:SI 0 "register_operand" "")
560 (ltu:SI (match_dup 1)
561 (match_dup 2)))]
562 ""
563 "
564 {
565 if (hppa_branch_type != CMP_SI)
566 FAIL;
567 operands[1] = hppa_compare_op0;
568 operands[2] = hppa_compare_op1;
569 }")
570
571 (define_expand "sgtu"
572 [(set (match_operand:SI 0 "register_operand" "")
573 (gtu:SI (match_dup 1)
574 (match_dup 2)))]
575 ""
576 "
577 {
578 if (hppa_branch_type != CMP_SI)
579 FAIL;
580 operands[1] = hppa_compare_op0;
581 operands[2] = hppa_compare_op1;
582 }")
583
584 (define_expand "sleu"
585 [(set (match_operand:SI 0 "register_operand" "")
586 (leu:SI (match_dup 1)
587 (match_dup 2)))]
588 ""
589 "
590 {
591 if (hppa_branch_type != CMP_SI)
592 FAIL;
593 operands[1] = hppa_compare_op0;
594 operands[2] = hppa_compare_op1;
595 }")
596
597 (define_expand "sgeu"
598 [(set (match_operand:SI 0 "register_operand" "")
599 (geu:SI (match_dup 1)
600 (match_dup 2)))]
601 ""
602 "
603 {
604 if (hppa_branch_type != CMP_SI)
605 FAIL;
606 operands[1] = hppa_compare_op0;
607 operands[2] = hppa_compare_op1;
608 }")
609
610 ;; Instruction canonicalization puts immediate operands second, which
611 ;; is the reverse of what we want.
612
613 (define_insn "scc"
614 [(set (match_operand:SI 0 "register_operand" "=r")
615 (match_operator:SI 3 "comparison_operator"
616 [(match_operand:SI 1 "register_operand" "r")
617 (match_operand:SI 2 "arith11_operand" "rI")]))]
618 ""
619 "com%I2clr,%B3 %2,%1,%0\;ldi 1,%0"
620 [(set_attr "type" "binary")
621 (set_attr "length" "8")])
622
623 (define_insn "iorscc"
624 [(set (match_operand:SI 0 "register_operand" "=r")
625 (ior:SI (match_operator:SI 3 "comparison_operator"
626 [(match_operand:SI 1 "register_operand" "r")
627 (match_operand:SI 2 "arith11_operand" "rI")])
628 (match_operator:SI 6 "comparison_operator"
629 [(match_operand:SI 4 "register_operand" "r")
630 (match_operand:SI 5 "arith11_operand" "rI")])))]
631 ""
632 "com%I2clr,%S3 %2,%1,%%r0\;com%I5clr,%B6 %5,%4,%0\;ldi 1,%0"
633 [(set_attr "type" "binary")
634 (set_attr "length" "12")])
635
636 ;; Combiner patterns for common operations performed with the output
637 ;; from an scc insn (negscc and incscc).
638 (define_insn "negscc"
639 [(set (match_operand:SI 0 "register_operand" "=r")
640 (neg:SI (match_operator:SI 3 "comparison_operator"
641 [(match_operand:SI 1 "register_operand" "r")
642 (match_operand:SI 2 "arith11_operand" "rI")])))]
643 ""
644 "com%I2clr,%B3 %2,%1,%0\;ldi -1,%0"
645 [(set_attr "type" "binary")
646 (set_attr "length" "8")])
647
648 ;; Patterns for adding/subtracting the result of a boolean expression from
649 ;; a register. First we have special patterns that make use of the carry
650 ;; bit, and output only two instructions. For the cases we can't in
651 ;; general do in two instructions, the incscc pattern at the end outputs
652 ;; two or three instructions.
653
654 (define_insn ""
655 [(set (match_operand:SI 0 "register_operand" "=r")
656 (plus:SI (leu:SI (match_operand:SI 2 "register_operand" "r")
657 (match_operand:SI 3 "arith11_operand" "rI"))
658 (match_operand:SI 1 "register_operand" "r")))]
659 ""
660 "sub%I3 %3,%2,%%r0\;addc %%r0,%1,%0"
661 [(set_attr "type" "binary")
662 (set_attr "length" "8")])
663
664 ; This need only accept registers for op3, since canonicalization
665 ; replaces geu with gtu when op3 is an integer.
666 (define_insn ""
667 [(set (match_operand:SI 0 "register_operand" "=r")
668 (plus:SI (geu:SI (match_operand:SI 2 "register_operand" "r")
669 (match_operand:SI 3 "register_operand" "r"))
670 (match_operand:SI 1 "register_operand" "r")))]
671 ""
672 "sub %2,%3,%%r0\;addc %%r0,%1,%0"
673 [(set_attr "type" "binary")
674 (set_attr "length" "8")])
675
676 ; Match only integers for op3 here. This is used as canonical form of the
677 ; geu pattern when op3 is an integer. Don't match registers since we can't
678 ; make better code than the general incscc pattern.
679 (define_insn ""
680 [(set (match_operand:SI 0 "register_operand" "=r")
681 (plus:SI (gtu:SI (match_operand:SI 2 "register_operand" "r")
682 (match_operand:SI 3 "int11_operand" "I"))
683 (match_operand:SI 1 "register_operand" "r")))]
684 ""
685 "addi %k3,%2,%%r0\;addc %%r0,%1,%0"
686 [(set_attr "type" "binary")
687 (set_attr "length" "8")])
688
689 (define_insn "incscc"
690 [(set (match_operand:SI 0 "register_operand" "=r,r")
691 (plus:SI (match_operator:SI 4 "comparison_operator"
692 [(match_operand:SI 2 "register_operand" "r,r")
693 (match_operand:SI 3 "arith11_operand" "rI,rI")])
694 (match_operand:SI 1 "register_operand" "0,?r")))]
695 ""
696 "@
697 com%I3clr,%B4 %3,%2,%%r0\;addi 1,%0,%0
698 com%I3clr,%B4 %3,%2,%%r0\;addi,tr 1,%1,%0\;copy %1,%0"
699 [(set_attr "type" "binary,binary")
700 (set_attr "length" "8,12")])
701
702 (define_insn ""
703 [(set (match_operand:SI 0 "register_operand" "=r")
704 (minus:SI (match_operand:SI 1 "register_operand" "r")
705 (gtu:SI (match_operand:SI 2 "register_operand" "r")
706 (match_operand:SI 3 "arith11_operand" "rI"))))]
707 ""
708 "sub%I3 %3,%2,%%r0\;subb %1,0,%0"
709 [(set_attr "type" "binary")
710 (set_attr "length" "8")])
711
712 (define_insn ""
713 [(set (match_operand:SI 0 "register_operand" "=r")
714 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
715 (gtu:SI (match_operand:SI 2 "register_operand" "r")
716 (match_operand:SI 3 "arith11_operand" "rI")))
717 (match_operand:SI 4 "register_operand" "r")))]
718 ""
719 "sub%I3 %3,%2,%%r0\;subb %1,%4,%0"
720 [(set_attr "type" "binary")
721 (set_attr "length" "8")])
722
723 ; This need only accept registers for op3, since canonicalization
724 ; replaces ltu with leu when op3 is an integer.
725 (define_insn ""
726 [(set (match_operand:SI 0 "register_operand" "=r")
727 (minus:SI (match_operand:SI 1 "register_operand" "r")
728 (ltu:SI (match_operand:SI 2 "register_operand" "r")
729 (match_operand:SI 3 "register_operand" "r"))))]
730 ""
731 "sub %2,%3,%%r0\;subb %1,0,%0"
732 [(set_attr "type" "binary")
733 (set_attr "length" "8")])
734
735 (define_insn ""
736 [(set (match_operand:SI 0 "register_operand" "=r")
737 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
738 (ltu:SI (match_operand:SI 2 "register_operand" "r")
739 (match_operand:SI 3 "register_operand" "r")))
740 (match_operand:SI 4 "register_operand" "r")))]
741 ""
742 "sub %2,%3,%%r0\;subb %1,%4,%0"
743 [(set_attr "type" "binary")
744 (set_attr "length" "8")])
745
746 ; Match only integers for op3 here. This is used as canonical form of the
747 ; ltu pattern when op3 is an integer. Don't match registers since we can't
748 ; make better code than the general incscc pattern.
749 (define_insn ""
750 [(set (match_operand:SI 0 "register_operand" "=r")
751 (minus:SI (match_operand:SI 1 "register_operand" "r")
752 (leu:SI (match_operand:SI 2 "register_operand" "r")
753 (match_operand:SI 3 "int11_operand" "I"))))]
754 ""
755 "addi %k3,%2,%%r0\;subb %1,0,%0"
756 [(set_attr "type" "binary")
757 (set_attr "length" "8")])
758
759 (define_insn ""
760 [(set (match_operand:SI 0 "register_operand" "=r")
761 (minus:SI (minus:SI (match_operand:SI 1 "register_operand" "r")
762 (leu:SI (match_operand:SI 2 "register_operand" "r")
763 (match_operand:SI 3 "int11_operand" "I")))
764 (match_operand:SI 4 "register_operand" "r")))]
765 ""
766 "addi %k3,%2,%%r0\;subb %1,%4,%0"
767 [(set_attr "type" "binary")
768 (set_attr "length" "8")])
769
770 (define_insn "decscc"
771 [(set (match_operand:SI 0 "register_operand" "=r,r")
772 (minus:SI (match_operand:SI 1 "register_operand" "0,?r")
773 (match_operator:SI 4 "comparison_operator"
774 [(match_operand:SI 2 "register_operand" "r,r")
775 (match_operand:SI 3 "arith11_operand" "rI,rI")])))]
776 ""
777 "@
778 com%I3clr,%B4 %3,%2,%%r0\;addi -1,%0,%0
779 com%I3clr,%B4 %3,%2,%%r0\;addi,tr -1,%1,%0\;copy %1,%0"
780 [(set_attr "type" "binary,binary")
781 (set_attr "length" "8,12")])
782
783 ; Patterns for max and min. (There is no need for an earlyclobber in the
784 ; last alternative since the middle alternative will match if op0 == op1.)
785
786 (define_insn "sminsi3"
787 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
788 (smin:SI (match_operand:SI 1 "register_operand" "%0,0,r")
789 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
790 ""
791 "@
792 comclr,> %2,%0,%%r0\;copy %2,%0
793 comiclr,> %2,%0,%%r0\;ldi %2,%0
794 comclr,> %1,%r2,%0\;copy %1,%0"
795 [(set_attr "type" "multi,multi,multi")
796 (set_attr "length" "8,8,8")])
797
798 (define_insn "uminsi3"
799 [(set (match_operand:SI 0 "register_operand" "=r,r")
800 (umin:SI (match_operand:SI 1 "register_operand" "%0,0")
801 (match_operand:SI 2 "arith11_operand" "r,I")))]
802 ""
803 "@
804 comclr,>> %2,%0,%%r0\;copy %2,%0
805 comiclr,>> %2,%0,%%r0\;ldi %2,%0"
806 [(set_attr "type" "multi,multi")
807 (set_attr "length" "8,8")])
808
809 (define_insn "smaxsi3"
810 [(set (match_operand:SI 0 "register_operand" "=r,r,r")
811 (smax:SI (match_operand:SI 1 "register_operand" "%0,0,r")
812 (match_operand:SI 2 "arith11_operand" "r,I,M")))]
813 ""
814 "@
815 comclr,< %2,%0,%%r0\;copy %2,%0
816 comiclr,< %2,%0,%%r0\;ldi %2,%0
817 comclr,< %1,%r2,%0\;copy %1,%0"
818 [(set_attr "type" "multi,multi,multi")
819 (set_attr "length" "8,8,8")])
820
821 (define_insn "umaxsi3"
822 [(set (match_operand:SI 0 "register_operand" "=r,r")
823 (umax:SI (match_operand:SI 1 "register_operand" "%0,0")
824 (match_operand:SI 2 "arith11_operand" "r,I")))]
825 ""
826 "@
827 comclr,<< %2,%0,%%r0\;copy %2,%0
828 comiclr,<< %2,%0,%%r0\;ldi %2,%0"
829 [(set_attr "type" "multi,multi")
830 (set_attr "length" "8,8")])
831
832 (define_insn "abssi2"
833 [(set (match_operand:SI 0 "register_operand" "=r")
834 (abs:SI (match_operand:SI 1 "register_operand" "r")))]
835 ""
836 "or,>= %%r0,%1,%0\;subi 0,%0,%0"
837 [(set_attr "type" "multi")
838 (set_attr "length" "8")])
839
840 ;;; Experimental conditional move patterns
841
842 (define_expand "movsicc"
843 [(set (match_operand:SI 0 "register_operand" "")
844 (if_then_else:SI
845 (match_operator 1 "comparison_operator"
846 [(match_dup 4)
847 (match_dup 5)])
848 (match_operand:SI 2 "reg_or_cint_move_operand" "")
849 (match_operand:SI 3 "reg_or_cint_move_operand" "")))]
850 ""
851 "
852 {
853 enum rtx_code code = GET_CODE (operands[1]);
854
855 if (hppa_branch_type != CMP_SI)
856 FAIL;
857
858 /* operands[1] is currently the result of compare_from_rtx. We want to
859 emit a compare of the original operands. */
860 operands[1] = gen_rtx_fmt_ee (code, SImode, hppa_compare_op0, hppa_compare_op1);
861 operands[4] = hppa_compare_op0;
862 operands[5] = hppa_compare_op1;
863 }")
864
865 ; We need the first constraint alternative in order to avoid
866 ; earlyclobbers on all other alternatives.
867 (define_insn ""
868 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r")
869 (if_then_else:SI
870 (match_operator 5 "comparison_operator"
871 [(match_operand:SI 3 "register_operand" "r,r,r,r,r")
872 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI")])
873 (match_operand:SI 1 "reg_or_cint_move_operand" "0,r,J,N,K")
874 (const_int 0)))]
875 ""
876 "@
877 com%I4clr,%S5 %4,%3,%%r0\;ldi 0,%0
878 com%I4clr,%B5 %4,%3,%0\;copy %1,%0
879 com%I4clr,%B5 %4,%3,%0\;ldi %1,%0
880 com%I4clr,%B5 %4,%3,%0\;ldil L'%1,%0
881 com%I4clr,%B5 %4,%3,%0\;zdepi %Z1,%0"
882 [(set_attr "type" "multi,multi,multi,multi,nullshift")
883 (set_attr "length" "8,8,8,8,8")])
884
885 (define_insn ""
886 [(set (match_operand:SI 0 "register_operand" "=r,r,r,r,r,r,r,r")
887 (if_then_else:SI
888 (match_operator 5 "comparison_operator"
889 [(match_operand:SI 3 "register_operand" "r,r,r,r,r,r,r,r")
890 (match_operand:SI 4 "arith11_operand" "rI,rI,rI,rI,rI,rI,rI,rI")])
891 (match_operand:SI 1 "reg_or_cint_move_operand" "0,0,0,0,r,J,N,K")
892 (match_operand:SI 2 "reg_or_cint_move_operand" "r,J,N,K,0,0,0,0")))]
893 ""
894 "@
895 com%I4clr,%S5 %4,%3,%%r0\;copy %2,%0
896 com%I4clr,%S5 %4,%3,%%r0\;ldi %2,%0
897 com%I4clr,%S5 %4,%3,%%r0\;ldil L'%2,%0
898 com%I4clr,%S5 %4,%3,%%r0\;zdepi %Z2,%0
899 com%I4clr,%B5 %4,%3,%%r0\;copy %1,%0
900 com%I4clr,%B5 %4,%3,%%r0\;ldi %1,%0
901 com%I4clr,%B5 %4,%3,%%r0\;ldil L'%1,%0
902 com%I4clr,%B5 %4,%3,%%r0\;zdepi %Z1,%0"
903 [(set_attr "type" "multi,multi,multi,nullshift,multi,multi,multi,nullshift")
904 (set_attr "length" "8,8,8,8,8,8,8,8")])
905
906 ;; Conditional Branches
907
908 (define_expand "beq"
909 [(set (pc)
910 (if_then_else (eq (match_dup 1) (match_dup 2))
911 (label_ref (match_operand 0 "" ""))
912 (pc)))]
913 ""
914 "
915 {
916 if (hppa_branch_type != CMP_SI)
917 {
918 emit_insn (gen_cmp_fp (EQ, hppa_compare_op0, hppa_compare_op1));
919 emit_bcond_fp (NE, operands[0]);
920 DONE;
921 }
922 /* set up operands from compare. */
923 operands[1] = hppa_compare_op0;
924 operands[2] = hppa_compare_op1;
925 /* fall through and generate default code */
926 }")
927
928 (define_expand "bne"
929 [(set (pc)
930 (if_then_else (ne (match_dup 1) (match_dup 2))
931 (label_ref (match_operand 0 "" ""))
932 (pc)))]
933 ""
934 "
935 {
936 if (hppa_branch_type != CMP_SI)
937 {
938 emit_insn (gen_cmp_fp (NE, hppa_compare_op0, hppa_compare_op1));
939 emit_bcond_fp (NE, operands[0]);
940 DONE;
941 }
942 operands[1] = hppa_compare_op0;
943 operands[2] = hppa_compare_op1;
944 }")
945
946 (define_expand "bgt"
947 [(set (pc)
948 (if_then_else (gt (match_dup 1) (match_dup 2))
949 (label_ref (match_operand 0 "" ""))
950 (pc)))]
951 ""
952 "
953 {
954 if (hppa_branch_type != CMP_SI)
955 {
956 emit_insn (gen_cmp_fp (GT, hppa_compare_op0, hppa_compare_op1));
957 emit_bcond_fp (NE, operands[0]);
958 DONE;
959 }
960 operands[1] = hppa_compare_op0;
961 operands[2] = hppa_compare_op1;
962 }")
963
964 (define_expand "blt"
965 [(set (pc)
966 (if_then_else (lt (match_dup 1) (match_dup 2))
967 (label_ref (match_operand 0 "" ""))
968 (pc)))]
969 ""
970 "
971 {
972 if (hppa_branch_type != CMP_SI)
973 {
974 emit_insn (gen_cmp_fp (LT, hppa_compare_op0, hppa_compare_op1));
975 emit_bcond_fp (NE, operands[0]);
976 DONE;
977 }
978 operands[1] = hppa_compare_op0;
979 operands[2] = hppa_compare_op1;
980 }")
981
982 (define_expand "bge"
983 [(set (pc)
984 (if_then_else (ge (match_dup 1) (match_dup 2))
985 (label_ref (match_operand 0 "" ""))
986 (pc)))]
987 ""
988 "
989 {
990 if (hppa_branch_type != CMP_SI)
991 {
992 emit_insn (gen_cmp_fp (GE, hppa_compare_op0, hppa_compare_op1));
993 emit_bcond_fp (NE, operands[0]);
994 DONE;
995 }
996 operands[1] = hppa_compare_op0;
997 operands[2] = hppa_compare_op1;
998 }")
999
1000 (define_expand "ble"
1001 [(set (pc)
1002 (if_then_else (le (match_dup 1) (match_dup 2))
1003 (label_ref (match_operand 0 "" ""))
1004 (pc)))]
1005 ""
1006 "
1007 {
1008 if (hppa_branch_type != CMP_SI)
1009 {
1010 emit_insn (gen_cmp_fp (LE, hppa_compare_op0, hppa_compare_op1));
1011 emit_bcond_fp (NE, operands[0]);
1012 DONE;
1013 }
1014 operands[1] = hppa_compare_op0;
1015 operands[2] = hppa_compare_op1;
1016 }")
1017
1018 (define_expand "bgtu"
1019 [(set (pc)
1020 (if_then_else (gtu (match_dup 1) (match_dup 2))
1021 (label_ref (match_operand 0 "" ""))
1022 (pc)))]
1023 ""
1024 "
1025 {
1026 if (hppa_branch_type != CMP_SI)
1027 FAIL;
1028 operands[1] = hppa_compare_op0;
1029 operands[2] = hppa_compare_op1;
1030 }")
1031
1032 (define_expand "bltu"
1033 [(set (pc)
1034 (if_then_else (ltu (match_dup 1) (match_dup 2))
1035 (label_ref (match_operand 0 "" ""))
1036 (pc)))]
1037 ""
1038 "
1039 {
1040 if (hppa_branch_type != CMP_SI)
1041 FAIL;
1042 operands[1] = hppa_compare_op0;
1043 operands[2] = hppa_compare_op1;
1044 }")
1045
1046 (define_expand "bgeu"
1047 [(set (pc)
1048 (if_then_else (geu (match_dup 1) (match_dup 2))
1049 (label_ref (match_operand 0 "" ""))
1050 (pc)))]
1051 ""
1052 "
1053 {
1054 if (hppa_branch_type != CMP_SI)
1055 FAIL;
1056 operands[1] = hppa_compare_op0;
1057 operands[2] = hppa_compare_op1;
1058 }")
1059
1060 (define_expand "bleu"
1061 [(set (pc)
1062 (if_then_else (leu (match_dup 1) (match_dup 2))
1063 (label_ref (match_operand 0 "" ""))
1064 (pc)))]
1065 ""
1066 "
1067 {
1068 if (hppa_branch_type != CMP_SI)
1069 FAIL;
1070 operands[1] = hppa_compare_op0;
1071 operands[2] = hppa_compare_op1;
1072 }")
1073
1074 ;; Match the branch patterns.
1075
1076
1077 ;; Note a long backward conditional branch with an annulled delay slot
1078 ;; has a length of 12.
1079 (define_insn ""
1080 [(set (pc)
1081 (if_then_else
1082 (match_operator 3 "comparison_operator"
1083 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1084 (match_operand:SI 2 "arith5_operand" "rL")])
1085 (label_ref (match_operand 0 "" ""))
1086 (pc)))]
1087 ""
1088 "*
1089 {
1090 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1091 get_attr_length (insn), 0, insn);
1092 }"
1093 [(set_attr "type" "cbranch")
1094 (set (attr "length")
1095 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1096 (const_int 8184))
1097 (const_int 4)
1098 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1099 (const_int 262100))
1100 (const_int 8)
1101 (eq (symbol_ref "flag_pic") (const_int 0))
1102 (const_int 20)]
1103 (const_int 28)))])
1104
1105 ;; Match the negated branch.
1106
1107 (define_insn ""
1108 [(set (pc)
1109 (if_then_else
1110 (match_operator 3 "comparison_operator"
1111 [(match_operand:SI 1 "reg_or_0_operand" "rM")
1112 (match_operand:SI 2 "arith5_operand" "rL")])
1113 (pc)
1114 (label_ref (match_operand 0 "" ""))))]
1115 ""
1116 "*
1117 {
1118 return output_cbranch (operands, INSN_ANNULLED_BRANCH_P (insn),
1119 get_attr_length (insn), 1, insn);
1120 }"
1121 [(set_attr "type" "cbranch")
1122 (set (attr "length")
1123 (cond [(lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1124 (const_int 8184))
1125 (const_int 4)
1126 (lt (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
1127 (const_int 262100))
1128 (const_int 8)
1129 (eq (symbol_ref "flag_pic") (const_int 0))
1130 (const_int 20)]
1131 (const_int 28)))])
1132
1133 ;; Branch on Bit patterns.
1134 (define_insn ""
1135 [(set (pc)
1136 (if_then_else
1137 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1138 (const_int 1)
1139 (match_operand:SI 1 "uint5_operand" ""))
1140 (const_int 0))
1141 (label_ref (match_operand 2 "" ""))
1142 (pc)))]
1143 ""
1144 "*
1145 {
1146 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1147 get_attr_length (insn), 0, insn, 0);
1148 }"
1149 [(set_attr "type" "cbranch")
1150 (set (attr "length")
1151 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1152 (const_int 8184))
1153 (const_int 4)
1154 (const_int 8)))])
1155
1156 (define_insn ""
1157 [(set (pc)
1158 (if_then_else
1159 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1160 (const_int 1)
1161 (match_operand:SI 1 "uint5_operand" ""))
1162 (const_int 0))
1163 (pc)
1164 (label_ref (match_operand 2 "" ""))))]
1165 ""
1166 "*
1167 {
1168 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1169 get_attr_length (insn), 1, insn, 0);
1170 }"
1171 [(set_attr "type" "cbranch")
1172 (set (attr "length")
1173 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1174 (const_int 8184))
1175 (const_int 4)
1176 (const_int 8)))])
1177
1178 (define_insn ""
1179 [(set (pc)
1180 (if_then_else
1181 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1182 (const_int 1)
1183 (match_operand:SI 1 "uint5_operand" ""))
1184 (const_int 0))
1185 (label_ref (match_operand 2 "" ""))
1186 (pc)))]
1187 ""
1188 "*
1189 {
1190 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1191 get_attr_length (insn), 0, insn, 1);
1192 }"
1193 [(set_attr "type" "cbranch")
1194 (set (attr "length")
1195 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1196 (const_int 8184))
1197 (const_int 4)
1198 (const_int 8)))])
1199
1200 (define_insn ""
1201 [(set (pc)
1202 (if_then_else
1203 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1204 (const_int 1)
1205 (match_operand:SI 1 "uint5_operand" ""))
1206 (const_int 0))
1207 (pc)
1208 (label_ref (match_operand 2 "" ""))))]
1209 ""
1210 "*
1211 {
1212 return output_bb (operands, INSN_ANNULLED_BRANCH_P (insn),
1213 get_attr_length (insn), 1, insn, 1);
1214 }"
1215 [(set_attr "type" "cbranch")
1216 (set (attr "length")
1217 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1218 (const_int 8184))
1219 (const_int 4)
1220 (const_int 8)))])
1221
1222 ;; Branch on Variable Bit patterns.
1223 (define_insn ""
1224 [(set (pc)
1225 (if_then_else
1226 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1227 (const_int 1)
1228 (match_operand:SI 1 "register_operand" "q"))
1229 (const_int 0))
1230 (label_ref (match_operand 2 "" ""))
1231 (pc)))]
1232 ""
1233 "*
1234 {
1235 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1236 get_attr_length (insn), 0, insn, 0);
1237 }"
1238 [(set_attr "type" "cbranch")
1239 (set (attr "length")
1240 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1241 (const_int 8184))
1242 (const_int 4)
1243 (const_int 8)))])
1244
1245 (define_insn ""
1246 [(set (pc)
1247 (if_then_else
1248 (ne (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1249 (const_int 1)
1250 (match_operand:SI 1 "register_operand" "q"))
1251 (const_int 0))
1252 (pc)
1253 (label_ref (match_operand 2 "" ""))))]
1254 ""
1255 "*
1256 {
1257 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1258 get_attr_length (insn), 1, insn, 0);
1259 }"
1260 [(set_attr "type" "cbranch")
1261 (set (attr "length")
1262 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1263 (const_int 8184))
1264 (const_int 4)
1265 (const_int 8)))])
1266
1267 (define_insn ""
1268 [(set (pc)
1269 (if_then_else
1270 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1271 (const_int 1)
1272 (match_operand:SI 1 "register_operand" "q"))
1273 (const_int 0))
1274 (label_ref (match_operand 2 "" ""))
1275 (pc)))]
1276 ""
1277 "*
1278 {
1279 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1280 get_attr_length (insn), 0, insn, 1);
1281 }"
1282 [(set_attr "type" "cbranch")
1283 (set (attr "length")
1284 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1285 (const_int 8184))
1286 (const_int 4)
1287 (const_int 8)))])
1288
1289 (define_insn ""
1290 [(set (pc)
1291 (if_then_else
1292 (eq (zero_extract:SI (match_operand:SI 0 "register_operand" "r")
1293 (const_int 1)
1294 (match_operand:SI 1 "register_operand" "q"))
1295 (const_int 0))
1296 (pc)
1297 (label_ref (match_operand 2 "" ""))))]
1298 ""
1299 "*
1300 {
1301 return output_bvb (operands, INSN_ANNULLED_BRANCH_P (insn),
1302 get_attr_length (insn), 1, insn, 1);
1303 }"
1304 [(set_attr "type" "cbranch")
1305 (set (attr "length")
1306 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
1307 (const_int 8184))
1308 (const_int 4)
1309 (const_int 8)))])
1310
1311 ;; Floating point branches
1312 (define_insn ""
1313 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1314 (label_ref (match_operand 0 "" ""))
1315 (pc)))]
1316 "! TARGET_SOFT_FLOAT"
1317 "*
1318 {
1319 if (INSN_ANNULLED_BRANCH_P (insn))
1320 return \"ftest\;b,n %0\";
1321 else
1322 return \"ftest\;b%* %0\";
1323 }"
1324 [(set_attr "type" "fbranch")
1325 (set_attr "length" "8")])
1326
1327 (define_insn ""
1328 [(set (pc) (if_then_else (ne (reg:CCFP 0) (const_int 0))
1329 (pc)
1330 (label_ref (match_operand 0 "" ""))))]
1331 "! TARGET_SOFT_FLOAT"
1332 "*
1333 {
1334 if (INSN_ANNULLED_BRANCH_P (insn))
1335 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b,n %0\";
1336 else
1337 return \"ftest\;add,tr %%r0,%%r0,%%r0\;b%* %0\";
1338 }"
1339 [(set_attr "type" "fbranch")
1340 (set_attr "length" "12")])
1341
1342 ;; Move instructions
1343
1344 (define_expand "movsi"
1345 [(set (match_operand:SI 0 "general_operand" "")
1346 (match_operand:SI 1 "general_operand" ""))]
1347 ""
1348 "
1349 {
1350 if (emit_move_sequence (operands, SImode, 0))
1351 DONE;
1352 }")
1353
1354 ;; Reloading an SImode or DImode value requires a scratch register if
1355 ;; going in to or out of float point registers.
1356
1357 (define_expand "reload_insi"
1358 [(set (match_operand:SI 0 "register_operand" "=Z")
1359 (match_operand:SI 1 "non_hard_reg_operand" ""))
1360 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1361 ""
1362 "
1363 {
1364 if (emit_move_sequence (operands, SImode, operands[2]))
1365 DONE;
1366
1367 /* We don't want the clobber emitted, so handle this ourselves. */
1368 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1369 DONE;
1370 }")
1371
1372 (define_expand "reload_outsi"
1373 [(set (match_operand:SI 0 "non_hard_reg_operand" "")
1374 (match_operand:SI 1 "register_operand" "Z"))
1375 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
1376 ""
1377 "
1378 {
1379 if (emit_move_sequence (operands, SImode, operands[2]))
1380 DONE;
1381
1382 /* We don't want the clobber emitted, so handle this ourselves. */
1383 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
1384 DONE;
1385 }")
1386
1387 ;;; pic symbol references
1388
1389 (define_insn ""
1390 [(set (match_operand:SI 0 "register_operand" "=r")
1391 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1392 (match_operand:SI 2 "symbolic_operand" ""))))]
1393 "flag_pic && operands[1] == pic_offset_table_rtx"
1394 "ldw T'%2(%1),%0"
1395 [(set_attr "type" "load")
1396 (set_attr "length" "4")])
1397
1398 (define_insn ""
1399 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1400 "=r,r,r,r,r,Q,*q,!f,f,*TR")
1401 (match_operand:SI 1 "move_operand"
1402 "r,J,N,K,RQ,rM,rM,!fM,*RT,f"))]
1403 "(register_operand (operands[0], SImode)
1404 || reg_or_0_operand (operands[1], SImode))
1405 && ! TARGET_SOFT_FLOAT"
1406 "@
1407 copy %1,%0
1408 ldi %1,%0
1409 ldil L'%1,%0
1410 zdepi %Z1,%0
1411 ldw%M1 %1,%0
1412 stw%M0 %r1,%0
1413 mtsar %r1
1414 fcpy,sgl %f1,%0
1415 fldw%F1 %1,%0
1416 fstw%F0 %1,%0"
1417 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu,fpload,fpstore")
1418 (set_attr "pa_combine_type" "addmove")
1419 (set_attr "length" "4,4,4,4,4,4,4,4,4,4")])
1420
1421 (define_insn ""
1422 [(set (match_operand:SI 0 "reg_or_nonsymb_mem_operand"
1423 "=r,r,r,r,r,Q,*q")
1424 (match_operand:SI 1 "move_operand"
1425 "r,J,N,K,RQ,rM,rM"))]
1426 "(register_operand (operands[0], SImode)
1427 || reg_or_0_operand (operands[1], SImode))
1428 && TARGET_SOFT_FLOAT"
1429 "@
1430 copy %1,%0
1431 ldi %1,%0
1432 ldil L'%1,%0
1433 zdepi %Z1,%0
1434 ldw%M1 %1,%0
1435 stw%M0 %r1,%0
1436 mtsar %r1"
1437 [(set_attr "type" "move,move,move,move,load,store,move")
1438 (set_attr "pa_combine_type" "addmove")
1439 (set_attr "length" "4,4,4,4,4,4,4")])
1440
1441 (define_insn ""
1442 [(set (match_operand:SI 0 "register_operand" "=r")
1443 (mem:SI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1444 (match_operand:SI 2 "register_operand" "r"))))]
1445 "! TARGET_DISABLE_INDEXING"
1446 "*
1447 {
1448 /* Reload can create backwards (relative to cse) unscaled index
1449 address modes when eliminating registers and possibly for
1450 pseudos that don't get hard registers. Deal with it. */
1451 if (operands[2] == hard_frame_pointer_rtx
1452 || operands[2] == stack_pointer_rtx)
1453 return \"ldwx %1(%2),%0\";
1454 else
1455 return \"ldwx %2(%1),%0\";
1456 }"
1457 [(set_attr "type" "load")
1458 (set_attr "length" "4")])
1459
1460 (define_insn ""
1461 [(set (match_operand:SI 0 "register_operand" "=r")
1462 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "r")
1463 (match_operand:SI 2 "basereg_operand" "r"))))]
1464 "! TARGET_DISABLE_INDEXING"
1465 "*
1466 {
1467 /* Reload can create backwards (relative to cse) unscaled index
1468 address modes when eliminating registers and possibly for
1469 pseudos that don't get hard registers. Deal with it. */
1470 if (operands[1] == hard_frame_pointer_rtx
1471 || operands[1] == stack_pointer_rtx)
1472 return \"ldwx %2(%1),%0\";
1473 else
1474 return \"ldwx %1(%2),%0\";
1475 }"
1476 [(set_attr "type" "load")
1477 (set_attr "length" "4")])
1478
1479 ;; Load or store with base-register modification.
1480
1481 (define_insn "pre_ldwm"
1482 [(set (match_operand:SI 0 "register_operand" "=r")
1483 (mem:SI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1484 (match_operand:SI 2 "pre_cint_operand" ""))))
1485 (set (match_dup 1)
1486 (plus:SI (match_dup 1) (match_dup 2)))]
1487 ""
1488 "*
1489 {
1490 if (INTVAL (operands[2]) < 0)
1491 return \"ldwm %2(%1),%0\";
1492 return \"ldws,mb %2(%1),%0\";
1493 }"
1494 [(set_attr "type" "load")
1495 (set_attr "length" "4")])
1496
1497 (define_insn "pre_stwm"
1498 [(set (mem:SI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1499 (match_operand:SI 1 "pre_cint_operand" "")))
1500 (match_operand:SI 2 "reg_or_0_operand" "rM"))
1501 (set (match_dup 0)
1502 (plus:SI (match_dup 0) (match_dup 1)))]
1503 ""
1504 "*
1505 {
1506 if (INTVAL (operands[1]) < 0)
1507 return \"stwm %r2,%1(%0)\";
1508 return \"stws,mb %r2,%1(%0)\";
1509 }"
1510 [(set_attr "type" "store")
1511 (set_attr "length" "4")])
1512
1513 (define_insn "post_ldwm"
1514 [(set (match_operand:SI 0 "register_operand" "=r")
1515 (mem:SI (match_operand:SI 1 "register_operand" "+r")))
1516 (set (match_dup 1)
1517 (plus:SI (match_dup 1)
1518 (match_operand:SI 2 "post_cint_operand" "")))]
1519 ""
1520 "*
1521 {
1522 if (INTVAL (operands[2]) > 0)
1523 return \"ldwm %2(%1),%0\";
1524 return \"ldws,ma %2(%1),%0\";
1525 }"
1526 [(set_attr "type" "load")
1527 (set_attr "length" "4")])
1528
1529 (define_insn "post_stwm"
1530 [(set (mem:SI (match_operand:SI 0 "register_operand" "+r"))
1531 (match_operand:SI 1 "reg_or_0_operand" "rM"))
1532 (set (match_dup 0)
1533 (plus:SI (match_dup 0)
1534 (match_operand:SI 2 "post_cint_operand" "")))]
1535 ""
1536 "*
1537 {
1538 if (INTVAL (operands[2]) > 0)
1539 return \"stwm %r1,%2(%0)\";
1540 return \"stws,ma %r1,%2(%0)\";
1541 }"
1542 [(set_attr "type" "store")
1543 (set_attr "length" "4")])
1544
1545 ;; For pic
1546 ;; Note since this pattern can be created at reload time (via movsi), all
1547 ;; the same rules for movsi apply here. (no new pseudos, no temporaries).
1548 (define_insn "pic_load_label"
1549 [(set (match_operand:SI 0 "register_operand" "=a")
1550 (match_operand:SI 1 "pic_label_operand" ""))]
1551 ""
1552 "*
1553 {
1554 rtx label_rtx = gen_label_rtx ();
1555 rtx xoperands[3];
1556 extern FILE *asm_out_file;
1557
1558 xoperands[0] = operands[0];
1559 xoperands[1] = operands[1];
1560 xoperands[2] = label_rtx;
1561 output_asm_insn (\"bl .+8,%0\", xoperands);
1562 output_asm_insn (\"depi 0,31,2,%0\", xoperands);
1563 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
1564 CODE_LABEL_NUMBER (label_rtx));
1565
1566 /* If we're trying to load the address of a label that happens to be
1567 close, then we can use a shorter sequence. */
1568 if (GET_CODE (operands[1]) == LABEL_REF
1569 && insn_addresses
1570 && abs (insn_addresses[INSN_UID (XEXP (operands[1], 0))]
1571 - insn_addresses[INSN_UID (insn)]) < 8100)
1572 {
1573 /* Prefixing with R% here is wrong, it extracts just 11 bits and is
1574 always non-negative. */
1575 output_asm_insn (\"ldo %1-%2(%0),%0\", xoperands);
1576 }
1577 else
1578 {
1579 output_asm_insn (\"addil L%%%1-%2,%0\", xoperands);
1580 output_asm_insn (\"ldo R%%%1-%2(%0),%0\", xoperands);
1581 }
1582 return \"\";
1583 }"
1584 [(set_attr "type" "multi")
1585 (set_attr "length" "16")]) ; 12 or 16
1586
1587 (define_insn "pic2_highpart"
1588 [(set (match_operand:SI 0 "register_operand" "=a")
1589 (plus:SI (match_operand:SI 1 "register_operand" "r")
1590 (high:SI (match_operand 2 "" ""))))]
1591 "symbolic_operand (operands[2], Pmode)
1592 && ! function_label_operand (operands[2])
1593 && flag_pic == 2"
1594 "addil LT'%G2,%1"
1595 [(set_attr "type" "binary")
1596 (set_attr "length" "4")])
1597
1598 ; We need this to make sure CSE doesn't simplify a memory load with a
1599 ; symbolic address, whose content it think it knows. For PIC, what CSE
1600 ; think is the real value will be the address of that value.
1601 (define_insn "pic2_lo_sum"
1602 [(set (match_operand:SI 0 "register_operand" "=r")
1603 (mem:SI (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1604 (unspec:SI [(match_operand:SI 2 "symbolic_operand" "")] 0))))]
1605 ""
1606 "*
1607 {
1608 if (flag_pic != 2)
1609 abort ();
1610 return \"ldw RT'%G2(%1),%0\";
1611 }"
1612 [(set_attr "type" "load")
1613 (set_attr "length" "4")])
1614
1615
1616 ;; Always use addil rather than ldil;add sequences. This allows the
1617 ;; HP linker to eliminate the dp relocation if the symbolic operand
1618 ;; lives in the TEXT space.
1619 (define_insn ""
1620 [(set (match_operand:SI 0 "register_operand" "=a")
1621 (high:SI (match_operand 1 "" "")))]
1622 "symbolic_operand (operands[1], Pmode)
1623 && ! function_label_operand (operands[1])
1624 && ! read_only_operand (operands[1])
1625 && ! flag_pic"
1626 "*
1627 {
1628 if (TARGET_LONG_LOAD_STORE)
1629 return \"addil NLR'%H1,%%r27\;ldo N'%H1(%%r1),%%r1\";
1630 else
1631 return \"addil LR'%H1,%%r27\";
1632 }"
1633 [(set_attr "type" "binary")
1634 (set (attr "length")
1635 (if_then_else (eq (symbol_ref "TARGET_LONG_LOAD_STORE") (const_int 0))
1636 (const_int 4)
1637 (const_int 8)))])
1638
1639
1640 ;; This is for use in the prologue/epilogue code. We need it
1641 ;; to add large constants to a stack pointer or frame pointer.
1642 ;; Because of the additional %r1 pressure, we probably do not
1643 ;; want to use this in general code, so make it available
1644 ;; only after reload.
1645 (define_insn "add_high_const"
1646 [(set (match_operand:SI 0 "register_operand" "=!a,*r")
1647 (plus:SI (match_operand:SI 1 "register_operand" "r,r")
1648 (high:SI (match_operand 2 "const_int_operand" ""))))]
1649 "reload_completed"
1650 "@
1651 addil L'%G2,%1
1652 ldil L'%G2,%0\;addl %0,%1,%0"
1653 [(set_attr "type" "binary,binary")
1654 (set_attr "length" "4,8")])
1655
1656 (define_insn ""
1657 [(set (match_operand:SI 0 "register_operand" "=r")
1658 (high:SI (match_operand 1 "" "")))]
1659 "(!flag_pic || !symbolic_operand (operands[1]), Pmode)
1660 && !is_function_label_plus_const (operands[1])"
1661 "*
1662 {
1663 if (symbolic_operand (operands[1], Pmode))
1664 return \"ldil LR'%H1,%0\";
1665 else
1666 return \"ldil L'%G1,%0\";
1667 }"
1668 [(set_attr "type" "move")
1669 (set_attr "length" "4")])
1670
1671 (define_insn ""
1672 [(set (match_operand:SI 0 "register_operand" "=r")
1673 (lo_sum:SI (match_operand:SI 1 "register_operand" "r")
1674 (match_operand:SI 2 "immediate_operand" "i")))]
1675 "!is_function_label_plus_const (operands[2])"
1676 "*
1677 {
1678 if (flag_pic && symbolic_operand (operands[2], Pmode))
1679 abort ();
1680 else if (symbolic_operand (operands[2], Pmode))
1681 return \"ldo RR'%G2(%1),%0\";
1682 else
1683 return \"ldo R'%G2(%1),%0\";
1684 }"
1685 [(set_attr "type" "move")
1686 (set_attr "length" "4")])
1687
1688 ;; Now that a symbolic_address plus a constant is broken up early
1689 ;; in the compilation phase (for better CSE) we need a special
1690 ;; combiner pattern to load the symbolic address plus the constant
1691 ;; in only 2 instructions. (For cases where the symbolic address
1692 ;; was not a common subexpression.)
1693 (define_split
1694 [(set (match_operand:SI 0 "register_operand" "")
1695 (match_operand:SI 1 "symbolic_operand" ""))
1696 (clobber (match_operand:SI 2 "register_operand" ""))]
1697 "! (flag_pic && pic_label_operand (operands[1], SImode))"
1698 [(set (match_dup 2) (high:SI (match_dup 1)))
1699 (set (match_dup 0) (lo_sum:SI (match_dup 2) (match_dup 1)))]
1700 "")
1701
1702 ;; hppa_legitimize_address goes to a great deal of trouble to
1703 ;; create addresses which use indexing. In some cases, this
1704 ;; is a lose because there isn't any store instructions which
1705 ;; allow indexed addresses (with integer register source).
1706 ;;
1707 ;; These define_splits try to turn a 3 insn store into
1708 ;; a 2 insn store with some creative RTL rewriting.
1709 (define_split
1710 [(set (mem:SI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1711 (match_operand:SI 1 "shadd_operand" ""))
1712 (plus:SI (match_operand:SI 2 "register_operand" "")
1713 (match_operand:SI 3 "const_int_operand" ""))))
1714 (match_operand:SI 4 "register_operand" ""))
1715 (clobber (match_operand:SI 5 "register_operand" ""))]
1716 ""
1717 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1718 (match_dup 2)))
1719 (set (mem:SI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1720 "")
1721
1722 (define_split
1723 [(set (mem:HI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1724 (match_operand:SI 1 "shadd_operand" ""))
1725 (plus:SI (match_operand:SI 2 "register_operand" "")
1726 (match_operand:SI 3 "const_int_operand" ""))))
1727 (match_operand:HI 4 "register_operand" ""))
1728 (clobber (match_operand:SI 5 "register_operand" ""))]
1729 ""
1730 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1731 (match_dup 2)))
1732 (set (mem:HI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1733 "")
1734
1735 (define_split
1736 [(set (mem:QI (plus:SI (mult:SI (match_operand:SI 0 "register_operand" "")
1737 (match_operand:SI 1 "shadd_operand" ""))
1738 (plus:SI (match_operand:SI 2 "register_operand" "")
1739 (match_operand:SI 3 "const_int_operand" ""))))
1740 (match_operand:QI 4 "register_operand" ""))
1741 (clobber (match_operand:SI 5 "register_operand" ""))]
1742 ""
1743 [(set (match_dup 5) (plus:SI (mult:SI (match_dup 0) (match_dup 1))
1744 (match_dup 2)))
1745 (set (mem:QI (plus:SI (match_dup 5) (match_dup 3))) (match_dup 4))]
1746 "")
1747
1748 (define_expand "movhi"
1749 [(set (match_operand:HI 0 "general_operand" "")
1750 (match_operand:HI 1 "general_operand" ""))]
1751 ""
1752 "
1753 {
1754 if (emit_move_sequence (operands, HImode, 0))
1755 DONE;
1756 }")
1757
1758 (define_insn ""
1759 [(set (match_operand:HI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1760 (match_operand:HI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1761 "register_operand (operands[0], HImode)
1762 || reg_or_0_operand (operands[1], HImode)"
1763 "@
1764 copy %1,%0
1765 ldi %1,%0
1766 ldil L'%1,%0
1767 zdepi %Z1,%0
1768 ldh%M1 %1,%0
1769 sth%M0 %r1,%0
1770 mtsar %r1
1771 fcpy,sgl %f1,%0"
1772 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1773 (set_attr "pa_combine_type" "addmove")
1774 (set_attr "length" "4,4,4,4,4,4,4,4")])
1775
1776 (define_insn ""
1777 [(set (match_operand:HI 0 "register_operand" "=r")
1778 (mem:HI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1779 (match_operand:SI 2 "register_operand" "r"))))]
1780 "! TARGET_DISABLE_INDEXING"
1781 "*
1782 {
1783 /* Reload can create backwards (relative to cse) unscaled index
1784 address modes when eliminating registers and possibly for
1785 pseudos that don't get hard registers. Deal with it. */
1786 if (operands[2] == hard_frame_pointer_rtx
1787 || operands[2] == stack_pointer_rtx)
1788 return \"ldhx %1(%2),%0\";
1789 else
1790 return \"ldhx %2(%1),%0\";
1791 }"
1792 [(set_attr "type" "load")
1793 (set_attr "length" "4")])
1794
1795 (define_insn ""
1796 [(set (match_operand:HI 0 "register_operand" "=r")
1797 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "r")
1798 (match_operand:SI 2 "basereg_operand" "r"))))]
1799 "! TARGET_DISABLE_INDEXING"
1800 "*
1801 {
1802 /* Reload can create backwards (relative to cse) unscaled index
1803 address modes when eliminating registers and possibly for
1804 pseudos that don't get hard registers. Deal with it. */
1805 if (operands[1] == hard_frame_pointer_rtx
1806 || operands[1] == stack_pointer_rtx)
1807 return \"ldhx %2(%1),%0\";
1808 else
1809 return \"ldhx %1(%2),%0\";
1810 }"
1811 [(set_attr "type" "load")
1812 (set_attr "length" "4")])
1813
1814 ; Now zero extended variants.
1815 (define_insn ""
1816 [(set (match_operand:SI 0 "register_operand" "=r")
1817 (zero_extend:SI (mem:HI
1818 (plus:SI
1819 (match_operand:SI 1 "basereg_operand" "r")
1820 (match_operand:SI 2 "register_operand" "r")))))]
1821 "! TARGET_DISABLE_INDEXING"
1822 "*
1823 {
1824 /* Reload can create backwards (relative to cse) unscaled index
1825 address modes when eliminating registers and possibly for
1826 pseudos that don't get hard registers. Deal with it. */
1827 if (operands[2] == hard_frame_pointer_rtx
1828 || operands[2] == stack_pointer_rtx)
1829 return \"ldhx %1(%2),%0\";
1830 else
1831 return \"ldhx %2(%1),%0\";
1832 }"
1833 [(set_attr "type" "load")
1834 (set_attr "length" "4")])
1835
1836 (define_insn ""
1837 [(set (match_operand:SI 0 "register_operand" "=r")
1838 (zero_extend:SI (mem:HI
1839 (plus:SI
1840 (match_operand:SI 1 "register_operand" "r")
1841 (match_operand:SI 2 "basereg_operand" "r")))))]
1842 "! TARGET_DISABLE_INDEXING"
1843 "*
1844 {
1845 /* Reload can create backwards (relative to cse) unscaled index
1846 address modes when eliminating registers and possibly for
1847 pseudos that don't get hard registers. Deal with it. */
1848 if (operands[1] == hard_frame_pointer_rtx
1849 || operands[1] == stack_pointer_rtx)
1850 return \"ldhx %2(%1),%0\";
1851 else
1852 return \"ldhx %1(%2),%0\";
1853 }"
1854 [(set_attr "type" "load")
1855 (set_attr "length" "4")])
1856
1857 (define_insn ""
1858 [(set (match_operand:HI 0 "register_operand" "=r")
1859 (mem:HI (plus:SI (match_operand:SI 1 "register_operand" "+r")
1860 (match_operand:SI 2 "int5_operand" "L"))))
1861 (set (match_dup 1)
1862 (plus:SI (match_dup 1) (match_dup 2)))]
1863 ""
1864 "ldhs,mb %2(%1),%0"
1865 [(set_attr "type" "load")
1866 (set_attr "length" "4")])
1867
1868 ; And a zero extended variant.
1869 (define_insn ""
1870 [(set (match_operand:SI 0 "register_operand" "=r")
1871 (zero_extend:SI (mem:HI
1872 (plus:SI
1873 (match_operand:SI 1 "register_operand" "+r")
1874 (match_operand:SI 2 "int5_operand" "L")))))
1875 (set (match_dup 1)
1876 (plus:SI (match_dup 1) (match_dup 2)))]
1877 ""
1878 "ldhs,mb %2(%1),%0"
1879 [(set_attr "type" "load")
1880 (set_attr "length" "4")])
1881
1882 (define_insn ""
1883 [(set (mem:HI (plus:SI (match_operand:SI 0 "register_operand" "+r")
1884 (match_operand:SI 1 "int5_operand" "L")))
1885 (match_operand:HI 2 "reg_or_0_operand" "rM"))
1886 (set (match_dup 0)
1887 (plus:SI (match_dup 0) (match_dup 1)))]
1888 ""
1889 "sths,mb %r2,%1(%0)"
1890 [(set_attr "type" "store")
1891 (set_attr "length" "4")])
1892
1893 (define_insn ""
1894 [(set (match_operand:HI 0 "register_operand" "=r")
1895 (high:HI (match_operand 1 "const_int_operand" "")))]
1896 ""
1897 "ldil L'%G1,%0"
1898 [(set_attr "type" "move")
1899 (set_attr "length" "4")])
1900
1901 (define_insn ""
1902 [(set (match_operand:HI 0 "register_operand" "=r")
1903 (lo_sum:HI (match_operand:HI 1 "register_operand" "r")
1904 (match_operand 2 "const_int_operand" "")))]
1905 ""
1906 "ldo R'%G2(%1),%0"
1907 [(set_attr "type" "move")
1908 (set_attr "length" "4")])
1909
1910 (define_expand "movqi"
1911 [(set (match_operand:QI 0 "general_operand" "")
1912 (match_operand:QI 1 "general_operand" ""))]
1913 ""
1914 "
1915 {
1916 if (emit_move_sequence (operands, QImode, 0))
1917 DONE;
1918 }")
1919
1920 (define_insn ""
1921 [(set (match_operand:QI 0 "reg_or_nonsymb_mem_operand" "=r,r,r,r,r,Q,*q,!f")
1922 (match_operand:QI 1 "move_operand" "r,J,N,K,RQ,rM,rM,!fM"))]
1923 "register_operand (operands[0], QImode)
1924 || reg_or_0_operand (operands[1], QImode)"
1925 "@
1926 copy %1,%0
1927 ldi %1,%0
1928 ldil L'%1,%0
1929 zdepi %Z1,%0
1930 ldb%M1 %1,%0
1931 stb%M0 %r1,%0
1932 mtsar %r1
1933 fcpy,sgl %f1,%0"
1934 [(set_attr "type" "move,move,move,shift,load,store,move,fpalu")
1935 (set_attr "pa_combine_type" "addmove")
1936 (set_attr "length" "4,4,4,4,4,4,4,4")])
1937
1938 (define_insn ""
1939 [(set (match_operand:QI 0 "register_operand" "=r")
1940 (mem:QI (plus:SI (match_operand:SI 1 "basereg_operand" "r")
1941 (match_operand:SI 2 "register_operand" "r"))))]
1942 "! TARGET_DISABLE_INDEXING"
1943 "*
1944 {
1945 /* Reload can create backwards (relative to cse) unscaled index
1946 address modes when eliminating registers and possibly for
1947 pseudos that don't get hard registers. Deal with it. */
1948 if (operands[2] == hard_frame_pointer_rtx
1949 || operands[2] == stack_pointer_rtx)
1950 return \"ldbx %1(%2),%0\";
1951 else
1952 return \"ldbx %2(%1),%0\";
1953 }"
1954 [(set_attr "type" "load")
1955 (set_attr "length" "4")])
1956
1957 (define_insn ""
1958 [(set (match_operand:QI 0 "register_operand" "=r")
1959 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "r")
1960 (match_operand:SI 2 "basereg_operand" "r"))))]
1961 "! TARGET_DISABLE_INDEXING"
1962 "*
1963 {
1964 /* Reload can create backwards (relative to cse) unscaled index
1965 address modes when eliminating registers and possibly for
1966 pseudos that don't get hard registers. Deal with it. */
1967 if (operands[1] == hard_frame_pointer_rtx
1968 || operands[1] == stack_pointer_rtx)
1969 return \"ldbx %2(%1),%0\";
1970 else
1971 return \"ldbx %1(%2),%0\";
1972 }"
1973 [(set_attr "type" "load")
1974 (set_attr "length" "4")])
1975
1976 ; Indexed byte load with zero extension to SImode or HImode.
1977 (define_insn ""
1978 [(set (match_operand:SI 0 "register_operand" "=r")
1979 (zero_extend:SI (mem:QI
1980 (plus:SI
1981 (match_operand:SI 1 "basereg_operand" "r")
1982 (match_operand:SI 2 "register_operand" "r")))))]
1983 "! TARGET_DISABLE_INDEXING"
1984 "*
1985 {
1986 /* Reload can create backwards (relative to cse) unscaled index
1987 address modes when eliminating registers and possibly for
1988 pseudos that don't get hard registers. Deal with it. */
1989 if (operands[2] == hard_frame_pointer_rtx
1990 || operands[2] == stack_pointer_rtx)
1991 return \"ldbx %1(%2),%0\";
1992 else
1993 return \"ldbx %2(%1),%0\";
1994 }"
1995 [(set_attr "type" "load")
1996 (set_attr "length" "4")])
1997
1998 (define_insn ""
1999 [(set (match_operand:SI 0 "register_operand" "=r")
2000 (zero_extend:SI (mem:QI
2001 (plus:SI
2002 (match_operand:SI 1 "register_operand" "r")
2003 (match_operand:SI 2 "basereg_operand" "r")))))]
2004 "! TARGET_DISABLE_INDEXING"
2005 "*
2006 {
2007 /* Reload can create backwards (relative to cse) unscaled index
2008 address modes when eliminating registers and possibly for
2009 pseudos that don't get hard registers. Deal with it. */
2010 if (operands[1] == hard_frame_pointer_rtx
2011 || operands[1] == stack_pointer_rtx)
2012 return \"ldbx %2(%1),%0\";
2013 else
2014 return \"ldbx %1(%2),%0\";
2015 }"
2016 [(set_attr "type" "load")
2017 (set_attr "length" "4")])
2018
2019 (define_insn ""
2020 [(set (match_operand:HI 0 "register_operand" "=r")
2021 (zero_extend:HI (mem:QI
2022 (plus:SI
2023 (match_operand:SI 1 "basereg_operand" "r")
2024 (match_operand:SI 2 "register_operand" "r")))))]
2025 "! TARGET_DISABLE_INDEXING"
2026 "*
2027 {
2028 /* Reload can create backwards (relative to cse) unscaled index
2029 address modes when eliminating registers and possibly for
2030 pseudos that don't get hard registers. Deal with it. */
2031 if (operands[2] == hard_frame_pointer_rtx
2032 || operands[2] == stack_pointer_rtx)
2033 return \"ldbx %1(%2),%0\";
2034 else
2035 return \"ldbx %2(%1),%0\";
2036 }"
2037 [(set_attr "type" "load")
2038 (set_attr "length" "4")])
2039
2040 (define_insn ""
2041 [(set (match_operand:HI 0 "register_operand" "=r")
2042 (zero_extend:HI (mem:QI
2043 (plus:SI
2044 (match_operand:SI 1 "register_operand" "r")
2045 (match_operand:SI 2 "basereg_operand" "r")))))]
2046 "! TARGET_DISABLE_INDEXING"
2047 "*
2048 {
2049 /* Reload can create backwards (relative to cse) unscaled index
2050 address modes when eliminating registers and possibly for
2051 pseudos that don't get hard registers. Deal with it. */
2052 if (operands[1] == hard_frame_pointer_rtx
2053 || operands[1] == stack_pointer_rtx)
2054 return \"ldbx %2(%1),%0\";
2055 else
2056 return \"ldbx %1(%2),%0\";
2057 }"
2058 [(set_attr "type" "load")
2059 (set_attr "length" "4")])
2060
2061 (define_insn ""
2062 [(set (match_operand:QI 0 "register_operand" "=r")
2063 (mem:QI (plus:SI (match_operand:SI 1 "register_operand" "+r")
2064 (match_operand:SI 2 "int5_operand" "L"))))
2065 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2066 ""
2067 "ldbs,mb %2(%1),%0"
2068 [(set_attr "type" "load")
2069 (set_attr "length" "4")])
2070
2071 ; Now the same thing with zero extensions.
2072 (define_insn ""
2073 [(set (match_operand:SI 0 "register_operand" "=r")
2074 (zero_extend:SI (mem:QI (plus:SI
2075 (match_operand:SI 1 "register_operand" "+r")
2076 (match_operand:SI 2 "int5_operand" "L")))))
2077 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2078 ""
2079 "ldbs,mb %2(%1),%0"
2080 [(set_attr "type" "load")
2081 (set_attr "length" "4")])
2082
2083 (define_insn ""
2084 [(set (match_operand:HI 0 "register_operand" "=r")
2085 (zero_extend:HI (mem:QI (plus:SI
2086 (match_operand:SI 1 "register_operand" "+r")
2087 (match_operand:SI 2 "int5_operand" "L")))))
2088 (set (match_dup 1) (plus:SI (match_dup 1) (match_dup 2)))]
2089 ""
2090 "ldbs,mb %2(%1),%0"
2091 [(set_attr "type" "load")
2092 (set_attr "length" "4")])
2093
2094 (define_insn ""
2095 [(set (mem:QI (plus:SI (match_operand:SI 0 "register_operand" "+r")
2096 (match_operand:SI 1 "int5_operand" "L")))
2097 (match_operand:QI 2 "reg_or_0_operand" "rM"))
2098 (set (match_dup 0)
2099 (plus:SI (match_dup 0) (match_dup 1)))]
2100 ""
2101 "stbs,mb %r2,%1(%0)"
2102 [(set_attr "type" "store")
2103 (set_attr "length" "4")])
2104
2105 ;; The definition of this insn does not really explain what it does,
2106 ;; but it should suffice
2107 ;; that anything generated as this insn will be recognized as one
2108 ;; and that it will not successfully combine with anything.
2109 (define_expand "movstrsi"
2110 [(parallel [(set (match_operand:BLK 0 "" "")
2111 (match_operand:BLK 1 "" ""))
2112 (clobber (match_dup 7))
2113 (clobber (match_dup 8))
2114 (clobber (match_dup 4))
2115 (clobber (match_dup 5))
2116 (clobber (match_dup 6))
2117 (use (match_operand:SI 2 "arith_operand" ""))
2118 (use (match_operand:SI 3 "const_int_operand" ""))])]
2119 ""
2120 "
2121 {
2122 int size, align;
2123
2124 /* HP provides very fast block move library routine for the PA;
2125 this routine includes:
2126
2127 4x4 byte at a time block moves,
2128 1x4 byte at a time with alignment checked at runtime with
2129 attempts to align the source and destination as needed
2130 1x1 byte loop
2131
2132 With that in mind, here's the heuristics to try and guess when
2133 the inlined block move will be better than the library block
2134 move:
2135
2136 If the size isn't constant, then always use the library routines.
2137
2138 If the size is large in respect to the known alignment, then use
2139 the library routines.
2140
2141 If the size is small in repsect to the known alignment, then open
2142 code the copy (since that will lead to better scheduling).
2143
2144 Else use the block move pattern. */
2145
2146 /* Undetermined size, use the library routine. */
2147 if (GET_CODE (operands[2]) != CONST_INT)
2148 FAIL;
2149
2150 size = INTVAL (operands[2]);
2151 align = INTVAL (operands[3]);
2152 align = align > 4 ? 4 : align;
2153
2154 /* If size/alignment > 8 (eg size is large in respect to alignment),
2155 then use the library routines. */
2156 if (size / align > 16)
2157 FAIL;
2158
2159 /* This does happen, but not often enough to worry much about. */
2160 if (size / align < MOVE_RATIO)
2161 FAIL;
2162
2163 /* Fall through means we're going to use our block move pattern. */
2164 operands[0]
2165 = change_address (operands[0], VOIDmode,
2166 copy_to_mode_reg (SImode, XEXP (operands[0], 0)));
2167 operands[1]
2168 = change_address (operands[1], VOIDmode,
2169 copy_to_mode_reg (SImode, XEXP (operands[1], 0)));
2170 operands[4] = gen_reg_rtx (SImode);
2171 operands[5] = gen_reg_rtx (SImode);
2172 operands[6] = gen_reg_rtx (SImode);
2173 operands[7] = XEXP (operands[0], 0);
2174 operands[8] = XEXP (operands[1], 0);
2175 }")
2176
2177 ;; The operand constraints are written like this to support both compile-time
2178 ;; and run-time determined byte count. If the count is run-time determined,
2179 ;; the register with the byte count is clobbered by the copying code, and
2180 ;; therefore it is forced to operand 2. If the count is compile-time
2181 ;; determined, we need two scratch registers for the unrolled code.
2182 (define_insn "movstrsi_internal"
2183 [(set (mem:BLK (match_operand:SI 0 "register_operand" "+r,r"))
2184 (mem:BLK (match_operand:SI 1 "register_operand" "+r,r")))
2185 (clobber (match_dup 0))
2186 (clobber (match_dup 1))
2187 (clobber (match_operand:SI 2 "register_operand" "=r,r")) ;loop cnt/tmp
2188 (clobber (match_operand:SI 3 "register_operand" "=&r,&r")) ;item tmp
2189 (clobber (match_operand:SI 6 "register_operand" "=&r,&r")) ;item tmp2
2190 (use (match_operand:SI 4 "arith_operand" "J,2")) ;byte count
2191 (use (match_operand:SI 5 "const_int_operand" "n,n"))] ;alignment
2192 ""
2193 "* return output_block_move (operands, !which_alternative);"
2194 [(set_attr "type" "multi,multi")])
2195 \f
2196 ;; Floating point move insns
2197
2198 ;; This pattern forces (set (reg:DF ...) (const_double ...))
2199 ;; to be reloaded by putting the constant into memory when
2200 ;; reg is a floating point register.
2201 ;;
2202 ;; For integer registers we use ldil;ldo to set the appropriate
2203 ;; value.
2204 ;;
2205 ;; This must come before the movdf pattern, and it must be present
2206 ;; to handle obscure reloading cases.
2207 (define_insn ""
2208 [(set (match_operand:DF 0 "register_operand" "=?r,f")
2209 (match_operand:DF 1 "" "?F,m"))]
2210 "GET_CODE (operands[1]) == CONST_DOUBLE
2211 && operands[1] != CONST0_RTX (DFmode)
2212 && ! TARGET_SOFT_FLOAT"
2213 "* return (which_alternative == 0 ? output_move_double (operands)
2214 : \"fldd%F1 %1,%0\");"
2215 [(set_attr "type" "move,fpload")
2216 (set_attr "length" "16,4")])
2217
2218 (define_expand "movdf"
2219 [(set (match_operand:DF 0 "general_operand" "")
2220 (match_operand:DF 1 "general_operand" ""))]
2221 ""
2222 "
2223 {
2224 if (emit_move_sequence (operands, DFmode, 0))
2225 DONE;
2226 }")
2227
2228 ;; Reloading an SImode or DImode value requires a scratch register if
2229 ;; going in to or out of float point registers.
2230
2231 (define_expand "reload_indf"
2232 [(set (match_operand:DF 0 "register_operand" "=Z")
2233 (match_operand:DF 1 "non_hard_reg_operand" ""))
2234 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2235 ""
2236 "
2237 {
2238 if (emit_move_sequence (operands, DFmode, operands[2]))
2239 DONE;
2240
2241 /* We don't want the clobber emitted, so handle this ourselves. */
2242 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2243 DONE;
2244 }")
2245
2246 (define_expand "reload_outdf"
2247 [(set (match_operand:DF 0 "non_hard_reg_operand" "")
2248 (match_operand:DF 1 "register_operand" "Z"))
2249 (clobber (match_operand:DF 2 "register_operand" "=&r"))]
2250 ""
2251 "
2252 {
2253 if (emit_move_sequence (operands, DFmode, operands[2]))
2254 DONE;
2255
2256 /* We don't want the clobber emitted, so handle this ourselves. */
2257 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2258 DONE;
2259 }")
2260
2261 (define_insn ""
2262 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2263 "=f,*r,RQ,?o,?Q,f,*r,*r")
2264 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2265 "fG,*rG,f,*r,*r,RQ,o,RQ"))]
2266 "(register_operand (operands[0], DFmode)
2267 || reg_or_0_operand (operands[1], DFmode))
2268 && ! (GET_CODE (operands[1]) == CONST_DOUBLE
2269 && GET_CODE (operands[0]) == MEM)
2270 && ! TARGET_SOFT_FLOAT"
2271 "*
2272 {
2273 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2274 || operands[1] == CONST0_RTX (DFmode))
2275 return output_fp_move_double (operands);
2276 return output_move_double (operands);
2277 }"
2278 [(set_attr "type" "fpalu,move,fpstore,store,store,fpload,load,load")
2279 (set_attr "length" "4,8,4,8,16,4,8,16")])
2280
2281 (define_insn ""
2282 [(set (match_operand:DF 0 "reg_or_nonsymb_mem_operand"
2283 "=r,?o,?Q,r,r")
2284 (match_operand:DF 1 "reg_or_0_or_nonsymb_mem_operand"
2285 "rG,r,r,o,Q"))]
2286 "(register_operand (operands[0], DFmode)
2287 || reg_or_0_operand (operands[1], DFmode))
2288 && TARGET_SOFT_FLOAT"
2289 "*
2290 {
2291 return output_move_double (operands);
2292 }"
2293 [(set_attr "type" "move,store,store,load,load")
2294 (set_attr "length" "8,8,16,8,16")])
2295
2296 (define_insn ""
2297 [(set (match_operand:DF 0 "register_operand" "=fx")
2298 (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2299 (match_operand:SI 2 "register_operand" "r"))))]
2300 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2301 "*
2302 {
2303 /* Reload can create backwards (relative to cse) unscaled index
2304 address modes when eliminating registers and possibly for
2305 pseudos that don't get hard registers. Deal with it. */
2306 if (operands[2] == hard_frame_pointer_rtx
2307 || operands[2] == stack_pointer_rtx)
2308 return \"flddx %1(%2),%0\";
2309 else
2310 return \"flddx %2(%1),%0\";
2311 }"
2312 [(set_attr "type" "fpload")
2313 (set_attr "length" "4")])
2314
2315 (define_insn ""
2316 [(set (match_operand:DF 0 "register_operand" "=fx")
2317 (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2318 (match_operand:SI 2 "basereg_operand" "r"))))]
2319 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2320 "*
2321 {
2322 /* Reload can create backwards (relative to cse) unscaled index
2323 address modes when eliminating registers and possibly for
2324 pseudos that don't get hard registers. Deal with it. */
2325 if (operands[1] == hard_frame_pointer_rtx
2326 || operands[1] == stack_pointer_rtx)
2327 return \"flddx %2(%1),%0\";
2328 else
2329 return \"flddx %1(%2),%0\";
2330 }"
2331 [(set_attr "type" "fpload")
2332 (set_attr "length" "4")])
2333
2334 (define_insn ""
2335 [(set (mem:DF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2336 (match_operand:SI 2 "register_operand" "r")))
2337 (match_operand:DF 0 "register_operand" "fx"))]
2338 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2339 "*
2340 {
2341 /* Reload can create backwards (relative to cse) unscaled index
2342 address modes when eliminating registers and possibly for
2343 pseudos that don't get hard registers. Deal with it. */
2344 if (operands[2] == hard_frame_pointer_rtx
2345 || operands[2] == stack_pointer_rtx)
2346 return \"fstdx %0,%1(%2)\";
2347 else
2348 return \"fstdx %0,%2(%1)\";
2349 }"
2350 [(set_attr "type" "fpstore")
2351 (set_attr "length" "4")])
2352
2353 (define_insn ""
2354 [(set (mem:DF (plus:SI (match_operand:SI 1 "register_operand" "r")
2355 (match_operand:SI 2 "basereg_operand" "r")))
2356 (match_operand:DF 0 "register_operand" "fx"))]
2357 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2358 "*
2359 {
2360 /* Reload can create backwards (relative to cse) unscaled index
2361 address modes when eliminating registers and possibly for
2362 pseudos that don't get hard registers. Deal with it. */
2363 if (operands[1] == hard_frame_pointer_rtx
2364 || operands[1] == stack_pointer_rtx)
2365 return \"fstdx %0,%2(%1)\";
2366 else
2367 return \"fstdx %0,%1(%2)\";
2368 }"
2369 [(set_attr "type" "fpstore")
2370 (set_attr "length" "4")])
2371
2372 (define_expand "movdi"
2373 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand" "")
2374 (match_operand:DI 1 "general_operand" ""))]
2375 ""
2376 "
2377 {
2378 if (emit_move_sequence (operands, DImode, 0))
2379 DONE;
2380 }")
2381
2382 (define_expand "reload_indi"
2383 [(set (match_operand:DI 0 "register_operand" "=f")
2384 (match_operand:DI 1 "non_hard_reg_operand" ""))
2385 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2386 ""
2387 "
2388 {
2389 if (emit_move_sequence (operands, DImode, operands[2]))
2390 DONE;
2391
2392 /* We don't want the clobber emitted, so handle this ourselves. */
2393 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2394 DONE;
2395 }")
2396
2397 (define_expand "reload_outdi"
2398 [(set (match_operand:DI 0 "general_operand" "")
2399 (match_operand:DI 1 "register_operand" "f"))
2400 (clobber (match_operand:SI 2 "register_operand" "=&r"))]
2401 ""
2402 "
2403 {
2404 if (emit_move_sequence (operands, DImode, operands[2]))
2405 DONE;
2406
2407 /* We don't want the clobber emitted, so handle this ourselves. */
2408 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2409 DONE;
2410 }")
2411
2412 (define_insn ""
2413 [(set (match_operand:DI 0 "register_operand" "=r")
2414 (high:DI (match_operand 1 "" "")))]
2415 ""
2416 "*
2417 {
2418 rtx op0 = operands[0];
2419 rtx op1 = operands[1];
2420
2421 if (GET_CODE (op1) == CONST_INT)
2422 {
2423 operands[0] = operand_subword (op0, 1, 0, DImode);
2424 output_asm_insn (\"ldil L'%1,%0\", operands);
2425
2426 operands[0] = operand_subword (op0, 0, 0, DImode);
2427 if (INTVAL (op1) < 0)
2428 output_asm_insn (\"ldi -1,%0\", operands);
2429 else
2430 output_asm_insn (\"ldi 0,%0\", operands);
2431 return \"\";
2432 }
2433 else if (GET_CODE (op1) == CONST_DOUBLE)
2434 {
2435 operands[0] = operand_subword (op0, 1, 0, DImode);
2436 operands[1] = GEN_INT (CONST_DOUBLE_LOW (op1));
2437 output_asm_insn (\"ldil L'%1,%0\", operands);
2438
2439 operands[0] = operand_subword (op0, 0, 0, DImode);
2440 operands[1] = GEN_INT (CONST_DOUBLE_HIGH (op1));
2441 output_asm_insn (singlemove_string (operands), operands);
2442 return \"\";
2443 }
2444 else
2445 abort ();
2446 }"
2447 [(set_attr "type" "move")
2448 (set_attr "length" "8")])
2449
2450 ;;; Experimental
2451
2452 (define_insn ""
2453 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2454 "=r,o,Q,r,r,r,f,f,*TR")
2455 (match_operand:DI 1 "general_operand"
2456 "rM,r,r,o*R,Q,i,fM,*TR,f"))]
2457 "(register_operand (operands[0], DImode)
2458 || reg_or_0_operand (operands[1], DImode))
2459 && ! TARGET_SOFT_FLOAT"
2460 "*
2461 {
2462 if (FP_REG_P (operands[0]) || FP_REG_P (operands[1])
2463 || (operands[1] == CONST0_RTX (DImode)))
2464 return output_fp_move_double (operands);
2465 return output_move_double (operands);
2466 }"
2467 [(set_attr "type" "move,store,store,load,load,multi,fpalu,fpload,fpstore")
2468 (set_attr "length" "8,8,16,8,16,16,4,4,4")])
2469
2470 (define_insn ""
2471 [(set (match_operand:DI 0 "reg_or_nonsymb_mem_operand"
2472 "=r,o,Q,r,r,r")
2473 (match_operand:DI 1 "general_operand"
2474 "rM,r,r,o,Q,i"))]
2475 "(register_operand (operands[0], DImode)
2476 || reg_or_0_operand (operands[1], DImode))
2477 && TARGET_SOFT_FLOAT"
2478 "*
2479 {
2480 return output_move_double (operands);
2481 }"
2482 [(set_attr "type" "move,store,store,load,load,multi")
2483 (set_attr "length" "8,8,16,8,16,16")])
2484
2485 (define_insn ""
2486 [(set (match_operand:DI 0 "register_operand" "=r,&r")
2487 (lo_sum:DI (match_operand:DI 1 "register_operand" "0,r")
2488 (match_operand:DI 2 "immediate_operand" "i,i")))]
2489 ""
2490 "*
2491 {
2492 /* Don't output a 64 bit constant, since we can't trust the assembler to
2493 handle it correctly. */
2494 if (GET_CODE (operands[2]) == CONST_DOUBLE)
2495 operands[2] = GEN_INT (CONST_DOUBLE_LOW (operands[2]));
2496 if (which_alternative == 1)
2497 output_asm_insn (\"copy %1,%0\", operands);
2498 return \"ldo R'%G2(%R1),%R0\";
2499 }"
2500 [(set_attr "type" "move,move")
2501 (set_attr "length" "4,8")])
2502
2503 ;; This pattern forces (set (reg:SF ...) (const_double ...))
2504 ;; to be reloaded by putting the constant into memory when
2505 ;; reg is a floating point register.
2506 ;;
2507 ;; For integer registers we use ldil;ldo to set the appropriate
2508 ;; value.
2509 ;;
2510 ;; This must come before the movsf pattern, and it must be present
2511 ;; to handle obscure reloading cases.
2512 (define_insn ""
2513 [(set (match_operand:SF 0 "register_operand" "=?r,f")
2514 (match_operand:SF 1 "" "?F,m"))]
2515 "GET_CODE (operands[1]) == CONST_DOUBLE
2516 && operands[1] != CONST0_RTX (SFmode)
2517 && ! TARGET_SOFT_FLOAT"
2518 "* return (which_alternative == 0 ? singlemove_string (operands)
2519 : \" fldw%F1 %1,%0\");"
2520 [(set_attr "type" "move,fpload")
2521 (set_attr "length" "8,4")])
2522
2523 (define_expand "movsf"
2524 [(set (match_operand:SF 0 "general_operand" "")
2525 (match_operand:SF 1 "general_operand" ""))]
2526 ""
2527 "
2528 {
2529 if (emit_move_sequence (operands, SFmode, 0))
2530 DONE;
2531 }")
2532
2533 ;; Reloading an SImode or DImode value requires a scratch register if
2534 ;; going in to or out of float point registers.
2535
2536 (define_expand "reload_insf"
2537 [(set (match_operand:SF 0 "register_operand" "=Z")
2538 (match_operand:SF 1 "non_hard_reg_operand" ""))
2539 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2540 ""
2541 "
2542 {
2543 if (emit_move_sequence (operands, SFmode, operands[2]))
2544 DONE;
2545
2546 /* We don't want the clobber emitted, so handle this ourselves. */
2547 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2548 DONE;
2549 }")
2550
2551 (define_expand "reload_outsf"
2552 [(set (match_operand:SF 0 "non_hard_reg_operand" "")
2553 (match_operand:SF 1 "register_operand" "Z"))
2554 (clobber (match_operand:SF 2 "register_operand" "=&r"))]
2555 ""
2556 "
2557 {
2558 if (emit_move_sequence (operands, SFmode, operands[2]))
2559 DONE;
2560
2561 /* We don't want the clobber emitted, so handle this ourselves. */
2562 emit_insn (gen_rtx_SET (VOIDmode, operands[0], operands[1]));
2563 DONE;
2564 }")
2565
2566 (define_insn ""
2567 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2568 "=f,r,f,r,RQ,Q")
2569 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2570 "fG,rG,RQ,RQ,f,rG"))]
2571 "(register_operand (operands[0], SFmode)
2572 || reg_or_0_operand (operands[1], SFmode))
2573 && ! TARGET_SOFT_FLOAT"
2574 "@
2575 fcpy,sgl %f1,%0
2576 copy %r1,%0
2577 fldw%F1 %1,%0
2578 ldw%M1 %1,%0
2579 fstw%F0 %r1,%0
2580 stw%M0 %r1,%0"
2581 [(set_attr "type" "fpalu,move,fpload,load,fpstore,store")
2582 (set_attr "pa_combine_type" "addmove")
2583 (set_attr "length" "4,4,4,4,4,4")])
2584
2585 (define_insn ""
2586 [(set (match_operand:SF 0 "reg_or_nonsymb_mem_operand"
2587 "=r,r,Q")
2588 (match_operand:SF 1 "reg_or_0_or_nonsymb_mem_operand"
2589 "rG,RQ,rG"))]
2590 "(register_operand (operands[0], SFmode)
2591 || reg_or_0_operand (operands[1], SFmode))
2592 && TARGET_SOFT_FLOAT"
2593 "@
2594 copy %r1,%0
2595 ldw%M1 %1,%0
2596 stw%M0 %r1,%0"
2597 [(set_attr "type" "move,load,store")
2598 (set_attr "pa_combine_type" "addmove")
2599 (set_attr "length" "4,4,4")])
2600
2601 (define_insn ""
2602 [(set (match_operand:SF 0 "register_operand" "=fx")
2603 (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2604 (match_operand:SI 2 "register_operand" "r"))))]
2605 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2606 "*
2607 {
2608 /* Reload can create backwards (relative to cse) unscaled index
2609 address modes when eliminating registers and possibly for
2610 pseudos that don't get hard registers. Deal with it. */
2611 if (operands[2] == hard_frame_pointer_rtx
2612 || operands[2] == stack_pointer_rtx)
2613 return \"fldwx %1(%2),%0\";
2614 else
2615 return \"fldwx %2(%1),%0\";
2616 }"
2617 [(set_attr "type" "fpload")
2618 (set_attr "length" "4")])
2619
2620 (define_insn ""
2621 [(set (match_operand:SF 0 "register_operand" "=fx")
2622 (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2623 (match_operand:SI 2 "basereg_operand" "r"))))]
2624 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2625 "*
2626 {
2627 /* Reload can create backwards (relative to cse) unscaled index
2628 address modes when eliminating registers and possibly for
2629 pseudos that don't get hard registers. Deal with it. */
2630 if (operands[1] == hard_frame_pointer_rtx
2631 || operands[1] == stack_pointer_rtx)
2632 return \"fldwx %2(%1),%0\";
2633 else
2634 return \"fldwx %1(%2),%0\";
2635 }"
2636 [(set_attr "type" "fpload")
2637 (set_attr "length" "4")])
2638
2639 (define_insn ""
2640 [(set (mem:SF (plus:SI (match_operand:SI 1 "basereg_operand" "r")
2641 (match_operand:SI 2 "register_operand" "r")))
2642 (match_operand:SF 0 "register_operand" "fx"))]
2643 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2644 "*
2645 {
2646 /* Reload can create backwards (relative to cse) unscaled index
2647 address modes when eliminating registers and possibly for
2648 pseudos that don't get hard registers. Deal with it. */
2649 if (operands[2] == hard_frame_pointer_rtx
2650 || operands[2] == stack_pointer_rtx)
2651 return \"fstwx %0,%1(%2)\";
2652 else
2653 return \"fstwx %0,%2(%1)\";
2654 }"
2655 [(set_attr "type" "fpstore")
2656 (set_attr "length" "4")])
2657 \f
2658 (define_insn ""
2659 [(set (mem:SF (plus:SI (match_operand:SI 1 "register_operand" "r")
2660 (match_operand:SI 2 "basereg_operand" "r")))
2661 (match_operand:SF 0 "register_operand" "fx"))]
2662 "! TARGET_DISABLE_INDEXING && ! TARGET_SOFT_FLOAT"
2663 "*
2664 {
2665 /* Reload can create backwards (relative to cse) unscaled index
2666 address modes when eliminating registers and possibly for
2667 pseudos that don't get hard registers. Deal with it. */
2668 if (operands[1] == hard_frame_pointer_rtx
2669 || operands[1] == stack_pointer_rtx)
2670 return \"fstwx %0,%2(%1)\";
2671 else
2672 return \"fstwx %0,%1(%2)\";
2673 }"
2674 [(set_attr "type" "fpstore")
2675 (set_attr "length" "4")])
2676 \f
2677
2678 ;;- zero extension instructions
2679 ;; We have define_expand for zero extension patterns to make sure the
2680 ;; operands get loaded into registers. The define_insns accept
2681 ;; memory operands. This gives us better overall code than just
2682 ;; having a pattern that does or does not accept memory operands.
2683
2684 (define_expand "zero_extendhisi2"
2685 [(set (match_operand:SI 0 "register_operand" "")
2686 (zero_extend:SI
2687 (match_operand:HI 1 "register_operand" "")))]
2688 ""
2689 "")
2690
2691 (define_insn ""
2692 [(set (match_operand:SI 0 "register_operand" "=r,r")
2693 (zero_extend:SI
2694 (match_operand:HI 1 "move_operand" "r,RQ")))]
2695 "GET_CODE (operands[1]) != CONST_INT"
2696 "@
2697 extru %1,31,16,%0
2698 ldh%M1 %1,%0"
2699 [(set_attr "type" "shift,load")
2700 (set_attr "length" "4,4")])
2701
2702 (define_expand "zero_extendqihi2"
2703 [(set (match_operand:HI 0 "register_operand" "")
2704 (zero_extend:HI
2705 (match_operand:QI 1 "register_operand" "")))]
2706 ""
2707 "")
2708
2709 (define_insn ""
2710 [(set (match_operand:HI 0 "register_operand" "=r,r")
2711 (zero_extend:HI
2712 (match_operand:QI 1 "move_operand" "r,RQ")))]
2713 "GET_CODE (operands[1]) != CONST_INT"
2714 "@
2715 extru %1,31,8,%0
2716 ldb%M1 %1,%0"
2717 [(set_attr "type" "shift,load")
2718 (set_attr "length" "4,4")])
2719
2720 (define_expand "zero_extendqisi2"
2721 [(set (match_operand:SI 0 "register_operand" "")
2722 (zero_extend:SI
2723 (match_operand:QI 1 "register_operand" "")))]
2724 ""
2725 "")
2726
2727 (define_insn ""
2728 [(set (match_operand:SI 0 "register_operand" "=r,r")
2729 (zero_extend:SI
2730 (match_operand:QI 1 "move_operand" "r,RQ")))]
2731 "GET_CODE (operands[1]) != CONST_INT"
2732 "@
2733 extru %1,31,8,%0
2734 ldb%M1 %1,%0"
2735 [(set_attr "type" "shift,load")
2736 (set_attr "length" "4,4")])
2737
2738 ;;- sign extension instructions
2739
2740 (define_insn "extendhisi2"
2741 [(set (match_operand:SI 0 "register_operand" "=r")
2742 (sign_extend:SI (match_operand:HI 1 "register_operand" "r")))]
2743 ""
2744 "extrs %1,31,16,%0"
2745 [(set_attr "type" "shift")
2746 (set_attr "length" "4")])
2747
2748 (define_insn "extendqihi2"
2749 [(set (match_operand:HI 0 "register_operand" "=r")
2750 (sign_extend:HI (match_operand:QI 1 "register_operand" "r")))]
2751 ""
2752 "extrs %1,31,8,%0"
2753 [(set_attr "type" "shift")
2754 (set_attr "length" "4")])
2755
2756 (define_insn "extendqisi2"
2757 [(set (match_operand:SI 0 "register_operand" "=r")
2758 (sign_extend:SI (match_operand:QI 1 "register_operand" "r")))]
2759 ""
2760 "extrs %1,31,8,%0"
2761 [(set_attr "type" "shift")
2762 (set_attr "length" "4")])
2763 \f
2764 ;; Conversions between float and double.
2765
2766 (define_insn "extendsfdf2"
2767 [(set (match_operand:DF 0 "register_operand" "=f")
2768 (float_extend:DF
2769 (match_operand:SF 1 "register_operand" "f")))]
2770 "! TARGET_SOFT_FLOAT"
2771 "fcnvff,sgl,dbl %1,%0"
2772 [(set_attr "type" "fpalu")
2773 (set_attr "length" "4")])
2774
2775 (define_insn "truncdfsf2"
2776 [(set (match_operand:SF 0 "register_operand" "=f")
2777 (float_truncate:SF
2778 (match_operand:DF 1 "register_operand" "f")))]
2779 "! TARGET_SOFT_FLOAT"
2780 "fcnvff,dbl,sgl %1,%0"
2781 [(set_attr "type" "fpalu")
2782 (set_attr "length" "4")])
2783
2784 ;; Conversion between fixed point and floating point.
2785 ;; Note that among the fix-to-float insns
2786 ;; the ones that start with SImode come first.
2787 ;; That is so that an operand that is a CONST_INT
2788 ;; (and therefore lacks a specific machine mode).
2789 ;; will be recognized as SImode (which is always valid)
2790 ;; rather than as QImode or HImode.
2791
2792 ;; This pattern forces (set (reg:SF ...) (float:SF (const_int ...)))
2793 ;; to be reloaded by putting the constant into memory.
2794 ;; It must come before the more general floatsisf2 pattern.
2795 (define_insn ""
2796 [(set (match_operand:SF 0 "register_operand" "=f")
2797 (float:SF (match_operand:SI 1 "const_int_operand" "m")))]
2798 "! TARGET_SOFT_FLOAT"
2799 "fldw%F1 %1,%0\;fcnvxf,sgl,sgl %0,%0"
2800 [(set_attr "type" "fpalu")
2801 (set_attr "length" "8")])
2802
2803 (define_insn "floatsisf2"
2804 [(set (match_operand:SF 0 "register_operand" "=f")
2805 (float:SF (match_operand:SI 1 "register_operand" "f")))]
2806 "! TARGET_SOFT_FLOAT"
2807 "fcnvxf,sgl,sgl %1,%0"
2808 [(set_attr "type" "fpalu")
2809 (set_attr "length" "4")])
2810
2811 ;; This pattern forces (set (reg:DF ...) (float:DF (const_int ...)))
2812 ;; to be reloaded by putting the constant into memory.
2813 ;; It must come before the more general floatsidf2 pattern.
2814 (define_insn ""
2815 [(set (match_operand:DF 0 "register_operand" "=f")
2816 (float:DF (match_operand:SI 1 "const_int_operand" "m")))]
2817 "! TARGET_SOFT_FLOAT"
2818 "fldw%F1 %1,%0\;fcnvxf,sgl,dbl %0,%0"
2819 [(set_attr "type" "fpalu")
2820 (set_attr "length" "8")])
2821
2822 (define_insn "floatsidf2"
2823 [(set (match_operand:DF 0 "register_operand" "=f")
2824 (float:DF (match_operand:SI 1 "register_operand" "f")))]
2825 "! TARGET_SOFT_FLOAT"
2826 "fcnvxf,sgl,dbl %1,%0"
2827 [(set_attr "type" "fpalu")
2828 (set_attr "length" "4")])
2829
2830 (define_expand "floatunssisf2"
2831 [(set (subreg:SI (match_dup 2) 1)
2832 (match_operand:SI 1 "register_operand" ""))
2833 (set (subreg:SI (match_dup 2) 0)
2834 (const_int 0))
2835 (set (match_operand:SF 0 "register_operand" "")
2836 (float:SF (match_dup 2)))]
2837 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2838 "operands[2] = gen_reg_rtx (DImode);")
2839
2840 (define_expand "floatunssidf2"
2841 [(set (subreg:SI (match_dup 2) 1)
2842 (match_operand:SI 1 "register_operand" ""))
2843 (set (subreg:SI (match_dup 2) 0)
2844 (const_int 0))
2845 (set (match_operand:DF 0 "register_operand" "")
2846 (float:DF (match_dup 2)))]
2847 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2848 "operands[2] = gen_reg_rtx (DImode);")
2849
2850 (define_insn "floatdisf2"
2851 [(set (match_operand:SF 0 "register_operand" "=f")
2852 (float:SF (match_operand:DI 1 "register_operand" "f")))]
2853 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2854 "fcnvxf,dbl,sgl %1,%0"
2855 [(set_attr "type" "fpalu")
2856 (set_attr "length" "4")])
2857
2858 (define_insn "floatdidf2"
2859 [(set (match_operand:DF 0 "register_operand" "=f")
2860 (float:DF (match_operand:DI 1 "register_operand" "f")))]
2861 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2862 "fcnvxf,dbl,dbl %1,%0"
2863 [(set_attr "type" "fpalu")
2864 (set_attr "length" "4")])
2865
2866 ;; Convert a float to an actual integer.
2867 ;; Truncation is performed as part of the conversion.
2868
2869 (define_insn "fix_truncsfsi2"
2870 [(set (match_operand:SI 0 "register_operand" "=f")
2871 (fix:SI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2872 "! TARGET_SOFT_FLOAT"
2873 "fcnvfxt,sgl,sgl %1,%0"
2874 [(set_attr "type" "fpalu")
2875 (set_attr "length" "4")])
2876
2877 (define_insn "fix_truncdfsi2"
2878 [(set (match_operand:SI 0 "register_operand" "=f")
2879 (fix:SI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2880 "! TARGET_SOFT_FLOAT"
2881 "fcnvfxt,dbl,sgl %1,%0"
2882 [(set_attr "type" "fpalu")
2883 (set_attr "length" "4")])
2884
2885 (define_insn "fix_truncsfdi2"
2886 [(set (match_operand:DI 0 "register_operand" "=f")
2887 (fix:DI (fix:SF (match_operand:SF 1 "register_operand" "f"))))]
2888 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2889 "fcnvfxt,sgl,dbl %1,%0"
2890 [(set_attr "type" "fpalu")
2891 (set_attr "length" "4")])
2892
2893 (define_insn "fix_truncdfdi2"
2894 [(set (match_operand:DI 0 "register_operand" "=f")
2895 (fix:DI (fix:DF (match_operand:DF 1 "register_operand" "f"))))]
2896 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT"
2897 "fcnvfxt,dbl,dbl %1,%0"
2898 [(set_attr "type" "fpalu")
2899 (set_attr "length" "4")])
2900 \f
2901 ;;- arithmetic instructions
2902
2903 (define_insn "adddi3"
2904 [(set (match_operand:DI 0 "register_operand" "=r")
2905 (plus:DI (match_operand:DI 1 "register_operand" "%r")
2906 (match_operand:DI 2 "arith11_operand" "rI")))]
2907 ""
2908 "*
2909 {
2910 if (GET_CODE (operands[2]) == CONST_INT)
2911 {
2912 if (INTVAL (operands[2]) >= 0)
2913 return \"addi %2,%R1,%R0\;addc %1,0,%0\";
2914 else
2915 return \"addi %2,%R1,%R0\;subb %1,0,%0\";
2916 }
2917 else
2918 return \"add %R2,%R1,%R0\;addc %2,%1,%0\";
2919 }"
2920 [(set_attr "type" "binary")
2921 (set_attr "length" "8")])
2922
2923 (define_insn ""
2924 [(set (match_operand:SI 0 "register_operand" "=r")
2925 (plus:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
2926 (match_operand:SI 2 "register_operand" "r")))]
2927 ""
2928 "uaddcm %2,%1,%0"
2929 [(set_attr "type" "binary")
2930 (set_attr "length" "4")])
2931
2932 ;; define_splits to optimize cases of adding a constant integer
2933 ;; to a register when the constant does not fit in 14 bits. */
2934 (define_split
2935 [(set (match_operand:SI 0 "register_operand" "")
2936 (plus:SI (match_operand:SI 1 "register_operand" "")
2937 (match_operand:SI 2 "const_int_operand" "")))
2938 (clobber (match_operand:SI 4 "register_operand" ""))]
2939 "! cint_ok_for_move (INTVAL (operands[2]))
2940 && VAL_14_BITS_P (INTVAL (operands[2]) >> 1)"
2941 [(set (match_dup 4) (plus:SI (match_dup 1) (match_dup 2)))
2942 (set (match_dup 0) (plus:SI (match_dup 4) (match_dup 3)))]
2943 "
2944 {
2945 int val = INTVAL (operands[2]);
2946 int low = (val < 0) ? -0x2000 : 0x1fff;
2947 int rest = val - low;
2948
2949 operands[2] = GEN_INT (rest);
2950 operands[3] = GEN_INT (low);
2951 }")
2952
2953 (define_split
2954 [(set (match_operand:SI 0 "register_operand" "")
2955 (plus:SI (match_operand:SI 1 "register_operand" "")
2956 (match_operand:SI 2 "const_int_operand" "")))
2957 (clobber (match_operand:SI 4 "register_operand" ""))]
2958 "! cint_ok_for_move (INTVAL (operands[2]))"
2959 [(set (match_dup 4) (match_dup 2))
2960 (set (match_dup 0) (plus:SI (mult:SI (match_dup 4) (match_dup 3))
2961 (match_dup 1)))]
2962 "
2963 {
2964 HOST_WIDE_INT intval = INTVAL (operands[2]);
2965
2966 /* Try dividing the constant by 2, then 4, and finally 8 to see
2967 if we can get a constant which can be loaded into a register
2968 in a single instruction (cint_ok_for_move).
2969
2970 If that fails, try to negate the constant and subtract it
2971 from our input operand. */
2972 if (intval % 2 == 0 && cint_ok_for_move (intval / 2))
2973 {
2974 operands[2] = GEN_INT (intval / 2);
2975 operands[3] = GEN_INT (2);
2976 }
2977 else if (intval % 4 == 0 && cint_ok_for_move (intval / 4))
2978 {
2979 operands[2] = GEN_INT (intval / 4);
2980 operands[3] = GEN_INT (4);
2981 }
2982 else if (intval % 8 == 0 && cint_ok_for_move (intval / 8))
2983 {
2984 operands[2] = GEN_INT (intval / 8);
2985 operands[3] = GEN_INT (8);
2986 }
2987 else if (cint_ok_for_move (-intval))
2988 {
2989 emit_insn (gen_rtx_SET (VOIDmode, operands[4], GEN_INT (-intval)));
2990 emit_insn (gen_subsi3 (operands[0], operands[1], operands[4]));
2991 DONE;
2992 }
2993 else
2994 FAIL;
2995 }")
2996
2997 (define_insn "addsi3"
2998 [(set (match_operand:SI 0 "register_operand" "=r,r")
2999 (plus:SI (match_operand:SI 1 "register_operand" "%r,r")
3000 (match_operand:SI 2 "arith_operand" "r,J")))]
3001 ""
3002 "@
3003 addl %1,%2,%0
3004 ldo %2(%1),%0"
3005 [(set_attr "type" "binary,binary")
3006 (set_attr "pa_combine_type" "addmove")
3007 (set_attr "length" "4,4")])
3008
3009 ;; Disgusting kludge to work around reload bugs with frame pointer
3010 ;; elimination. Similar to other magic reload patterns in the
3011 ;; indexed memory operations.
3012 (define_insn ""
3013 [(set (match_operand:SI 0 "register_operand" "=&r")
3014 (plus:SI (plus:SI (match_operand:SI 1 "register_operand" "%r")
3015 (match_operand:SI 2 "register_operand" "r"))
3016 (match_operand:SI 3 "const_int_operand" "rL")))]
3017 "reload_in_progress"
3018 "*
3019 {
3020 if (GET_CODE (operands[3]) == CONST_INT)
3021 return \"ldo %3(%2),%0\;addl %1,%0,%0\";
3022 else
3023 return \"addl %3,%2,%0\;addl %1,%0,%0\";
3024 }"
3025 [(set_attr "type" "binary")
3026 (set_attr "length" "8")])
3027
3028 (define_insn "subdi3"
3029 [(set (match_operand:DI 0 "register_operand" "=r")
3030 (minus:DI (match_operand:DI 1 "register_operand" "r")
3031 (match_operand:DI 2 "register_operand" "r")))]
3032 ""
3033 "sub %R1,%R2,%R0\;subb %1,%2,%0"
3034 [(set_attr "type" "binary")
3035 (set_attr "length" "8")])
3036
3037 (define_insn "subsi3"
3038 [(set (match_operand:SI 0 "register_operand" "=r,r")
3039 (minus:SI (match_operand:SI 1 "arith11_operand" "r,I")
3040 (match_operand:SI 2 "register_operand" "r,r")))]
3041 ""
3042 "@
3043 sub %1,%2,%0
3044 subi %1,%2,%0"
3045 [(set_attr "type" "binary,binary")
3046 (set_attr "length" "4,4")])
3047
3048 ;; Clobbering a "register_operand" instead of a match_scratch
3049 ;; in operand3 of millicode calls avoids spilling %r1 and
3050 ;; produces better code.
3051
3052 ;; The mulsi3 insns set up registers for the millicode call.
3053 (define_expand "mulsi3"
3054 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3055 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3056 (parallel [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3057 (clobber (match_dup 3))
3058 (clobber (reg:SI 26))
3059 (clobber (reg:SI 25))
3060 (clobber (reg:SI 31))])
3061 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3062 ""
3063 "
3064 {
3065 if (TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT)
3066 {
3067 rtx scratch = gen_reg_rtx (DImode);
3068 operands[1] = force_reg (SImode, operands[1]);
3069 operands[2] = force_reg (SImode, operands[2]);
3070 emit_insn (gen_umulsidi3 (scratch, operands[1], operands[2]));
3071 emit_insn (gen_rtx_SET (VOIDmode,
3072 operands[0],
3073 gen_rtx_SUBREG (SImode, scratch, 1)));
3074 DONE;
3075 }
3076 operands[3] = gen_reg_rtx (SImode);
3077 }")
3078
3079 (define_insn "umulsidi3"
3080 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3081 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3082 (zero_extend:DI (match_operand:SI 2 "nonimmediate_operand" "f"))))]
3083 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3084 "xmpyu %1,%2,%0"
3085 [(set_attr "type" "fpmuldbl")
3086 (set_attr "length" "4")])
3087
3088 (define_insn ""
3089 [(set (match_operand:DI 0 "nonimmediate_operand" "=f")
3090 (mult:DI (zero_extend:DI (match_operand:SI 1 "nonimmediate_operand" "f"))
3091 (match_operand:DI 2 "uint32_operand" "f")))]
3092 "TARGET_PA_11 && ! TARGET_DISABLE_FPREGS && ! TARGET_SOFT_FLOAT"
3093 "xmpyu %1,%R2,%0"
3094 [(set_attr "type" "fpmuldbl")
3095 (set_attr "length" "4")])
3096
3097 (define_insn ""
3098 [(set (reg:SI 29) (mult:SI (reg:SI 26) (reg:SI 25)))
3099 (clobber (match_operand:SI 0 "register_operand" "=a"))
3100 (clobber (reg:SI 26))
3101 (clobber (reg:SI 25))
3102 (clobber (reg:SI 31))]
3103 ""
3104 "* return output_mul_insn (0, insn);"
3105 [(set_attr "type" "milli")
3106 (set (attr "length")
3107 (cond [
3108 ;; Target (or stub) within reach
3109 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3110 (const_int 240000))
3111 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3112 (const_int 0)))
3113 (const_int 4)
3114
3115 ;; NO_SPACE_REGS
3116 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3117 (const_int 0))
3118 (const_int 8)
3119
3120 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3121 ;; same as NO_SPACE_REGS code
3122 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3123 (const_int 0))
3124 (eq (symbol_ref "flag_pic")
3125 (const_int 0)))
3126 (const_int 8)]
3127
3128 ;; Out of range and either PIC or PORTABLE_RUNTIME
3129 (const_int 24)))])
3130
3131 ;;; Division and mod.
3132 (define_expand "divsi3"
3133 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3134 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3135 (parallel [(set (reg:SI 29) (div:SI (reg:SI 26) (reg:SI 25)))
3136 (clobber (match_dup 3))
3137 (clobber (reg:SI 26))
3138 (clobber (reg:SI 25))
3139 (clobber (reg:SI 31))])
3140 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3141 ""
3142 "
3143 {
3144 operands[3] = gen_reg_rtx (SImode);
3145 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 0))
3146 DONE;
3147 }")
3148
3149 (define_insn ""
3150 [(set (reg:SI 29)
3151 (div:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3152 (clobber (match_operand:SI 1 "register_operand" "=a"))
3153 (clobber (reg:SI 26))
3154 (clobber (reg:SI 25))
3155 (clobber (reg:SI 31))]
3156 ""
3157 "*
3158 return output_div_insn (operands, 0, insn);"
3159 [(set_attr "type" "milli")
3160 (set (attr "length")
3161 (cond [
3162 ;; Target (or stub) within reach
3163 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3164 (const_int 240000))
3165 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3166 (const_int 0)))
3167 (const_int 4)
3168
3169 ;; NO_SPACE_REGS
3170 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3171 (const_int 0))
3172 (const_int 8)
3173
3174 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3175 ;; same as NO_SPACE_REGS code
3176 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3177 (const_int 0))
3178 (eq (symbol_ref "flag_pic")
3179 (const_int 0)))
3180 (const_int 8)]
3181
3182 ;; Out of range and either PIC or PORTABLE_RUNTIME
3183 (const_int 24)))])
3184
3185 (define_expand "udivsi3"
3186 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3187 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3188 (parallel [(set (reg:SI 29) (udiv:SI (reg:SI 26) (reg:SI 25)))
3189 (clobber (match_dup 3))
3190 (clobber (reg:SI 26))
3191 (clobber (reg:SI 25))
3192 (clobber (reg:SI 31))])
3193 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3194 ""
3195 "
3196 {
3197 operands[3] = gen_reg_rtx (SImode);
3198 if (GET_CODE (operands[2]) == CONST_INT && emit_hpdiv_const (operands, 1))
3199 DONE;
3200 }")
3201
3202 (define_insn ""
3203 [(set (reg:SI 29)
3204 (udiv:SI (reg:SI 26) (match_operand:SI 0 "div_operand" "")))
3205 (clobber (match_operand:SI 1 "register_operand" "=a"))
3206 (clobber (reg:SI 26))
3207 (clobber (reg:SI 25))
3208 (clobber (reg:SI 31))]
3209 ""
3210 "*
3211 return output_div_insn (operands, 1, insn);"
3212 [(set_attr "type" "milli")
3213 (set (attr "length")
3214 (cond [
3215 ;; Target (or stub) within reach
3216 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3217 (const_int 240000))
3218 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3219 (const_int 0)))
3220 (const_int 4)
3221
3222 ;; NO_SPACE_REGS
3223 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3224 (const_int 0))
3225 (const_int 8)
3226
3227 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3228 ;; same as NO_SPACE_REGS code
3229 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3230 (const_int 0))
3231 (eq (symbol_ref "flag_pic")
3232 (const_int 0)))
3233 (const_int 8)]
3234
3235 ;; Out of range and either PIC or PORTABLE_RUNTIME
3236 (const_int 24)))])
3237
3238 (define_expand "modsi3"
3239 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3240 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3241 (parallel [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3242 (clobber (match_dup 3))
3243 (clobber (reg:SI 26))
3244 (clobber (reg:SI 25))
3245 (clobber (reg:SI 31))])
3246 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3247 ""
3248 "
3249 {
3250 operands[3] = gen_reg_rtx (SImode);
3251 }")
3252
3253 (define_insn ""
3254 [(set (reg:SI 29) (mod:SI (reg:SI 26) (reg:SI 25)))
3255 (clobber (match_operand:SI 0 "register_operand" "=a"))
3256 (clobber (reg:SI 26))
3257 (clobber (reg:SI 25))
3258 (clobber (reg:SI 31))]
3259 ""
3260 "*
3261 return output_mod_insn (0, insn);"
3262 [(set_attr "type" "milli")
3263 (set (attr "length")
3264 (cond [
3265 ;; Target (or stub) within reach
3266 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3267 (const_int 240000))
3268 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3269 (const_int 0)))
3270 (const_int 4)
3271
3272 ;; NO_SPACE_REGS
3273 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3274 (const_int 0))
3275 (const_int 8)
3276
3277 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3278 ;; same as NO_SPACE_REGS code
3279 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3280 (const_int 0))
3281 (eq (symbol_ref "flag_pic")
3282 (const_int 0)))
3283 (const_int 8)]
3284
3285 ;; Out of range and either PIC or PORTABLE_RUNTIME
3286 (const_int 24)))])
3287
3288 (define_expand "umodsi3"
3289 [(set (reg:SI 26) (match_operand:SI 1 "move_operand" ""))
3290 (set (reg:SI 25) (match_operand:SI 2 "move_operand" ""))
3291 (parallel [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3292 (clobber (match_dup 3))
3293 (clobber (reg:SI 26))
3294 (clobber (reg:SI 25))
3295 (clobber (reg:SI 31))])
3296 (set (match_operand:SI 0 "general_operand" "") (reg:SI 29))]
3297 ""
3298 "
3299 {
3300 operands[3] = gen_reg_rtx (SImode);
3301 }")
3302
3303 (define_insn ""
3304 [(set (reg:SI 29) (umod:SI (reg:SI 26) (reg:SI 25)))
3305 (clobber (match_operand:SI 0 "register_operand" "=a"))
3306 (clobber (reg:SI 26))
3307 (clobber (reg:SI 25))
3308 (clobber (reg:SI 31))]
3309 ""
3310 "*
3311 return output_mod_insn (1, insn);"
3312 [(set_attr "type" "milli")
3313 (set (attr "length")
3314 (cond [
3315 ;; Target (or stub) within reach
3316 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
3317 (const_int 240000))
3318 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3319 (const_int 0)))
3320 (const_int 4)
3321
3322 ;; NO_SPACE_REGS
3323 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
3324 (const_int 0))
3325 (const_int 8)
3326
3327 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
3328 ;; same as NO_SPACE_REGS code
3329 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
3330 (const_int 0))
3331 (eq (symbol_ref "flag_pic")
3332 (const_int 0)))
3333 (const_int 8)]
3334
3335 ;; Out of range and either PIC or PORTABLE_RUNTIME
3336 (const_int 24)))])
3337
3338 ;;- and instructions
3339 ;; We define DImode `and` so with DImode `not` we can get
3340 ;; DImode `andn`. Other combinations are possible.
3341
3342 (define_expand "anddi3"
3343 [(set (match_operand:DI 0 "register_operand" "")
3344 (and:DI (match_operand:DI 1 "arith_double_operand" "")
3345 (match_operand:DI 2 "arith_double_operand" "")))]
3346 ""
3347 "
3348 {
3349 if (! register_operand (operands[1], DImode)
3350 || ! register_operand (operands[2], DImode))
3351 /* Let GCC break this into word-at-a-time operations. */
3352 FAIL;
3353 }")
3354
3355 (define_insn ""
3356 [(set (match_operand:DI 0 "register_operand" "=r")
3357 (and:DI (match_operand:DI 1 "register_operand" "%r")
3358 (match_operand:DI 2 "register_operand" "r")))]
3359 ""
3360 "and %1,%2,%0\;and %R1,%R2,%R0"
3361 [(set_attr "type" "binary")
3362 (set_attr "length" "8")])
3363
3364 ; The ? for op1 makes reload prefer zdepi instead of loading a huge
3365 ; constant with ldil;ldo.
3366 (define_insn "andsi3"
3367 [(set (match_operand:SI 0 "register_operand" "=r,r")
3368 (and:SI (match_operand:SI 1 "register_operand" "%?r,0")
3369 (match_operand:SI 2 "and_operand" "rO,P")))]
3370 ""
3371 "* return output_and (operands); "
3372 [(set_attr "type" "binary,shift")
3373 (set_attr "length" "4,4")])
3374
3375 (define_insn ""
3376 [(set (match_operand:DI 0 "register_operand" "=r")
3377 (and:DI (not:DI (match_operand:DI 1 "register_operand" "r"))
3378 (match_operand:DI 2 "register_operand" "r")))]
3379 ""
3380 "andcm %2,%1,%0\;andcm %R2,%R1,%R0"
3381 [(set_attr "type" "binary")
3382 (set_attr "length" "8")])
3383
3384 (define_insn ""
3385 [(set (match_operand:SI 0 "register_operand" "=r")
3386 (and:SI (not:SI (match_operand:SI 1 "register_operand" "r"))
3387 (match_operand:SI 2 "register_operand" "r")))]
3388 ""
3389 "andcm %2,%1,%0"
3390 [(set_attr "type" "binary")
3391 (set_attr "length" "4")])
3392
3393 (define_expand "iordi3"
3394 [(set (match_operand:DI 0 "register_operand" "")
3395 (ior:DI (match_operand:DI 1 "arith_double_operand" "")
3396 (match_operand:DI 2 "arith_double_operand" "")))]
3397 ""
3398 "
3399 {
3400 if (! register_operand (operands[1], DImode)
3401 || ! register_operand (operands[2], DImode))
3402 /* Let GCC break this into word-at-a-time operations. */
3403 FAIL;
3404 }")
3405
3406 (define_insn ""
3407 [(set (match_operand:DI 0 "register_operand" "=r")
3408 (ior:DI (match_operand:DI 1 "register_operand" "%r")
3409 (match_operand:DI 2 "register_operand" "r")))]
3410 ""
3411 "or %1,%2,%0\;or %R1,%R2,%R0"
3412 [(set_attr "type" "binary")
3413 (set_attr "length" "8")])
3414
3415 ;; Need a define_expand because we've run out of CONST_OK... characters.
3416 (define_expand "iorsi3"
3417 [(set (match_operand:SI 0 "register_operand" "")
3418 (ior:SI (match_operand:SI 1 "register_operand" "")
3419 (match_operand:SI 2 "arith32_operand" "")))]
3420 ""
3421 "
3422 {
3423 if (! (ior_operand (operands[2], SImode)
3424 || register_operand (operands[2], SImode)))
3425 operands[2] = force_reg (SImode, operands[2]);
3426 }")
3427
3428 (define_insn ""
3429 [(set (match_operand:SI 0 "register_operand" "=r,r")
3430 (ior:SI (match_operand:SI 1 "register_operand" "0,0")
3431 (match_operand:SI 2 "ior_operand" "M,i")))]
3432 ""
3433 "* return output_ior (operands); "
3434 [(set_attr "type" "binary,shift")
3435 (set_attr "length" "4,4")])
3436
3437 (define_insn ""
3438 [(set (match_operand:SI 0 "register_operand" "=r")
3439 (ior:SI (match_operand:SI 1 "register_operand" "%r")
3440 (match_operand:SI 2 "register_operand" "r")))]
3441 ""
3442 "or %1,%2,%0"
3443 [(set_attr "type" "binary")
3444 (set_attr "length" "4")])
3445
3446 (define_expand "xordi3"
3447 [(set (match_operand:DI 0 "register_operand" "")
3448 (xor:DI (match_operand:DI 1 "arith_double_operand" "")
3449 (match_operand:DI 2 "arith_double_operand" "")))]
3450 ""
3451 "
3452 {
3453 if (! register_operand (operands[1], DImode)
3454 || ! register_operand (operands[2], DImode))
3455 /* Let GCC break this into word-at-a-time operations. */
3456 FAIL;
3457 }")
3458
3459 (define_insn ""
3460 [(set (match_operand:DI 0 "register_operand" "=r")
3461 (xor:DI (match_operand:DI 1 "register_operand" "%r")
3462 (match_operand:DI 2 "register_operand" "r")))]
3463 ""
3464 "xor %1,%2,%0\;xor %R1,%R2,%R0"
3465 [(set_attr "type" "binary")
3466 (set_attr "length" "8")])
3467
3468 (define_insn "xorsi3"
3469 [(set (match_operand:SI 0 "register_operand" "=r")
3470 (xor:SI (match_operand:SI 1 "register_operand" "%r")
3471 (match_operand:SI 2 "register_operand" "r")))]
3472 ""
3473 "xor %1,%2,%0"
3474 [(set_attr "type" "binary")
3475 (set_attr "length" "4")])
3476
3477 (define_insn "negdi2"
3478 [(set (match_operand:DI 0 "register_operand" "=r")
3479 (neg:DI (match_operand:DI 1 "register_operand" "r")))]
3480 ""
3481 "sub %%r0,%R1,%R0\;subb %%r0,%1,%0"
3482 [(set_attr "type" "unary")
3483 (set_attr "length" "8")])
3484
3485 (define_insn "negsi2"
3486 [(set (match_operand:SI 0 "register_operand" "=r")
3487 (neg:SI (match_operand:SI 1 "register_operand" "r")))]
3488 ""
3489 "sub %%r0,%1,%0"
3490 [(set_attr "type" "unary")
3491 (set_attr "length" "4")])
3492
3493 (define_expand "one_cmpldi2"
3494 [(set (match_operand:DI 0 "register_operand" "")
3495 (not:DI (match_operand:DI 1 "arith_double_operand" "")))]
3496 ""
3497 "
3498 {
3499 if (! register_operand (operands[1], DImode))
3500 FAIL;
3501 }")
3502
3503 (define_insn ""
3504 [(set (match_operand:DI 0 "register_operand" "=r")
3505 (not:DI (match_operand:DI 1 "register_operand" "r")))]
3506 ""
3507 "uaddcm %%r0,%1,%0\;uaddcm %%r0,%R1,%R0"
3508 [(set_attr "type" "unary")
3509 (set_attr "length" "8")])
3510
3511 (define_insn "one_cmplsi2"
3512 [(set (match_operand:SI 0 "register_operand" "=r")
3513 (not:SI (match_operand:SI 1 "register_operand" "r")))]
3514 ""
3515 "uaddcm %%r0,%1,%0"
3516 [(set_attr "type" "unary")
3517 (set_attr "length" "4")])
3518 \f
3519 ;; Floating point arithmetic instructions.
3520
3521 (define_insn "adddf3"
3522 [(set (match_operand:DF 0 "register_operand" "=f")
3523 (plus:DF (match_operand:DF 1 "register_operand" "f")
3524 (match_operand:DF 2 "register_operand" "f")))]
3525 "! TARGET_SOFT_FLOAT"
3526 "fadd,dbl %1,%2,%0"
3527 [(set_attr "type" "fpalu")
3528 (set_attr "pa_combine_type" "faddsub")
3529 (set_attr "length" "4")])
3530
3531 (define_insn "addsf3"
3532 [(set (match_operand:SF 0 "register_operand" "=f")
3533 (plus:SF (match_operand:SF 1 "register_operand" "f")
3534 (match_operand:SF 2 "register_operand" "f")))]
3535 "! TARGET_SOFT_FLOAT"
3536 "fadd,sgl %1,%2,%0"
3537 [(set_attr "type" "fpalu")
3538 (set_attr "pa_combine_type" "faddsub")
3539 (set_attr "length" "4")])
3540
3541 (define_insn "subdf3"
3542 [(set (match_operand:DF 0 "register_operand" "=f")
3543 (minus:DF (match_operand:DF 1 "register_operand" "f")
3544 (match_operand:DF 2 "register_operand" "f")))]
3545 "! TARGET_SOFT_FLOAT"
3546 "fsub,dbl %1,%2,%0"
3547 [(set_attr "type" "fpalu")
3548 (set_attr "pa_combine_type" "faddsub")
3549 (set_attr "length" "4")])
3550
3551 (define_insn "subsf3"
3552 [(set (match_operand:SF 0 "register_operand" "=f")
3553 (minus:SF (match_operand:SF 1 "register_operand" "f")
3554 (match_operand:SF 2 "register_operand" "f")))]
3555 "! TARGET_SOFT_FLOAT"
3556 "fsub,sgl %1,%2,%0"
3557 [(set_attr "type" "fpalu")
3558 (set_attr "pa_combine_type" "faddsub")
3559 (set_attr "length" "4")])
3560
3561 (define_insn "muldf3"
3562 [(set (match_operand:DF 0 "register_operand" "=f")
3563 (mult:DF (match_operand:DF 1 "register_operand" "f")
3564 (match_operand:DF 2 "register_operand" "f")))]
3565 "! TARGET_SOFT_FLOAT"
3566 "fmpy,dbl %1,%2,%0"
3567 [(set_attr "type" "fpmuldbl")
3568 (set_attr "pa_combine_type" "fmpy")
3569 (set_attr "length" "4")])
3570
3571 (define_insn "mulsf3"
3572 [(set (match_operand:SF 0 "register_operand" "=f")
3573 (mult:SF (match_operand:SF 1 "register_operand" "f")
3574 (match_operand:SF 2 "register_operand" "f")))]
3575 "! TARGET_SOFT_FLOAT"
3576 "fmpy,sgl %1,%2,%0"
3577 [(set_attr "type" "fpmulsgl")
3578 (set_attr "pa_combine_type" "fmpy")
3579 (set_attr "length" "4")])
3580
3581 (define_insn "divdf3"
3582 [(set (match_operand:DF 0 "register_operand" "=f")
3583 (div:DF (match_operand:DF 1 "register_operand" "f")
3584 (match_operand:DF 2 "register_operand" "f")))]
3585 "! TARGET_SOFT_FLOAT"
3586 "fdiv,dbl %1,%2,%0"
3587 [(set_attr "type" "fpdivdbl")
3588 (set_attr "length" "4")])
3589
3590 (define_insn "divsf3"
3591 [(set (match_operand:SF 0 "register_operand" "=f")
3592 (div:SF (match_operand:SF 1 "register_operand" "f")
3593 (match_operand:SF 2 "register_operand" "f")))]
3594 "! TARGET_SOFT_FLOAT"
3595 "fdiv,sgl %1,%2,%0"
3596 [(set_attr "type" "fpdivsgl")
3597 (set_attr "length" "4")])
3598
3599 (define_insn "negdf2"
3600 [(set (match_operand:DF 0 "register_operand" "=f")
3601 (neg:DF (match_operand:DF 1 "register_operand" "f")))]
3602 "! TARGET_SOFT_FLOAT"
3603 "fsub,dbl %%fr0,%1,%0"
3604 [(set_attr "type" "fpalu")
3605 (set_attr "length" "4")])
3606
3607 (define_insn "negsf2"
3608 [(set (match_operand:SF 0 "register_operand" "=f")
3609 (neg:SF (match_operand:SF 1 "register_operand" "f")))]
3610 "! TARGET_SOFT_FLOAT"
3611 "fsub,sgl %%fr0,%1,%0"
3612 [(set_attr "type" "fpalu")
3613 (set_attr "length" "4")])
3614
3615 (define_insn "absdf2"
3616 [(set (match_operand:DF 0 "register_operand" "=f")
3617 (abs:DF (match_operand:DF 1 "register_operand" "f")))]
3618 "! TARGET_SOFT_FLOAT"
3619 "fabs,dbl %1,%0"
3620 [(set_attr "type" "fpalu")
3621 (set_attr "length" "4")])
3622
3623 (define_insn "abssf2"
3624 [(set (match_operand:SF 0 "register_operand" "=f")
3625 (abs:SF (match_operand:SF 1 "register_operand" "f")))]
3626 "! TARGET_SOFT_FLOAT"
3627 "fabs,sgl %1,%0"
3628 [(set_attr "type" "fpalu")
3629 (set_attr "length" "4")])
3630
3631 (define_insn "sqrtdf2"
3632 [(set (match_operand:DF 0 "register_operand" "=f")
3633 (sqrt:DF (match_operand:DF 1 "register_operand" "f")))]
3634 "! TARGET_SOFT_FLOAT"
3635 "fsqrt,dbl %1,%0"
3636 [(set_attr "type" "fpsqrtdbl")
3637 (set_attr "length" "4")])
3638
3639 (define_insn "sqrtsf2"
3640 [(set (match_operand:SF 0 "register_operand" "=f")
3641 (sqrt:SF (match_operand:SF 1 "register_operand" "f")))]
3642 "! TARGET_SOFT_FLOAT"
3643 "fsqrt,sgl %1,%0"
3644 [(set_attr "type" "fpsqrtsgl")
3645 (set_attr "length" "4")])
3646 \f
3647 ;;- Shift instructions
3648
3649 ;; Optimized special case of shifting.
3650
3651 (define_insn ""
3652 [(set (match_operand:SI 0 "register_operand" "=r")
3653 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3654 (const_int 24)))]
3655 ""
3656 "ldb%M1 %1,%0"
3657 [(set_attr "type" "load")
3658 (set_attr "length" "4")])
3659
3660 (define_insn ""
3661 [(set (match_operand:SI 0 "register_operand" "=r")
3662 (lshiftrt:SI (match_operand:SI 1 "memory_operand" "m")
3663 (const_int 16)))]
3664 ""
3665 "ldh%M1 %1,%0"
3666 [(set_attr "type" "load")
3667 (set_attr "length" "4")])
3668
3669 (define_insn ""
3670 [(set (match_operand:SI 0 "register_operand" "=r")
3671 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3672 (match_operand:SI 3 "shadd_operand" ""))
3673 (match_operand:SI 1 "register_operand" "r")))]
3674 ""
3675 "sh%O3addl %2,%1,%0"
3676 [(set_attr "type" "binary")
3677 (set_attr "length" "4")])
3678
3679 ;; This variant of the above insn can occur if the first operand
3680 ;; is the frame pointer. This is a kludge, but there doesn't
3681 ;; seem to be a way around it. Only recognize it while reloading.
3682 ;; Note how operand 3 uses a predicate of "const_int_operand", but
3683 ;; has constraints allowing a register. I don't know how this works,
3684 ;; but it somehow makes sure that out-of-range constants are placed
3685 ;; in a register which somehow magically is a "const_int_operand".
3686 ;; (this was stolen from alpha.md, I'm not going to try and change it.
3687
3688 (define_insn ""
3689 [(set (match_operand:SI 0 "register_operand" "=&r,r")
3690 (plus:SI (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r,r")
3691 (match_operand:SI 4 "shadd_operand" ""))
3692 (match_operand:SI 1 "register_operand" "r,r"))
3693 (match_operand:SI 3 "const_int_operand" "r,J")))]
3694 "reload_in_progress"
3695 "@
3696 sh%O4addl %2,%1,%0\;addl %3,%0,%0
3697 sh%O4addl %2,%1,%0\;ldo %3(%0),%0"
3698 [(set_attr "type" "multi")
3699 (set_attr "length" "8")])
3700
3701 ;; This anonymous pattern and splitter wins because it reduces the latency
3702 ;; of the shadd sequence without increasing the latency of the shift.
3703 ;;
3704 ;; We want to make sure and split up the operations for the scheduler since
3705 ;; these instructions can (and should) schedule independently.
3706 ;;
3707 ;; It would be clearer if combine used the same operator for both expressions,
3708 ;; it's somewhat confusing to have a mult in ine operation and an ashift
3709 ;; in the other.
3710 ;;
3711 ;; If this pattern is not split before register allocation, then we must expose
3712 ;; the fact that operand 4 is set before operands 1, 2 and 3 have been read.
3713 (define_insn ""
3714 [(set (match_operand:SI 0 "register_operand" "=r")
3715 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3716 (match_operand:SI 3 "shadd_operand" ""))
3717 (match_operand:SI 1 "register_operand" "r")))
3718 (set (match_operand:SI 4 "register_operand" "=&r")
3719 (ashift:SI (match_dup 2)
3720 (match_operand:SI 5 "const_int_operand" "i")))]
3721 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3722 "#"
3723 [(set_attr "type" "binary")
3724 (set_attr "length" "8")])
3725
3726 (define_split
3727 [(set (match_operand:SI 0 "register_operand" "=r")
3728 (plus:SI (mult:SI (match_operand:SI 2 "register_operand" "r")
3729 (match_operand:SI 3 "shadd_operand" ""))
3730 (match_operand:SI 1 "register_operand" "r")))
3731 (set (match_operand:SI 4 "register_operand" "=&r")
3732 (ashift:SI (match_dup 2)
3733 (match_operand:SI 5 "const_int_operand" "i")))]
3734 "INTVAL (operands[5]) == exact_log2 (INTVAL (operands[3]))"
3735 [(set (match_dup 4) (ashift:SI (match_dup 2) (match_dup 5)))
3736 (set (match_dup 0) (plus:SI (mult:SI (match_dup 2) (match_dup 3))
3737 (match_dup 1)))]
3738 "")
3739
3740 (define_expand "ashlsi3"
3741 [(set (match_operand:SI 0 "register_operand" "")
3742 (ashift:SI (match_operand:SI 1 "lhs_lshift_operand" "")
3743 (match_operand:SI 2 "arith32_operand" "")))]
3744 ""
3745 "
3746 {
3747 if (GET_CODE (operands[2]) != CONST_INT)
3748 {
3749 rtx temp = gen_reg_rtx (SImode);
3750 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3751 if (GET_CODE (operands[1]) == CONST_INT)
3752 emit_insn (gen_zvdep_imm (operands[0], operands[1], temp));
3753 else
3754 emit_insn (gen_zvdep32 (operands[0], operands[1], temp));
3755 DONE;
3756 }
3757 /* Make sure both inputs are not constants,
3758 there are no patterns for that. */
3759 operands[1] = force_reg (SImode, operands[1]);
3760 }")
3761
3762 (define_insn ""
3763 [(set (match_operand:SI 0 "register_operand" "=r")
3764 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3765 (match_operand:SI 2 "const_int_operand" "n")))]
3766 ""
3767 "zdep %1,%P2,%L2,%0"
3768 [(set_attr "type" "shift")
3769 (set_attr "length" "4")])
3770
3771 ; Match cases of op1 a CONST_INT here that zvdep_imm doesn't handle.
3772 ; Doing it like this makes slightly better code since reload can
3773 ; replace a register with a known value in range -16..15 with a
3774 ; constant. Ideally, we would like to merge zvdep32 and zvdep_imm,
3775 ; but since we have no more CONST_OK... characters, that is not
3776 ; possible.
3777 (define_insn "zvdep32"
3778 [(set (match_operand:SI 0 "register_operand" "=r,r")
3779 (ashift:SI (match_operand:SI 1 "arith5_operand" "r,L")
3780 (minus:SI (const_int 31)
3781 (match_operand:SI 2 "register_operand" "q,q"))))]
3782 ""
3783 "@
3784 zvdep %1,32,%0
3785 zvdepi %1,32,%0"
3786 [(set_attr "type" "shift,shift")
3787 (set_attr "length" "4,4")])
3788
3789 (define_insn "zvdep_imm"
3790 [(set (match_operand:SI 0 "register_operand" "=r")
3791 (ashift:SI (match_operand:SI 1 "lhs_lshift_cint_operand" "")
3792 (minus:SI (const_int 31)
3793 (match_operand:SI 2 "register_operand" "q"))))]
3794 ""
3795 "*
3796 {
3797 int x = INTVAL (operands[1]);
3798 operands[2] = GEN_INT (4 + exact_log2 ((x >> 4) + 1));
3799 operands[1] = GEN_INT ((x & 0xf) - 0x10);
3800 return \"zvdepi %1,%2,%0\";
3801 }"
3802 [(set_attr "type" "shift")
3803 (set_attr "length" "4")])
3804
3805 (define_insn "vdepi_ior"
3806 [(set (match_operand:SI 0 "register_operand" "=r")
3807 (ior:SI (ashift:SI (match_operand:SI 1 "const_int_operand" "")
3808 (minus:SI (const_int 31)
3809 (match_operand:SI 2 "register_operand" "q")))
3810 (match_operand:SI 3 "register_operand" "0")))]
3811 ; accept ...0001...1, can this be generalized?
3812 "exact_log2 (INTVAL (operands[1]) + 1) >= 0"
3813 "*
3814 {
3815 int x = INTVAL (operands[1]);
3816 operands[2] = GEN_INT (exact_log2 (x + 1));
3817 return \"vdepi -1,%2,%0\";
3818 }"
3819 [(set_attr "type" "shift")
3820 (set_attr "length" "4")])
3821
3822 (define_insn "vdepi_and"
3823 [(set (match_operand:SI 0 "register_operand" "=r")
3824 (and:SI (rotate:SI (match_operand:SI 1 "const_int_operand" "")
3825 (minus:SI (const_int 31)
3826 (match_operand:SI 2 "register_operand" "q")))
3827 (match_operand:SI 3 "register_operand" "0")))]
3828 ; this can be generalized...!
3829 "INTVAL (operands[1]) == -2"
3830 "*
3831 {
3832 int x = INTVAL (operands[1]);
3833 operands[2] = GEN_INT (exact_log2 ((~x) + 1));
3834 return \"vdepi 0,%2,%0\";
3835 }"
3836 [(set_attr "type" "shift")
3837 (set_attr "length" "4")])
3838
3839 (define_expand "ashrsi3"
3840 [(set (match_operand:SI 0 "register_operand" "")
3841 (ashiftrt:SI (match_operand:SI 1 "register_operand" "")
3842 (match_operand:SI 2 "arith32_operand" "")))]
3843 ""
3844 "
3845 {
3846 if (GET_CODE (operands[2]) != CONST_INT)
3847 {
3848 rtx temp = gen_reg_rtx (SImode);
3849 emit_insn (gen_subsi3 (temp, GEN_INT (31), operands[2]));
3850 emit_insn (gen_vextrs32 (operands[0], operands[1], temp));
3851 DONE;
3852 }
3853 }")
3854
3855 (define_insn ""
3856 [(set (match_operand:SI 0 "register_operand" "=r")
3857 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3858 (match_operand:SI 2 "const_int_operand" "n")))]
3859 ""
3860 "extrs %1,%P2,%L2,%0"
3861 [(set_attr "type" "shift")
3862 (set_attr "length" "4")])
3863
3864 (define_insn "vextrs32"
3865 [(set (match_operand:SI 0 "register_operand" "=r")
3866 (ashiftrt:SI (match_operand:SI 1 "register_operand" "r")
3867 (minus:SI (const_int 31)
3868 (match_operand:SI 2 "register_operand" "q"))))]
3869 ""
3870 "vextrs %1,32,%0"
3871 [(set_attr "type" "shift")
3872 (set_attr "length" "4")])
3873
3874 (define_insn "lshrsi3"
3875 [(set (match_operand:SI 0 "register_operand" "=r,r")
3876 (lshiftrt:SI (match_operand:SI 1 "register_operand" "r,r")
3877 (match_operand:SI 2 "arith32_operand" "q,n")))]
3878 ""
3879 "@
3880 vshd %%r0,%1,%0
3881 extru %1,%P2,%L2,%0"
3882 [(set_attr "type" "shift")
3883 (set_attr "length" "4")])
3884
3885 (define_insn "rotrsi3"
3886 [(set (match_operand:SI 0 "register_operand" "=r,r")
3887 (rotatert:SI (match_operand:SI 1 "register_operand" "r,r")
3888 (match_operand:SI 2 "arith32_operand" "q,n")))]
3889 ""
3890 "*
3891 {
3892 if (GET_CODE (operands[2]) == CONST_INT)
3893 {
3894 operands[2] = GEN_INT (INTVAL (operands[2]) & 31);
3895 return \"shd %1,%1,%2,%0\";
3896 }
3897 else
3898 return \"vshd %1,%1,%0\";
3899 }"
3900 [(set_attr "type" "shift")
3901 (set_attr "length" "4")])
3902
3903 (define_expand "rotlsi3"
3904 [(set (match_operand:SI 0 "register_operand" "")
3905 (rotate:SI (match_operand:SI 1 "register_operand" "")
3906 (match_operand:SI 2 "arith32_operand" "")))]
3907 ""
3908 "
3909 {
3910 if (GET_CODE (operands[2]) != CONST_INT)
3911 {
3912 rtx temp = gen_reg_rtx (SImode);
3913 emit_insn (gen_subsi3 (temp, GEN_INT (32), operands[2]));
3914 emit_insn (gen_rotrsi3 (operands[0], operands[1], temp));
3915 DONE;
3916 }
3917 /* Else expand normally. */
3918 }")
3919
3920 (define_insn ""
3921 [(set (match_operand:SI 0 "register_operand" "=r")
3922 (rotate:SI (match_operand:SI 1 "register_operand" "r")
3923 (match_operand:SI 2 "const_int_operand" "n")))]
3924 ""
3925 "*
3926 {
3927 operands[2] = GEN_INT ((32 - INTVAL (operands[2])) & 31);
3928 return \"shd %1,%1,%2,%0\";
3929 }"
3930 [(set_attr "type" "shift")
3931 (set_attr "length" "4")])
3932
3933 (define_insn ""
3934 [(set (match_operand:SI 0 "register_operand" "=r")
3935 (match_operator:SI 5 "plus_xor_ior_operator"
3936 [(ashift:SI (match_operand:SI 1 "register_operand" "r")
3937 (match_operand:SI 3 "const_int_operand" "n"))
3938 (lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3939 (match_operand:SI 4 "const_int_operand" "n"))]))]
3940 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3941 "shd %1,%2,%4,%0"
3942 [(set_attr "type" "shift")
3943 (set_attr "length" "4")])
3944
3945 (define_insn ""
3946 [(set (match_operand:SI 0 "register_operand" "=r")
3947 (match_operator:SI 5 "plus_xor_ior_operator"
3948 [(lshiftrt:SI (match_operand:SI 2 "register_operand" "r")
3949 (match_operand:SI 4 "const_int_operand" "n"))
3950 (ashift:SI (match_operand:SI 1 "register_operand" "r")
3951 (match_operand:SI 3 "const_int_operand" "n"))]))]
3952 "INTVAL (operands[3]) + INTVAL (operands[4]) == 32"
3953 "shd %1,%2,%4,%0"
3954 [(set_attr "type" "shift")
3955 (set_attr "length" "4")])
3956
3957 (define_insn ""
3958 [(set (match_operand:SI 0 "register_operand" "=r")
3959 (and:SI (ashift:SI (match_operand:SI 1 "register_operand" "r")
3960 (match_operand:SI 2 "const_int_operand" ""))
3961 (match_operand:SI 3 "const_int_operand" "")))]
3962 "exact_log2 (1 + (INTVAL (operands[3]) >> (INTVAL (operands[2]) & 31))) >= 0"
3963 "*
3964 {
3965 int cnt = INTVAL (operands[2]) & 31;
3966 operands[3] = GEN_INT (exact_log2 (1 + (INTVAL (operands[3]) >> cnt)));
3967 operands[2] = GEN_INT (31 - cnt);
3968 return \"zdep %1,%2,%3,%0\";
3969 }"
3970 [(set_attr "type" "shift")
3971 (set_attr "length" "4")])
3972 \f
3973 ;; Unconditional and other jump instructions.
3974
3975 (define_insn "return"
3976 [(return)]
3977 "hppa_can_use_return_insn_p ()"
3978 "bv%* %%r0(%%r2)"
3979 [(set_attr "type" "branch")
3980 (set_attr "length" "4")])
3981
3982 ;; Use a different pattern for functions which have non-trivial
3983 ;; epilogues so as not to confuse jump and reorg.
3984 (define_insn "return_internal"
3985 [(use (reg:SI 2))
3986 (return)]
3987 ""
3988 "bv%* %%r0(%%r2)"
3989 [(set_attr "type" "branch")
3990 (set_attr "length" "4")])
3991
3992 (define_expand "prologue"
3993 [(const_int 0)]
3994 ""
3995 "hppa_expand_prologue ();DONE;")
3996
3997 (define_expand "epilogue"
3998 [(return)]
3999 ""
4000 "
4001 {
4002 /* Try to use the trivial return first. Else use the full
4003 epilogue. */
4004 if (hppa_can_use_return_insn_p ())
4005 emit_jump_insn (gen_return ());
4006 else
4007 {
4008 hppa_expand_epilogue ();
4009 emit_jump_insn (gen_return_internal ());
4010 }
4011 DONE;
4012 }")
4013
4014 ;; Special because we use the value placed in %r2 by the bl instruction
4015 ;; from within its delay slot to set the value for the 2nd parameter to
4016 ;; the call.
4017 (define_insn "call_profiler"
4018 [(unspec_volatile [(const_int 0)] 0)
4019 (use (match_operand:SI 0 "const_int_operand" ""))]
4020 ""
4021 "bl _mcount,%%r2\;ldo %0(%%r2),%%r25"
4022 [(set_attr "type" "multi")
4023 (set_attr "length" "8")])
4024
4025 (define_insn "blockage"
4026 [(unspec_volatile [(const_int 2)] 0)]
4027 ""
4028 ""
4029 [(set_attr "length" "0")])
4030
4031 (define_insn "jump"
4032 [(set (pc) (label_ref (match_operand 0 "" "")))]
4033 ""
4034 "*
4035 {
4036 extern int optimize;
4037
4038 if (GET_MODE (insn) == SImode)
4039 return \"b %l0%#\";
4040
4041 /* An unconditional branch which can reach its target. */
4042 if (get_attr_length (insn) != 24
4043 && get_attr_length (insn) != 16)
4044 return \"b%* %l0\";
4045
4046 /* An unconditional branch which can not reach its target.
4047
4048 We need to be able to use %r1 as a scratch register; however,
4049 we can never be sure whether or not it's got a live value in
4050 it. Therefore, we must restore its original value after the
4051 jump.
4052
4053 To make matters worse, we don't have a stack slot which we
4054 can always clobber. sp-12/sp-16 shouldn't ever have a live
4055 value during a non-optimizing compilation, so we use those
4056 slots for now. We don't support very long branches when
4057 optimizing -- they should be quite rare when optimizing.
4058
4059 Really the way to go long term is a register scavenger; goto
4060 the target of the jump and find a register which we can use
4061 as a scratch to hold the value in %r1. */
4062
4063 /* We don't know how to register scavenge yet. */
4064 if (optimize)
4065 abort ();
4066
4067 /* First store %r1 into the stack. */
4068 output_asm_insn (\"stw %%r1,-16(%%r30)\", operands);
4069
4070 /* Now load the target address into %r1 and do an indirect jump
4071 to the value specified in %r1. Be careful to generate PIC
4072 code as needed. */
4073 if (flag_pic)
4074 {
4075 rtx xoperands[2];
4076 xoperands[0] = operands[0];
4077 xoperands[1] = gen_label_rtx ();
4078
4079 output_asm_insn (\"bl .+8,%%r1\\n\\taddil L'%l0-%l1,%%r1\", xoperands);
4080 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4081 CODE_LABEL_NUMBER (xoperands[1]));
4082 output_asm_insn (\"ldo R'%l0-%l1(%%r1),%%r1\\n\\tbv %%r0(%%r1)\",
4083 xoperands);
4084 }
4085 else
4086 output_asm_insn (\"ldil L'%l0,%%r1\\n\\tbe R'%l0(%%sr4,%%r1)\", operands);;
4087
4088 /* And restore the value of %r1 in the delay slot. We're not optimizing,
4089 so we know nothing else can be in the delay slot. */
4090 return \"ldw -16(%%r30),%%r1\";
4091 }"
4092 [(set_attr "type" "uncond_branch")
4093 (set_attr "pa_combine_type" "uncond_branch")
4094 (set (attr "length")
4095 (cond [(eq (symbol_ref "jump_in_call_delay (insn)") (const_int 1))
4096 (if_then_else (lt (abs (minus (match_dup 0)
4097 (plus (pc) (const_int 8))))
4098 (const_int 8184))
4099 (const_int 4)
4100 (const_int 8))
4101 (ge (abs (minus (match_dup 0) (plus (pc) (const_int 8))))
4102 (const_int 262100))
4103 (if_then_else (eq (symbol_ref "flag_pic") (const_int 0))
4104 (const_int 16)
4105 (const_int 24))]
4106 (const_int 4)))])
4107
4108 ;; Subroutines of "casesi".
4109 ;; operand 0 is index
4110 ;; operand 1 is the minimum bound
4111 ;; operand 2 is the maximum bound - minimum bound + 1
4112 ;; operand 3 is CODE_LABEL for the table;
4113 ;; operand 4 is the CODE_LABEL to go to if index out of range.
4114
4115 (define_expand "casesi"
4116 [(match_operand:SI 0 "general_operand" "")
4117 (match_operand:SI 1 "const_int_operand" "")
4118 (match_operand:SI 2 "const_int_operand" "")
4119 (match_operand 3 "" "")
4120 (match_operand 4 "" "")]
4121 ""
4122 "
4123 {
4124 if (GET_CODE (operands[0]) != REG)
4125 operands[0] = force_reg (SImode, operands[0]);
4126
4127 if (operands[1] != const0_rtx)
4128 {
4129 rtx reg = gen_reg_rtx (SImode);
4130
4131 operands[1] = GEN_INT (-INTVAL (operands[1]));
4132 if (!INT_14_BITS (operands[1]))
4133 operands[1] = force_reg (SImode, operands[1]);
4134 emit_insn (gen_addsi3 (reg, operands[0], operands[1]));
4135
4136 operands[0] = reg;
4137 }
4138
4139 if (!INT_5_BITS (operands[2]))
4140 operands[2] = force_reg (SImode, operands[2]);
4141
4142 emit_insn (gen_cmpsi (operands[0], operands[2]));
4143 emit_jump_insn (gen_bgtu (operands[4]));
4144 if (TARGET_BIG_SWITCH)
4145 {
4146 rtx temp = gen_reg_rtx (SImode);
4147 emit_move_insn (temp, gen_rtx_PLUS (SImode, operands[0], operands[0]));
4148 operands[0] = temp;
4149 }
4150 emit_jump_insn (gen_casesi0 (operands[0], operands[3]));
4151 DONE;
4152 }")
4153
4154 (define_insn "casesi0"
4155 [(set (pc) (plus:SI
4156 (mem:SI (plus:SI (pc)
4157 (match_operand:SI 0 "register_operand" "r")))
4158 (label_ref (match_operand 1 "" ""))))]
4159 ""
4160 "blr %0,%%r0\;nop"
4161 [(set_attr "type" "multi")
4162 (set_attr "length" "8")])
4163
4164 ;; Need nops for the calls because execution is supposed to continue
4165 ;; past; we don't want to nullify an instruction that we need.
4166 ;;- jump to subroutine
4167
4168 (define_expand "call"
4169 [(parallel [(call (match_operand:SI 0 "" "")
4170 (match_operand 1 "" ""))
4171 (clobber (reg:SI 2))])]
4172 ""
4173 "
4174 {
4175 rtx op;
4176 rtx call_insn;
4177
4178 if (TARGET_PORTABLE_RUNTIME)
4179 op = force_reg (SImode, XEXP (operands[0], 0));
4180 else
4181 op = XEXP (operands[0], 0);
4182
4183 /* Use two different patterns for calls to explicitly named functions
4184 and calls through function pointers. This is necessary as these two
4185 types of calls use different calling conventions, and CSE might try
4186 to change the named call into an indirect call in some cases (using
4187 two patterns keeps CSE from performing this optimization). */
4188 if (GET_CODE (op) == SYMBOL_REF)
4189 call_insn = emit_call_insn (gen_call_internal_symref (op, operands[1]));
4190 else
4191 {
4192 rtx tmpreg = gen_rtx_REG (SImode, 22);
4193 emit_move_insn (tmpreg, force_reg (SImode, op));
4194 call_insn = emit_call_insn (gen_call_internal_reg (operands[1]));
4195 }
4196
4197 if (flag_pic)
4198 {
4199 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4200
4201 /* After each call we must restore the PIC register, even if it
4202 doesn't appear to be used.
4203
4204 This will set regs_ever_live for the callee saved register we
4205 stored the PIC register in. */
4206 emit_move_insn (pic_offset_table_rtx,
4207 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4208 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4209
4210 /* Gross. We have to keep the scheduler from moving the restore
4211 of the PIC register away from the call. SCHED_GROUP_P is
4212 supposed to do this, but for some reason the compiler will
4213 go into an infinite loop when we use that.
4214
4215 This method (blockage insn) may make worse code (then again
4216 it may not since calls are nearly blockages anyway), but at
4217 least it should work. */
4218 emit_insn (gen_blockage ());
4219 }
4220 DONE;
4221 }")
4222
4223 (define_insn "call_internal_symref"
4224 [(call (mem:SI (match_operand:SI 0 "call_operand_address" ""))
4225 (match_operand 1 "" "i"))
4226 (clobber (reg:SI 2))
4227 (use (const_int 0))]
4228 "! TARGET_PORTABLE_RUNTIME"
4229 "*
4230 {
4231 output_arg_descriptor (insn);
4232 return output_call (insn, operands[0]);
4233 }"
4234 [(set_attr "type" "call")
4235 (set (attr "length")
4236 ;; If we're sure that we can either reach the target or that the
4237 ;; linker can use a long-branch stub, then the length is 4 bytes.
4238 ;;
4239 ;; For long-calls the length will be either 52 bytes (non-pic)
4240 ;; or 68 bytes (pic). */
4241 ;; Else we have to use a long-call;
4242 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4243 (const_int 240000))
4244 (const_int 4)
4245 (if_then_else (eq (symbol_ref "flag_pic")
4246 (const_int 0))
4247 (const_int 52)
4248 (const_int 68))))])
4249
4250 (define_insn "call_internal_reg"
4251 [(call (mem:SI (reg:SI 22))
4252 (match_operand 0 "" "i"))
4253 (clobber (reg:SI 2))
4254 (use (const_int 1))]
4255 ""
4256 "*
4257 {
4258 rtx xoperands[2];
4259
4260 /* First the special case for kernels, level 0 systems, etc. */
4261 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4262 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4263
4264 /* Now the normal case -- we can reach $$dyncall directly or
4265 we're sure that we can get there via a long-branch stub.
4266
4267 No need to check target flags as the length uniquely identifies
4268 the remaining cases. */
4269 if (get_attr_length (insn) == 8)
4270 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4271
4272 /* Long millicode call, but we are not generating PIC or portable runtime
4273 code. */
4274 if (get_attr_length (insn) == 12)
4275 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4276
4277 /* Long millicode call for portable runtime. */
4278 if (get_attr_length (insn) == 20)
4279 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4280
4281 /* If we're generating PIC code. */
4282 xoperands[0] = operands[0];
4283 xoperands[1] = gen_label_rtx ();
4284 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4285 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4286 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4287 CODE_LABEL_NUMBER (xoperands[1]));
4288 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4289 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4290 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4291 return \"\";
4292 }"
4293 [(set_attr "type" "dyncall")
4294 (set (attr "length")
4295 (cond [
4296 ;; First NO_SPACE_REGS
4297 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4298 (const_int 0))
4299 (const_int 8)
4300
4301 ;; Target (or stub) within reach
4302 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4303 (const_int 240000))
4304 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4305 (const_int 0)))
4306 (const_int 8)
4307
4308 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4309 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4310 (const_int 0))
4311 (eq (symbol_ref "flag_pic")
4312 (const_int 0)))
4313 (const_int 12)
4314
4315 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4316 (const_int 0))
4317 (const_int 20)]
4318
4319 ;; Out of range PIC case
4320 (const_int 24)))])
4321
4322 (define_expand "call_value"
4323 [(parallel [(set (match_operand 0 "" "")
4324 (call (match_operand:SI 1 "" "")
4325 (match_operand 2 "" "")))
4326 (clobber (reg:SI 2))])]
4327 ""
4328 "
4329 {
4330 rtx op;
4331 rtx call_insn;
4332
4333 if (TARGET_PORTABLE_RUNTIME)
4334 op = force_reg (SImode, XEXP (operands[1], 0));
4335 else
4336 op = XEXP (operands[1], 0);
4337
4338 /* Use two different patterns for calls to explicitly named functions
4339 and calls through function pointers. This is necessary as these two
4340 types of calls use different calling conventions, and CSE might try
4341 to change the named call into an indirect call in some cases (using
4342 two patterns keeps CSE from performing this optimization). */
4343 if (GET_CODE (op) == SYMBOL_REF)
4344 call_insn = emit_call_insn (gen_call_value_internal_symref (operands[0],
4345 op,
4346 operands[2]));
4347 else
4348 {
4349 rtx tmpreg = gen_rtx_REG (SImode, 22);
4350 emit_move_insn (tmpreg, force_reg (SImode, op));
4351 call_insn = emit_call_insn (gen_call_value_internal_reg (operands[0],
4352 operands[2]));
4353 }
4354 if (flag_pic)
4355 {
4356 use_reg (&CALL_INSN_FUNCTION_USAGE (call_insn), pic_offset_table_rtx);
4357
4358 /* After each call we must restore the PIC register, even if it
4359 doesn't appear to be used.
4360
4361 This will set regs_ever_live for the callee saved register we
4362 stored the PIC register in. */
4363 emit_move_insn (pic_offset_table_rtx,
4364 gen_rtx_REG (SImode, PIC_OFFSET_TABLE_REGNUM_SAVED));
4365 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
4366
4367 /* Gross. We have to keep the scheduler from moving the restore
4368 of the PIC register away from the call. SCHED_GROUP_P is
4369 supposed to do this, but for some reason the compiler will
4370 go into an infinite loop when we use that.
4371
4372 This method (blockage insn) may make worse code (then again
4373 it may not since calls are nearly blockages anyway), but at
4374 least it should work. */
4375 emit_insn (gen_blockage ());
4376 }
4377 DONE;
4378 }")
4379
4380 (define_insn "call_value_internal_symref"
4381 [(set (match_operand 0 "" "=rf")
4382 (call (mem:SI (match_operand:SI 1 "call_operand_address" ""))
4383 (match_operand 2 "" "i")))
4384 (clobber (reg:SI 2))
4385 (use (const_int 0))]
4386 ;;- Don't use operand 1 for most machines.
4387 "! TARGET_PORTABLE_RUNTIME"
4388 "*
4389 {
4390 output_arg_descriptor (insn);
4391 return output_call (insn, operands[1]);
4392 }"
4393 [(set_attr "type" "call")
4394 (set (attr "length")
4395 ;; If we're sure that we can either reach the target or that the
4396 ;; linker can use a long-branch stub, then the length is 4 bytes.
4397 ;;
4398 ;; For long-calls the length will be either 52 bytes (non-pic)
4399 ;; or 68 bytes (pic). */
4400 ;; Else we have to use a long-call;
4401 (if_then_else (lt (plus (symbol_ref "total_code_bytes") (pc))
4402 (const_int 240000))
4403 (const_int 4)
4404 (if_then_else (eq (symbol_ref "flag_pic")
4405 (const_int 0))
4406 (const_int 52)
4407 (const_int 68))))])
4408
4409 (define_insn "call_value_internal_reg"
4410 [(set (match_operand 0 "" "=rf")
4411 (call (mem:SI (reg:SI 22))
4412 (match_operand 1 "" "i")))
4413 (clobber (reg:SI 2))
4414 (use (const_int 1))]
4415 ""
4416 "*
4417 {
4418 rtx xoperands[2];
4419
4420 /* First the special case for kernels, level 0 systems, etc. */
4421 if (TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS)
4422 return \"ble 0(%%sr4,%%r22)\;copy %%r31,%%r2\";
4423
4424 /* Now the normal case -- we can reach $$dyncall directly or
4425 we're sure that we can get there via a long-branch stub.
4426
4427 No need to check target flags as the length uniquely identifies
4428 the remaining cases. */
4429 if (get_attr_length (insn) == 8)
4430 return \".CALL\\tARGW0=GR\;bl $$dyncall,%%r31\;copy %%r31,%%r2\";
4431
4432 /* Long millicode call, but we are not generating PIC or portable runtime
4433 code. */
4434 if (get_attr_length (insn) == 12)
4435 return \".CALL\\tARGW0=GR\;ldil L%%$$dyncall,%%r2\;ble R%%$$dyncall(%%sr4,%%r2)\;copy %%r31,%%r2\";
4436
4437 /* Long millicode call for portable runtime. */
4438 if (get_attr_length (insn) == 20)
4439 return \"ldil L%%$$dyncall,%%r31\;ldo R%%$$dyncall(%%r31),%%r31\;blr %%r0,%%r2\;bv,n %%r0(%%r31)\;nop\";
4440
4441 /* If we're generating PIC code. */
4442 xoperands[0] = operands[1];
4443 xoperands[1] = gen_label_rtx ();
4444 output_asm_insn (\"bl .+8,%%r1\", xoperands);
4445 output_asm_insn (\"addil L%%$$dyncall-%1,%%r1\", xoperands);
4446 ASM_OUTPUT_INTERNAL_LABEL (asm_out_file, \"L\",
4447 CODE_LABEL_NUMBER (xoperands[1]));
4448 output_asm_insn (\"ldo R%%$$dyncall-%1(%%r1),%%r1\", xoperands);
4449 output_asm_insn (\"blr %%r0,%%r2\", xoperands);
4450 output_asm_insn (\"bv,n %%r0(%%r1)\\n\\tnop\", xoperands);
4451 return \"\";
4452 }"
4453 [(set_attr "type" "dyncall")
4454 (set (attr "length")
4455 (cond [
4456 ;; First NO_SPACE_REGS
4457 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
4458 (const_int 0))
4459 (const_int 8)
4460
4461 ;; Target (or stub) within reach
4462 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
4463 (const_int 240000))
4464 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4465 (const_int 0)))
4466 (const_int 8)
4467
4468 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
4469 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
4470 (const_int 0))
4471 (eq (symbol_ref "flag_pic")
4472 (const_int 0)))
4473 (const_int 12)
4474
4475 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
4476 (const_int 0))
4477 (const_int 20)]
4478
4479 ;; Out of range PIC case
4480 (const_int 24)))])
4481
4482 ;; Call subroutine returning any type.
4483
4484 (define_expand "untyped_call"
4485 [(parallel [(call (match_operand 0 "" "")
4486 (const_int 0))
4487 (match_operand 1 "" "")
4488 (match_operand 2 "" "")])]
4489 ""
4490 "
4491 {
4492 int i;
4493
4494 emit_call_insn (gen_call (operands[0], const0_rtx));
4495
4496 for (i = 0; i < XVECLEN (operands[2], 0); i++)
4497 {
4498 rtx set = XVECEXP (operands[2], 0, i);
4499 emit_move_insn (SET_DEST (set), SET_SRC (set));
4500 }
4501
4502 /* The optimizer does not know that the call sets the function value
4503 registers we stored in the result block. We avoid problems by
4504 claiming that all hard registers are used and clobbered at this
4505 point. */
4506 emit_insn (gen_blockage ());
4507
4508 DONE;
4509 }")
4510 (define_insn "nop"
4511 [(const_int 0)]
4512 ""
4513 "nop"
4514 [(set_attr "type" "move")
4515 (set_attr "length" "4")])
4516
4517 ;; These are just placeholders so we know where branch tables
4518 ;; begin and end.
4519 (define_insn "begin_brtab"
4520 [(const_int 1)]
4521 ""
4522 "*
4523 {
4524 /* Only GAS actually supports this pseudo-op. */
4525 if (TARGET_GAS)
4526 return \".begin_brtab\";
4527 else
4528 return \"\";
4529 }"
4530 [(set_attr "type" "move")
4531 (set_attr "length" "0")])
4532
4533 (define_insn "end_brtab"
4534 [(const_int 2)]
4535 ""
4536 "*
4537 {
4538 /* Only GAS actually supports this pseudo-op. */
4539 if (TARGET_GAS)
4540 return \".end_brtab\";
4541 else
4542 return \"\";
4543 }"
4544 [(set_attr "type" "move")
4545 (set_attr "length" "0")])
4546
4547 ;;; Hope this is only within a function...
4548 (define_insn "indirect_jump"
4549 [(set (pc) (match_operand:SI 0 "register_operand" "r"))]
4550 ""
4551 "bv%* %%r0(%0)"
4552 [(set_attr "type" "branch")
4553 (set_attr "length" "4")])
4554
4555 (define_insn "extzv"
4556 [(set (match_operand:SI 0 "register_operand" "=r")
4557 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4558 (match_operand:SI 2 "uint5_operand" "")
4559 (match_operand:SI 3 "uint5_operand" "")))]
4560 ""
4561 "extru %1,%3+%2-1,%2,%0"
4562 [(set_attr "type" "shift")
4563 (set_attr "length" "4")])
4564
4565 (define_insn ""
4566 [(set (match_operand:SI 0 "register_operand" "=r")
4567 (zero_extract:SI (match_operand:SI 1 "register_operand" "r")
4568 (const_int 1)
4569 (match_operand:SI 3 "register_operand" "q")))]
4570 ""
4571 "vextru %1,1,%0"
4572 [(set_attr "type" "shift")
4573 (set_attr "length" "4")])
4574
4575 (define_insn "extv"
4576 [(set (match_operand:SI 0 "register_operand" "=r")
4577 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4578 (match_operand:SI 2 "uint5_operand" "")
4579 (match_operand:SI 3 "uint5_operand" "")))]
4580 ""
4581 "extrs %1,%3+%2-1,%2,%0"
4582 [(set_attr "type" "shift")
4583 (set_attr "length" "4")])
4584
4585 (define_insn ""
4586 [(set (match_operand:SI 0 "register_operand" "=r")
4587 (sign_extract:SI (match_operand:SI 1 "register_operand" "r")
4588 (const_int 1)
4589 (match_operand:SI 3 "register_operand" "q")))]
4590 ""
4591 "vextrs %1,1,%0"
4592 [(set_attr "type" "shift")
4593 (set_attr "length" "4")])
4594
4595 (define_insn "insv"
4596 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r,r")
4597 (match_operand:SI 1 "uint5_operand" "")
4598 (match_operand:SI 2 "uint5_operand" ""))
4599 (match_operand:SI 3 "arith5_operand" "r,L"))]
4600 ""
4601 "@
4602 dep %3,%2+%1-1,%1,%0
4603 depi %3,%2+%1-1,%1,%0"
4604 [(set_attr "type" "shift,shift")
4605 (set_attr "length" "4,4")])
4606
4607 ;; Optimize insertion of const_int values of type 1...1xxxx.
4608 (define_insn ""
4609 [(set (zero_extract:SI (match_operand:SI 0 "register_operand" "+r")
4610 (match_operand:SI 1 "uint5_operand" "")
4611 (match_operand:SI 2 "uint5_operand" ""))
4612 (match_operand:SI 3 "const_int_operand" ""))]
4613 "(INTVAL (operands[3]) & 0x10) != 0 &&
4614 (~INTVAL (operands[3]) & ((1L << INTVAL (operands[1])) - 1) & ~0xf) == 0"
4615 "*
4616 {
4617 operands[3] = GEN_INT ((INTVAL (operands[3]) & 0xf) - 0x10);
4618 return \"depi %3,%2+%1-1,%1,%0\";
4619 }"
4620 [(set_attr "type" "shift")
4621 (set_attr "length" "4")])
4622
4623 ;; This insn is used for some loop tests, typically loops reversed when
4624 ;; strength reduction is used. It is actually created when the instruction
4625 ;; combination phase combines the special loop test. Since this insn
4626 ;; is both a jump insn and has an output, it must deal with its own
4627 ;; reloads, hence the `m' constraints. The `!' constraints direct reload
4628 ;; to not choose the register alternatives in the event a reload is needed.
4629 (define_insn "decrement_and_branch_until_zero"
4630 [(set (pc)
4631 (if_then_else
4632 (match_operator 2 "comparison_operator"
4633 [(plus:SI (match_operand:SI 0 "register_operand" "+!r,!*f,!*m")
4634 (match_operand:SI 1 "int5_operand" "L,L,L"))
4635 (const_int 0)])
4636 (label_ref (match_operand 3 "" ""))
4637 (pc)))
4638 (set (match_dup 0)
4639 (plus:SI (match_dup 0) (match_dup 1)))
4640 (clobber (match_scratch:SI 4 "=X,r,r"))]
4641 ""
4642 "* return output_dbra (operands, insn, which_alternative); "
4643 ;; Do not expect to understand this the first time through.
4644 [(set_attr "type" "cbranch,multi,multi")
4645 (set (attr "length")
4646 (if_then_else (eq_attr "alternative" "0")
4647 ;; Loop counter in register case
4648 ;; Short branch has length of 4
4649 ;; Long branch has length of 8
4650 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4651 (const_int 8184))
4652 (const_int 4)
4653 (const_int 8))
4654
4655 ;; Loop counter in FP reg case.
4656 ;; Extra goo to deal with additional reload insns.
4657 (if_then_else (eq_attr "alternative" "1")
4658 (if_then_else (lt (match_dup 3) (pc))
4659 (if_then_else
4660 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 24))))
4661 (const_int 8184))
4662 (const_int 24)
4663 (const_int 28))
4664 (if_then_else
4665 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4666 (const_int 8184))
4667 (const_int 24)
4668 (const_int 28)))
4669 ;; Loop counter in memory case.
4670 ;; Extra goo to deal with additional reload insns.
4671 (if_then_else (lt (match_dup 3) (pc))
4672 (if_then_else
4673 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4674 (const_int 8184))
4675 (const_int 12)
4676 (const_int 16))
4677 (if_then_else
4678 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4679 (const_int 8184))
4680 (const_int 12)
4681 (const_int 16))))))])
4682
4683 (define_insn ""
4684 [(set (pc)
4685 (if_then_else
4686 (match_operator 2 "movb_comparison_operator"
4687 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4688 (label_ref (match_operand 3 "" ""))
4689 (pc)))
4690 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4691 (match_dup 1))]
4692 ""
4693 "* return output_movb (operands, insn, which_alternative, 0); "
4694 ;; Do not expect to understand this the first time through.
4695 [(set_attr "type" "cbranch,multi,multi,multi")
4696 (set (attr "length")
4697 (if_then_else (eq_attr "alternative" "0")
4698 ;; Loop counter in register case
4699 ;; Short branch has length of 4
4700 ;; Long branch has length of 8
4701 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4702 (const_int 8184))
4703 (const_int 4)
4704 (const_int 8))
4705
4706 ;; Loop counter in FP reg case.
4707 ;; Extra goo to deal with additional reload insns.
4708 (if_then_else (eq_attr "alternative" "1")
4709 (if_then_else (lt (match_dup 3) (pc))
4710 (if_then_else
4711 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4712 (const_int 8184))
4713 (const_int 12)
4714 (const_int 16))
4715 (if_then_else
4716 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4717 (const_int 8184))
4718 (const_int 12)
4719 (const_int 16)))
4720 ;; Loop counter in memory or sar case.
4721 ;; Extra goo to deal with additional reload insns.
4722 (if_then_else
4723 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4724 (const_int 8184))
4725 (const_int 8)
4726 (const_int 12)))))])
4727
4728 ;; Handle negated branch.
4729 (define_insn ""
4730 [(set (pc)
4731 (if_then_else
4732 (match_operator 2 "movb_comparison_operator"
4733 [(match_operand:SI 1 "register_operand" "r,r,r,r") (const_int 0)])
4734 (pc)
4735 (label_ref (match_operand 3 "" ""))))
4736 (set (match_operand:SI 0 "register_operand" "=!r,!*f,!*m,!*q")
4737 (match_dup 1))]
4738 ""
4739 "* return output_movb (operands, insn, which_alternative, 1); "
4740 ;; Do not expect to understand this the first time through.
4741 [(set_attr "type" "cbranch,multi,multi,multi")
4742 (set (attr "length")
4743 (if_then_else (eq_attr "alternative" "0")
4744 ;; Loop counter in register case
4745 ;; Short branch has length of 4
4746 ;; Long branch has length of 8
4747 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4748 (const_int 8184))
4749 (const_int 4)
4750 (const_int 8))
4751
4752 ;; Loop counter in FP reg case.
4753 ;; Extra goo to deal with additional reload insns.
4754 (if_then_else (eq_attr "alternative" "1")
4755 (if_then_else (lt (match_dup 3) (pc))
4756 (if_then_else
4757 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 12))))
4758 (const_int 8184))
4759 (const_int 12)
4760 (const_int 16))
4761 (if_then_else
4762 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4763 (const_int 8184))
4764 (const_int 12)
4765 (const_int 16)))
4766 ;; Loop counter in memory or SAR case.
4767 ;; Extra goo to deal with additional reload insns.
4768 (if_then_else
4769 (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4770 (const_int 8184))
4771 (const_int 8)
4772 (const_int 12)))))])
4773
4774 ;; The next several patterns (parallel_addb, parallel_movb, fmpyadd and
4775 ;; fmpysub aren't currently used by the FSF sources, but will be soon.
4776 ;;
4777 ;; They're in the FSF tree for documentation and to make Cygnus<->FSF
4778 ;; merging easier.
4779 (define_insn ""
4780 [(set (pc) (label_ref (match_operand 3 "" "" )))
4781 (set (match_operand:SI 0 "register_operand" "=r")
4782 (plus:SI (match_operand:SI 1 "register_operand" "r")
4783 (match_operand:SI 2 "ireg_or_int5_operand" "rL")))]
4784 "(reload_completed && operands[0] == operands[1]) || operands[0] == operands[2]"
4785 "*
4786 {
4787 return output_parallel_addb (operands, get_attr_length (insn));
4788 }"
4789 [(set_attr "type" "parallel_branch")
4790 (set (attr "length")
4791 (if_then_else (lt (abs (minus (match_dup 3) (plus (pc) (const_int 8))))
4792 (const_int 8184))
4793 (const_int 4)
4794 (const_int 8)))])
4795
4796 (define_insn ""
4797 [(set (pc) (label_ref (match_operand 2 "" "" )))
4798 (set (match_operand:SF 0 "register_operand" "=r")
4799 (match_operand:SF 1 "ireg_or_int5_operand" "rL"))]
4800 "reload_completed"
4801 "*
4802 {
4803 return output_parallel_movb (operands, get_attr_length (insn));
4804 }"
4805 [(set_attr "type" "parallel_branch")
4806 (set (attr "length")
4807 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4808 (const_int 8184))
4809 (const_int 4)
4810 (const_int 8)))])
4811
4812 (define_insn ""
4813 [(set (pc) (label_ref (match_operand 2 "" "" )))
4814 (set (match_operand:SI 0 "register_operand" "=r")
4815 (match_operand:SI 1 "ireg_or_int5_operand" "rL"))]
4816 "reload_completed"
4817 "*
4818 {
4819 return output_parallel_movb (operands, get_attr_length (insn));
4820 }"
4821 [(set_attr "type" "parallel_branch")
4822 (set (attr "length")
4823 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4824 (const_int 8184))
4825 (const_int 4)
4826 (const_int 8)))])
4827
4828 (define_insn ""
4829 [(set (pc) (label_ref (match_operand 2 "" "" )))
4830 (set (match_operand:HI 0 "register_operand" "=r")
4831 (match_operand:HI 1 "ireg_or_int5_operand" "rL"))]
4832 "reload_completed"
4833 "*
4834 {
4835 return output_parallel_movb (operands, get_attr_length (insn));
4836 }"
4837 [(set_attr "type" "parallel_branch")
4838 (set (attr "length")
4839 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4840 (const_int 8184))
4841 (const_int 4)
4842 (const_int 8)))])
4843
4844 (define_insn ""
4845 [(set (pc) (label_ref (match_operand 2 "" "" )))
4846 (set (match_operand:QI 0 "register_operand" "=r")
4847 (match_operand:QI 1 "ireg_or_int5_operand" "rL"))]
4848 "reload_completed"
4849 "*
4850 {
4851 return output_parallel_movb (operands, get_attr_length (insn));
4852 }"
4853 [(set_attr "type" "parallel_branch")
4854 (set (attr "length")
4855 (if_then_else (lt (abs (minus (match_dup 2) (plus (pc) (const_int 8))))
4856 (const_int 8184))
4857 (const_int 4)
4858 (const_int 8)))])
4859
4860 (define_insn ""
4861 [(set (match_operand 0 "register_operand" "=f")
4862 (mult (match_operand 1 "register_operand" "f")
4863 (match_operand 2 "register_operand" "f")))
4864 (set (match_operand 3 "register_operand" "+f")
4865 (plus (match_operand 4 "register_operand" "f")
4866 (match_operand 5 "register_operand" "f")))]
4867 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
4868 && reload_completed && fmpyaddoperands (operands)"
4869 "*
4870 {
4871 if (GET_MODE (operands[0]) == DFmode)
4872 {
4873 if (rtx_equal_p (operands[3], operands[5]))
4874 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4875 else
4876 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4877 }
4878 else
4879 {
4880 if (rtx_equal_p (operands[3], operands[5]))
4881 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4882 else
4883 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4884 }
4885 }"
4886 [(set_attr "type" "fpalu")
4887 (set_attr "length" "4")])
4888
4889 (define_insn ""
4890 [(set (match_operand 3 "register_operand" "+f")
4891 (plus (match_operand 4 "register_operand" "f")
4892 (match_operand 5 "register_operand" "f")))
4893 (set (match_operand 0 "register_operand" "=f")
4894 (mult (match_operand 1 "register_operand" "f")
4895 (match_operand 2 "register_operand" "f")))]
4896 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
4897 && reload_completed && fmpyaddoperands (operands)"
4898 "*
4899 {
4900 if (GET_MODE (operands[0]) == DFmode)
4901 {
4902 if (rtx_equal_p (operands[3], operands[5]))
4903 return \"fmpyadd,dbl %1,%2,%0,%4,%3\";
4904 else
4905 return \"fmpyadd,dbl %1,%2,%0,%5,%3\";
4906 }
4907 else
4908 {
4909 if (rtx_equal_p (operands[3], operands[5]))
4910 return \"fmpyadd,sgl %1,%2,%0,%4,%3\";
4911 else
4912 return \"fmpyadd,sgl %1,%2,%0,%5,%3\";
4913 }
4914 }"
4915 [(set_attr "type" "fpalu")
4916 (set_attr "length" "4")])
4917
4918 (define_insn ""
4919 [(set (match_operand 0 "register_operand" "=f")
4920 (mult (match_operand 1 "register_operand" "f")
4921 (match_operand 2 "register_operand" "f")))
4922 (set (match_operand 3 "register_operand" "+f")
4923 (minus (match_operand 4 "register_operand" "f")
4924 (match_operand 5 "register_operand" "f")))]
4925 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
4926 && reload_completed && fmpysuboperands (operands)"
4927 "*
4928 {
4929 if (GET_MODE (operands[0]) == DFmode)
4930 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4931 else
4932 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4933 }"
4934 [(set_attr "type" "fpalu")
4935 (set_attr "length" "4")])
4936
4937 (define_insn ""
4938 [(set (match_operand 3 "register_operand" "+f")
4939 (minus (match_operand 4 "register_operand" "f")
4940 (match_operand 5 "register_operand" "f")))
4941 (set (match_operand 0 "register_operand" "=f")
4942 (mult (match_operand 1 "register_operand" "f")
4943 (match_operand 2 "register_operand" "f")))]
4944 "TARGET_PA_11 && ! TARGET_SOFT_FLOAT
4945 && reload_completed && fmpysuboperands (operands)"
4946 "*
4947 {
4948 if (GET_MODE (operands[0]) == DFmode)
4949 return \"fmpysub,dbl %1,%2,%0,%5,%3\";
4950 else
4951 return \"fmpysub,sgl %1,%2,%0,%5,%3\";
4952 }"
4953 [(set_attr "type" "fpalu")
4954 (set_attr "length" "4")])
4955
4956 ;; Clean up turds left by reload.
4957 (define_peephole
4958 [(set (match_operand 0 "reg_or_nonsymb_mem_operand" "")
4959 (match_operand 1 "register_operand" "fr"))
4960 (set (match_operand 2 "register_operand" "fr")
4961 (match_dup 0))]
4962 "! TARGET_SOFT_FLOAT
4963 && GET_CODE (operands[0]) == MEM
4964 && ! MEM_VOLATILE_P (operands[0])
4965 && GET_MODE (operands[0]) == GET_MODE (operands[1])
4966 && GET_MODE (operands[0]) == GET_MODE (operands[2])
4967 && GET_MODE (operands[0]) == DFmode
4968 && GET_CODE (operands[1]) == REG
4969 && GET_CODE (operands[2]) == REG
4970 && ! side_effects_p (XEXP (operands[0], 0))
4971 && REGNO_REG_CLASS (REGNO (operands[1]))
4972 == REGNO_REG_CLASS (REGNO (operands[2]))"
4973 "*
4974 {
4975 rtx xoperands[2];
4976
4977 if (FP_REG_P (operands[1]))
4978 output_asm_insn (output_fp_move_double (operands), operands);
4979 else
4980 output_asm_insn (output_move_double (operands), operands);
4981
4982 if (rtx_equal_p (operands[1], operands[2]))
4983 return \"\";
4984
4985 xoperands[0] = operands[2];
4986 xoperands[1] = operands[1];
4987
4988 if (FP_REG_P (xoperands[1]))
4989 output_asm_insn (output_fp_move_double (xoperands), xoperands);
4990 else
4991 output_asm_insn (output_move_double (xoperands), xoperands);
4992
4993 return \"\";
4994 }")
4995
4996 (define_peephole
4997 [(set (match_operand 0 "register_operand" "fr")
4998 (match_operand 1 "reg_or_nonsymb_mem_operand" ""))
4999 (set (match_operand 2 "register_operand" "fr")
5000 (match_dup 1))]
5001 "! TARGET_SOFT_FLOAT
5002 && GET_CODE (operands[1]) == MEM
5003 && ! MEM_VOLATILE_P (operands[1])
5004 && GET_MODE (operands[0]) == GET_MODE (operands[1])
5005 && GET_MODE (operands[0]) == GET_MODE (operands[2])
5006 && GET_MODE (operands[0]) == DFmode
5007 && GET_CODE (operands[0]) == REG
5008 && GET_CODE (operands[2]) == REG
5009 && ! side_effects_p (XEXP (operands[1], 0))
5010 && REGNO_REG_CLASS (REGNO (operands[0]))
5011 == REGNO_REG_CLASS (REGNO (operands[2]))"
5012 "*
5013 {
5014 rtx xoperands[2];
5015
5016 if (FP_REG_P (operands[0]))
5017 output_asm_insn (output_fp_move_double (operands), operands);
5018 else
5019 output_asm_insn (output_move_double (operands), operands);
5020
5021 xoperands[0] = operands[2];
5022 xoperands[1] = operands[0];
5023
5024 if (FP_REG_P (xoperands[1]))
5025 output_asm_insn (output_fp_move_double (xoperands), xoperands);
5026 else
5027 output_asm_insn (output_move_double (xoperands), xoperands);
5028
5029 return \"\";
5030 }")
5031
5032 ;; Flush the I and D cache line found at the address in operand 0.
5033 ;; This is used by the trampoline code for nested functions.
5034 ;; So long as the trampoline itself is less than 32 bytes this
5035 ;; is sufficient.
5036
5037 (define_insn "dcacheflush"
5038 [(unspec_volatile [(const_int 1)] 0)
5039 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5040 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))]
5041 ""
5042 "fdc 0(%0)\;fdc 0(%1)\;sync"
5043 [(set_attr "type" "multi")
5044 (set_attr "length" "12")])
5045
5046 (define_insn "icacheflush"
5047 [(unspec_volatile [(const_int 2)] 0)
5048 (use (mem:SI (match_operand:SI 0 "register_operand" "r")))
5049 (use (mem:SI (match_operand:SI 1 "register_operand" "r")))
5050 (use (match_operand:SI 2 "register_operand" "r"))
5051 (clobber (match_operand:SI 3 "register_operand" "=&r"))
5052 (clobber (match_operand:SI 4 "register_operand" "=&r"))]
5053 ""
5054 "mfsp %%sr0,%4\;ldsid (%2),%3\;mtsp %3,%%sr0\;fic 0(%%sr0,%0)\;fic 0(%%sr0,%1)\;sync\;mtsp %4,%%sr0\;nop\;nop\;nop\;nop\;nop\;nop"
5055 [(set_attr "type" "multi")
5056 (set_attr "length" "52")])
5057
5058 ;; An out-of-line prologue.
5059 (define_insn "outline_prologue_call"
5060 [(unspec_volatile [(const_int 0)] 0)
5061 (clobber (reg:SI 31))
5062 (clobber (reg:SI 22))
5063 (clobber (reg:SI 21))
5064 (clobber (reg:SI 20))
5065 (clobber (reg:SI 19))
5066 (clobber (reg:SI 1))]
5067 ""
5068 "*
5069 {
5070 extern int frame_pointer_needed;
5071
5072 /* We need two different versions depending on whether or not we
5073 need a frame pointer. Also note that we return to the instruction
5074 immediately after the branch rather than two instructions after the
5075 break as normally is the case. */
5076 if (frame_pointer_needed)
5077 {
5078 /* Must import the magic millicode routine(s). */
5079 output_asm_insn (\".IMPORT __outline_prologue_fp,MILLICODE\", NULL);
5080
5081 if (TARGET_PORTABLE_RUNTIME)
5082 {
5083 output_asm_insn (\"ldil L'__outline_prologue_fp,%%r31\", NULL);
5084 output_asm_insn (\"ble,n R'__outline_prologue_fp(%%sr0,%%r31)\",
5085 NULL);
5086 }
5087 else
5088 output_asm_insn (\"bl,n __outline_prologue_fp,%%r31\", NULL);
5089 }
5090 else
5091 {
5092 /* Must import the magic millicode routine(s). */
5093 output_asm_insn (\".IMPORT __outline_prologue,MILLICODE\", NULL);
5094
5095 if (TARGET_PORTABLE_RUNTIME)
5096 {
5097 output_asm_insn (\"ldil L'__outline_prologue,%%r31\", NULL);
5098 output_asm_insn (\"ble,n R'__outline_prologue(%%sr0,%%r31)\", NULL);
5099 }
5100 else
5101 output_asm_insn (\"bl,n __outline_prologue,%%r31\", NULL);
5102 }
5103 return \"\";
5104 }"
5105 [(set_attr "type" "multi")
5106 (set_attr "length" "8")])
5107
5108 ;; An out-of-line epilogue.
5109 (define_insn "outline_epilogue_call"
5110 [(unspec_volatile [(const_int 1)] 0)
5111 (use (reg:SI 29))
5112 (use (reg:SI 28))
5113 (clobber (reg:SI 31))
5114 (clobber (reg:SI 22))
5115 (clobber (reg:SI 21))
5116 (clobber (reg:SI 20))
5117 (clobber (reg:SI 19))
5118 (clobber (reg:SI 2))
5119 (clobber (reg:SI 1))]
5120 ""
5121 "*
5122 {
5123 extern int frame_pointer_needed;
5124
5125 /* We need two different versions depending on whether or not we
5126 need a frame pointer. Also note that we return to the instruction
5127 immediately after the branch rather than two instructions after the
5128 break as normally is the case. */
5129 if (frame_pointer_needed)
5130 {
5131 /* Must import the magic millicode routine. */
5132 output_asm_insn (\".IMPORT __outline_epilogue_fp,MILLICODE\", NULL);
5133
5134 /* The out-of-line prologue will make sure we return to the right
5135 instruction. */
5136 if (TARGET_PORTABLE_RUNTIME)
5137 {
5138 output_asm_insn (\"ldil L'__outline_epilogue_fp,%%r31\", NULL);
5139 output_asm_insn (\"ble,n R'__outline_epilogue_fp(%%sr0,%%r31)\",
5140 NULL);
5141 }
5142 else
5143 output_asm_insn (\"bl,n __outline_epilogue_fp,%%r31\", NULL);
5144 }
5145 else
5146 {
5147 /* Must import the magic millicode routine. */
5148 output_asm_insn (\".IMPORT __outline_epilogue,MILLICODE\", NULL);
5149
5150 /* The out-of-line prologue will make sure we return to the right
5151 instruction. */
5152 if (TARGET_PORTABLE_RUNTIME)
5153 {
5154 output_asm_insn (\"ldil L'__outline_epilogue,%%r31\", NULL);
5155 output_asm_insn (\"ble,n R'__outline_epilogue(%%sr0,%%r31)\", NULL);
5156 }
5157 else
5158 output_asm_insn (\"bl,n __outline_epilogue,%%r31\", NULL);
5159 }
5160 return \"\";
5161 }"
5162 [(set_attr "type" "multi")
5163 (set_attr "length" "8")])
5164
5165 ;; Given a function pointer, canonicalize it so it can be
5166 ;; reliably compared to another function pointer. */
5167 (define_expand "canonicalize_funcptr_for_compare"
5168 [(set (reg:SI 26) (match_operand:SI 1 "register_operand" ""))
5169 (parallel [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5170 (clobber (match_dup 2))
5171 (clobber (reg:SI 26))
5172 (clobber (reg:SI 22))
5173 (clobber (reg:SI 31))])
5174 (set (match_operand:SI 0 "register_operand" "")
5175 (reg:SI 29))]
5176 "! TARGET_PORTABLE_RUNTIME"
5177 "
5178 {
5179 operands[2] = gen_reg_rtx (SImode);
5180 if (GET_CODE (operands[1]) != REG)
5181 {
5182 rtx tmp = gen_reg_rtx (Pmode);
5183 emit_move_insn (tmp, operands[1]);
5184 operands[1] = tmp;
5185 }
5186 }")
5187
5188 (define_insn ""
5189 [(set (reg:SI 29) (unspec:SI [(reg:SI 26)] 0))
5190 (clobber (match_operand:SI 0 "register_operand" "=a"))
5191 (clobber (reg:SI 26))
5192 (clobber (reg:SI 22))
5193 (clobber (reg:SI 31))]
5194 ""
5195 "*
5196 {
5197 /* Must import the magic millicode routine. */
5198 output_asm_insn (\".IMPORT $$sh_func_adrs,MILLICODE\", NULL);
5199
5200 /* This is absolutely amazing.
5201
5202 First, copy our input parameter into %r29 just in case we don't
5203 need to call $$sh_func_adrs. */
5204 output_asm_insn (\"copy %%r26,%%r29\", NULL);
5205
5206 /* Next, examine the low two bits in %r26, if they aren't 0x2, then
5207 we use %r26 unchanged. */
5208 if (get_attr_length (insn) == 32)
5209 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+24\", NULL);
5210 else if (get_attr_length (insn) == 40)
5211 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+32\", NULL);
5212 else if (get_attr_length (insn) == 44)
5213 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+36\", NULL);
5214 else
5215 output_asm_insn (\"extru %%r26,31,2,%%r31\;comib,<>,n 2,%%r31,.+20\", NULL);
5216
5217 /* Next, compare %r26 with 4096, if %r26 is less than or equal to
5218 4096, then we use %r26 unchanged. */
5219 if (get_attr_length (insn) == 32)
5220 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+16\", NULL);
5221 else if (get_attr_length (insn) == 40)
5222 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+24\", NULL);
5223 else if (get_attr_length (insn) == 44)
5224 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+28\", NULL);
5225 else
5226 output_asm_insn (\"ldi 4096,%%r31\;comb,<<,n %%r26,%%r31,.+12\", NULL);
5227
5228 /* Else call $$sh_func_adrs to extract the function's real add24. */
5229 return output_millicode_call (insn,
5230 gen_rtx_SYMBOL_REF (SImode, \"$$sh_func_adrs\"));
5231 }"
5232 [(set_attr "type" "multi")
5233 (set (attr "length")
5234 (cond [
5235 ;; Target (or stub) within reach
5236 (and (lt (plus (symbol_ref "total_code_bytes") (pc))
5237 (const_int 240000))
5238 (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5239 (const_int 0)))
5240 (const_int 28)
5241
5242 ;; NO_SPACE_REGS
5243 (ne (symbol_ref "TARGET_NO_SPACE_REGS || TARGET_FAST_INDIRECT_CALLS")
5244 (const_int 0))
5245 (const_int 32)
5246
5247 ;; Out of reach, but not PIC or PORTABLE_RUNTIME
5248 ;; same as NO_SPACE_REGS code
5249 (and (eq (symbol_ref "TARGET_PORTABLE_RUNTIME")
5250 (const_int 0))
5251 (eq (symbol_ref "flag_pic")
5252 (const_int 0)))
5253 (const_int 32)
5254
5255 ;; PORTABLE_RUNTIME
5256 (ne (symbol_ref "TARGET_PORTABLE_RUNTIME")
5257 (const_int 0))
5258 (const_int 40)]
5259
5260 ;; Out of range and PIC
5261 (const_int 44)))])
5262
5263 ;; On the PA, the PIC register is call clobbered, so it must
5264 ;; be saved & restored around calls by the caller. If the call
5265 ;; doesn't return normally (nonlocal goto, or an exception is
5266 ;; thrown), then the code at the exception handler label must
5267 ;; restore the PIC register.
5268 (define_expand "exception_receiver"
5269 [(const_int 4)]
5270 "!TARGET_PORTABLE_RUNTIME && flag_pic"
5271 "
5272 {
5273 /* Load the PIC register from the stack slot (in our caller's
5274 frame). */
5275 emit_move_insn (pic_offset_table_rtx,
5276 gen_rtx_MEM (SImode, plus_constant (stack_pointer_rtx, -32)));
5277 emit_insn (gen_rtx_USE (VOIDmode, pic_offset_table_rtx));
5278 emit_insn (gen_blockage ());
5279 DONE;
5280 }")
5281
5282
This page took 0.271605 seconds and 5 git commands to generate.