]> gcc.gnu.org Git - gcc.git/blame - gcc/config/pyr/pyr.md
*** empty log message ***
[gcc.git] / gcc / config / pyr / pyr.md
CommitLineData
573ade84
RK
1;; GNU C machine description for Pyramid 90x, 9000, MIServer Series
2;; Copyright (C) 1989, 1990 Free Software Foundation, Inc.
3
4;; This file is part of GNU CC.
5
6;; GNU CC is free software; you can redistribute it and/or modify
7;; it under the terms of the GNU General Public License as published by
8;; the Free Software Foundation; either version 2, or (at your option)
9;; any later version.
10
11;; GNU CC is distributed in the hope that it will be useful,
12;; but WITHOUT ANY WARRANTY; without even the implied warranty of
13;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14;; GNU General Public License for more details.
15
16;; You should have received a copy of the GNU General Public License
17;; along with GNU CC; see the file COPYING. If not, write to
18;; the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
19
20;; Instruction patterns. When multiple patterns apply,
21;; the first one in the file is chosen.
22;;
23;; See file "rtl.def" for documentation on define_insn, match_*, et. al.
24;;
25;; cpp macro #define NOTICE_UPDATE_CC in file tm.h handles condition code
26;; updates for most instructions.
27\f
28;; * Try using define_insn instead of some peepholes in more places.
29;; * Set REG_NOTES:REG_EQUIV for cvt[bh]w loads. This would make the
30;; backward scan in sign_extend needless.
31;; * Match (pc) (label_ref) case in peephole patterns.
32;; * Should optimize
33;; "cmpX op1,op2; b{eq,ne} LY; ucmpX op1.op2; b{lt,le,gt,ge} LZ"
34;; to
35;; "ucmpX op1,op2; b{eq,ne} LY; b{lt,le,gt,ge} LZ"
36;; by pre-scanning insn and running notice_update_cc for them.
37;; * Is it necessary to do copy_rtx in the test and compare patterns?
38;; * Fix true frame pointer omission.
39;; * Make the jump tables contain branches, not addresses! This would
40;; save us one instruction.
41;; * Could the compilcated scheme for compares be simplyfied, if we had
42;; no named cmpqi or cmphi patterns, and instead anonymous patterns for
43;; the less-than-word compare cases pyr can handle???
44;; * The jump insn seems to accept more than just IR addressing. Would
45;; we win by telling GCC? Or can we use movw into the global reg which
46;; is a synonym for pc?
47;; * More DImode patterns.
48;; * Scan backwards in "zero_extendhisi2", "zero_extendqisi2" to find out
49;; if the extension can be omitted.
50;; * "divmodsi" with Pyramid "ediv" insn. Is it possible in rtl??
51;; * Would "rcsp tmpreg; u?cmp[bh] op1_regdispl(tmpreg),op2" win in
52;; comparison with the two extensions and single test generated now?
53;; The rcsp insn could be expanded, and moved out of loops by the
54;; optimizer, making 1 (64 bit) insn of 3 (32 bit) insns in loops.
55;; The rcsp insn could be followed by an add insn, making non-displacement
56;; IR addressing sufficient.
57
58;______________________________________________________________________
59;
60; Test and Compare Patterns.
61;______________________________________________________________________
62
63; The argument for the rather complicated test and compare expansion
64; scheme, is the irregular pyramid instructions for these operations.
65; 1) Pyramid has different signed and unsigned compares. 2) HImode
66; and QImode integers are memory-memory and immediate-memory only. 3)
67; Unsigned HImode compares doesn't exist. 4) Only certain
68; combinations of addresses are allowed for memory-memory compares.
69; Whenever necessary, in order to fulfill these addressing
70; constraints, the compare operands are swapped.
71
72(define_expand "tstsi"
73 [(set (cc0)
74 (match_operand:SI 0 "general_operand" ""))]
75 "" "operands[0] = force_reg (SImode, operands[0]);")
76
77(define_insn ""
78 [(set (cc0)
79 (compare (match_operand:SI 0 "memory_operand" "m")
80 (match_operand:SI 1 "memory_operand" "m")))]
81 "weird_memory_memory (operands[0], operands[1])"
82 "*
83{
84 rtx br_insn = NEXT_INSN (insn);
85 RTX_CODE br_code;
86
87 if (GET_CODE (br_insn) != JUMP_INSN)
88 abort();
89 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
90
91 weird_memory_memory (operands[0], operands[1]);
92
93 if (swap_operands)
94 {
95 cc_status.flags = CC_REVERSED;
96 if (TRULY_UNSIGNED_COMPARE_P (br_code))
97 {
98 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
99 return \"ucmpw %0,%1\";
100 }
101 return \"cmpw %0,%1\";
102 }
103
104 if (TRULY_UNSIGNED_COMPARE_P (br_code))
105 {
106 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
107 return \"ucmpw %1,%0\";
108 }
109 return \"cmpw %1,%0\";
110}")
111
112(define_insn "cmpsi"
113 [(set (cc0)
114 (compare (match_operand:SI 0 "nonimmediate_operand" "r,g")
115 (match_operand:SI 1 "general_operand" "g,r")))]
116 ""
117 "*
118{
119 rtx br_insn = NEXT_INSN (insn);
120 RTX_CODE br_code;
121
122 if (GET_CODE (br_insn) != JUMP_INSN)
123 abort();
124 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
125
126 if (which_alternative != 0)
127 {
128 cc_status.flags = CC_REVERSED;
129 if (TRULY_UNSIGNED_COMPARE_P (br_code))
130 {
131 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
132 return \"ucmpw %0,%1\";
133 }
134 return \"cmpw %0,%1\";
135 }
136
137 if (TRULY_UNSIGNED_COMPARE_P (br_code))
138 {
139 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
140 return \"ucmpw %1,%0\";
141 }
142 return \"cmpw %1,%0\";
143}")
144
145(define_insn ""
146 [(set (cc0)
147 (match_operand:SI 0 "general_operand" "r"))]
148 ""
149 "*
150{
151#if 0
152 cc_status.flags |= CC_NO_OVERFLOW;
153 return \"cmpw $0,%0\";
154#endif
155 rtx br_insn = NEXT_INSN (insn);
156 RTX_CODE br_code;
157
158 if (GET_CODE (br_insn) != JUMP_INSN)
159 abort();
160 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
161
162 if (TRULY_UNSIGNED_COMPARE_P (br_code))
163 {
164 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
165 return \"ucmpw $0,%0\";
166 }
167 return \"mtstw %0,%0\";
168}")
169
170(define_expand "cmphi"
171 [(set (cc0)
172 (compare (match_operand:HI 0 "nonimmediate_operand" "")
173 (match_operand:HI 1 "general_operand" "")))]
174 ""
175 "
176{
177 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
178 test_op0 = copy_rtx (operands[0]);
179 test_op1 = copy_rtx (operands[1]);
180 test_mode = HImode;
181 DONE;
182}")
183
184(define_expand "tsthi"
185 [(set (cc0)
186 (match_operand:HI 0 "general_operand" ""))]
187 ""
188 "
189{
190 extern rtx test_op0; extern enum machine_mode test_mode;
191 test_op0 = copy_rtx (operands[0]);
192 test_mode = HImode;
193 DONE;
194}")
195
196(define_insn ""
197 [(set (cc0)
198 (compare (match_operand:HI 0 "memory_operand" "m")
199 (match_operand:HI 1 "memory_operand" "m")))]
200 "weird_memory_memory (operands[0], operands[1])"
201 "*
202{
203 rtx br_insn = NEXT_INSN (insn);
204
205 if (GET_CODE (br_insn) != JUMP_INSN)
206 abort();
207
208 weird_memory_memory (operands[0], operands[1]);
209
210 if (swap_operands)
211 {
212 cc_status.flags = CC_REVERSED;
213 return \"cmph %0,%1\";
214 }
215
216 return \"cmph %1,%0\";
217}")
218
219(define_insn ""
220 [(set (cc0)
221 (compare (match_operand:HI 0 "nonimmediate_operand" "r,m")
222 (match_operand:HI 1 "nonimmediate_operand" "m,r")))]
223 "(GET_CODE (operands[0]) != GET_CODE (operands[1]))"
224 "*
225{
226 rtx br_insn = NEXT_INSN (insn);
227
228 if (GET_CODE (br_insn) != JUMP_INSN)
229 abort();
230
231 if (which_alternative != 0)
232 {
233 cc_status.flags = CC_REVERSED;
234 return \"cmph %0,%1\";
235 }
236
237 return \"cmph %1,%0\";
238}")
239
240(define_expand "cmpqi"
241 [(set (cc0)
242 (compare (match_operand:QI 0 "nonimmediate_operand" "")
243 (match_operand:QI 1 "general_operand" "")))]
244 ""
245 "
246{
247 extern rtx test_op0, test_op1; extern enum machine_mode test_mode;
248 test_op0 = copy_rtx (operands[0]);
249 test_op1 = copy_rtx (operands[1]);
250 test_mode = QImode;
251 DONE;
252}")
253
254(define_expand "tstqi"
255 [(set (cc0)
256 (match_operand:QI 0 "general_operand" ""))]
257 ""
258 "
259{
260 extern rtx test_op0; extern enum machine_mode test_mode;
261 test_op0 = copy_rtx (operands[0]);
262 test_mode = QImode;
263 DONE;
264}")
265
266(define_insn ""
267 [(set (cc0)
268 (compare (match_operand:QI 0 "memory_operand" "m")
269 (match_operand:QI 1 "memory_operand" "m")))]
270 "weird_memory_memory (operands[0], operands[1])"
271 "*
272{
273 rtx br_insn = NEXT_INSN (insn);
274 RTX_CODE br_code;
275
276 if (GET_CODE (br_insn) != JUMP_INSN)
277 abort();
278 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
279
280 weird_memory_memory (operands[0], operands[1]);
281
282 if (swap_operands)
283 {
284 cc_status.flags = CC_REVERSED;
285 if (TRULY_UNSIGNED_COMPARE_P (br_code))
286 {
287 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
288 return \"ucmpb %0,%1\";
289 }
290 return \"cmpb %0,%1\";
291 }
292
293 if (TRULY_UNSIGNED_COMPARE_P (br_code))
294 {
295 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
296 return \"ucmpb %1,%0\";
297 }
298 return \"cmpb %1,%0\";
299}")
300
301(define_insn ""
302 [(set (cc0)
303 (compare (match_operand:QI 0 "nonimmediate_operand" "r,m")
304 (match_operand:QI 1 "nonimmediate_operand" "m,r")))]
305 "(GET_CODE (operands[0]) != GET_CODE (operands[1]))"
306 "*
307{
308 rtx br_insn = NEXT_INSN (insn);
309 RTX_CODE br_code;
310
311 if (GET_CODE (br_insn) != JUMP_INSN)
312 abort();
313 br_code = GET_CODE (XEXP (XEXP (PATTERN (br_insn), 1), 0));
314
315 if (which_alternative != 0)
316 {
317 cc_status.flags = CC_REVERSED;
318 if (TRULY_UNSIGNED_COMPARE_P (br_code))
319 {
320 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
321 return \"ucmpb %0,%1\";
322 }
323 return \"cmpb %0,%1\";
324 }
325
326 if (TRULY_UNSIGNED_COMPARE_P (br_code))
327 {
328 cc_status.mdep = CC_VALID_FOR_UNSIGNED;
329 return \"ucmpb %1,%0\";
330 }
331 return \"cmpb %1,%0\";
332}")
333
334(define_expand "bgt"
335 [(set (pc) (if_then_else (gt (cc0) (const_int 0))
336 (label_ref (match_operand 0 "" "")) (pc)))]
337 "" "extend_and_branch (SIGN_EXTEND);")
338
339(define_expand "blt"
340 [(set (pc) (if_then_else (lt (cc0) (const_int 0))
341 (label_ref (match_operand 0 "" "")) (pc)))]
342 "" "extend_and_branch (SIGN_EXTEND);")
343
344(define_expand "bge"
345 [(set (pc) (if_then_else (ge (cc0) (const_int 0))
346 (label_ref (match_operand 0 "" "")) (pc)))]
347 "" "extend_and_branch (SIGN_EXTEND);")
348
349(define_expand "ble"
350 [(set (pc) (if_then_else (le (cc0) (const_int 0))
351 (label_ref (match_operand 0 "" "")) (pc)))]
352 "" "extend_and_branch (SIGN_EXTEND);")
353
354(define_expand "beq"
355 [(set (pc) (if_then_else (eq (cc0) (const_int 0))
356 (label_ref (match_operand 0 "" "")) (pc)))]
357 "" "extend_and_branch (SIGN_EXTEND);")
358
359(define_expand "bne"
360 [(set (pc) (if_then_else (ne (cc0) (const_int 0))
361 (label_ref (match_operand 0 "" "")) (pc)))]
362 "" "extend_and_branch (SIGN_EXTEND);")
363
364(define_expand "bgtu"
365 [(set (pc) (if_then_else (gtu (cc0) (const_int 0))
366 (label_ref (match_operand 0 "" "")) (pc)))]
367 "" "extend_and_branch (ZERO_EXTEND);")
368
369(define_expand "bltu"
370 [(set (pc) (if_then_else (ltu (cc0) (const_int 0))
371 (label_ref (match_operand 0 "" "")) (pc)))]
372 "" "extend_and_branch (ZERO_EXTEND);")
373
374(define_expand "bgeu"
375 [(set (pc) (if_then_else (geu (cc0) (const_int 0))
376 (label_ref (match_operand 0 "" "")) (pc)))]
377 "" "extend_and_branch (ZERO_EXTEND);")
378
379(define_expand "bleu"
380 [(set (pc) (if_then_else (leu (cc0) (const_int 0))
381 (label_ref (match_operand 0 "" "")) (pc)))]
382 "" "extend_and_branch (ZERO_EXTEND);")
383
384(define_insn "cmpdf"
385 [(set (cc0)
386 (compare (match_operand:DF 0 "register_operand" "r")
387 (match_operand:DF 1 "register_operand" "r")))]
388 ""
389 "cmpd %1,%0")
390
391(define_insn "cmpsf"
392 [(set (cc0)
393 (compare (match_operand:SF 0 "register_operand" "r")
394 (match_operand:SF 1 "register_operand" "r")))]
395 ""
396 "cmpf %1,%0")
397
398(define_insn "tstdf"
399 [(set (cc0)
400 (match_operand:DF 0 "register_operand" "r"))]
401 ""
402 "mtstd %0,%0")
403
404(define_insn "tstsf"
405 [(set (cc0)
406 (match_operand:SF 0 "register_operand" "r"))]
407 ""
408 "mtstf %0,%0")
409\f
410;______________________________________________________________________
411;
412; Fixed-point Arithmetic.
413;______________________________________________________________________
414
415(define_insn "addsi3"
416 [(set (match_operand:SI 0 "register_operand" "=r,!r")
417 (plus:SI (match_operand:SI 1 "general_operand" "%0,r")
418 (match_operand:SI 2 "general_operand" "g,rJ")))]
419 ""
420 "*
421{
422 if (which_alternative == 0)
423 return (GET_CODE (operands[2]) == CONST_INT && INTVAL (operands[2]) == 32
424 ? \"subw %n2,%0\" : \"addw %2,%0\");
425 else
426 {
427 forget_cc_if_dependent (operands[0]);
428 return \"mova %a2[%1*1],%0\";
429 }
430}")
431
432(define_insn "subsi3"
433 [(set (match_operand:SI 0 "register_operand" "=r,r")
434 (minus:SI (match_operand:SI 1 "general_operand" "0,g")
435 (match_operand:SI 2 "general_operand" "g,0")))]
436 ""
437 "* return (which_alternative == 0) ? \"subw %2,%0\" : \"rsubw %1,%0\";")
438
439(define_insn "mulsi3"
440 [(set (match_operand:SI 0 "register_operand" "=r")
441 (mult:SI (match_operand:SI 1 "general_operand" "%0")
442 (match_operand:SI 2 "general_operand" "g")))]
443 ""
444 "mulw %2,%0")
445
446(define_insn "divsi3"
447 [(set (match_operand:SI 0 "register_operand" "=r,r")
448 (div:SI (match_operand:SI 1 "general_operand" "0,g")
449 (match_operand:SI 2 "general_operand" "g,0")))]
450 ""
451 "* return (which_alternative == 0) ? \"divw %2,%0\" : \"rdivw %1,%0\";")
452
453(define_insn "udivsi3"
454 [(set (match_operand:SI 0 "register_operand" "=r")
455 (udiv:SI (match_operand:SI 1 "register_operand" "0")
456 (match_operand:SI 2 "general_operand" "g")))]
457 ""
458 "udivw %2,%0")
459
460(define_insn "modsi3"
461 [(set (match_operand:SI 0 "register_operand" "=r")
462 (mod:SI (match_operand:SI 1 "register_operand" "0")
463 (match_operand:SI 2 "general_operand" "g")))]
464 ""
465 "modw %2,%0")
466
467(define_insn "umodsi3"
468 [(set (match_operand:SI 0 "register_operand" "=r")
469 (umod:SI (match_operand:SI 1 "register_operand" "0")
470 (match_operand:SI 2 "general_operand" "g")))]
471 ""
472 "umodw %2,%0")
473
474(define_insn "negsi2"
475 [(set (match_operand:SI 0 "register_operand" "=r")
476 (neg:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
477 ""
478 "mnegw %1,%0")
479
480(define_insn "one_cmplsi2"
481 [(set (match_operand:SI 0 "register_operand" "=r")
482 (not:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
483 ""
484 "mcomw %1,%0")
485
486(define_insn "abssi2"
487 [(set (match_operand:SI 0 "register_operand" "=r")
488 (abs:SI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
489 ""
490 "mabsw %1,%0")
491\f
492;______________________________________________________________________
493;
494; Floating-point Arithmetic.
495;______________________________________________________________________
496
497(define_insn "adddf3"
498 [(set (match_operand:DF 0 "register_operand" "=r")
499 (plus:DF (match_operand:DF 1 "register_operand" "%0")
500 (match_operand:DF 2 "register_operand" "r")))]
501 ""
502 "addd %2,%0")
503
504(define_insn "addsf3"
505 [(set (match_operand:SF 0 "register_operand" "=r")
506 (plus:SF (match_operand:SF 1 "register_operand" "%0")
507 (match_operand:SF 2 "register_operand" "r")))]
508 ""
509 "addf %2,%0")
510
511(define_insn "subdf3"
512 [(set (match_operand:DF 0 "register_operand" "=r")
513 (minus:DF (match_operand:DF 1 "register_operand" "0")
514 (match_operand:DF 2 "register_operand" "r")))]
515 ""
516 "subd %2,%0")
517
518(define_insn "subsf3"
519 [(set (match_operand:SF 0 "register_operand" "=r")
520 (minus:SF (match_operand:SF 1 "register_operand" "0")
521 (match_operand:SF 2 "register_operand" "r")))]
522 ""
523 "subf %2,%0")
524
525(define_insn "muldf3"
526 [(set (match_operand:DF 0 "register_operand" "=r")
527 (mult:DF (match_operand:DF 1 "register_operand" "%0")
528 (match_operand:DF 2 "register_operand" "r")))]
529 ""
530 "muld %2,%0")
531
532(define_insn "mulsf3"
533 [(set (match_operand:SF 0 "register_operand" "=r")
534 (mult:SF (match_operand:SF 1 "register_operand" "%0")
535 (match_operand:SF 2 "register_operand" "r")))]
536 ""
537 "mulf %2,%0")
538
539(define_insn "divdf3"
540 [(set (match_operand:DF 0 "register_operand" "=r")
541 (div:DF (match_operand:DF 1 "register_operand" "0")
542 (match_operand:DF 2 "register_operand" "r")))]
543 ""
544 "divd %2,%0")
545
546(define_insn "divsf3"
547 [(set (match_operand:SF 0 "register_operand" "=r")
548 (div:SF (match_operand:SF 1 "register_operand" "0")
549 (match_operand:SF 2 "register_operand" "r")))]
550 ""
551 "divf %2,%0")
552
553(define_insn "negdf2"
554 [(set (match_operand:DF 0 "register_operand" "=r")
555 (neg:DF (match_operand:DF 1 "register_operand" "r")))]
556 ""
557 "mnegd %1,%0")
558
559(define_insn "negsf2"
560 [(set (match_operand:SF 0 "register_operand" "=r")
561 (neg:SF (match_operand:SF 1 "register_operand" "r")))]
562 ""
563 "mnegf %1,%0")
564
565(define_insn "absdf2"
566 [(set (match_operand:DF 0 "register_operand" "=r")
567 (abs:DF (match_operand:DF 1 "register_operand" "r")))]
568 ""
569 "mabsd %1,%0")
570
571(define_insn "abssf2"
572 [(set (match_operand:SF 0 "register_operand" "=r")
573 (abs:SF (match_operand:SF 1 "register_operand" "r")))]
574 ""
575 "mabsf %1,%0")
576\f
577;______________________________________________________________________
578;
579; Logical and Shift Instructions.
580;______________________________________________________________________
581
582(define_insn ""
583 [(set (cc0)
584 (and:SI (match_operand:SI 0 "general_operand" "%r")
585 (match_operand:SI 1 "general_operand" "g")))]
586 ""
587 "*
588{
589 cc_status.flags |= CC_NO_OVERFLOW;
590 return \"bitw %1,%0\";
591}")
592
593(define_insn "andsi3"
594 [(set (match_operand:SI 0 "register_operand" "=r,r")
595 (and:SI (match_operand:SI 1 "general_operand" "%0,r")
596 (match_operand:SI 2 "general_operand" "g,K")))]
597 ""
598 "*
599{
600 if (which_alternative == 0)
601 return \"andw %2,%0\";
602
603 cc_status.flags = CC_NOT_NEGATIVE;
604 return (INTVAL (operands[2]) == 255
605 ? \"movzbw %1,%0\" : \"movzhw %1,%0\");
606}")
607
608(define_insn ""
609 [(set (match_operand:SI 0 "register_operand" "=r")
610 (and:SI (not:SI (match_operand:SI 1 "general_operand" "g"))
611 (match_operand:SI 2 "register_operand" "0")))]
612 ""
613 "bicw %1,%0")
614
615(define_insn "iorsi3"
616 [(set (match_operand:SI 0 "register_operand" "=r")
617 (ior:SI (match_operand:SI 1 "general_operand" "%0")
618 (match_operand:SI 2 "general_operand" "g")))]
619 ""
620 "orw %2,%0")
621
622(define_insn "xorsi3"
623 [(set (match_operand:SI 0 "register_operand" "=r")
624 (xor:SI (match_operand:SI 1 "general_operand" "%0")
625 (match_operand:SI 2 "general_operand" "g")))]
626 ""
627 "xorw %2,%0")
628
629; The arithmetic left shift instructions work strangely on pyramids.
630; They fail to modify the sign bit. Therefore, use logic shifts.
631
632(define_insn "ashlsi3"
633 [(set (match_operand:SI 0 "register_operand" "=r")
634 (ashift:SI (match_operand:SI 1 "register_operand" "0")
635 (match_operand:SI 2 "general_operand" "rnm")))]
636 ""
637 "* return output_shift (\"lshlw %2,%0\", operands[2], 32); ")
638
639(define_insn "ashrsi3"
640 [(set (match_operand:SI 0 "register_operand" "=r")
641 (ashiftrt:SI (match_operand:SI 1 "register_operand" "0")
642 (match_operand:SI 2 "general_operand" "rnm")))]
643 ""
644 "* return output_shift (\"ashrw %2,%0\", operands[2], 32); ")
645
646(define_insn "ashrdi3"
647 [(set (match_operand:DI 0 "register_operand" "=r")
648 (ashiftrt:DI (match_operand:DI 1 "register_operand" "0")
649 (match_operand:SI 2 "general_operand" "rnm")))]
650 ""
651 "* return output_shift (\"ashrl %2,%0\", operands[2], 64); ")
652
653(define_insn "lshrsi3"
654 [(set (match_operand:SI 0 "register_operand" "=r")
655 (lshiftrt:SI (match_operand:SI 1 "register_operand" "0")
656 (match_operand:SI 2 "general_operand" "rnm")))]
657 ""
658 "* return output_shift (\"lshrw %2,%0\", operands[2], 32); ")
659
660(define_insn "rotlsi3"
661 [(set (match_operand:SI 0 "register_operand" "=r")
662 (rotate:SI (match_operand:SI 1 "register_operand" "0")
663 (match_operand:SI 2 "general_operand" "rnm")))]
664 ""
665 "* return output_shift (\"rotlw %2,%0\", operands[2], 32); ")
666
667(define_insn "rotrsi3"
668 [(set (match_operand:SI 0 "register_operand" "=r")
669 (rotatert:SI (match_operand:SI 1 "register_operand" "0")
670 (match_operand:SI 2 "general_operand" "rnm")))]
671 ""
672 "* return output_shift (\"rotrw %2,%0\", operands[2], 32); ")
673\f
674;______________________________________________________________________
675;
676; Fixed and Floating Moves.
677;______________________________________________________________________
678
679;; If the destination is a memory operand, indexed source operands are
680;; disallowed. Big DImode constants are always loaded into a reg pair,
681;; although offsetable memory addresses really could be dealt with.
682
683(define_insn ""
684 [(set (match_operand:DI 0 "memory_operand" "=m")
685 (match_operand:DI 1 "nonindexed_operand" "gF"))]
686 "(GET_CODE (operands[1]) == CONST_DOUBLE
687 ? ((CONST_DOUBLE_HIGH (operands[1]) == 0
688 && CONST_DOUBLE_LOW (operands[1]) >= 0)
689 || (CONST_DOUBLE_HIGH (operands[1]) == -1
690 && CONST_DOUBLE_LOW (operands[1]) < 0))
691 : 1)"
692 "*
693{
694 if (GET_CODE (operands[1]) == CONST_DOUBLE)
695 operands[1] = gen_rtx (CONST_INT, VOIDmode,
696 CONST_DOUBLE_LOW (operands[1]));
697 return \"movl %1,%0\";
698}")
699
700;; Force the destination to a register, so all source operands are allowed.
701
702(define_insn "movdi"
703 [(set (match_operand:DI 0 "general_operand" "=r")
704 (match_operand:DI 1 "general_operand" "gF"))]
705 ""
706 "* return output_move_double (operands); ")
707
708;; If the destination is a memory address, indexed source operands are
709;; disallowed.
710
711(define_insn ""
712 [(set (match_operand:SI 0 "memory_operand" "=m")
713 (match_operand:SI 1 "nonindexed_operand" "g"))]
714 ""
715 "movw %1,%0")
716
717;; Force the destination to a register, so all source operands are allowed.
718
719(define_insn "movsi"
720 [(set (match_operand:SI 0 "general_operand" "=r")
721 (match_operand:SI 1 "general_operand" "g"))]
722 ""
723 "movw %1,%0")
724
725;; If the destination is a memory address, indexed source operands are
726;; disallowed.
727
728(define_insn ""
729 [(set (match_operand:HI 0 "memory_operand" "=m")
730 (match_operand:HI 1 "nonindexed_operand" "g"))]
731 ""
732 "*
733{
734 if (REG_P (operands[1]))
735 return \"cvtwh %1,%0\"; /* reg -> mem */
736 else
737 return \"movh %1,%0\"; /* mem imm -> mem */
738}")
739
740;; Force the destination to a register, so all source operands are allowed.
741
742(define_insn "movhi"
743 [(set (match_operand:HI 0 "general_operand" "=r")
744 (match_operand:HI 1 "general_operand" "g"))]
745 ""
746 "*
747{
748 if (GET_CODE (operands[1]) != MEM)
749 return \"movw %1,%0\"; /* reg imm -> reg */
750 return \"cvthw %1,%0\"; /* mem -> reg */
751}")
752
753;; If the destination is a memory address, indexed source operands are
754;; disallowed.
755
756(define_insn ""
757 [(set (match_operand:QI 0 "memory_operand" "=m")
758 (match_operand:QI 1 "nonindexed_operand" "g"))]
759 ""
760 "*
761{
762 if (REG_P (operands[1]))
763 return \"cvtwb %1,%0\"; /* reg -> mem */
764 else
765 return \"movb %1,%0\"; /* mem imm -> mem */
766}")
767
768;; Force the destination to a register, so all source operands are allowed.
769
770(define_insn "movqi"
771 [(set (match_operand:QI 0 "general_operand" "=r")
772 (match_operand:QI 1 "general_operand" "g"))]
773 ""
774 "*
775{
776 if (GET_CODE (operands[1]) != MEM)
777 return \"movw %1,%0\"; /* reg imm -> reg */
778 return \"cvtbw %1,%0\"; /* mem -> reg */
779}")
780
781;; If the destination is a memory address, indexed source operands are
782;; disallowed.
783
784(define_insn ""
785 [(set (match_operand:DF 0 "memory_operand" "=m")
786 (match_operand:DF 1 "nonindexed_operand" "g"))]
787 "GET_CODE (operands[1]) != CONST_DOUBLE"
788 "movl %1,%0")
789
790;; Force the destination to a register, so all source operands are allowed.
791
792(define_insn "movdf"
793 [(set (match_operand:DF 0 "general_operand" "=r")
794 (match_operand:DF 1 "general_operand" "gF"))]
795 ""
796 "* return output_move_double (operands); ")
797
798;; If the destination is a memory address, indexed source operands are
799;; disallowed.
800
801(define_insn ""
802 [(set (match_operand:SF 0 "memory_operand" "=m")
803 (match_operand:SF 1 "nonindexed_operand" "g"))]
804 ""
805 "movw %1,%0")
806
807;; Force the destination to a register, so all source operands are allowed.
808
809(define_insn "movsf"
810 [(set (match_operand:SF 0 "general_operand" "=r")
811 (match_operand:SF 1 "general_operand" "g"))]
812 ""
813 "movw %1,%0")
814
815(define_insn ""
816 [(set (match_operand:SI 0 "register_operand" "=r")
817 (match_operand:QI 1 "address_operand" "p"))]
818 ""
819 "*
820{
821 forget_cc_if_dependent (operands[0]);
822 return \"mova %a1,%0\";
823}")
824\f
825;______________________________________________________________________
826;
827; Conversion patterns.
828;______________________________________________________________________
829
830;; The trunc patterns are used only when non compile-time constants are used.
831
832(define_insn "truncsiqi2"
833 [(set (match_operand:QI 0 "register_operand" "=r")
834 (truncate:QI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
835 ""
836 "*
837{
838 if (REG_P (operands[0]) && REG_P (operands[1])
839 && REGNO (operands[0]) == REGNO (operands[1]))
840 {
841 cc_status = cc_prev_status;
842 return \"\";
843 }
844 forget_cc_if_dependent (operands[0]);
845 return \"movw %1,%0\";
846}")
847
848(define_insn "truncsihi2"
849 [(set (match_operand:HI 0 "register_operand" "=r")
850 (truncate:HI (match_operand:SI 1 "nonimmediate_operand" "rm")))]
851 ""
852 "*
853{
854 if (REG_P (operands[0]) && REG_P (operands[1])
855 && REGNO (operands[0]) == REGNO (operands[1]))
856 {
857 cc_status = cc_prev_status;
858 return \"\";
859 }
860 forget_cc_if_dependent (operands[0]);
861 return \"movw %1,%0\";
862}")
863
864(define_insn "extendhisi2"
865 [(set (match_operand:SI 0 "general_operand" "=r,m")
866 (sign_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm,r")))]
867 ""
868 "*
869{
870 extern int optimize;
871 if (optimize && REG_P (operands[0]) && REG_P (operands[1])
872 && REGNO (operands[0]) == REGNO (operands[1])
873 && already_sign_extended (insn, HImode, operands[0]))
874 {
875 cc_status = cc_prev_status;
876 return \"\";
877 }
878 return \"cvthw %1,%0\";
879}")
880
881(define_insn "extendqisi2"
882 [(set (match_operand:SI 0 "general_operand" "=r,m")
883 (sign_extend:SI (match_operand:QI 1 "nonimmediate_operand" "rm,r")))]
884 ""
885 "*
886{
887 extern int optimize;
888 if (optimize && REG_P (operands[0]) && REG_P (operands[1])
889 && REGNO (operands[0]) == REGNO (operands[1])
890 && already_sign_extended (insn, QImode, operands[0]))
891 {
892 cc_status = cc_prev_status;
893 return \"\";
894 }
895 return \"cvtbw %1,%0\";
896}")
897
898; Pyramid doesn't have insns *called* "cvtbh" or "movzbh".
899; But we can cvtbw/movzbw into a register, where there is no distinction
900; between words and halfwords.
901
902(define_insn "extendqihi2"
903 [(set (match_operand:HI 0 "register_operand" "=r")
904 (sign_extend:HI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
905 ""
906 "cvtbw %1,%0")
907
908(define_insn "zero_extendhisi2"
909 [(set (match_operand:SI 0 "register_operand" "=r")
910 (zero_extend:SI (match_operand:HI 1 "nonimmediate_operand" "rm")))]
911 ""
912 "*
913{
914 cc_status.flags = CC_NOT_NEGATIVE;
915 return \"movzhw %1,%0\";
916}")
917
918(define_insn "zero_extendqisi2"
919 [(set (match_operand:SI 0 "register_operand" "=r")
920 (zero_extend:SI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
921 ""
922 "*
923{
924 cc_status.flags = CC_NOT_NEGATIVE;
925 return \"movzbw %1,%0\";
926}")
927
928(define_insn "zero_extendqihi2"
929 [(set (match_operand:HI 0 "register_operand" "=r")
930 (zero_extend:HI (match_operand:QI 1 "nonimmediate_operand" "rm")))]
931 ""
932 "*
933{
934 cc_status.flags = CC_NOT_NEGATIVE;
935 return \"movzbw %1,%0\";
936}")
937
938(define_insn "extendsfdf2"
939 [(set (match_operand:DF 0 "general_operand" "=&r,m")
940 (float_extend:DF (match_operand:SF 1 "nonimmediate_operand" "rm,r")))]
941 ""
942 "cvtfd %1,%0")
943
944(define_insn "truncdfsf2"
945 [(set (match_operand:SF 0 "general_operand" "=&r,m")
946 (float_truncate:SF (match_operand:DF 1 "nonimmediate_operand" "rm,r")))]
947 ""
948 "cvtdf %1,%0")
949
950(define_insn "floatsisf2"
951 [(set (match_operand:SF 0 "general_operand" "=&r,m")
952 (float:SF (match_operand:SI 1 "nonimmediate_operand" "rm,r")))]
953 ""
954 "cvtwf %1,%0")
955
956(define_insn "floatsidf2"
957 [(set (match_operand:DF 0 "general_operand" "=&r,m")
958 (float:DF (match_operand:SI 1 "nonimmediate_operand" "rm,r")))]
959 ""
960 "cvtwd %1,%0")
961
962(define_insn "fix_truncsfsi2"
963 [(set (match_operand:SI 0 "general_operand" "=&r,m")
964 (fix:SI (fix:SF (match_operand:SF 1 "nonimmediate_operand" "rm,r"))))]
965 ""
966 "cvtfw %1,%0")
967
968(define_insn "fix_truncdfsi2"
969 [(set (match_operand:SI 0 "general_operand" "=&r,m")
970 (fix:SI (fix:DF (match_operand:DF 1 "nonimmediate_operand" "rm,r"))))]
971 ""
972 "cvtdw %1,%0")
973\f
974;______________________________________________________________________
975;
976; Flow Control Patterns.
977;______________________________________________________________________
978
979;; Prefer "br" to "jump" for unconditional jumps, since it's faster.
980;; (The assembler can manage with out-of-range branches.)
981
982(define_insn "jump"
983 [(set (pc)
984 (label_ref (match_operand 0 "" "")))]
985 ""
986 "br %l0")
987
988(define_insn ""
989 [(set (pc)
990 (if_then_else (match_operator 0 "relop" [(cc0) (const_int 0)])
991 (label_ref (match_operand 1 "" ""))
992 (pc)))]
993 ""
994 "*
995{
996 extern int optimize;
997 if (optimize)
998 switch (GET_CODE (operands[0]))
999 {
1000 case EQ: case NE:
1001 break;
1002 case LT: case LE: case GE: case GT:
1003 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1004 return 0;
1005 break;
1006 case LTU: case LEU: case GEU: case GTU:
1007 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1008 return 0;
1009 break;
1010 }
1011
1012 return \"b%N0 %l1\";
1013}")
1014
1015(define_insn ""
1016 [(set (pc)
1017 (if_then_else (match_operator 0 "relop" [(cc0) (const_int 0)])
1018 (pc)
1019 (label_ref (match_operand 1 "" ""))))]
1020 ""
1021 "*
1022{
1023 extern int optimize;
1024 if (optimize)
1025 switch (GET_CODE (operands[0]))
1026 {
1027 case EQ: case NE:
1028 break;
1029 case LT: case LE: case GE: case GT:
1030 if (cc_prev_status.mdep == CC_VALID_FOR_UNSIGNED)
1031 return 0;
1032 break;
1033 case LTU: case LEU: case GEU: case GTU:
1034 if (cc_prev_status.mdep != CC_VALID_FOR_UNSIGNED)
1035 return 0;
1036 break;
1037 }
1038
1039 return \"b%C0 %l1\";
1040}")
1041
1042(define_insn "call"
1043 [(call (match_operand:QI 0 "memory_operand" "m")
1044 (match_operand:SI 1 "immediate_operand" "n"))]
1045 ""
1046 "call %0")
1047
1048(define_insn "call_value"
1049 [(set (match_operand 0 "" "=r")
1050 (call (match_operand:QI 1 "memory_operand" "m")
1051 (match_operand:SI 2 "immediate_operand" "n")))]
1052 ;; Operand 2 not really used on Pyramid architecture.
1053 ""
1054 "call %1")
1055
1056(define_insn "return"
1057 [(return)]
1058 ""
1059 "*
1060{
1061 if (get_frame_size () + current_function_pretend_args_size
1062 + current_function_args_size != 0
1063 || current_function_calls_alloca)
1064 {
1065 int dealloc_size = current_function_pretend_args_size;
1066 if (current_function_pops_args)
1067 dealloc_size += current_function_args_size;
1068 operands[0] = gen_rtx (CONST_INT, VOIDmode, dealloc_size);
1069 return \"retd %0\";
1070 }
1071 else
1072 return \"ret\";
1073}")
1074
1075(define_insn "tablejump"
1076 [(set (pc) (match_operand:SI 0 "register_operand" "r"))
1077 (use (label_ref (match_operand 1 "" "")))]
1078 ""
1079 "jump (%0)")
1080
1081(define_insn "nop"
1082 [(const_int 0)]
1083 ""
1084 "movw gr0,gr0 # nop")
1085\f
1086;______________________________________________________________________
1087;
1088; Peep-hole Optimization Patterns.
1089;______________________________________________________________________
1090
1091;; Optimize fullword move followed by a test of the moved value.
1092
1093(define_peephole
1094 [(set (match_operand:SI 0 "register_operand" "=r")
1095 (match_operand:SI 1 "nonimmediate_operand" "rm"))
1096 (set (cc0) (match_operand:SI 2 "nonimmediate_operand" "rm"))]
1097 "rtx_equal_p (operands[2], operands[0])
1098 || rtx_equal_p (operands[2], operands[1])"
1099 "*
1100 cc_status.flags |= CC_NO_OVERFLOW;
1101 return \"mtstw %1,%0\";
1102")
1103
1104;; Same for HI and QI mode move-test as well.
1105
1106(define_peephole
1107 [(set (match_operand:HI 0 "register_operand" "=r")
1108 (match_operand:HI 1 "nonimmediate_operand" "rm"))
1109 (set (match_operand:SI 2 "register_operand" "=r")
1110 (sign_extend:SI (match_operand:HI 3 "nonimmediate_operand" "rm")))
1111 (set (cc0) (match_dup 2))]
1112 "dead_or_set_p (insn, operands[2])
1113 && (rtx_equal_p (operands[3], operands[0])
1114 || rtx_equal_p (operands[3], operands[1]))"
1115 "*
1116 cc_status.flags |= CC_NO_OVERFLOW;
1117 return \"cvthw %1,%0\";
1118")
1119
1120(define_peephole
1121 [(set (match_operand:QI 0 "register_operand" "=r")
1122 (match_operand:QI 1 "nonimmediate_operand" "rm"))
1123 (set (match_operand:SI 2 "register_operand" "=r")
1124 (sign_extend:SI (match_operand:QI 3 "nonimmediate_operand" "rm")))
1125 (set (cc0) (match_dup 2))]
1126 "dead_or_set_p (insn, operands[2])
1127 && (rtx_equal_p (operands[3], operands[0])
1128 || rtx_equal_p (operands[3], operands[1]))"
1129 "*
1130 cc_status.flags |= CC_NO_OVERFLOW;
1131 return \"cvtbw %1,%0\";
1132")
1133
1134;; Optimize loops with an incremented/decremented variable.
1135
1136(define_peephole
1137 [(set (match_operand:SI 0 "register_operand" "=r")
1138 (plus:SI (match_dup 0)
1139 (const_int -1)))
1140 (set (cc0)
1141 (compare (match_operand:SI 1 "register_operand" "r")
1142 (match_operand:SI 2 "nonmemory_operand" "ri")))
1143 (set (pc)
1144 (if_then_else (match_operator:SI 3 "signed_comparison"
1145 [(cc0) (const_int 0)])
1146 (label_ref (match_operand 4 "" ""))
1147 (pc)))]
1148 "(GET_CODE (operands[2]) == CONST_INT
1149 ? (unsigned)INTVAL (operands[2]) + 32 >= 64
1150 : 1) && (rtx_equal_p (operands[0], operands[1])
1151 || rtx_equal_p (operands[0], operands[2]))"
1152 "*
1153 if (rtx_equal_p (operands[0], operands[1]))
1154 {
1155 output_asm_insn (\"dcmpw %2,%0\", operands);
1156 return \"b%N3 %l4\";
1157 }
1158 else
1159 {
1160 output_asm_insn (\"dcmpw %1,%0\", operands);
1161 return \"b%R3 %l4\";
1162 }
1163")
1164
1165(define_peephole
1166 [(set (match_operand:SI 0 "register_operand" "=r")
1167 (plus:SI (match_dup 0)
1168 (const_int 1)))
1169 (set (cc0)
1170 (compare (match_operand:SI 1 "register_operand" "r")
1171 (match_operand:SI 2 "nonmemory_operand" "ri")))
1172 (set (pc)
1173 (if_then_else (match_operator:SI 3 "signed_comparison"
1174 [(cc0) (const_int 0)])
1175 (label_ref (match_operand 4 "" ""))
1176 (pc)))]
1177 "(GET_CODE (operands[2]) == CONST_INT
1178 ? (unsigned)INTVAL (operands[2]) + 32 >= 64
1179 : 1) && (rtx_equal_p (operands[0], operands[1])
1180 || rtx_equal_p (operands[0], operands[2]))"
1181 "*
1182 if (rtx_equal_p (operands[0], operands[1]))
1183 {
1184 output_asm_insn (\"icmpw %2,%0\", operands);
1185 return \"b%N3 %l4\";
1186 }
1187 else
1188 {
1189 output_asm_insn (\"icmpw %1,%0\", operands);
1190 return \"b%R3 %l4\";
1191 }
1192")
1193
1194;; Combine two word moves with consecutive operands into one long move.
1195;; Also combines immediate moves, if the high-order destination operand
1196;; is loaded with 0 or -1 and the low-order destination operand is loaded
1197;; with a constant with the same sign.
1198
1199(define_peephole
1200 [(set (match_operand:SI 0 "general_operand" "=g")
1201 (match_operand:SI 1 "general_operand" "g"))
1202 (set (match_operand:SI 2 "general_operand" "=g")
1203 (match_operand:SI 3 "general_operand" "g"))]
1204 "movdi_possible (operands)"
1205 "*
1206 output_asm_insn (\"# COMBINE movw %1,%0\", operands);
1207 output_asm_insn (\"# COMBINE movw %3,%2\", operands);
1208 movdi_possible (operands);
1209 if (CONSTANT_P (operands[1]))
1210 return (swap_operands) ? \"movl %3,%0\" : \"movl %1,%2\";
1211
1212 return (swap_operands) ? \"movl %1,%0\" : \"movl %3,%2\";
1213")
1214
1215;; Optimize certain tests after memory stores.
1216
1217(define_peephole
1218 [(set (match_operand 0 "memory_operand" "=m")
1219 (match_operand 1 "register_operand" "r"))
1220 (set (match_operand:SI 2 "register_operand" "=r")
1221 (sign_extend:SI (match_dup 1)))
1222 (set (cc0)
1223 (match_dup 2))]
1224 "dead_or_set_p (insn, operands[2])"
1225 "*
1226 cc_status.flags |= CC_NO_OVERFLOW;
1227 if (GET_MODE (operands[0]) == QImode)
1228 return \"cvtwb %1,%0\";
1229 else
1230 return \"cvtwh %1,%0\";
1231")
1232\f
1233;______________________________________________________________________
1234;
1235; DImode Patterns.
1236;______________________________________________________________________
1237
1238(define_expand "extendsidi2"
1239 [(set (subreg:SI (match_operand:DI 0 "register_operand" "=r") 1)
1240 (match_operand:SI 1 "general_operand" "g"))
1241 (set (subreg:SI (match_dup 0) 0)
1242 (subreg:SI (match_dup 0) 1))
1243 (set (subreg:SI (match_dup 0) 0)
1244 (ashiftrt:SI (subreg:SI (match_dup 0) 0)
1245 (const_int 31)))]
1246 ""
1247 "")
1248
1249(define_insn "adddi3"
1250 [(set (match_operand:DI 0 "register_operand" "=r")
1251 (plus:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1252 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1253 ""
1254 "*
1255{
1256 rtx xoperands[2];
1257 CC_STATUS_INIT;
1258 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1259 if (REG_P (operands[2]))
1260 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1261 else
1262 {
1263 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1264 CONST_DOUBLE_LOW (operands[2]));
1265 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1266 CONST_DOUBLE_HIGH (operands[2]));
1267 }
1268 output_asm_insn (\"addw %1,%0\", xoperands);
1269 return \"addwc %2,%0\";
1270}")
1271
1272(define_insn "subdi3"
1273 [(set (match_operand:DI 0 "register_operand" "=r")
1274 (minus:DI (match_operand:DI 1 "register_operand" "0")
1275 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1276 ""
1277 "*
1278{
1279 rtx xoperands[2];
1280 CC_STATUS_INIT;
1281 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1282 if (REG_P (operands[2]))
1283 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1284 else
1285 {
1286 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1287 CONST_DOUBLE_LOW (operands[2]));
1288 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1289 CONST_DOUBLE_HIGH (operands[2]));
1290 }
1291 output_asm_insn (\"subw %1,%0\", xoperands);
1292 return \"subwb %2,%0\";
1293}")
1294
1295(define_insn "iordi3"
1296 [(set (match_operand:DI 0 "register_operand" "=r")
1297 (ior:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1298 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1299 ""
1300 "*
1301{
1302 rtx xoperands[2];
1303 CC_STATUS_INIT;
1304 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1305 if (REG_P (operands[2]))
1306 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1307 else
1308 {
1309 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1310 CONST_DOUBLE_LOW (operands[2]));
1311 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1312 CONST_DOUBLE_HIGH (operands[2]));
1313 }
1314 output_asm_insn (\"orw %1,%0\", xoperands);
1315 return \"orw %2,%0\";
1316}")
1317
1318(define_insn "anddi3"
1319 [(set (match_operand:DI 0 "register_operand" "=r")
1320 (and:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1321 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1322 ""
1323 "*
1324{
1325 rtx xoperands[2];
1326 CC_STATUS_INIT;
1327 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1328 if (REG_P (operands[2]))
1329 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1330 else
1331 {
1332 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1333 CONST_DOUBLE_LOW (operands[2]));
1334 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1335 CONST_DOUBLE_HIGH (operands[2]));
1336 }
1337 output_asm_insn (\"andw %1,%0\", xoperands);
1338 return \"andw %2,%0\";
1339}")
1340
1341(define_insn "xordi3"
1342 [(set (match_operand:DI 0 "register_operand" "=r")
1343 (xor:DI (match_operand:DI 1 "nonmemory_operand" "%0")
1344 (match_operand:DI 2 "nonmemory_operand" "rF")))]
1345 ""
1346 "*
1347{
1348 rtx xoperands[2];
1349 CC_STATUS_INIT;
1350 xoperands[0] = gen_rtx (REG, SImode, REGNO (operands[0]) + 1);
1351 if (REG_P (operands[2]))
1352 xoperands[1] = gen_rtx (REG, SImode, REGNO (operands[2]) + 1);
1353 else
1354 {
1355 xoperands[1] = gen_rtx (CONST_INT, VOIDmode,
1356 CONST_DOUBLE_LOW (operands[2]));
1357 operands[2] = gen_rtx (CONST_INT, VOIDmode,
1358 CONST_DOUBLE_HIGH (operands[2]));
1359 }
1360 output_asm_insn (\"xorw %1,%0\", xoperands);
1361 return \"xorw %2,%0\";
1362}")
1363\f
1364;;- Local variables:
1365;;- mode:emacs-lisp
1366;;- comment-start: ";;- "
1367;;- eval: (set-syntax-table (copy-sequence (syntax-table)))
1368;;- eval: (modify-syntax-entry ?] ")[")
1369;;- eval: (modify-syntax-entry ?{ "(}")
1370;;- eval: (modify-syntax-entry ?} "){")
1371;;- End:
1372
This page took 0.179158 seconds and 5 git commands to generate.