]> gcc.gnu.org Git - gcc.git/blob - gcc/recog.c
combine.c (undobuf): Remove field previous_undos; all refs deleted.
[gcc.git] / gcc / recog.c
1 /* Subroutines used by or related to instruction recognition.
2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "insn-config.h"
28 #include "insn-attr.h"
29 #include "insn-flags.h"
30 #include "insn-codes.h"
31 #include "hard-reg-set.h"
32 #include "recog.h"
33 #include "regs.h"
34 #include "function.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "toplev.h"
38 #include "basic-block.h"
39 #include "output.h"
40 #include "reload.h"
41
42 #ifndef STACK_PUSH_CODE
43 #ifdef STACK_GROWS_DOWNWARD
44 #define STACK_PUSH_CODE PRE_DEC
45 #else
46 #define STACK_PUSH_CODE PRE_INC
47 #endif
48 #endif
49
50 #ifndef STACK_POP_CODE
51 #ifdef STACK_GROWS_DOWNWARD
52 #define STACK_POP_CODE POST_INC
53 #else
54 #define STACK_POP_CODE POST_DEC
55 #endif
56 #endif
57
58 static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
59 static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
60 static rtx *find_constant_term_loc PARAMS ((rtx *));
61 static void validate_replace_src_1 PARAMS ((rtx *, void *));
62
63 /* Nonzero means allow operands to be volatile.
64 This should be 0 if you are generating rtl, such as if you are calling
65 the functions in optabs.c and expmed.c (most of the time).
66 This should be 1 if all valid insns need to be recognized,
67 such as in regclass.c and final.c and reload.c.
68
69 init_recog and init_recog_no_volatile are responsible for setting this. */
70
71 int volatile_ok;
72
73 struct recog_data recog_data;
74
75 /* Contains a vector of operand_alternative structures for every operand.
76 Set up by preprocess_constraints. */
77 struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
78
79 /* On return from `constrain_operands', indicate which alternative
80 was satisfied. */
81
82 int which_alternative;
83
84 /* Nonzero after end of reload pass.
85 Set to 1 or 0 by toplev.c.
86 Controls the significance of (SUBREG (MEM)). */
87
88 int reload_completed;
89
90 /* Initialize data used by the function `recog'.
91 This must be called once in the compilation of a function
92 before any insn recognition may be done in the function. */
93
94 void
95 init_recog_no_volatile ()
96 {
97 volatile_ok = 0;
98 }
99
100 void
101 init_recog ()
102 {
103 volatile_ok = 1;
104 }
105
106 /* Try recognizing the instruction INSN,
107 and return the code number that results.
108 Remember the code so that repeated calls do not
109 need to spend the time for actual rerecognition.
110
111 This function is the normal interface to instruction recognition.
112 The automatically-generated function `recog' is normally called
113 through this one. (The only exception is in combine.c.) */
114
115 int
116 recog_memoized_1 (insn)
117 rtx insn;
118 {
119 if (INSN_CODE (insn) < 0)
120 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
121 return INSN_CODE (insn);
122 }
123 \f
124 /* Check that X is an insn-body for an `asm' with operands
125 and that the operands mentioned in it are legitimate. */
126
127 int
128 check_asm_operands (x)
129 rtx x;
130 {
131 int noperands;
132 rtx *operands;
133 const char **constraints;
134 int i;
135
136 /* Post-reload, be more strict with things. */
137 if (reload_completed)
138 {
139 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
140 extract_insn (make_insn_raw (x));
141 constrain_operands (1);
142 return which_alternative >= 0;
143 }
144
145 noperands = asm_noperands (x);
146 if (noperands < 0)
147 return 0;
148 if (noperands == 0)
149 return 1;
150
151 operands = (rtx *) alloca (noperands * sizeof (rtx));
152 constraints = (const char **) alloca (noperands * sizeof (char *));
153
154 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
155
156 for (i = 0; i < noperands; i++)
157 {
158 const char *c = constraints[i];
159 if (c[0] == '%')
160 c++;
161 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
162 c = constraints[c[0] - '0'];
163
164 if (! asm_operand_ok (operands[i], c))
165 return 0;
166 }
167
168 return 1;
169 }
170 \f
171 /* Static data for the next two routines. */
172
173 typedef struct change_t
174 {
175 rtx object;
176 int old_code;
177 rtx *loc;
178 rtx old;
179 } change_t;
180
181 static change_t *changes;
182 static int changes_allocated;
183
184 static int num_changes = 0;
185
186 /* Validate a proposed change to OBJECT. LOC is the location in the rtl for
187 at which NEW will be placed. If OBJECT is zero, no validation is done,
188 the change is simply made.
189
190 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
191 will be called with the address and mode as parameters. If OBJECT is
192 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
193 the change in place.
194
195 IN_GROUP is non-zero if this is part of a group of changes that must be
196 performed as a group. In that case, the changes will be stored. The
197 function `apply_change_group' will validate and apply the changes.
198
199 If IN_GROUP is zero, this is a single change. Try to recognize the insn
200 or validate the memory reference with the change applied. If the result
201 is not valid for the machine, suppress the change and return zero.
202 Otherwise, perform the change and return 1. */
203
204 int
205 validate_change (object, loc, new, in_group)
206 rtx object;
207 rtx *loc;
208 rtx new;
209 int in_group;
210 {
211 rtx old = *loc;
212
213 if (old == new || rtx_equal_p (old, new))
214 return 1;
215
216 if (in_group == 0 && num_changes != 0)
217 abort ();
218
219 *loc = new;
220
221 /* Save the information describing this change. */
222 if (num_changes >= changes_allocated)
223 {
224 if (changes_allocated == 0)
225 /* This value allows for repeated substitutions inside complex
226 indexed addresses, or changes in up to 5 insns. */
227 changes_allocated = MAX_RECOG_OPERANDS * 5;
228 else
229 changes_allocated *= 2;
230
231 changes =
232 (change_t*) xrealloc (changes,
233 sizeof (change_t) * changes_allocated);
234 }
235
236 changes[num_changes].object = object;
237 changes[num_changes].loc = loc;
238 changes[num_changes].old = old;
239
240 if (object && GET_CODE (object) != MEM)
241 {
242 /* Set INSN_CODE to force rerecognition of insn. Save old code in
243 case invalid. */
244 changes[num_changes].old_code = INSN_CODE (object);
245 INSN_CODE (object) = -1;
246 }
247
248 num_changes++;
249
250 /* If we are making a group of changes, return 1. Otherwise, validate the
251 change group we made. */
252
253 if (in_group)
254 return 1;
255 else
256 return apply_change_group ();
257 }
258
259 /* This subroutine of apply_change_group verifies whether the changes to INSN
260 were valid; i.e. whether INSN can still be recognized. */
261
262 int
263 insn_invalid_p (insn)
264 rtx insn;
265 {
266 rtx pat = PATTERN (insn);
267 int num_clobbers = 0;
268 /* If we are before reload and the pattern is a SET, see if we can add
269 clobbers. */
270 int icode = recog (pat, insn,
271 (GET_CODE (pat) == SET
272 && ! reload_completed && ! reload_in_progress)
273 ? &num_clobbers : NULL_PTR);
274 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
275
276
277 /* If this is an asm and the operand aren't legal, then fail. Likewise if
278 this is not an asm and the insn wasn't recognized. */
279 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
280 || (!is_asm && icode < 0))
281 return 1;
282
283 /* If we have to add CLOBBERs, fail if we have to add ones that reference
284 hard registers since our callers can't know if they are live or not.
285 Otherwise, add them. */
286 if (num_clobbers > 0)
287 {
288 rtx newpat;
289
290 if (added_clobbers_hard_reg_p (icode))
291 return 1;
292
293 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
294 XVECEXP (newpat, 0, 0) = pat;
295 add_clobbers (newpat, icode);
296 PATTERN (insn) = pat = newpat;
297 }
298
299 /* After reload, verify that all constraints are satisfied. */
300 if (reload_completed)
301 {
302 extract_insn (insn);
303
304 if (! constrain_operands (1))
305 return 1;
306 }
307
308 INSN_CODE (insn) = icode;
309 return 0;
310 }
311
312 /* Apply a group of changes previously issued with `validate_change'.
313 Return 1 if all changes are valid, zero otherwise. */
314
315 int
316 apply_change_group ()
317 {
318 int i;
319
320 /* The changes have been applied and all INSN_CODEs have been reset to force
321 rerecognition.
322
323 The changes are valid if we aren't given an object, or if we are
324 given a MEM and it still is a valid address, or if this is in insn
325 and it is recognized. In the latter case, if reload has completed,
326 we also require that the operands meet the constraints for
327 the insn. */
328
329 for (i = 0; i < num_changes; i++)
330 {
331 rtx object = changes[i].object;
332
333 if (object == 0)
334 continue;
335
336 if (GET_CODE (object) == MEM)
337 {
338 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
339 break;
340 }
341 else if (insn_invalid_p (object))
342 {
343 rtx pat = PATTERN (object);
344
345 /* Perhaps we couldn't recognize the insn because there were
346 extra CLOBBERs at the end. If so, try to re-recognize
347 without the last CLOBBER (later iterations will cause each of
348 them to be eliminated, in turn). But don't do this if we
349 have an ASM_OPERAND. */
350 if (GET_CODE (pat) == PARALLEL
351 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
352 && asm_noperands (PATTERN (object)) < 0)
353 {
354 rtx newpat;
355
356 if (XVECLEN (pat, 0) == 2)
357 newpat = XVECEXP (pat, 0, 0);
358 else
359 {
360 int j;
361
362 newpat
363 = gen_rtx_PARALLEL (VOIDmode,
364 rtvec_alloc (XVECLEN (pat, 0) - 1));
365 for (j = 0; j < XVECLEN (newpat, 0); j++)
366 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
367 }
368
369 /* Add a new change to this group to replace the pattern
370 with this new pattern. Then consider this change
371 as having succeeded. The change we added will
372 cause the entire call to fail if things remain invalid.
373
374 Note that this can lose if a later change than the one
375 we are processing specified &XVECEXP (PATTERN (object), 0, X)
376 but this shouldn't occur. */
377
378 validate_change (object, &PATTERN (object), newpat, 1);
379 }
380 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
381 /* If this insn is a CLOBBER or USE, it is always valid, but is
382 never recognized. */
383 continue;
384 else
385 break;
386 }
387 }
388
389 if (i == num_changes)
390 {
391 num_changes = 0;
392 return 1;
393 }
394 else
395 {
396 cancel_changes (0);
397 return 0;
398 }
399 }
400
401 /* Return the number of changes so far in the current group. */
402
403 int
404 num_validated_changes ()
405 {
406 return num_changes;
407 }
408
409 /* Retract the changes numbered NUM and up. */
410
411 void
412 cancel_changes (num)
413 int num;
414 {
415 int i;
416
417 /* Back out all the changes. Do this in the opposite order in which
418 they were made. */
419 for (i = num_changes - 1; i >= num; i--)
420 {
421 *changes[i].loc = changes[i].old;
422 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
423 INSN_CODE (changes[i].object) = changes[i].old_code;
424 }
425 num_changes = num;
426 }
427
428 /* Replace every occurrence of FROM in X with TO. Mark each change with
429 validate_change passing OBJECT. */
430
431 static void
432 validate_replace_rtx_1 (loc, from, to, object)
433 rtx *loc;
434 rtx from, to, object;
435 {
436 register int i, j;
437 register const char *fmt;
438 register rtx x = *loc;
439 enum rtx_code code;
440
441 if (!x)
442 return;
443 code = GET_CODE (x);
444 /* X matches FROM if it is the same rtx or they are both referring to the
445 same register in the same mode. Avoid calling rtx_equal_p unless the
446 operands look similar. */
447
448 if (x == from
449 || (GET_CODE (x) == REG && GET_CODE (from) == REG
450 && GET_MODE (x) == GET_MODE (from)
451 && REGNO (x) == REGNO (from))
452 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
453 && rtx_equal_p (x, from)))
454 {
455 validate_change (object, loc, to, 1);
456 return;
457 }
458
459 /* For commutative or comparison operations, try replacing each argument
460 separately and seeing if we made any changes. If so, put a constant
461 argument last.*/
462 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
463 {
464 int prev_changes = num_changes;
465
466 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
467 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
468 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
469 {
470 validate_change (object, loc,
471 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
472 : swap_condition (code),
473 GET_MODE (x), XEXP (x, 1),
474 XEXP (x, 0)),
475 1);
476 x = *loc;
477 code = GET_CODE (x);
478 }
479 }
480
481 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
482 done the substitution, otherwise we won't. */
483
484 switch (code)
485 {
486 case PLUS:
487 /* If we have a PLUS whose second operand is now a CONST_INT, use
488 plus_constant to try to simplify it. */
489 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
490 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
491 1);
492 return;
493
494 case MINUS:
495 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
496 {
497 validate_change (object, loc,
498 plus_constant (XEXP (x, 0), - INTVAL (to)),
499 1);
500 return;
501 }
502 break;
503
504 case ZERO_EXTEND:
505 case SIGN_EXTEND:
506 /* In these cases, the operation to be performed depends on the mode
507 of the operand. If we are replacing the operand with a VOIDmode
508 constant, we lose the information. So try to simplify the operation
509 in that case. */
510 if (GET_MODE (to) == VOIDmode
511 && (rtx_equal_p (XEXP (x, 0), from)
512 || (GET_CODE (XEXP (x, 0)) == SUBREG
513 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
514 {
515 rtx new = NULL_RTX;
516
517 /* If there is a subreg involved, crop to the portion of the
518 constant that we are interested in. */
519 if (GET_CODE (XEXP (x, 0)) == SUBREG)
520 {
521 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
522 to = operand_subword (to, SUBREG_WORD (XEXP (x, 0)),
523 0, GET_MODE (from));
524 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
525 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
526 <= HOST_BITS_PER_WIDE_INT))
527 {
528 int i = SUBREG_WORD (XEXP (x, 0)) * BITS_PER_WORD;
529 HOST_WIDE_INT valh;
530 unsigned HOST_WIDE_INT vall;
531
532 if (GET_CODE (to) == CONST_INT)
533 {
534 vall = INTVAL (to);
535 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
536 }
537 else
538 {
539 vall = CONST_DOUBLE_LOW (to);
540 valh = CONST_DOUBLE_HIGH (to);
541 }
542
543 if (WORDS_BIG_ENDIAN)
544 i = (GET_MODE_BITSIZE (GET_MODE (from))
545 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
546 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
547 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
548 else if (i >= HOST_BITS_PER_WIDE_INT)
549 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
550 to = GEN_INT (trunc_int_for_mode (vall,
551 GET_MODE (XEXP (x, 0))));
552 }
553 else
554 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
555 }
556
557 /* If the above didn't fail, perform the extension from the
558 mode of the operand (and not the mode of FROM). */
559 if (to)
560 new = simplify_unary_operation (code, GET_MODE (x), to,
561 GET_MODE (XEXP (x, 0)));
562
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
565 if (!new)
566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
567
568 validate_change (object, loc, new, 1);
569 return;
570 }
571 break;
572
573 case SUBREG:
574 /* In case we are replacing by constant, attempt to simplify it to non-SUBREG
575 expression. We can't do this later, since the information about inner mode
576 may be lost. */
577 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
578 {
579 if (GET_MODE_SIZE (GET_MODE (x)) == UNITS_PER_WORD
580 && GET_MODE_SIZE (GET_MODE (from)) > UNITS_PER_WORD
581 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT)
582 {
583 rtx temp = operand_subword (to, SUBREG_WORD (x),
584 0, GET_MODE (from));
585 if (temp)
586 {
587 validate_change (object, loc, temp, 1);
588 return;
589 }
590 }
591 if (subreg_lowpart_p (x))
592 {
593 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
594 if (new)
595 {
596 validate_change (object, loc, new, 1);
597 return;
598 }
599 }
600
601 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
602 since we are saying that the high bits don't matter. */
603 if (GET_MODE (to) == VOIDmode
604 && GET_MODE_SIZE (GET_MODE (x)) > GET_MODE_SIZE (GET_MODE (from)))
605 {
606 validate_change (object, loc, to, 1);
607 return;
608 }
609 }
610
611 /* Changing mode twice with SUBREG => just change it once,
612 or not at all if changing back to starting mode. */
613 if (GET_CODE (to) == SUBREG
614 && rtx_equal_p (SUBREG_REG (x), from))
615 {
616 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
617 && SUBREG_WORD (x) == 0 && SUBREG_WORD (to) == 0)
618 {
619 validate_change (object, loc, SUBREG_REG (to), 1);
620 return;
621 }
622
623 validate_change (object, loc,
624 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
625 SUBREG_WORD (x) + SUBREG_WORD (to)), 1);
626 return;
627 }
628
629 /* If we have a SUBREG of a register that we are replacing and we are
630 replacing it with a MEM, make a new MEM and try replacing the
631 SUBREG with it. Don't do this if the MEM has a mode-dependent address
632 or if we would be widening it. */
633
634 if (GET_CODE (from) == REG
635 && GET_CODE (to) == MEM
636 && rtx_equal_p (SUBREG_REG (x), from)
637 && ! mode_dependent_address_p (XEXP (to, 0))
638 && ! MEM_VOLATILE_P (to)
639 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
640 {
641 int offset = SUBREG_WORD (x) * UNITS_PER_WORD;
642 enum machine_mode mode = GET_MODE (x);
643 rtx new;
644
645 if (BYTES_BIG_ENDIAN)
646 offset += (MIN (UNITS_PER_WORD,
647 GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
648 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode)));
649
650 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
651 MEM_COPY_ATTRIBUTES (new, to);
652 validate_change (object, loc, new, 1);
653 return;
654 }
655 break;
656
657 case ZERO_EXTRACT:
658 case SIGN_EXTRACT:
659 /* If we are replacing a register with memory, try to change the memory
660 to be the mode required for memory in extract operations (this isn't
661 likely to be an insertion operation; if it was, nothing bad will
662 happen, we might just fail in some cases). */
663
664 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
665 && rtx_equal_p (XEXP (x, 0), from)
666 && GET_CODE (XEXP (x, 1)) == CONST_INT
667 && GET_CODE (XEXP (x, 2)) == CONST_INT
668 && ! mode_dependent_address_p (XEXP (to, 0))
669 && ! MEM_VOLATILE_P (to))
670 {
671 enum machine_mode wanted_mode = VOIDmode;
672 enum machine_mode is_mode = GET_MODE (to);
673 int pos = INTVAL (XEXP (x, 2));
674
675 #ifdef HAVE_extzv
676 if (code == ZERO_EXTRACT)
677 {
678 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
679 if (wanted_mode == VOIDmode)
680 wanted_mode = word_mode;
681 }
682 #endif
683 #ifdef HAVE_extv
684 if (code == SIGN_EXTRACT)
685 {
686 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
687 if (wanted_mode == VOIDmode)
688 wanted_mode = word_mode;
689 }
690 #endif
691
692 /* If we have a narrower mode, we can do something. */
693 if (wanted_mode != VOIDmode
694 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
695 {
696 int offset = pos / BITS_PER_UNIT;
697 rtx newmem;
698
699 /* If the bytes and bits are counted differently, we
700 must adjust the offset. */
701 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
702 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
703 - offset);
704
705 pos %= GET_MODE_BITSIZE (wanted_mode);
706
707 newmem = gen_rtx_MEM (wanted_mode,
708 plus_constant (XEXP (to, 0), offset));
709 MEM_COPY_ATTRIBUTES (newmem, to);
710
711 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
712 validate_change (object, &XEXP (x, 0), newmem, 1);
713 }
714 }
715
716 break;
717
718 default:
719 break;
720 }
721
722 /* For commutative or comparison operations we've already performed
723 replacements. Don't try to perform them again. */
724 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
725 {
726 fmt = GET_RTX_FORMAT (code);
727 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
728 {
729 if (fmt[i] == 'e')
730 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
731 else if (fmt[i] == 'E')
732 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
733 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
734 }
735 }
736 }
737
738 /* Try replacing every occurrence of FROM in subexpression LOC of INSN
739 with TO. After all changes have been made, validate by seeing
740 if INSN is still valid. */
741
742 int
743 validate_replace_rtx_subexp (from, to, insn, loc)
744 rtx from, to, insn, *loc;
745 {
746 validate_replace_rtx_1 (loc, from, to, insn);
747 return apply_change_group ();
748 }
749
750 /* Try replacing every occurrence of FROM in INSN with TO. After all
751 changes have been made, validate by seeing if INSN is still valid. */
752
753 int
754 validate_replace_rtx (from, to, insn)
755 rtx from, to, insn;
756 {
757 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
758 return apply_change_group ();
759 }
760
761 /* Try replacing every occurrence of FROM in INSN with TO. */
762
763 void
764 validate_replace_rtx_group (from, to, insn)
765 rtx from, to, insn;
766 {
767 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
768 }
769
770 /* Function called by note_uses to replace used subexpressions. */
771 struct validate_replace_src_data
772 {
773 rtx from; /* Old RTX */
774 rtx to; /* New RTX */
775 rtx insn; /* Insn in which substitution is occurring. */
776 };
777
778 static void
779 validate_replace_src_1 (x, data)
780 rtx *x;
781 void *data;
782 {
783 struct validate_replace_src_data *d
784 = (struct validate_replace_src_data *) data;
785
786 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
787 }
788
789 /* Try replacing every occurrence of FROM in INSN with TO, avoiding
790 SET_DESTs. After all changes have been made, validate by seeing if
791 INSN is still valid. */
792
793 int
794 validate_replace_src (from, to, insn)
795 rtx from, to, insn;
796 {
797 struct validate_replace_src_data d;
798
799 d.from = from;
800 d.to = to;
801 d.insn = insn;
802 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
803 return apply_change_group ();
804 }
805 \f
806 #ifdef HAVE_cc0
807 /* Return 1 if the insn using CC0 set by INSN does not contain
808 any ordered tests applied to the condition codes.
809 EQ and NE tests do not count. */
810
811 int
812 next_insn_tests_no_inequality (insn)
813 rtx insn;
814 {
815 register rtx next = next_cc0_user (insn);
816
817 /* If there is no next insn, we have to take the conservative choice. */
818 if (next == 0)
819 return 0;
820
821 return ((GET_CODE (next) == JUMP_INSN
822 || GET_CODE (next) == INSN
823 || GET_CODE (next) == CALL_INSN)
824 && ! inequality_comparisons_p (PATTERN (next)));
825 }
826
827 #if 0 /* This is useless since the insn that sets the cc's
828 must be followed immediately by the use of them. */
829 /* Return 1 if the CC value set up by INSN is not used. */
830
831 int
832 next_insns_test_no_inequality (insn)
833 rtx insn;
834 {
835 register rtx next = NEXT_INSN (insn);
836
837 for (; next != 0; next = NEXT_INSN (next))
838 {
839 if (GET_CODE (next) == CODE_LABEL
840 || GET_CODE (next) == BARRIER)
841 return 1;
842 if (GET_CODE (next) == NOTE)
843 continue;
844 if (inequality_comparisons_p (PATTERN (next)))
845 return 0;
846 if (sets_cc0_p (PATTERN (next)) == 1)
847 return 1;
848 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
849 return 1;
850 }
851 return 1;
852 }
853 #endif
854 #endif
855 \f
856 /* This is used by find_single_use to locate an rtx that contains exactly one
857 use of DEST, which is typically either a REG or CC0. It returns a
858 pointer to the innermost rtx expression containing DEST. Appearances of
859 DEST that are being used to totally replace it are not counted. */
860
861 static rtx *
862 find_single_use_1 (dest, loc)
863 rtx dest;
864 rtx *loc;
865 {
866 rtx x = *loc;
867 enum rtx_code code = GET_CODE (x);
868 rtx *result = 0;
869 rtx *this_result;
870 int i;
871 const char *fmt;
872
873 switch (code)
874 {
875 case CONST_INT:
876 case CONST:
877 case LABEL_REF:
878 case SYMBOL_REF:
879 case CONST_DOUBLE:
880 case CLOBBER:
881 return 0;
882
883 case SET:
884 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
885 of a REG that occupies all of the REG, the insn uses DEST if
886 it is mentioned in the destination or the source. Otherwise, we
887 need just check the source. */
888 if (GET_CODE (SET_DEST (x)) != CC0
889 && GET_CODE (SET_DEST (x)) != PC
890 && GET_CODE (SET_DEST (x)) != REG
891 && ! (GET_CODE (SET_DEST (x)) == SUBREG
892 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
893 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
894 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
895 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
896 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
897 break;
898
899 return find_single_use_1 (dest, &SET_SRC (x));
900
901 case MEM:
902 case SUBREG:
903 return find_single_use_1 (dest, &XEXP (x, 0));
904
905 default:
906 break;
907 }
908
909 /* If it wasn't one of the common cases above, check each expression and
910 vector of this code. Look for a unique usage of DEST. */
911
912 fmt = GET_RTX_FORMAT (code);
913 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
914 {
915 if (fmt[i] == 'e')
916 {
917 if (dest == XEXP (x, i)
918 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
919 && REGNO (dest) == REGNO (XEXP (x, i))))
920 this_result = loc;
921 else
922 this_result = find_single_use_1 (dest, &XEXP (x, i));
923
924 if (result == 0)
925 result = this_result;
926 else if (this_result)
927 /* Duplicate usage. */
928 return 0;
929 }
930 else if (fmt[i] == 'E')
931 {
932 int j;
933
934 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
935 {
936 if (XVECEXP (x, i, j) == dest
937 || (GET_CODE (dest) == REG
938 && GET_CODE (XVECEXP (x, i, j)) == REG
939 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
940 this_result = loc;
941 else
942 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
943
944 if (result == 0)
945 result = this_result;
946 else if (this_result)
947 return 0;
948 }
949 }
950 }
951
952 return result;
953 }
954 \f
955 /* See if DEST, produced in INSN, is used only a single time in the
956 sequel. If so, return a pointer to the innermost rtx expression in which
957 it is used.
958
959 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
960
961 This routine will return usually zero either before flow is called (because
962 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
963 note can't be trusted).
964
965 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
966 care about REG_DEAD notes or LOG_LINKS.
967
968 Otherwise, we find the single use by finding an insn that has a
969 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
970 only referenced once in that insn, we know that it must be the first
971 and last insn referencing DEST. */
972
973 rtx *
974 find_single_use (dest, insn, ploc)
975 rtx dest;
976 rtx insn;
977 rtx *ploc;
978 {
979 rtx next;
980 rtx *result;
981 rtx link;
982
983 #ifdef HAVE_cc0
984 if (dest == cc0_rtx)
985 {
986 next = NEXT_INSN (insn);
987 if (next == 0
988 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
989 return 0;
990
991 result = find_single_use_1 (dest, &PATTERN (next));
992 if (result && ploc)
993 *ploc = next;
994 return result;
995 }
996 #endif
997
998 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
999 return 0;
1000
1001 for (next = next_nonnote_insn (insn);
1002 next != 0 && GET_CODE (next) != CODE_LABEL;
1003 next = next_nonnote_insn (next))
1004 if (INSN_P (next) && dead_or_set_p (next, dest))
1005 {
1006 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
1007 if (XEXP (link, 0) == insn)
1008 break;
1009
1010 if (link)
1011 {
1012 result = find_single_use_1 (dest, &PATTERN (next));
1013 if (ploc)
1014 *ploc = next;
1015 return result;
1016 }
1017 }
1018
1019 return 0;
1020 }
1021 \f
1022 /* Return 1 if OP is a valid general operand for machine mode MODE.
1023 This is either a register reference, a memory reference,
1024 or a constant. In the case of a memory reference, the address
1025 is checked for general validity for the target machine.
1026
1027 Register and memory references must have mode MODE in order to be valid,
1028 but some constants have no machine mode and are valid for any mode.
1029
1030 If MODE is VOIDmode, OP is checked for validity for whatever mode
1031 it has.
1032
1033 The main use of this function is as a predicate in match_operand
1034 expressions in the machine description.
1035
1036 For an explanation of this function's behavior for registers of
1037 class NO_REGS, see the comment for `register_operand'. */
1038
1039 int
1040 general_operand (op, mode)
1041 register rtx op;
1042 enum machine_mode mode;
1043 {
1044 register enum rtx_code code = GET_CODE (op);
1045 int mode_altering_drug = 0;
1046
1047 if (mode == VOIDmode)
1048 mode = GET_MODE (op);
1049
1050 /* Don't accept CONST_INT or anything similar
1051 if the caller wants something floating. */
1052 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1053 && GET_MODE_CLASS (mode) != MODE_INT
1054 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1055 return 0;
1056
1057 if (CONSTANT_P (op))
1058 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1059 || mode == VOIDmode)
1060 #ifdef LEGITIMATE_PIC_OPERAND_P
1061 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1062 #endif
1063 && LEGITIMATE_CONSTANT_P (op));
1064
1065 /* Except for certain constants with VOIDmode, already checked for,
1066 OP's mode must match MODE if MODE specifies a mode. */
1067
1068 if (GET_MODE (op) != mode)
1069 return 0;
1070
1071 if (code == SUBREG)
1072 {
1073 #ifdef INSN_SCHEDULING
1074 /* On machines that have insn scheduling, we want all memory
1075 reference to be explicit, so outlaw paradoxical SUBREGs. */
1076 if (GET_CODE (SUBREG_REG (op)) == MEM
1077 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1078 return 0;
1079 #endif
1080
1081 op = SUBREG_REG (op);
1082 code = GET_CODE (op);
1083 #if 0
1084 /* No longer needed, since (SUBREG (MEM...))
1085 will load the MEM into a reload reg in the MEM's own mode. */
1086 mode_altering_drug = 1;
1087 #endif
1088 }
1089
1090 if (code == REG)
1091 /* A register whose class is NO_REGS is not a general operand. */
1092 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1093 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1094
1095 if (code == MEM)
1096 {
1097 register rtx y = XEXP (op, 0);
1098
1099 if (! volatile_ok && MEM_VOLATILE_P (op))
1100 return 0;
1101
1102 if (GET_CODE (y) == ADDRESSOF)
1103 return 1;
1104
1105 /* Use the mem's mode, since it will be reloaded thus. */
1106 mode = GET_MODE (op);
1107 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1108 }
1109
1110 /* Pretend this is an operand for now; we'll run force_operand
1111 on its replacement in fixup_var_refs_1. */
1112 if (code == ADDRESSOF)
1113 return 1;
1114
1115 return 0;
1116
1117 win:
1118 if (mode_altering_drug)
1119 return ! mode_dependent_address_p (XEXP (op, 0));
1120 return 1;
1121 }
1122 \f
1123 /* Return 1 if OP is a valid memory address for a memory reference
1124 of mode MODE.
1125
1126 The main use of this function is as a predicate in match_operand
1127 expressions in the machine description. */
1128
1129 int
1130 address_operand (op, mode)
1131 register rtx op;
1132 enum machine_mode mode;
1133 {
1134 return memory_address_p (mode, op);
1135 }
1136
1137 /* Return 1 if OP is a register reference of mode MODE.
1138 If MODE is VOIDmode, accept a register in any mode.
1139
1140 The main use of this function is as a predicate in match_operand
1141 expressions in the machine description.
1142
1143 As a special exception, registers whose class is NO_REGS are
1144 not accepted by `register_operand'. The reason for this change
1145 is to allow the representation of special architecture artifacts
1146 (such as a condition code register) without extending the rtl
1147 definitions. Since registers of class NO_REGS cannot be used
1148 as registers in any case where register classes are examined,
1149 it is most consistent to keep this function from accepting them. */
1150
1151 int
1152 register_operand (op, mode)
1153 register rtx op;
1154 enum machine_mode mode;
1155 {
1156 if (GET_MODE (op) != mode && mode != VOIDmode)
1157 return 0;
1158
1159 if (GET_CODE (op) == SUBREG)
1160 {
1161 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1162 because it is guaranteed to be reloaded into one.
1163 Just make sure the MEM is valid in itself.
1164 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1165 but currently it does result from (SUBREG (REG)...) where the
1166 reg went on the stack.) */
1167 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1168 return general_operand (op, mode);
1169
1170 #ifdef CLASS_CANNOT_CHANGE_MODE
1171 if (GET_CODE (SUBREG_REG (op)) == REG
1172 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
1173 && (TEST_HARD_REG_BIT
1174 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1175 REGNO (SUBREG_REG (op))))
1176 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
1177 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1178 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
1179 return 0;
1180 #endif
1181
1182 op = SUBREG_REG (op);
1183 }
1184
1185 /* If we have an ADDRESSOF, consider it valid since it will be
1186 converted into something that will not be a MEM. */
1187 if (GET_CODE (op) == ADDRESSOF)
1188 return 1;
1189
1190 /* We don't consider registers whose class is NO_REGS
1191 to be a register operand. */
1192 return (GET_CODE (op) == REG
1193 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1194 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1195 }
1196
1197 /* Return 1 for a register in Pmode; ignore the tested mode. */
1198
1199 int
1200 pmode_register_operand (op, mode)
1201 rtx op;
1202 enum machine_mode mode ATTRIBUTE_UNUSED;
1203 {
1204 return register_operand (op, Pmode);
1205 }
1206
1207 /* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1208 or a hard register. */
1209
1210 int
1211 scratch_operand (op, mode)
1212 register rtx op;
1213 enum machine_mode mode;
1214 {
1215 if (GET_MODE (op) != mode && mode != VOIDmode)
1216 return 0;
1217
1218 return (GET_CODE (op) == SCRATCH
1219 || (GET_CODE (op) == REG
1220 && REGNO (op) < FIRST_PSEUDO_REGISTER));
1221 }
1222
1223 /* Return 1 if OP is a valid immediate operand for mode MODE.
1224
1225 The main use of this function is as a predicate in match_operand
1226 expressions in the machine description. */
1227
1228 int
1229 immediate_operand (op, mode)
1230 register rtx op;
1231 enum machine_mode mode;
1232 {
1233 /* Don't accept CONST_INT or anything similar
1234 if the caller wants something floating. */
1235 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1236 && GET_MODE_CLASS (mode) != MODE_INT
1237 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1238 return 0;
1239
1240 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1241 result in 0/1. It seems a safe assumption that this is
1242 in range for everyone. */
1243 if (GET_CODE (op) == CONSTANT_P_RTX)
1244 return 1;
1245
1246 return (CONSTANT_P (op)
1247 && (GET_MODE (op) == mode || mode == VOIDmode
1248 || GET_MODE (op) == VOIDmode)
1249 #ifdef LEGITIMATE_PIC_OPERAND_P
1250 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1251 #endif
1252 && LEGITIMATE_CONSTANT_P (op));
1253 }
1254
1255 /* Returns 1 if OP is an operand that is a CONST_INT. */
1256
1257 int
1258 const_int_operand (op, mode)
1259 register rtx op;
1260 enum machine_mode mode ATTRIBUTE_UNUSED;
1261 {
1262 return GET_CODE (op) == CONST_INT;
1263 }
1264
1265 /* Returns 1 if OP is an operand that is a constant integer or constant
1266 floating-point number. */
1267
1268 int
1269 const_double_operand (op, mode)
1270 register rtx op;
1271 enum machine_mode mode;
1272 {
1273 /* Don't accept CONST_INT or anything similar
1274 if the caller wants something floating. */
1275 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1276 && GET_MODE_CLASS (mode) != MODE_INT
1277 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1278 return 0;
1279
1280 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1281 && (mode == VOIDmode || GET_MODE (op) == mode
1282 || GET_MODE (op) == VOIDmode));
1283 }
1284
1285 /* Return 1 if OP is a general operand that is not an immediate operand. */
1286
1287 int
1288 nonimmediate_operand (op, mode)
1289 register rtx op;
1290 enum machine_mode mode;
1291 {
1292 return (general_operand (op, mode) && ! CONSTANT_P (op));
1293 }
1294
1295 /* Return 1 if OP is a register reference or immediate value of mode MODE. */
1296
1297 int
1298 nonmemory_operand (op, mode)
1299 register rtx op;
1300 enum machine_mode mode;
1301 {
1302 if (CONSTANT_P (op))
1303 {
1304 /* Don't accept CONST_INT or anything similar
1305 if the caller wants something floating. */
1306 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
1307 && GET_MODE_CLASS (mode) != MODE_INT
1308 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
1309 return 0;
1310
1311 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1312 || mode == VOIDmode)
1313 #ifdef LEGITIMATE_PIC_OPERAND_P
1314 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1315 #endif
1316 && LEGITIMATE_CONSTANT_P (op));
1317 }
1318
1319 if (GET_MODE (op) != mode && mode != VOIDmode)
1320 return 0;
1321
1322 if (GET_CODE (op) == SUBREG)
1323 {
1324 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1325 because it is guaranteed to be reloaded into one.
1326 Just make sure the MEM is valid in itself.
1327 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1328 but currently it does result from (SUBREG (REG)...) where the
1329 reg went on the stack.) */
1330 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1331 return general_operand (op, mode);
1332 op = SUBREG_REG (op);
1333 }
1334
1335 /* We don't consider registers whose class is NO_REGS
1336 to be a register operand. */
1337 return (GET_CODE (op) == REG
1338 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1339 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1340 }
1341
1342 /* Return 1 if OP is a valid operand that stands for pushing a
1343 value of mode MODE onto the stack.
1344
1345 The main use of this function is as a predicate in match_operand
1346 expressions in the machine description. */
1347
1348 int
1349 push_operand (op, mode)
1350 rtx op;
1351 enum machine_mode mode;
1352 {
1353 if (GET_CODE (op) != MEM)
1354 return 0;
1355
1356 if (mode != VOIDmode && GET_MODE (op) != mode)
1357 return 0;
1358
1359 op = XEXP (op, 0);
1360
1361 if (GET_CODE (op) != STACK_PUSH_CODE)
1362 return 0;
1363
1364 return XEXP (op, 0) == stack_pointer_rtx;
1365 }
1366
1367 /* Return 1 if OP is a valid operand that stands for popping a
1368 value of mode MODE off the stack.
1369
1370 The main use of this function is as a predicate in match_operand
1371 expressions in the machine description. */
1372
1373 int
1374 pop_operand (op, mode)
1375 rtx op;
1376 enum machine_mode mode;
1377 {
1378 if (GET_CODE (op) != MEM)
1379 return 0;
1380
1381 if (mode != VOIDmode && GET_MODE (op) != mode)
1382 return 0;
1383
1384 op = XEXP (op, 0);
1385
1386 if (GET_CODE (op) != STACK_POP_CODE)
1387 return 0;
1388
1389 return XEXP (op, 0) == stack_pointer_rtx;
1390 }
1391
1392 /* Return 1 if ADDR is a valid memory address for mode MODE. */
1393
1394 int
1395 memory_address_p (mode, addr)
1396 enum machine_mode mode ATTRIBUTE_UNUSED;
1397 register rtx addr;
1398 {
1399 if (GET_CODE (addr) == ADDRESSOF)
1400 return 1;
1401
1402 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1403 return 0;
1404
1405 win:
1406 return 1;
1407 }
1408
1409 /* Return 1 if OP is a valid memory reference with mode MODE,
1410 including a valid address.
1411
1412 The main use of this function is as a predicate in match_operand
1413 expressions in the machine description. */
1414
1415 int
1416 memory_operand (op, mode)
1417 register rtx op;
1418 enum machine_mode mode;
1419 {
1420 rtx inner;
1421
1422 if (! reload_completed)
1423 /* Note that no SUBREG is a memory operand before end of reload pass,
1424 because (SUBREG (MEM...)) forces reloading into a register. */
1425 return GET_CODE (op) == MEM && general_operand (op, mode);
1426
1427 if (mode != VOIDmode && GET_MODE (op) != mode)
1428 return 0;
1429
1430 inner = op;
1431 if (GET_CODE (inner) == SUBREG)
1432 inner = SUBREG_REG (inner);
1433
1434 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1435 }
1436
1437 /* Return 1 if OP is a valid indirect memory reference with mode MODE;
1438 that is, a memory reference whose address is a general_operand. */
1439
1440 int
1441 indirect_operand (op, mode)
1442 register rtx op;
1443 enum machine_mode mode;
1444 {
1445 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1446 if (! reload_completed
1447 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1448 {
1449 register int offset = SUBREG_WORD (op) * UNITS_PER_WORD;
1450 rtx inner = SUBREG_REG (op);
1451
1452 if (BYTES_BIG_ENDIAN)
1453 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (op)))
1454 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (inner))));
1455
1456 if (mode != VOIDmode && GET_MODE (op) != mode)
1457 return 0;
1458
1459 /* The only way that we can have a general_operand as the resulting
1460 address is if OFFSET is zero and the address already is an operand
1461 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1462 operand. */
1463
1464 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1465 || (GET_CODE (XEXP (inner, 0)) == PLUS
1466 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1467 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1468 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1469 }
1470
1471 return (GET_CODE (op) == MEM
1472 && memory_operand (op, mode)
1473 && general_operand (XEXP (op, 0), Pmode));
1474 }
1475
1476 /* Return 1 if this is a comparison operator. This allows the use of
1477 MATCH_OPERATOR to recognize all the branch insns. */
1478
1479 int
1480 comparison_operator (op, mode)
1481 register rtx op;
1482 enum machine_mode mode;
1483 {
1484 return ((mode == VOIDmode || GET_MODE (op) == mode)
1485 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1486 }
1487 \f
1488 /* If BODY is an insn body that uses ASM_OPERANDS,
1489 return the number of operands (both input and output) in the insn.
1490 Otherwise return -1. */
1491
1492 int
1493 asm_noperands (body)
1494 rtx body;
1495 {
1496 switch (GET_CODE (body))
1497 {
1498 case ASM_OPERANDS:
1499 /* No output operands: return number of input operands. */
1500 return ASM_OPERANDS_INPUT_LENGTH (body);
1501 case SET:
1502 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1503 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1504 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1505 else
1506 return -1;
1507 case PARALLEL:
1508 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1509 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
1510 {
1511 /* Multiple output operands, or 1 output plus some clobbers:
1512 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1513 int i;
1514 int n_sets;
1515
1516 /* Count backwards through CLOBBERs to determine number of SETs. */
1517 for (i = XVECLEN (body, 0); i > 0; i--)
1518 {
1519 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1520 break;
1521 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1522 return -1;
1523 }
1524
1525 /* N_SETS is now number of output operands. */
1526 n_sets = i;
1527
1528 /* Verify that all the SETs we have
1529 came from a single original asm_operands insn
1530 (so that invalid combinations are blocked). */
1531 for (i = 0; i < n_sets; i++)
1532 {
1533 rtx elt = XVECEXP (body, 0, i);
1534 if (GET_CODE (elt) != SET)
1535 return -1;
1536 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1537 return -1;
1538 /* If these ASM_OPERANDS rtx's came from different original insns
1539 then they aren't allowed together. */
1540 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1541 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1542 return -1;
1543 }
1544 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1545 + n_sets);
1546 }
1547 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1548 {
1549 /* 0 outputs, but some clobbers:
1550 body is [(asm_operands ...) (clobber (reg ...))...]. */
1551 int i;
1552
1553 /* Make sure all the other parallel things really are clobbers. */
1554 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1555 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1556 return -1;
1557
1558 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1559 }
1560 else
1561 return -1;
1562 default:
1563 return -1;
1564 }
1565 }
1566
1567 /* Assuming BODY is an insn body that uses ASM_OPERANDS,
1568 copy its operands (both input and output) into the vector OPERANDS,
1569 the locations of the operands within the insn into the vector OPERAND_LOCS,
1570 and the constraints for the operands into CONSTRAINTS.
1571 Write the modes of the operands into MODES.
1572 Return the assembler-template.
1573
1574 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1575 we don't store that info. */
1576
1577 const char *
1578 decode_asm_operands (body, operands, operand_locs, constraints, modes)
1579 rtx body;
1580 rtx *operands;
1581 rtx **operand_locs;
1582 const char **constraints;
1583 enum machine_mode *modes;
1584 {
1585 register int i;
1586 int noperands;
1587 const char *template = 0;
1588
1589 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1590 {
1591 rtx asmop = SET_SRC (body);
1592 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1593
1594 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1595
1596 for (i = 1; i < noperands; i++)
1597 {
1598 if (operand_locs)
1599 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1600 if (operands)
1601 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1602 if (constraints)
1603 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1604 if (modes)
1605 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1606 }
1607
1608 /* The output is in the SET.
1609 Its constraint is in the ASM_OPERANDS itself. */
1610 if (operands)
1611 operands[0] = SET_DEST (body);
1612 if (operand_locs)
1613 operand_locs[0] = &SET_DEST (body);
1614 if (constraints)
1615 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1616 if (modes)
1617 modes[0] = GET_MODE (SET_DEST (body));
1618 template = ASM_OPERANDS_TEMPLATE (asmop);
1619 }
1620 else if (GET_CODE (body) == ASM_OPERANDS)
1621 {
1622 rtx asmop = body;
1623 /* No output operands: BODY is (asm_operands ....). */
1624
1625 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1626
1627 /* The input operands are found in the 1st element vector. */
1628 /* Constraints for inputs are in the 2nd element vector. */
1629 for (i = 0; i < noperands; i++)
1630 {
1631 if (operand_locs)
1632 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1633 if (operands)
1634 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1635 if (constraints)
1636 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1637 if (modes)
1638 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1639 }
1640 template = ASM_OPERANDS_TEMPLATE (asmop);
1641 }
1642 else if (GET_CODE (body) == PARALLEL
1643 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1644 {
1645 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1646 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1647 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1648 int nout = 0; /* Does not include CLOBBERs. */
1649
1650 /* At least one output, plus some CLOBBERs. */
1651
1652 /* The outputs are in the SETs.
1653 Their constraints are in the ASM_OPERANDS itself. */
1654 for (i = 0; i < nparallel; i++)
1655 {
1656 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1657 break; /* Past last SET */
1658
1659 if (operands)
1660 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1661 if (operand_locs)
1662 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1663 if (constraints)
1664 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1665 if (modes)
1666 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1667 nout++;
1668 }
1669
1670 for (i = 0; i < nin; i++)
1671 {
1672 if (operand_locs)
1673 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1674 if (operands)
1675 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1676 if (constraints)
1677 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1678 if (modes)
1679 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1680 }
1681
1682 template = ASM_OPERANDS_TEMPLATE (asmop);
1683 }
1684 else if (GET_CODE (body) == PARALLEL
1685 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1686 {
1687 /* No outputs, but some CLOBBERs. */
1688
1689 rtx asmop = XVECEXP (body, 0, 0);
1690 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1691
1692 for (i = 0; i < nin; i++)
1693 {
1694 if (operand_locs)
1695 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1696 if (operands)
1697 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1698 if (constraints)
1699 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1700 if (modes)
1701 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1702 }
1703
1704 template = ASM_OPERANDS_TEMPLATE (asmop);
1705 }
1706
1707 return template;
1708 }
1709
1710 /* Check if an asm_operand matches it's constraints.
1711 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1712
1713 int
1714 asm_operand_ok (op, constraint)
1715 rtx op;
1716 const char *constraint;
1717 {
1718 int result = 0;
1719
1720 /* Use constrain_operands after reload. */
1721 if (reload_completed)
1722 abort ();
1723
1724 while (*constraint)
1725 {
1726 char c = *constraint++;
1727 switch (c)
1728 {
1729 case '=':
1730 case '+':
1731 case '*':
1732 case '%':
1733 case '?':
1734 case '!':
1735 case '#':
1736 case '&':
1737 case ',':
1738 break;
1739
1740 case '0': case '1': case '2': case '3': case '4':
1741 case '5': case '6': case '7': case '8': case '9':
1742 /* For best results, our caller should have given us the
1743 proper matching constraint, but we can't actually fail
1744 the check if they didn't. Indicate that results are
1745 inconclusive. */
1746 result = -1;
1747 break;
1748
1749 case 'p':
1750 if (address_operand (op, VOIDmode))
1751 return 1;
1752 break;
1753
1754 case 'm':
1755 case 'V': /* non-offsettable */
1756 if (memory_operand (op, VOIDmode))
1757 return 1;
1758 break;
1759
1760 case 'o': /* offsettable */
1761 if (offsettable_nonstrict_memref_p (op))
1762 return 1;
1763 break;
1764
1765 case '<':
1766 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1767 excepting those that expand_call created. Further, on some
1768 machines which do not have generalized auto inc/dec, an inc/dec
1769 is not a memory_operand.
1770
1771 Match any memory and hope things are resolved after reload. */
1772
1773 if (GET_CODE (op) == MEM
1774 && (1
1775 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1776 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1777 return 1;
1778 break;
1779
1780 case '>':
1781 if (GET_CODE (op) == MEM
1782 && (1
1783 || GET_CODE (XEXP (op, 0)) == PRE_INC
1784 || GET_CODE (XEXP (op, 0)) == POST_INC))
1785 return 1;
1786 break;
1787
1788 case 'E':
1789 #ifndef REAL_ARITHMETIC
1790 /* Match any floating double constant, but only if
1791 we can examine the bits of it reliably. */
1792 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1793 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1794 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1795 break;
1796 #endif
1797 /* FALLTHRU */
1798
1799 case 'F':
1800 if (GET_CODE (op) == CONST_DOUBLE)
1801 return 1;
1802 break;
1803
1804 case 'G':
1805 if (GET_CODE (op) == CONST_DOUBLE
1806 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1807 return 1;
1808 break;
1809 case 'H':
1810 if (GET_CODE (op) == CONST_DOUBLE
1811 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1812 return 1;
1813 break;
1814
1815 case 's':
1816 if (GET_CODE (op) == CONST_INT
1817 || (GET_CODE (op) == CONST_DOUBLE
1818 && GET_MODE (op) == VOIDmode))
1819 break;
1820 /* FALLTHRU */
1821
1822 case 'i':
1823 if (CONSTANT_P (op)
1824 #ifdef LEGITIMATE_PIC_OPERAND_P
1825 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1826 #endif
1827 )
1828 return 1;
1829 break;
1830
1831 case 'n':
1832 if (GET_CODE (op) == CONST_INT
1833 || (GET_CODE (op) == CONST_DOUBLE
1834 && GET_MODE (op) == VOIDmode))
1835 return 1;
1836 break;
1837
1838 case 'I':
1839 if (GET_CODE (op) == CONST_INT
1840 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1841 return 1;
1842 break;
1843 case 'J':
1844 if (GET_CODE (op) == CONST_INT
1845 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1846 return 1;
1847 break;
1848 case 'K':
1849 if (GET_CODE (op) == CONST_INT
1850 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1851 return 1;
1852 break;
1853 case 'L':
1854 if (GET_CODE (op) == CONST_INT
1855 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1856 return 1;
1857 break;
1858 case 'M':
1859 if (GET_CODE (op) == CONST_INT
1860 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1861 return 1;
1862 break;
1863 case 'N':
1864 if (GET_CODE (op) == CONST_INT
1865 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1866 return 1;
1867 break;
1868 case 'O':
1869 if (GET_CODE (op) == CONST_INT
1870 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1871 return 1;
1872 break;
1873 case 'P':
1874 if (GET_CODE (op) == CONST_INT
1875 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1876 return 1;
1877 break;
1878
1879 case 'X':
1880 return 1;
1881
1882 case 'g':
1883 if (general_operand (op, VOIDmode))
1884 return 1;
1885 break;
1886
1887 default:
1888 /* For all other letters, we first check for a register class,
1889 otherwise it is an EXTRA_CONSTRAINT. */
1890 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1891 {
1892 case 'r':
1893 if (GET_MODE (op) == BLKmode)
1894 break;
1895 if (register_operand (op, VOIDmode))
1896 return 1;
1897 }
1898 #ifdef EXTRA_CONSTRAINT
1899 if (EXTRA_CONSTRAINT (op, c))
1900 return 1;
1901 #endif
1902 break;
1903 }
1904 }
1905
1906 return result;
1907 }
1908 \f
1909 /* Given an rtx *P, if it is a sum containing an integer constant term,
1910 return the location (type rtx *) of the pointer to that constant term.
1911 Otherwise, return a null pointer. */
1912
1913 static rtx *
1914 find_constant_term_loc (p)
1915 rtx *p;
1916 {
1917 register rtx *tem;
1918 register enum rtx_code code = GET_CODE (*p);
1919
1920 /* If *P IS such a constant term, P is its location. */
1921
1922 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
1923 || code == CONST)
1924 return p;
1925
1926 /* Otherwise, if not a sum, it has no constant term. */
1927
1928 if (GET_CODE (*p) != PLUS)
1929 return 0;
1930
1931 /* If one of the summands is constant, return its location. */
1932
1933 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
1934 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
1935 return p;
1936
1937 /* Otherwise, check each summand for containing a constant term. */
1938
1939 if (XEXP (*p, 0) != 0)
1940 {
1941 tem = find_constant_term_loc (&XEXP (*p, 0));
1942 if (tem != 0)
1943 return tem;
1944 }
1945
1946 if (XEXP (*p, 1) != 0)
1947 {
1948 tem = find_constant_term_loc (&XEXP (*p, 1));
1949 if (tem != 0)
1950 return tem;
1951 }
1952
1953 return 0;
1954 }
1955 \f
1956 /* Return 1 if OP is a memory reference
1957 whose address contains no side effects
1958 and remains valid after the addition
1959 of a positive integer less than the
1960 size of the object being referenced.
1961
1962 We assume that the original address is valid and do not check it.
1963
1964 This uses strict_memory_address_p as a subroutine, so
1965 don't use it before reload. */
1966
1967 int
1968 offsettable_memref_p (op)
1969 rtx op;
1970 {
1971 return ((GET_CODE (op) == MEM)
1972 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
1973 }
1974
1975 /* Similar, but don't require a strictly valid mem ref:
1976 consider pseudo-regs valid as index or base regs. */
1977
1978 int
1979 offsettable_nonstrict_memref_p (op)
1980 rtx op;
1981 {
1982 return ((GET_CODE (op) == MEM)
1983 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
1984 }
1985
1986 /* Return 1 if Y is a memory address which contains no side effects
1987 and would remain valid after the addition of a positive integer
1988 less than the size of that mode.
1989
1990 We assume that the original address is valid and do not check it.
1991 We do check that it is valid for narrower modes.
1992
1993 If STRICTP is nonzero, we require a strictly valid address,
1994 for the sake of use in reload.c. */
1995
1996 int
1997 offsettable_address_p (strictp, mode, y)
1998 int strictp;
1999 enum machine_mode mode;
2000 register rtx y;
2001 {
2002 register enum rtx_code ycode = GET_CODE (y);
2003 register rtx z;
2004 rtx y1 = y;
2005 rtx *y2;
2006 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
2007 (strictp ? strict_memory_address_p : memory_address_p);
2008 unsigned int mode_sz = GET_MODE_SIZE (mode);
2009
2010 if (CONSTANT_ADDRESS_P (y))
2011 return 1;
2012
2013 /* Adjusting an offsettable address involves changing to a narrower mode.
2014 Make sure that's OK. */
2015
2016 if (mode_dependent_address_p (y))
2017 return 0;
2018
2019 /* ??? How much offset does an offsettable BLKmode reference need?
2020 Clearly that depends on the situation in which it's being used.
2021 However, the current situation in which we test 0xffffffff is
2022 less than ideal. Caveat user. */
2023 if (mode_sz == 0)
2024 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2025
2026 /* If the expression contains a constant term,
2027 see if it remains valid when max possible offset is added. */
2028
2029 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2030 {
2031 int good;
2032
2033 y1 = *y2;
2034 *y2 = plus_constant (*y2, mode_sz - 1);
2035 /* Use QImode because an odd displacement may be automatically invalid
2036 for any wider mode. But it should be valid for a single byte. */
2037 good = (*addressp) (QImode, y);
2038
2039 /* In any case, restore old contents of memory. */
2040 *y2 = y1;
2041 return good;
2042 }
2043
2044 if (GET_RTX_CLASS (ycode) == 'a')
2045 return 0;
2046
2047 /* The offset added here is chosen as the maximum offset that
2048 any instruction could need to add when operating on something
2049 of the specified mode. We assume that if Y and Y+c are
2050 valid addresses then so is Y+d for all 0<d<c. */
2051
2052 z = plus_constant_for_output (y, mode_sz - 1);
2053
2054 /* Use QImode because an odd displacement may be automatically invalid
2055 for any wider mode. But it should be valid for a single byte. */
2056 return (*addressp) (QImode, z);
2057 }
2058
2059 /* Return 1 if ADDR is an address-expression whose effect depends
2060 on the mode of the memory reference it is used in.
2061
2062 Autoincrement addressing is a typical example of mode-dependence
2063 because the amount of the increment depends on the mode. */
2064
2065 int
2066 mode_dependent_address_p (addr)
2067 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2068 {
2069 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2070 return 0;
2071 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2072 win: ATTRIBUTE_UNUSED_LABEL
2073 return 1;
2074 }
2075
2076 /* Return 1 if OP is a general operand
2077 other than a memory ref with a mode dependent address. */
2078
2079 int
2080 mode_independent_operand (op, mode)
2081 enum machine_mode mode;
2082 rtx op;
2083 {
2084 rtx addr;
2085
2086 if (! general_operand (op, mode))
2087 return 0;
2088
2089 if (GET_CODE (op) != MEM)
2090 return 1;
2091
2092 addr = XEXP (op, 0);
2093 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2094 return 1;
2095 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2096 lose: ATTRIBUTE_UNUSED_LABEL
2097 return 0;
2098 }
2099
2100 /* Given an operand OP that is a valid memory reference which
2101 satisfies offsettable_memref_p, return a new memory reference whose
2102 address has been adjusted by OFFSET. OFFSET should be positive and
2103 less than the size of the object referenced. */
2104
2105 rtx
2106 adj_offsettable_operand (op, offset)
2107 rtx op;
2108 int offset;
2109 {
2110 register enum rtx_code code = GET_CODE (op);
2111
2112 if (code == MEM)
2113 {
2114 register rtx y = XEXP (op, 0);
2115 register rtx new;
2116
2117 if (CONSTANT_ADDRESS_P (y))
2118 {
2119 new = gen_rtx_MEM (GET_MODE (op),
2120 plus_constant_for_output (y, offset));
2121 MEM_COPY_ATTRIBUTES (new, op);
2122 return new;
2123 }
2124
2125 if (GET_CODE (y) == PLUS)
2126 {
2127 rtx z = y;
2128 register rtx *const_loc;
2129
2130 op = copy_rtx (op);
2131 z = XEXP (op, 0);
2132 const_loc = find_constant_term_loc (&z);
2133 if (const_loc)
2134 {
2135 *const_loc = plus_constant_for_output (*const_loc, offset);
2136 return op;
2137 }
2138 }
2139
2140 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
2141 MEM_COPY_ATTRIBUTES (new, op);
2142 return new;
2143 }
2144 abort ();
2145 }
2146 \f
2147 /* Like extract_insn, but save insn extracted and don't extract again, when
2148 called again for the same insn expecting that recog_data still contain the
2149 valid information. This is used primary by gen_attr infrastructure that
2150 often does extract insn again and again. */
2151 void
2152 extract_insn_cached (insn)
2153 rtx insn;
2154 {
2155 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2156 return;
2157 extract_insn (insn);
2158 recog_data.insn = insn;
2159 }
2160 /* Do cached extract_insn, constrain_operand and complain about failures.
2161 Used by insn_attrtab. */
2162 void
2163 extract_constrain_insn_cached (insn)
2164 rtx insn;
2165 {
2166 extract_insn_cached (insn);
2167 if (which_alternative == -1
2168 && !constrain_operands (reload_completed))
2169 fatal_insn_not_found (insn);
2170 }
2171 /* Do cached constrain_operand and complain about failures. */
2172 int
2173 constrain_operands_cached (strict)
2174 int strict;
2175 {
2176 if (which_alternative == -1)
2177 return constrain_operands (strict);
2178 else
2179 return 1;
2180 }
2181 \f
2182 /* Analyze INSN and fill in recog_data. */
2183
2184 void
2185 extract_insn (insn)
2186 rtx insn;
2187 {
2188 int i;
2189 int icode;
2190 int noperands;
2191 rtx body = PATTERN (insn);
2192
2193 recog_data.insn = NULL;
2194 recog_data.n_operands = 0;
2195 recog_data.n_alternatives = 0;
2196 recog_data.n_dups = 0;
2197 which_alternative = -1;
2198
2199 switch (GET_CODE (body))
2200 {
2201 case USE:
2202 case CLOBBER:
2203 case ASM_INPUT:
2204 case ADDR_VEC:
2205 case ADDR_DIFF_VEC:
2206 return;
2207
2208 case SET:
2209 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2210 goto asm_insn;
2211 else
2212 goto normal_insn;
2213 case PARALLEL:
2214 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2215 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2216 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2217 goto asm_insn;
2218 else
2219 goto normal_insn;
2220 case ASM_OPERANDS:
2221 asm_insn:
2222 recog_data.n_operands = noperands = asm_noperands (body);
2223 if (noperands >= 0)
2224 {
2225 /* This insn is an `asm' with operands. */
2226
2227 /* expand_asm_operands makes sure there aren't too many operands. */
2228 if (noperands > MAX_RECOG_OPERANDS)
2229 abort ();
2230
2231 /* Now get the operand values and constraints out of the insn. */
2232 decode_asm_operands (body, recog_data.operand,
2233 recog_data.operand_loc,
2234 recog_data.constraints,
2235 recog_data.operand_mode);
2236 if (noperands > 0)
2237 {
2238 const char *p = recog_data.constraints[0];
2239 recog_data.n_alternatives = 1;
2240 while (*p)
2241 recog_data.n_alternatives += (*p++ == ',');
2242 }
2243 break;
2244 }
2245 fatal_insn_not_found (insn);
2246
2247 default:
2248 normal_insn:
2249 /* Ordinary insn: recognize it, get the operands via insn_extract
2250 and get the constraints. */
2251
2252 icode = recog_memoized (insn);
2253 if (icode < 0)
2254 fatal_insn_not_found (insn);
2255
2256 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2257 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2258 recog_data.n_dups = insn_data[icode].n_dups;
2259
2260 insn_extract (insn);
2261
2262 for (i = 0; i < noperands; i++)
2263 {
2264 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
2265 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
2266 /* VOIDmode match_operands gets mode from their real operand. */
2267 if (recog_data.operand_mode[i] == VOIDmode)
2268 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
2269 }
2270 }
2271 for (i = 0; i < noperands; i++)
2272 recog_data.operand_type[i]
2273 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2274 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2275 : OP_IN);
2276
2277 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
2278 abort ();
2279 }
2280
2281 /* After calling extract_insn, you can use this function to extract some
2282 information from the constraint strings into a more usable form.
2283 The collected data is stored in recog_op_alt. */
2284 void
2285 preprocess_constraints ()
2286 {
2287 int i;
2288
2289 memset (recog_op_alt, 0, sizeof recog_op_alt);
2290 for (i = 0; i < recog_data.n_operands; i++)
2291 {
2292 int j;
2293 struct operand_alternative *op_alt;
2294 const char *p = recog_data.constraints[i];
2295
2296 op_alt = recog_op_alt[i];
2297
2298 for (j = 0; j < recog_data.n_alternatives; j++)
2299 {
2300 op_alt[j].class = NO_REGS;
2301 op_alt[j].constraint = p;
2302 op_alt[j].matches = -1;
2303 op_alt[j].matched = -1;
2304
2305 if (*p == '\0' || *p == ',')
2306 {
2307 op_alt[j].anything_ok = 1;
2308 continue;
2309 }
2310
2311 for (;;)
2312 {
2313 char c = *p++;
2314 if (c == '#')
2315 do
2316 c = *p++;
2317 while (c != ',' && c != '\0');
2318 if (c == ',' || c == '\0')
2319 break;
2320
2321 switch (c)
2322 {
2323 case '=': case '+': case '*': case '%':
2324 case 'E': case 'F': case 'G': case 'H':
2325 case 's': case 'i': case 'n':
2326 case 'I': case 'J': case 'K': case 'L':
2327 case 'M': case 'N': case 'O': case 'P':
2328 /* These don't say anything we care about. */
2329 break;
2330
2331 case '?':
2332 op_alt[j].reject += 6;
2333 break;
2334 case '!':
2335 op_alt[j].reject += 600;
2336 break;
2337 case '&':
2338 op_alt[j].earlyclobber = 1;
2339 break;
2340
2341 case '0': case '1': case '2': case '3': case '4':
2342 case '5': case '6': case '7': case '8': case '9':
2343 op_alt[j].matches = c - '0';
2344 recog_op_alt[op_alt[j].matches][j].matched = i;
2345 break;
2346
2347 case 'm':
2348 op_alt[j].memory_ok = 1;
2349 break;
2350 case '<':
2351 op_alt[j].decmem_ok = 1;
2352 break;
2353 case '>':
2354 op_alt[j].incmem_ok = 1;
2355 break;
2356 case 'V':
2357 op_alt[j].nonoffmem_ok = 1;
2358 break;
2359 case 'o':
2360 op_alt[j].offmem_ok = 1;
2361 break;
2362 case 'X':
2363 op_alt[j].anything_ok = 1;
2364 break;
2365
2366 case 'p':
2367 op_alt[j].is_address = 1;
2368 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2369 break;
2370
2371 case 'g': case 'r':
2372 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2373 break;
2374
2375 default:
2376 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
2377 break;
2378 }
2379 }
2380 }
2381 }
2382 }
2383
2384 /* Check the operands of an insn against the insn's operand constraints
2385 and return 1 if they are valid.
2386 The information about the insn's operands, constraints, operand modes
2387 etc. is obtained from the global variables set up by extract_insn.
2388
2389 WHICH_ALTERNATIVE is set to a number which indicates which
2390 alternative of constraints was matched: 0 for the first alternative,
2391 1 for the next, etc.
2392
2393 In addition, when two operands are match
2394 and it happens that the output operand is (reg) while the
2395 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2396 make the output operand look like the input.
2397 This is because the output operand is the one the template will print.
2398
2399 This is used in final, just before printing the assembler code and by
2400 the routines that determine an insn's attribute.
2401
2402 If STRICT is a positive non-zero value, it means that we have been
2403 called after reload has been completed. In that case, we must
2404 do all checks strictly. If it is zero, it means that we have been called
2405 before reload has completed. In that case, we first try to see if we can
2406 find an alternative that matches strictly. If not, we try again, this
2407 time assuming that reload will fix up the insn. This provides a "best
2408 guess" for the alternative and is used to compute attributes of insns prior
2409 to reload. A negative value of STRICT is used for this internal call. */
2410
2411 struct funny_match
2412 {
2413 int this, other;
2414 };
2415
2416 int
2417 constrain_operands (strict)
2418 int strict;
2419 {
2420 const char *constraints[MAX_RECOG_OPERANDS];
2421 int matching_operands[MAX_RECOG_OPERANDS];
2422 int earlyclobber[MAX_RECOG_OPERANDS];
2423 register int c;
2424
2425 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2426 int funny_match_index;
2427
2428 which_alternative = 0;
2429 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2430 return 1;
2431
2432 for (c = 0; c < recog_data.n_operands; c++)
2433 {
2434 constraints[c] = recog_data.constraints[c];
2435 matching_operands[c] = -1;
2436 }
2437
2438 do
2439 {
2440 register int opno;
2441 int lose = 0;
2442 funny_match_index = 0;
2443
2444 for (opno = 0; opno < recog_data.n_operands; opno++)
2445 {
2446 register rtx op = recog_data.operand[opno];
2447 enum machine_mode mode = GET_MODE (op);
2448 register const char *p = constraints[opno];
2449 int offset = 0;
2450 int win = 0;
2451 int val;
2452
2453 earlyclobber[opno] = 0;
2454
2455 /* A unary operator may be accepted by the predicate, but it
2456 is irrelevant for matching constraints. */
2457 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2458 op = XEXP (op, 0);
2459
2460 if (GET_CODE (op) == SUBREG)
2461 {
2462 if (GET_CODE (SUBREG_REG (op)) == REG
2463 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
2464 offset = SUBREG_WORD (op);
2465 op = SUBREG_REG (op);
2466 }
2467
2468 /* An empty constraint or empty alternative
2469 allows anything which matched the pattern. */
2470 if (*p == 0 || *p == ',')
2471 win = 1;
2472
2473 while (*p && (c = *p++) != ',')
2474 switch (c)
2475 {
2476 case '?': case '!': case '*': case '%':
2477 case '=': case '+':
2478 break;
2479
2480 case '#':
2481 /* Ignore rest of this alternative as far as
2482 constraint checking is concerned. */
2483 while (*p && *p != ',')
2484 p++;
2485 break;
2486
2487 case '&':
2488 earlyclobber[opno] = 1;
2489 break;
2490
2491 case '0': case '1': case '2': case '3': case '4':
2492 case '5': case '6': case '7': case '8': case '9':
2493
2494 /* This operand must be the same as a previous one.
2495 This kind of constraint is used for instructions such
2496 as add when they take only two operands.
2497
2498 Note that the lower-numbered operand is passed first.
2499
2500 If we are not testing strictly, assume that this constraint
2501 will be satisfied. */
2502 if (strict < 0)
2503 val = 1;
2504 else
2505 {
2506 rtx op1 = recog_data.operand[c - '0'];
2507 rtx op2 = recog_data.operand[opno];
2508
2509 /* A unary operator may be accepted by the predicate,
2510 but it is irrelevant for matching constraints. */
2511 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2512 op1 = XEXP (op1, 0);
2513 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2514 op2 = XEXP (op2, 0);
2515
2516 val = operands_match_p (op1, op2);
2517 }
2518
2519 matching_operands[opno] = c - '0';
2520 matching_operands[c - '0'] = opno;
2521
2522 if (val != 0)
2523 win = 1;
2524 /* If output is *x and input is *--x,
2525 arrange later to change the output to *--x as well,
2526 since the output op is the one that will be printed. */
2527 if (val == 2 && strict > 0)
2528 {
2529 funny_match[funny_match_index].this = opno;
2530 funny_match[funny_match_index++].other = c - '0';
2531 }
2532 break;
2533
2534 case 'p':
2535 /* p is used for address_operands. When we are called by
2536 gen_reload, no one will have checked that the address is
2537 strictly valid, i.e., that all pseudos requiring hard regs
2538 have gotten them. */
2539 if (strict <= 0
2540 || (strict_memory_address_p (recog_data.operand_mode[opno],
2541 op)))
2542 win = 1;
2543 break;
2544
2545 /* No need to check general_operand again;
2546 it was done in insn-recog.c. */
2547 case 'g':
2548 /* Anything goes unless it is a REG and really has a hard reg
2549 but the hard reg is not in the class GENERAL_REGS. */
2550 if (strict < 0
2551 || GENERAL_REGS == ALL_REGS
2552 || GET_CODE (op) != REG
2553 || (reload_in_progress
2554 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2555 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2556 win = 1;
2557 break;
2558
2559 case 'X':
2560 /* This is used for a MATCH_SCRATCH in the cases when
2561 we don't actually need anything. So anything goes
2562 any time. */
2563 win = 1;
2564 break;
2565
2566 case 'm':
2567 if (GET_CODE (op) == MEM
2568 /* Before reload, accept what reload can turn into mem. */
2569 || (strict < 0 && CONSTANT_P (op))
2570 /* During reload, accept a pseudo */
2571 || (reload_in_progress && GET_CODE (op) == REG
2572 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2573 win = 1;
2574 break;
2575
2576 case '<':
2577 if (GET_CODE (op) == MEM
2578 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2579 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2580 win = 1;
2581 break;
2582
2583 case '>':
2584 if (GET_CODE (op) == MEM
2585 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2586 || GET_CODE (XEXP (op, 0)) == POST_INC))
2587 win = 1;
2588 break;
2589
2590 case 'E':
2591 #ifndef REAL_ARITHMETIC
2592 /* Match any CONST_DOUBLE, but only if
2593 we can examine the bits of it reliably. */
2594 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
2595 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
2596 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2597 break;
2598 #endif
2599 if (GET_CODE (op) == CONST_DOUBLE)
2600 win = 1;
2601 break;
2602
2603 case 'F':
2604 if (GET_CODE (op) == CONST_DOUBLE)
2605 win = 1;
2606 break;
2607
2608 case 'G':
2609 case 'H':
2610 if (GET_CODE (op) == CONST_DOUBLE
2611 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2612 win = 1;
2613 break;
2614
2615 case 's':
2616 if (GET_CODE (op) == CONST_INT
2617 || (GET_CODE (op) == CONST_DOUBLE
2618 && GET_MODE (op) == VOIDmode))
2619 break;
2620 case 'i':
2621 if (CONSTANT_P (op))
2622 win = 1;
2623 break;
2624
2625 case 'n':
2626 if (GET_CODE (op) == CONST_INT
2627 || (GET_CODE (op) == CONST_DOUBLE
2628 && GET_MODE (op) == VOIDmode))
2629 win = 1;
2630 break;
2631
2632 case 'I':
2633 case 'J':
2634 case 'K':
2635 case 'L':
2636 case 'M':
2637 case 'N':
2638 case 'O':
2639 case 'P':
2640 if (GET_CODE (op) == CONST_INT
2641 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2642 win = 1;
2643 break;
2644
2645 case 'V':
2646 if (GET_CODE (op) == MEM
2647 && ((strict > 0 && ! offsettable_memref_p (op))
2648 || (strict < 0
2649 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2650 || (reload_in_progress
2651 && !(GET_CODE (op) == REG
2652 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2653 win = 1;
2654 break;
2655
2656 case 'o':
2657 if ((strict > 0 && offsettable_memref_p (op))
2658 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2659 /* Before reload, accept what reload can handle. */
2660 || (strict < 0
2661 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2662 /* During reload, accept a pseudo */
2663 || (reload_in_progress && GET_CODE (op) == REG
2664 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2665 win = 1;
2666 break;
2667
2668 default:
2669 {
2670 enum reg_class class;
2671
2672 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2673 if (class != NO_REGS)
2674 {
2675 if (strict < 0
2676 || (strict == 0
2677 && GET_CODE (op) == REG
2678 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2679 || (strict == 0 && GET_CODE (op) == SCRATCH)
2680 || (GET_CODE (op) == REG
2681 && reg_fits_class_p (op, class, offset, mode)))
2682 win = 1;
2683 }
2684 #ifdef EXTRA_CONSTRAINT
2685 else if (EXTRA_CONSTRAINT (op, c))
2686 win = 1;
2687 #endif
2688 break;
2689 }
2690 }
2691
2692 constraints[opno] = p;
2693 /* If this operand did not win somehow,
2694 this alternative loses. */
2695 if (! win)
2696 lose = 1;
2697 }
2698 /* This alternative won; the operands are ok.
2699 Change whichever operands this alternative says to change. */
2700 if (! lose)
2701 {
2702 int opno, eopno;
2703
2704 /* See if any earlyclobber operand conflicts with some other
2705 operand. */
2706
2707 if (strict > 0)
2708 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
2709 /* Ignore earlyclobber operands now in memory,
2710 because we would often report failure when we have
2711 two memory operands, one of which was formerly a REG. */
2712 if (earlyclobber[eopno]
2713 && GET_CODE (recog_data.operand[eopno]) == REG)
2714 for (opno = 0; opno < recog_data.n_operands; opno++)
2715 if ((GET_CODE (recog_data.operand[opno]) == MEM
2716 || recog_data.operand_type[opno] != OP_OUT)
2717 && opno != eopno
2718 /* Ignore things like match_operator operands. */
2719 && *recog_data.constraints[opno] != 0
2720 && ! (matching_operands[opno] == eopno
2721 && operands_match_p (recog_data.operand[opno],
2722 recog_data.operand[eopno]))
2723 && ! safe_from_earlyclobber (recog_data.operand[opno],
2724 recog_data.operand[eopno]))
2725 lose = 1;
2726
2727 if (! lose)
2728 {
2729 while (--funny_match_index >= 0)
2730 {
2731 recog_data.operand[funny_match[funny_match_index].other]
2732 = recog_data.operand[funny_match[funny_match_index].this];
2733 }
2734
2735 return 1;
2736 }
2737 }
2738
2739 which_alternative++;
2740 }
2741 while (which_alternative < recog_data.n_alternatives);
2742
2743 which_alternative = -1;
2744 /* If we are about to reject this, but we are not to test strictly,
2745 try a very loose test. Only return failure if it fails also. */
2746 if (strict == 0)
2747 return constrain_operands (-1);
2748 else
2749 return 0;
2750 }
2751
2752 /* Return 1 iff OPERAND (assumed to be a REG rtx)
2753 is a hard reg in class CLASS when its regno is offset by OFFSET
2754 and changed to mode MODE.
2755 If REG occupies multiple hard regs, all of them must be in CLASS. */
2756
2757 int
2758 reg_fits_class_p (operand, class, offset, mode)
2759 rtx operand;
2760 register enum reg_class class;
2761 int offset;
2762 enum machine_mode mode;
2763 {
2764 register int regno = REGNO (operand);
2765 if (regno < FIRST_PSEUDO_REGISTER
2766 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2767 regno + offset))
2768 {
2769 register int sr;
2770 regno += offset;
2771 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2772 sr > 0; sr--)
2773 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2774 regno + sr))
2775 break;
2776 return sr == 0;
2777 }
2778
2779 return 0;
2780 }
2781 \f
2782 /* Split all insns in the function. If UPD_LIFE, update life info after. */
2783
2784 void
2785 split_all_insns (upd_life)
2786 int upd_life;
2787 {
2788 sbitmap blocks;
2789 int changed;
2790 int i;
2791
2792 blocks = sbitmap_alloc (n_basic_blocks);
2793 sbitmap_zero (blocks);
2794 changed = 0;
2795
2796 for (i = n_basic_blocks - 1; i >= 0; --i)
2797 {
2798 basic_block bb = BASIC_BLOCK (i);
2799 rtx insn, next;
2800
2801 for (insn = bb->head; insn ; insn = next)
2802 {
2803 rtx set;
2804
2805 /* Can't use `next_real_insn' because that might go across
2806 CODE_LABELS and short-out basic blocks. */
2807 next = NEXT_INSN (insn);
2808 if (! INSN_P (insn))
2809 ;
2810
2811 /* Don't split no-op move insns. These should silently
2812 disappear later in final. Splitting such insns would
2813 break the code that handles REG_NO_CONFLICT blocks. */
2814
2815 else if ((set = single_set (insn)) != NULL
2816 && rtx_equal_p (SET_SRC (set), SET_DEST (set)))
2817 {
2818 /* Nops get in the way while scheduling, so delete them
2819 now if register allocation has already been done. It
2820 is too risky to try to do this before register
2821 allocation, and there are unlikely to be very many
2822 nops then anyways. */
2823 if (reload_completed)
2824 {
2825 PUT_CODE (insn, NOTE);
2826 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2827 NOTE_SOURCE_FILE (insn) = 0;
2828 }
2829 }
2830 else
2831 {
2832 /* Split insns here to get max fine-grain parallelism. */
2833 rtx first = PREV_INSN (insn);
2834 rtx last = try_split (PATTERN (insn), insn, 1);
2835
2836 if (last != insn)
2837 {
2838 SET_BIT (blocks, i);
2839 changed = 1;
2840
2841 /* try_split returns the NOTE that INSN became. */
2842 PUT_CODE (insn, NOTE);
2843 NOTE_SOURCE_FILE (insn) = 0;
2844 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2845
2846 /* ??? Coddle to md files that generate subregs in post-
2847 reload splitters instead of computing the proper
2848 hard register. */
2849 if (reload_completed && first != last)
2850 {
2851 first = NEXT_INSN (first);
2852 while (1)
2853 {
2854 if (INSN_P (first))
2855 cleanup_subreg_operands (first);
2856 if (first == last)
2857 break;
2858 first = NEXT_INSN (first);
2859 }
2860 }
2861
2862 if (insn == bb->end)
2863 {
2864 bb->end = last;
2865 break;
2866 }
2867 }
2868 }
2869
2870 if (insn == bb->end)
2871 break;
2872 }
2873
2874 /* ??? When we're called from just after reload, the CFG is in bad
2875 shape, and we may have fallen off the end. This could be fixed
2876 by having reload not try to delete unreachable code. Otherwise
2877 assert we found the end insn. */
2878 if (insn == NULL && upd_life)
2879 abort ();
2880 }
2881
2882 if (changed && upd_life)
2883 {
2884 compute_bb_for_insn (get_max_uid ());
2885 count_or_remove_death_notes (blocks, 1);
2886 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
2887 }
2888
2889 sbitmap_free (blocks);
2890 }
2891 \f
2892 #ifdef HAVE_peephole2
2893 struct peep2_insn_data
2894 {
2895 rtx insn;
2896 regset live_before;
2897 };
2898
2899 static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2900 static int peep2_current;
2901
2902 /* A non-insn marker indicating the last insn of the block.
2903 The live_before regset for this element is correct, indicating
2904 global_live_at_end for the block. */
2905 #define PEEP2_EOB pc_rtx
2906
2907 /* Return the Nth non-note insn after `current', or return NULL_RTX if it
2908 does not exist. Used by the recognizer to find the next insn to match
2909 in a multi-insn pattern. */
2910
2911 rtx
2912 peep2_next_insn (n)
2913 int n;
2914 {
2915 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2916 abort ();
2917
2918 n += peep2_current;
2919 if (n >= MAX_INSNS_PER_PEEP2 + 1)
2920 n -= MAX_INSNS_PER_PEEP2 + 1;
2921
2922 if (peep2_insn_data[n].insn == PEEP2_EOB)
2923 return NULL_RTX;
2924 return peep2_insn_data[n].insn;
2925 }
2926
2927 /* Return true if REGNO is dead before the Nth non-note insn
2928 after `current'. */
2929
2930 int
2931 peep2_regno_dead_p (ofs, regno)
2932 int ofs;
2933 int regno;
2934 {
2935 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2936 abort ();
2937
2938 ofs += peep2_current;
2939 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2940 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2941
2942 if (peep2_insn_data[ofs].insn == NULL_RTX)
2943 abort ();
2944
2945 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
2946 }
2947
2948 /* Similarly for a REG. */
2949
2950 int
2951 peep2_reg_dead_p (ofs, reg)
2952 int ofs;
2953 rtx reg;
2954 {
2955 int regno, n;
2956
2957 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2958 abort ();
2959
2960 ofs += peep2_current;
2961 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
2962 ofs -= MAX_INSNS_PER_PEEP2 + 1;
2963
2964 if (peep2_insn_data[ofs].insn == NULL_RTX)
2965 abort ();
2966
2967 regno = REGNO (reg);
2968 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
2969 while (--n >= 0)
2970 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
2971 return 0;
2972 return 1;
2973 }
2974
2975 /* Try to find a hard register of mode MODE, matching the register class in
2976 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
2977 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
2978 in which case the only condition is that the register must be available
2979 before CURRENT_INSN.
2980 Registers that already have bits set in REG_SET will not be considered.
2981
2982 If an appropriate register is available, it will be returned and the
2983 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
2984 returned. */
2985
2986 rtx
2987 peep2_find_free_register (from, to, class_str, mode, reg_set)
2988 int from, to;
2989 const char *class_str;
2990 enum machine_mode mode;
2991 HARD_REG_SET *reg_set;
2992 {
2993 static int search_ofs;
2994 enum reg_class class;
2995 HARD_REG_SET live;
2996 int i;
2997
2998 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
2999 abort ();
3000
3001 from += peep2_current;
3002 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3003 from -= MAX_INSNS_PER_PEEP2 + 1;
3004 to += peep2_current;
3005 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3006 to -= MAX_INSNS_PER_PEEP2 + 1;
3007
3008 if (peep2_insn_data[from].insn == NULL_RTX)
3009 abort ();
3010 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3011
3012 while (from != to)
3013 {
3014 HARD_REG_SET this_live;
3015
3016 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3017 from = 0;
3018 if (peep2_insn_data[from].insn == NULL_RTX)
3019 abort ();
3020 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3021 IOR_HARD_REG_SET (live, this_live);
3022 }
3023
3024 class = (class_str[0] == 'r' ? GENERAL_REGS
3025 : REG_CLASS_FROM_LETTER (class_str[0]));
3026
3027 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3028 {
3029 int raw_regno, regno, success, j;
3030
3031 /* Distribute the free registers as much as possible. */
3032 raw_regno = search_ofs + i;
3033 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3034 raw_regno -= FIRST_PSEUDO_REGISTER;
3035 #ifdef REG_ALLOC_ORDER
3036 regno = reg_alloc_order[raw_regno];
3037 #else
3038 regno = raw_regno;
3039 #endif
3040
3041 /* Don't allocate fixed registers. */
3042 if (fixed_regs[regno])
3043 continue;
3044 /* Make sure the register is of the right class. */
3045 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3046 continue;
3047 /* And can support the mode we need. */
3048 if (! HARD_REGNO_MODE_OK (regno, mode))
3049 continue;
3050 /* And that we don't create an extra save/restore. */
3051 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3052 continue;
3053 /* And we don't clobber traceback for noreturn functions. */
3054 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3055 && (! reload_completed || frame_pointer_needed))
3056 continue;
3057
3058 success = 1;
3059 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3060 {
3061 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3062 || TEST_HARD_REG_BIT (live, regno + j))
3063 {
3064 success = 0;
3065 break;
3066 }
3067 }
3068 if (success)
3069 {
3070 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3071 SET_HARD_REG_BIT (*reg_set, regno + j);
3072
3073 /* Start the next search with the next register. */
3074 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3075 raw_regno = 0;
3076 search_ofs = raw_regno;
3077
3078 return gen_rtx_REG (mode, regno);
3079 }
3080 }
3081
3082 search_ofs = 0;
3083 return NULL_RTX;
3084 }
3085
3086 /* Perform the peephole2 optimization pass. */
3087
3088 void
3089 peephole2_optimize (dump_file)
3090 FILE *dump_file ATTRIBUTE_UNUSED;
3091 {
3092 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
3093 rtx insn, prev;
3094 regset live;
3095 int i, b;
3096 #ifdef HAVE_conditional_execution
3097 sbitmap blocks;
3098 int changed;
3099 #endif
3100
3101 /* Initialize the regsets we're going to use. */
3102 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3103 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3104 live = INITIALIZE_REG_SET (rs_heads[i]);
3105
3106 #ifdef HAVE_conditional_execution
3107 blocks = sbitmap_alloc (n_basic_blocks);
3108 sbitmap_zero (blocks);
3109 changed = 0;
3110 #else
3111 count_or_remove_death_notes (NULL, 1);
3112 #endif
3113
3114 for (b = n_basic_blocks - 1; b >= 0; --b)
3115 {
3116 basic_block bb = BASIC_BLOCK (b);
3117 struct propagate_block_info *pbi;
3118
3119 /* Indicate that all slots except the last holds invalid data. */
3120 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3121 peep2_insn_data[i].insn = NULL_RTX;
3122
3123 /* Indicate that the last slot contains live_after data. */
3124 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3125 peep2_current = MAX_INSNS_PER_PEEP2;
3126
3127 /* Start up propagation. */
3128 COPY_REG_SET (live, bb->global_live_at_end);
3129 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3130
3131 #ifdef HAVE_conditional_execution
3132 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
3133 #else
3134 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
3135 #endif
3136
3137 for (insn = bb->end; ; insn = prev)
3138 {
3139 prev = PREV_INSN (insn);
3140 if (INSN_P (insn))
3141 {
3142 rtx try;
3143 int match_len;
3144
3145 /* Record this insn. */
3146 if (--peep2_current < 0)
3147 peep2_current = MAX_INSNS_PER_PEEP2;
3148 peep2_insn_data[peep2_current].insn = insn;
3149 propagate_one_insn (pbi, insn);
3150 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3151
3152 /* Match the peephole. */
3153 try = peephole2_insns (PATTERN (insn), insn, &match_len);
3154 if (try != NULL)
3155 {
3156 i = match_len + peep2_current;
3157 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3158 i -= MAX_INSNS_PER_PEEP2 + 1;
3159
3160 /* Replace the old sequence with the new. */
3161 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
3162 try = emit_insn_after (try, prev);
3163
3164 /* Adjust the basic block boundaries. */
3165 if (peep2_insn_data[i].insn == bb->end)
3166 bb->end = try;
3167 if (insn == bb->head)
3168 bb->head = NEXT_INSN (prev);
3169
3170 #ifdef HAVE_conditional_execution
3171 /* With conditional execution, we cannot back up the
3172 live information so easily, since the conditional
3173 death data structures are not so self-contained.
3174 So record that we've made a modification to this
3175 block and update life information at the end. */
3176 SET_BIT (blocks, b);
3177 changed = 1;
3178
3179 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3180 peep2_insn_data[i].insn = NULL_RTX;
3181 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3182 #else
3183 /* Back up lifetime information past the end of the
3184 newly created sequence. */
3185 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3186 i = 0;
3187 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3188
3189 /* Update life information for the new sequence. */
3190 do
3191 {
3192 if (INSN_P (try))
3193 {
3194 if (--i < 0)
3195 i = MAX_INSNS_PER_PEEP2;
3196 peep2_insn_data[i].insn = try;
3197 propagate_one_insn (pbi, try);
3198 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3199 }
3200 try = PREV_INSN (try);
3201 }
3202 while (try != prev);
3203
3204 /* ??? Should verify that LIVE now matches what we
3205 had before the new sequence. */
3206
3207 peep2_current = i;
3208 #endif
3209 }
3210 }
3211
3212 if (insn == bb->head)
3213 break;
3214 }
3215
3216 free_propagate_block_info (pbi);
3217 }
3218
3219 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3220 FREE_REG_SET (peep2_insn_data[i].live_before);
3221 FREE_REG_SET (live);
3222
3223 #ifdef HAVE_conditional_execution
3224 count_or_remove_death_notes (blocks, 1);
3225 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
3226 sbitmap_free (blocks);
3227 #endif
3228 }
3229 #endif /* HAVE_peephole2 */
This page took 0.178895 seconds and 5 git commands to generate.