]> gcc.gnu.org Git - gcc.git/blame - gcc/recog.c
recog.c (general_operand, [...]): Require CONST_INTs to be sign-extended values for...
[gcc.git] / gcc / recog.c
CommitLineData
2055cea7 1/* Subroutines used by or related to instruction recognition.
af841dbd 2 Copyright (C) 1987, 1988, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998
e2373f95 3 1999, 2000, 2001 Free Software Foundation, Inc.
2055cea7
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
e99215a3
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
2055cea7
RK
21
22
23#include "config.h"
670ee920 24#include "system.h"
38a448ca 25#include "rtl.h"
6baf1cc8 26#include "tm_p.h"
2055cea7
RK
27#include "insn-config.h"
28#include "insn-attr.h"
d80eb1e1 29#include "hard-reg-set.h"
2055cea7
RK
30#include "recog.h"
31#include "regs.h"
49ad7cfa 32#include "function.h"
2055cea7
RK
33#include "flags.h"
34#include "real.h"
7f7f8214 35#include "toplev.h"
ca545bb5 36#include "basic-block.h"
ede7cd44 37#include "output.h"
0e9295cf 38#include "reload.h"
2055cea7
RK
39
40#ifndef STACK_PUSH_CODE
41#ifdef STACK_GROWS_DOWNWARD
42#define STACK_PUSH_CODE PRE_DEC
43#else
44#define STACK_PUSH_CODE PRE_INC
45#endif
46#endif
47
6fbe9bd8
RH
48#ifndef STACK_POP_CODE
49#ifdef STACK_GROWS_DOWNWARD
50#define STACK_POP_CODE POST_INC
51#else
52#define STACK_POP_CODE POST_DEC
53#endif
54#endif
55
13536812
KG
56static void validate_replace_rtx_1 PARAMS ((rtx *, rtx, rtx, rtx));
57static rtx *find_single_use_1 PARAMS ((rtx, rtx *));
58static rtx *find_constant_term_loc PARAMS ((rtx *));
e2373f95 59static void validate_replace_src_1 PARAMS ((rtx *, void *));
2055cea7
RK
60
61/* Nonzero means allow operands to be volatile.
62 This should be 0 if you are generating rtl, such as if you are calling
63 the functions in optabs.c and expmed.c (most of the time).
64 This should be 1 if all valid insns need to be recognized,
65 such as in regclass.c and final.c and reload.c.
66
67 init_recog and init_recog_no_volatile are responsible for setting this. */
68
69int volatile_ok;
70
1ccbefce 71struct recog_data recog_data;
0a578fee 72
f62a15e3
BS
73/* Contains a vector of operand_alternative structures for every operand.
74 Set up by preprocess_constraints. */
75struct operand_alternative recog_op_alt[MAX_RECOG_OPERANDS][MAX_RECOG_ALTERNATIVES];
76
2055cea7
RK
77/* On return from `constrain_operands', indicate which alternative
78 was satisfied. */
79
80int which_alternative;
81
82/* Nonzero after end of reload pass.
83 Set to 1 or 0 by toplev.c.
84 Controls the significance of (SUBREG (MEM)). */
85
86int reload_completed;
87
88/* Initialize data used by the function `recog'.
89 This must be called once in the compilation of a function
90 before any insn recognition may be done in the function. */
91
92void
93init_recog_no_volatile ()
94{
95 volatile_ok = 0;
96}
97
e0069e43 98void
2055cea7
RK
99init_recog ()
100{
101 volatile_ok = 1;
102}
103
104/* Try recognizing the instruction INSN,
105 and return the code number that results.
9faa82d8 106 Remember the code so that repeated calls do not
2055cea7
RK
107 need to spend the time for actual rerecognition.
108
109 This function is the normal interface to instruction recognition.
110 The automatically-generated function `recog' is normally called
111 through this one. (The only exception is in combine.c.) */
112
113int
b1cdafbb 114recog_memoized_1 (insn)
2055cea7
RK
115 rtx insn;
116{
117 if (INSN_CODE (insn) < 0)
9e4223f2 118 INSN_CODE (insn) = recog (PATTERN (insn), insn, NULL_PTR);
2055cea7
RK
119 return INSN_CODE (insn);
120}
121\f
122/* Check that X is an insn-body for an `asm' with operands
123 and that the operands mentioned in it are legitimate. */
124
125int
126check_asm_operands (x)
127 rtx x;
128{
1f06ee8d 129 int noperands;
2055cea7 130 rtx *operands;
9b3142b3 131 const char **constraints;
2055cea7
RK
132 int i;
133
1f06ee8d
RH
134 /* Post-reload, be more strict with things. */
135 if (reload_completed)
136 {
137 /* ??? Doh! We've not got the wrapping insn. Cook one up. */
138 extract_insn (make_insn_raw (x));
139 constrain_operands (1);
140 return which_alternative >= 0;
141 }
142
143 noperands = asm_noperands (x);
2055cea7
RK
144 if (noperands < 0)
145 return 0;
146 if (noperands == 0)
147 return 1;
148
149 operands = (rtx *) alloca (noperands * sizeof (rtx));
9b3142b3 150 constraints = (const char **) alloca (noperands * sizeof (char *));
1f06ee8d
RH
151
152 decode_asm_operands (x, operands, NULL_PTR, constraints, NULL_PTR);
2055cea7
RK
153
154 for (i = 0; i < noperands; i++)
1f06ee8d 155 {
9b3142b3 156 const char *c = constraints[i];
1afbe1c4
RH
157 if (c[0] == '%')
158 c++;
159 if (ISDIGIT ((unsigned char)c[0]) && c[1] == '\0')
1f06ee8d
RH
160 c = constraints[c[0] - '0'];
161
162 if (! asm_operand_ok (operands[i], c))
163 return 0;
164 }
2055cea7
RK
165
166 return 1;
167}
168\f
41a972a9 169/* Static data for the next two routines. */
2055cea7 170
41a972a9
MM
171typedef struct change_t
172{
173 rtx object;
174 int old_code;
175 rtx *loc;
176 rtx old;
177} change_t;
2055cea7 178
41a972a9
MM
179static change_t *changes;
180static int changes_allocated;
2055cea7
RK
181
182static int num_changes = 0;
183
184/* Validate a proposed change to OBJECT. LOC is the location in the rtl for
185 at which NEW will be placed. If OBJECT is zero, no validation is done,
186 the change is simply made.
187
188 Two types of objects are supported: If OBJECT is a MEM, memory_address_p
189 will be called with the address and mode as parameters. If OBJECT is
190 an INSN, CALL_INSN, or JUMP_INSN, the insn will be re-recognized with
191 the change in place.
192
193 IN_GROUP is non-zero if this is part of a group of changes that must be
194 performed as a group. In that case, the changes will be stored. The
195 function `apply_change_group' will validate and apply the changes.
196
197 If IN_GROUP is zero, this is a single change. Try to recognize the insn
198 or validate the memory reference with the change applied. If the result
199 is not valid for the machine, suppress the change and return zero.
200 Otherwise, perform the change and return 1. */
201
202int
203validate_change (object, loc, new, in_group)
204 rtx object;
205 rtx *loc;
206 rtx new;
207 int in_group;
208{
209 rtx old = *loc;
210
211 if (old == new || rtx_equal_p (old, new))
212 return 1;
213
41a972a9 214 if (in_group == 0 && num_changes != 0)
2055cea7
RK
215 abort ();
216
fb0c0a12 217 *loc = new;
2055cea7
RK
218
219 /* Save the information describing this change. */
41a972a9
MM
220 if (num_changes >= changes_allocated)
221 {
222 if (changes_allocated == 0)
223 /* This value allows for repeated substitutions inside complex
224 indexed addresses, or changes in up to 5 insns. */
225 changes_allocated = MAX_RECOG_OPERANDS * 5;
226 else
227 changes_allocated *= 2;
228
229 changes =
230 (change_t*) xrealloc (changes,
231 sizeof (change_t) * changes_allocated);
232 }
233
234 changes[num_changes].object = object;
235 changes[num_changes].loc = loc;
236 changes[num_changes].old = old;
2055cea7
RK
237
238 if (object && GET_CODE (object) != MEM)
239 {
240 /* Set INSN_CODE to force rerecognition of insn. Save old code in
241 case invalid. */
41a972a9 242 changes[num_changes].old_code = INSN_CODE (object);
2055cea7
RK
243 INSN_CODE (object) = -1;
244 }
245
246 num_changes++;
247
248 /* If we are making a group of changes, return 1. Otherwise, validate the
249 change group we made. */
250
251 if (in_group)
252 return 1;
253 else
254 return apply_change_group ();
255}
256
61719ba7
BS
257/* This subroutine of apply_change_group verifies whether the changes to INSN
258 were valid; i.e. whether INSN can still be recognized. */
259
fb0c0a12 260int
61719ba7
BS
261insn_invalid_p (insn)
262 rtx insn;
263{
fb0c0a12
RK
264 rtx pat = PATTERN (insn);
265 int num_clobbers = 0;
266 /* If we are before reload and the pattern is a SET, see if we can add
267 clobbers. */
268 int icode = recog (pat, insn,
269 (GET_CODE (pat) == SET
270 && ! reload_completed && ! reload_in_progress)
271 ? &num_clobbers : NULL_PTR);
61719ba7
BS
272 int is_asm = icode < 0 && asm_noperands (PATTERN (insn)) >= 0;
273
fb0c0a12
RK
274
275 /* If this is an asm and the operand aren't legal, then fail. Likewise if
276 this is not an asm and the insn wasn't recognized. */
277 if ((is_asm && ! check_asm_operands (PATTERN (insn)))
278 || (!is_asm && icode < 0))
61719ba7
BS
279 return 1;
280
fb0c0a12
RK
281 /* If we have to add CLOBBERs, fail if we have to add ones that reference
282 hard registers since our callers can't know if they are live or not.
283 Otherwise, add them. */
284 if (num_clobbers > 0)
285 {
286 rtx newpat;
287
288 if (added_clobbers_hard_reg_p (icode))
289 return 1;
290
291 newpat = gen_rtx_PARALLEL (VOIDmode, rtvec_alloc (num_clobbers + 1));
292 XVECEXP (newpat, 0, 0) = pat;
293 add_clobbers (newpat, icode);
294 PATTERN (insn) = pat = newpat;
295 }
296
61719ba7
BS
297 /* After reload, verify that all constraints are satisfied. */
298 if (reload_completed)
299 {
0eadeb15 300 extract_insn (insn);
61719ba7 301
0eadeb15 302 if (! constrain_operands (1))
61719ba7
BS
303 return 1;
304 }
305
fb0c0a12 306 INSN_CODE (insn) = icode;
61719ba7
BS
307 return 0;
308}
309
2055cea7
RK
310/* Apply a group of changes previously issued with `validate_change'.
311 Return 1 if all changes are valid, zero otherwise. */
312
313int
314apply_change_group ()
315{
316 int i;
317
318 /* The changes have been applied and all INSN_CODEs have been reset to force
319 rerecognition.
320
321 The changes are valid if we aren't given an object, or if we are
322 given a MEM and it still is a valid address, or if this is in insn
323 and it is recognized. In the latter case, if reload has completed,
324 we also require that the operands meet the constraints for
0eadeb15 325 the insn. */
2055cea7
RK
326
327 for (i = 0; i < num_changes; i++)
328 {
41a972a9 329 rtx object = changes[i].object;
2055cea7
RK
330
331 if (object == 0)
332 continue;
333
334 if (GET_CODE (object) == MEM)
335 {
336 if (! memory_address_p (GET_MODE (object), XEXP (object, 0)))
337 break;
338 }
61719ba7 339 else if (insn_invalid_p (object))
2055cea7
RK
340 {
341 rtx pat = PATTERN (object);
342
343 /* Perhaps we couldn't recognize the insn because there were
344 extra CLOBBERs at the end. If so, try to re-recognize
345 without the last CLOBBER (later iterations will cause each of
346 them to be eliminated, in turn). But don't do this if we
347 have an ASM_OPERAND. */
348 if (GET_CODE (pat) == PARALLEL
349 && GET_CODE (XVECEXP (pat, 0, XVECLEN (pat, 0) - 1)) == CLOBBER
350 && asm_noperands (PATTERN (object)) < 0)
351 {
352 rtx newpat;
353
354 if (XVECLEN (pat, 0) == 2)
355 newpat = XVECEXP (pat, 0, 0);
356 else
357 {
358 int j;
359
c5c76735
JL
360 newpat
361 = gen_rtx_PARALLEL (VOIDmode,
bf103ec2 362 rtvec_alloc (XVECLEN (pat, 0) - 1));
2055cea7
RK
363 for (j = 0; j < XVECLEN (newpat, 0); j++)
364 XVECEXP (newpat, 0, j) = XVECEXP (pat, 0, j);
365 }
366
367 /* Add a new change to this group to replace the pattern
368 with this new pattern. Then consider this change
369 as having succeeded. The change we added will
370 cause the entire call to fail if things remain invalid.
371
372 Note that this can lose if a later change than the one
373 we are processing specified &XVECEXP (PATTERN (object), 0, X)
374 but this shouldn't occur. */
375
376 validate_change (object, &PATTERN (object), newpat, 1);
377 }
378 else if (GET_CODE (pat) == USE || GET_CODE (pat) == CLOBBER)
379 /* If this insn is a CLOBBER or USE, it is always valid, but is
380 never recognized. */
381 continue;
382 else
383 break;
384 }
385 }
386
387 if (i == num_changes)
388 {
389 num_changes = 0;
390 return 1;
391 }
392 else
393 {
394 cancel_changes (0);
395 return 0;
396 }
397}
398
399/* Return the number of changes so far in the current group. */
400
401int
402num_validated_changes ()
403{
404 return num_changes;
405}
406
407/* Retract the changes numbered NUM and up. */
408
409void
410cancel_changes (num)
411 int num;
412{
413 int i;
414
415 /* Back out all the changes. Do this in the opposite order in which
416 they were made. */
417 for (i = num_changes - 1; i >= num; i--)
418 {
41a972a9
MM
419 *changes[i].loc = changes[i].old;
420 if (changes[i].object && GET_CODE (changes[i].object) != MEM)
421 INSN_CODE (changes[i].object) = changes[i].old_code;
2055cea7
RK
422 }
423 num_changes = num;
424}
425
426/* Replace every occurrence of FROM in X with TO. Mark each change with
427 validate_change passing OBJECT. */
428
429static void
430validate_replace_rtx_1 (loc, from, to, object)
431 rtx *loc;
432 rtx from, to, object;
433{
434 register int i, j;
6f7d635c 435 register const char *fmt;
2055cea7 436 register rtx x = *loc;
22251f80 437 enum rtx_code code;
2055cea7 438
22251f80
JH
439 if (!x)
440 return;
441 code = GET_CODE (x);
2055cea7
RK
442 /* X matches FROM if it is the same rtx or they are both referring to the
443 same register in the same mode. Avoid calling rtx_equal_p unless the
444 operands look similar. */
445
446 if (x == from
447 || (GET_CODE (x) == REG && GET_CODE (from) == REG
448 && GET_MODE (x) == GET_MODE (from)
449 && REGNO (x) == REGNO (from))
450 || (GET_CODE (x) == GET_CODE (from) && GET_MODE (x) == GET_MODE (from)
451 && rtx_equal_p (x, from)))
452 {
453 validate_change (object, loc, to, 1);
454 return;
455 }
456
457 /* For commutative or comparison operations, try replacing each argument
458 separately and seeing if we made any changes. If so, put a constant
459 argument last.*/
460 if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
461 {
462 int prev_changes = num_changes;
463
464 validate_replace_rtx_1 (&XEXP (x, 0), from, to, object);
465 validate_replace_rtx_1 (&XEXP (x, 1), from, to, object);
466 if (prev_changes != num_changes && CONSTANT_P (XEXP (x, 0)))
467 {
468 validate_change (object, loc,
38a448ca
RH
469 gen_rtx_fmt_ee (GET_RTX_CLASS (code) == 'c' ? code
470 : swap_condition (code),
471 GET_MODE (x), XEXP (x, 1),
472 XEXP (x, 0)),
2055cea7
RK
473 1);
474 x = *loc;
475 code = GET_CODE (x);
476 }
477 }
478
06140bdf
RK
479 /* Note that if CODE's RTX_CLASS is "c" or "<" we will have already
480 done the substitution, otherwise we won't. */
481
2055cea7
RK
482 switch (code)
483 {
484 case PLUS:
38e01259 485 /* If we have a PLUS whose second operand is now a CONST_INT, use
2055cea7
RK
486 plus_constant to try to simplify it. */
487 if (GET_CODE (XEXP (x, 1)) == CONST_INT && XEXP (x, 1) == to)
06140bdf
RK
488 validate_change (object, loc, plus_constant (XEXP (x, 0), INTVAL (to)),
489 1);
2055cea7 490 return;
06140bdf
RK
491
492 case MINUS:
493 if (GET_CODE (to) == CONST_INT && XEXP (x, 1) == from)
494 {
495 validate_change (object, loc,
496 plus_constant (XEXP (x, 0), - INTVAL (to)),
497 1);
498 return;
499 }
500 break;
2055cea7
RK
501
502 case ZERO_EXTEND:
503 case SIGN_EXTEND:
504 /* In these cases, the operation to be performed depends on the mode
505 of the operand. If we are replacing the operand with a VOIDmode
506 constant, we lose the information. So try to simplify the operation
c0e3f87d 507 in that case. */
2055cea7 508 if (GET_MODE (to) == VOIDmode
c0e3f87d
RH
509 && (rtx_equal_p (XEXP (x, 0), from)
510 || (GET_CODE (XEXP (x, 0)) == SUBREG
511 && rtx_equal_p (SUBREG_REG (XEXP (x, 0)), from))))
2055cea7 512 {
c0e3f87d
RH
513 rtx new = NULL_RTX;
514
515 /* If there is a subreg involved, crop to the portion of the
516 constant that we are interested in. */
517 if (GET_CODE (XEXP (x, 0)) == SUBREG)
44e4159d
R
518 {
519 if (GET_MODE_SIZE (GET_MODE (XEXP (x, 0))) <= UNITS_PER_WORD)
ddef6bc7
JJ
520 to = operand_subword (to,
521 (SUBREG_BYTE (XEXP (x, 0))
522 / UNITS_PER_WORD),
44e4159d
R
523 0, GET_MODE (from));
524 else if (GET_MODE_CLASS (GET_MODE (from)) == MODE_INT
525 && (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
526 <= HOST_BITS_PER_WIDE_INT))
527 {
ddef6bc7 528 int i = SUBREG_BYTE (XEXP (x, 0)) * BITS_PER_UNIT;
44e4159d
R
529 HOST_WIDE_INT valh;
530 unsigned HOST_WIDE_INT vall;
531
532 if (GET_CODE (to) == CONST_INT)
533 {
534 vall = INTVAL (to);
535 valh = (HOST_WIDE_INT) vall < 0 ? ~0 : 0;
536 }
537 else
538 {
539 vall = CONST_DOUBLE_LOW (to);
540 valh = CONST_DOUBLE_HIGH (to);
541 }
542
543 if (WORDS_BIG_ENDIAN)
544 i = (GET_MODE_BITSIZE (GET_MODE (from))
545 - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - i);
546 if (i > 0 && i < HOST_BITS_PER_WIDE_INT)
547 vall = vall >> i | valh << (HOST_BITS_PER_WIDE_INT - i);
548 else if (i >= HOST_BITS_PER_WIDE_INT)
549 vall = valh >> (i - HOST_BITS_PER_WIDE_INT);
550 to = GEN_INT (trunc_int_for_mode (vall,
551 GET_MODE (XEXP (x, 0))));
552 }
553 else
554 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
555 }
c0e3f87d
RH
556
557 /* If the above didn't fail, perform the extension from the
558 mode of the operand (and not the mode of FROM). */
559 if (to)
560 new = simplify_unary_operation (code, GET_MODE (x), to,
561 GET_MODE (XEXP (x, 0)));
562
563 /* If any of the above failed, substitute in something that
564 we know won't be recognized. */
565 if (!new)
38a448ca 566 new = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
2055cea7
RK
567
568 validate_change (object, loc, new, 1);
569 return;
570 }
571 break;
572
573 case SUBREG:
ddef6bc7
JJ
574 /* In case we are replacing by constant, attempt to simplify it to
575 non-SUBREG expression. We can't do this later, since the information
576 about inner mode may be lost. */
bd5621a3
JH
577 if (CONSTANT_P (to) && rtx_equal_p (SUBREG_REG (x), from))
578 {
ddef6bc7
JJ
579 int offset, part;
580 unsigned HOST_WIDE_INT val;
581
582 /* A paradoxical SUBREG of a VOIDmode constant is the same constant,
583 since we are saying that the high bits don't matter. */
584 if (GET_MODE (to) == VOIDmode
585 && (GET_MODE_SIZE (GET_MODE (x))
586 >= GET_MODE_SIZE (GET_MODE (from))))
bd5621a3 587 {
ddef6bc7 588 rtx new = gen_lowpart_if_possible (GET_MODE (x), to);
bd5621a3
JH
589 if (new)
590 {
591 validate_change (object, loc, new, 1);
592 return;
593 }
594 }
595
ddef6bc7
JJ
596 offset = SUBREG_BYTE (x) * BITS_PER_UNIT;
597 switch (GET_CODE (to))
bd5621a3 598 {
ddef6bc7
JJ
599 case CONST_DOUBLE:
600 if (GET_MODE (to) != VOIDmode)
601 break;
602
603 part = offset >= HOST_BITS_PER_WIDE_INT;
604 if ((BITS_PER_WORD > HOST_BITS_PER_WIDE_INT
605 && BYTES_BIG_ENDIAN)
606 || (BITS_PER_WORD <= HOST_BITS_PER_WIDE_INT
607 && WORDS_BIG_ENDIAN))
608 part = !part;
609 val = part ? CONST_DOUBLE_HIGH (to) : CONST_DOUBLE_LOW (to);
610 offset %= HOST_BITS_PER_WIDE_INT;
611
612 /* FALLTHROUGH */
613 case CONST_INT:
614 if (GET_CODE (to) == CONST_INT)
615 val = INTVAL (to);
616
617 {
618 /* Avoid creating bogus SUBREGs */
619 enum machine_mode mode = GET_MODE (x);
620 enum machine_mode inner_mode = GET_MODE (from);
621
622 /* We've already picked the word we want from a double, so
623 pretend this is actually an integer. */
624 if (GET_CODE (to) == CONST_DOUBLE)
625 inner_mode = SImode;
626
627 if (GET_MODE_CLASS (mode) != MODE_INT)
347f3440
JJ
628 {
629 /* Substitute in something that we know won't be
630 recognized. */
631 to = gen_rtx_CLOBBER (GET_MODE (x), const0_rtx);
632 validate_change (object, loc, to, 1);
633 return;
634 }
ddef6bc7
JJ
635
636 if (BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
637 {
638 if (WORDS_BIG_ENDIAN)
639 offset = GET_MODE_BITSIZE (inner_mode)
640 - GET_MODE_BITSIZE (mode) - offset;
641 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
642 && GET_MODE_SIZE (mode) < UNITS_PER_WORD)
643 offset = offset + BITS_PER_WORD - GET_MODE_BITSIZE (mode)
644 - 2 * (offset % BITS_PER_WORD);
645 }
646
647 if (offset >= HOST_BITS_PER_WIDE_INT)
648 to = ((HOST_WIDE_INT) val < 0) ? constm1_rtx : const0_rtx;
649 else
650 {
651 val >>= offset;
652 if (GET_MODE_BITSIZE (mode) < HOST_BITS_PER_WIDE_INT)
653 val = trunc_int_for_mode (val, mode);
654 to = GEN_INT (val);
655 }
656
657 validate_change (object, loc, to, 1);
658 return;
659 }
660
661 default:
662 break;
bd5621a3
JH
663 }
664 }
665
666 /* Changing mode twice with SUBREG => just change it once,
667 or not at all if changing back to starting mode. */
668 if (GET_CODE (to) == SUBREG
669 && rtx_equal_p (SUBREG_REG (x), from))
670 {
671 if (GET_MODE (x) == GET_MODE (SUBREG_REG (to))
ddef6bc7 672 && SUBREG_BYTE (x) == 0 && SUBREG_BYTE (to) == 0)
bd5621a3
JH
673 {
674 validate_change (object, loc, SUBREG_REG (to), 1);
675 return;
676 }
677
ddef6bc7
JJ
678 /* Make sure the 2 byte counts added together are an even unit
679 of x's mode, and combine them if so. Otherwise we run
680 into problems with something like:
681 (subreg:HI (subreg:QI (SI:55) 3) 0)
682 we end up with an odd offset into a HI which is invalid. */
683
684 if (SUBREG_BYTE (to) % GET_MODE_SIZE (GET_MODE (x)) == 0)
685 validate_change (object, loc,
686 gen_rtx_SUBREG (GET_MODE (x), SUBREG_REG (to),
687 SUBREG_BYTE(x) + SUBREG_BYTE (to)),
688 1);
689 else
690 validate_change (object, loc, to, 1);
691
bd5621a3
JH
692 return;
693 }
694
2055cea7
RK
695 /* If we have a SUBREG of a register that we are replacing and we are
696 replacing it with a MEM, make a new MEM and try replacing the
697 SUBREG with it. Don't do this if the MEM has a mode-dependent address
698 or if we would be widening it. */
699
bd5621a3 700 if (GET_CODE (from) == REG
2055cea7 701 && GET_CODE (to) == MEM
bd5621a3 702 && rtx_equal_p (SUBREG_REG (x), from)
2055cea7
RK
703 && ! mode_dependent_address_p (XEXP (to, 0))
704 && ! MEM_VOLATILE_P (to)
705 && GET_MODE_SIZE (GET_MODE (x)) <= GET_MODE_SIZE (GET_MODE (to)))
706 {
ddef6bc7 707 int offset = SUBREG_BYTE (x);
2055cea7
RK
708 enum machine_mode mode = GET_MODE (x);
709 rtx new;
710
38a448ca 711 new = gen_rtx_MEM (mode, plus_constant (XEXP (to, 0), offset));
c6df88cb 712 MEM_COPY_ATTRIBUTES (new, to);
2055cea7
RK
713 validate_change (object, loc, new, 1);
714 return;
715 }
716 break;
717
718 case ZERO_EXTRACT:
719 case SIGN_EXTRACT:
720 /* If we are replacing a register with memory, try to change the memory
721 to be the mode required for memory in extract operations (this isn't
722 likely to be an insertion operation; if it was, nothing bad will
723 happen, we might just fail in some cases). */
724
bd5621a3
JH
725 if (GET_CODE (from) == REG && GET_CODE (to) == MEM
726 && rtx_equal_p (XEXP (x, 0), from)
2055cea7
RK
727 && GET_CODE (XEXP (x, 1)) == CONST_INT
728 && GET_CODE (XEXP (x, 2)) == CONST_INT
729 && ! mode_dependent_address_p (XEXP (to, 0))
730 && ! MEM_VOLATILE_P (to))
731 {
732 enum machine_mode wanted_mode = VOIDmode;
733 enum machine_mode is_mode = GET_MODE (to);
2055cea7
RK
734 int pos = INTVAL (XEXP (x, 2));
735
736#ifdef HAVE_extzv
737 if (code == ZERO_EXTRACT)
0d8e55d8 738 {
a995e389 739 wanted_mode = insn_data[(int) CODE_FOR_extzv].operand[1].mode;
0d8e55d8
JL
740 if (wanted_mode == VOIDmode)
741 wanted_mode = word_mode;
742 }
2055cea7
RK
743#endif
744#ifdef HAVE_extv
745 if (code == SIGN_EXTRACT)
0d8e55d8 746 {
a995e389 747 wanted_mode = insn_data[(int) CODE_FOR_extv].operand[1].mode;
0d8e55d8
JL
748 if (wanted_mode == VOIDmode)
749 wanted_mode = word_mode;
750 }
2055cea7
RK
751#endif
752
6dc42e49 753 /* If we have a narrower mode, we can do something. */
2055cea7
RK
754 if (wanted_mode != VOIDmode
755 && GET_MODE_SIZE (wanted_mode) < GET_MODE_SIZE (is_mode))
756 {
757 int offset = pos / BITS_PER_UNIT;
758 rtx newmem;
759
ddef6bc7
JJ
760 /* If the bytes and bits are counted differently, we
761 must adjust the offset. */
f76b9db2
ILT
762 if (BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN)
763 offset = (GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (wanted_mode)
764 - offset);
2055cea7
RK
765
766 pos %= GET_MODE_BITSIZE (wanted_mode);
767
38a448ca
RH
768 newmem = gen_rtx_MEM (wanted_mode,
769 plus_constant (XEXP (to, 0), offset));
c6df88cb 770 MEM_COPY_ATTRIBUTES (newmem, to);
2055cea7 771
9e4223f2 772 validate_change (object, &XEXP (x, 2), GEN_INT (pos), 1);
2055cea7
RK
773 validate_change (object, &XEXP (x, 0), newmem, 1);
774 }
775 }
776
777 break;
38a448ca
RH
778
779 default:
780 break;
2055cea7
RK
781 }
782
f745c7a2
AB
783 /* For commutative or comparison operations we've already performed
784 replacements. Don't try to perform them again. */
785 if (GET_RTX_CLASS (code) != '<' && GET_RTX_CLASS (code) != 'c')
2055cea7 786 {
f745c7a2
AB
787 fmt = GET_RTX_FORMAT (code);
788 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
789 {
790 if (fmt[i] == 'e')
791 validate_replace_rtx_1 (&XEXP (x, i), from, to, object);
792 else if (fmt[i] == 'E')
793 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
794 validate_replace_rtx_1 (&XVECEXP (x, i, j), from, to, object);
795 }
2055cea7
RK
796 }
797}
798
e251e2a2
JH
799/* Try replacing every occurrence of FROM in subexpression LOC of INSN
800 with TO. After all changes have been made, validate by seeing
801 if INSN is still valid. */
802
803int
804validate_replace_rtx_subexp (from, to, insn, loc)
805 rtx from, to, insn, *loc;
806{
807 validate_replace_rtx_1 (loc, from, to, insn);
808 return apply_change_group ();
809}
810
2055cea7
RK
811/* Try replacing every occurrence of FROM in INSN with TO. After all
812 changes have been made, validate by seeing if INSN is still valid. */
813
814int
815validate_replace_rtx (from, to, insn)
816 rtx from, to, insn;
817{
818 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
819 return apply_change_group ();
820}
7506f491 821
b71e8e84 822/* Try replacing every occurrence of FROM in INSN with TO. */
edfac33e
JL
823
824void
825validate_replace_rtx_group (from, to, insn)
826 rtx from, to, insn;
827{
828 validate_replace_rtx_1 (&PATTERN (insn), from, to, insn);
829}
830
e2373f95
RK
831/* Function called by note_uses to replace used subexpressions. */
832struct validate_replace_src_data
fb0c0a12
RK
833{
834 rtx from; /* Old RTX */
835 rtx to; /* New RTX */
836 rtx insn; /* Insn in which substitution is occurring. */
837};
e2373f95
RK
838
839static void
840validate_replace_src_1 (x, data)
841 rtx *x;
842 void *data;
843{
844 struct validate_replace_src_data *d
845 = (struct validate_replace_src_data *) data;
846
847 validate_replace_rtx_1 (x, d->from, d->to, d->insn);
848}
849
7506f491
DE
850/* Try replacing every occurrence of FROM in INSN with TO, avoiding
851 SET_DESTs. After all changes have been made, validate by seeing if
852 INSN is still valid. */
853
854int
855validate_replace_src (from, to, insn)
856 rtx from, to, insn;
857{
e2373f95 858 struct validate_replace_src_data d;
b71e8e84 859
e2373f95
RK
860 d.from = from;
861 d.to = to;
862 d.insn = insn;
863 note_uses (&PATTERN (insn), validate_replace_src_1, &d);
7506f491
DE
864 return apply_change_group ();
865}
2055cea7
RK
866\f
867#ifdef HAVE_cc0
868/* Return 1 if the insn using CC0 set by INSN does not contain
869 any ordered tests applied to the condition codes.
870 EQ and NE tests do not count. */
871
872int
873next_insn_tests_no_inequality (insn)
874 rtx insn;
875{
876 register rtx next = next_cc0_user (insn);
877
878 /* If there is no next insn, we have to take the conservative choice. */
879 if (next == 0)
880 return 0;
881
882 return ((GET_CODE (next) == JUMP_INSN
883 || GET_CODE (next) == INSN
884 || GET_CODE (next) == CALL_INSN)
885 && ! inequality_comparisons_p (PATTERN (next)));
886}
887
888#if 0 /* This is useless since the insn that sets the cc's
889 must be followed immediately by the use of them. */
890/* Return 1 if the CC value set up by INSN is not used. */
891
892int
893next_insns_test_no_inequality (insn)
894 rtx insn;
895{
896 register rtx next = NEXT_INSN (insn);
897
898 for (; next != 0; next = NEXT_INSN (next))
899 {
900 if (GET_CODE (next) == CODE_LABEL
901 || GET_CODE (next) == BARRIER)
902 return 1;
903 if (GET_CODE (next) == NOTE)
904 continue;
905 if (inequality_comparisons_p (PATTERN (next)))
906 return 0;
907 if (sets_cc0_p (PATTERN (next)) == 1)
908 return 1;
909 if (! reg_mentioned_p (cc0_rtx, PATTERN (next)))
910 return 1;
911 }
912 return 1;
913}
914#endif
915#endif
916\f
917/* This is used by find_single_use to locate an rtx that contains exactly one
918 use of DEST, which is typically either a REG or CC0. It returns a
919 pointer to the innermost rtx expression containing DEST. Appearances of
920 DEST that are being used to totally replace it are not counted. */
921
922static rtx *
923find_single_use_1 (dest, loc)
924 rtx dest;
925 rtx *loc;
926{
927 rtx x = *loc;
928 enum rtx_code code = GET_CODE (x);
929 rtx *result = 0;
930 rtx *this_result;
931 int i;
6f7d635c 932 const char *fmt;
2055cea7
RK
933
934 switch (code)
935 {
936 case CONST_INT:
937 case CONST:
938 case LABEL_REF:
939 case SYMBOL_REF:
940 case CONST_DOUBLE:
941 case CLOBBER:
942 return 0;
943
944 case SET:
945 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
946 of a REG that occupies all of the REG, the insn uses DEST if
947 it is mentioned in the destination or the source. Otherwise, we
948 need just check the source. */
949 if (GET_CODE (SET_DEST (x)) != CC0
950 && GET_CODE (SET_DEST (x)) != PC
951 && GET_CODE (SET_DEST (x)) != REG
952 && ! (GET_CODE (SET_DEST (x)) == SUBREG
953 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
954 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
955 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
956 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
957 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
958 break;
959
960 return find_single_use_1 (dest, &SET_SRC (x));
961
962 case MEM:
963 case SUBREG:
964 return find_single_use_1 (dest, &XEXP (x, 0));
38a448ca
RH
965
966 default:
967 break;
2055cea7
RK
968 }
969
970 /* If it wasn't one of the common cases above, check each expression and
971 vector of this code. Look for a unique usage of DEST. */
972
973 fmt = GET_RTX_FORMAT (code);
974 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
975 {
976 if (fmt[i] == 'e')
977 {
978 if (dest == XEXP (x, i)
979 || (GET_CODE (dest) == REG && GET_CODE (XEXP (x, i)) == REG
980 && REGNO (dest) == REGNO (XEXP (x, i))))
981 this_result = loc;
982 else
983 this_result = find_single_use_1 (dest, &XEXP (x, i));
984
985 if (result == 0)
986 result = this_result;
987 else if (this_result)
988 /* Duplicate usage. */
989 return 0;
990 }
991 else if (fmt[i] == 'E')
992 {
993 int j;
994
995 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
996 {
997 if (XVECEXP (x, i, j) == dest
998 || (GET_CODE (dest) == REG
999 && GET_CODE (XVECEXP (x, i, j)) == REG
1000 && REGNO (XVECEXP (x, i, j)) == REGNO (dest)))
1001 this_result = loc;
1002 else
1003 this_result = find_single_use_1 (dest, &XVECEXP (x, i, j));
1004
1005 if (result == 0)
1006 result = this_result;
1007 else if (this_result)
1008 return 0;
1009 }
1010 }
1011 }
1012
1013 return result;
1014}
1015\f
1016/* See if DEST, produced in INSN, is used only a single time in the
1017 sequel. If so, return a pointer to the innermost rtx expression in which
1018 it is used.
1019
1020 If PLOC is non-zero, *PLOC is set to the insn containing the single use.
1021
1022 This routine will return usually zero either before flow is called (because
1023 there will be no LOG_LINKS notes) or after reload (because the REG_DEAD
1024 note can't be trusted).
1025
1026 If DEST is cc0_rtx, we look only at the next insn. In that case, we don't
1027 care about REG_DEAD notes or LOG_LINKS.
1028
1029 Otherwise, we find the single use by finding an insn that has a
1030 LOG_LINKS pointing at INSN and has a REG_DEAD note for DEST. If DEST is
1031 only referenced once in that insn, we know that it must be the first
1032 and last insn referencing DEST. */
1033
1034rtx *
1035find_single_use (dest, insn, ploc)
1036 rtx dest;
1037 rtx insn;
1038 rtx *ploc;
1039{
1040 rtx next;
1041 rtx *result;
1042 rtx link;
1043
1044#ifdef HAVE_cc0
1045 if (dest == cc0_rtx)
1046 {
1047 next = NEXT_INSN (insn);
1048 if (next == 0
1049 || (GET_CODE (next) != INSN && GET_CODE (next) != JUMP_INSN))
1050 return 0;
1051
1052 result = find_single_use_1 (dest, &PATTERN (next));
1053 if (result && ploc)
1054 *ploc = next;
1055 return result;
1056 }
1057#endif
1058
1059 if (reload_completed || reload_in_progress || GET_CODE (dest) != REG)
1060 return 0;
1061
1062 for (next = next_nonnote_insn (insn);
1063 next != 0 && GET_CODE (next) != CODE_LABEL;
1064 next = next_nonnote_insn (next))
2c3c49de 1065 if (INSN_P (next) && dead_or_set_p (next, dest))
2055cea7
RK
1066 {
1067 for (link = LOG_LINKS (next); link; link = XEXP (link, 1))
1068 if (XEXP (link, 0) == insn)
1069 break;
1070
1071 if (link)
1072 {
1073 result = find_single_use_1 (dest, &PATTERN (next));
1074 if (ploc)
1075 *ploc = next;
1076 return result;
1077 }
1078 }
1079
1080 return 0;
1081}
1082\f
1083/* Return 1 if OP is a valid general operand for machine mode MODE.
1084 This is either a register reference, a memory reference,
1085 or a constant. In the case of a memory reference, the address
1086 is checked for general validity for the target machine.
1087
1088 Register and memory references must have mode MODE in order to be valid,
1089 but some constants have no machine mode and are valid for any mode.
1090
1091 If MODE is VOIDmode, OP is checked for validity for whatever mode
1092 it has.
1093
1094 The main use of this function is as a predicate in match_operand
1095 expressions in the machine description.
1096
6dc42e49 1097 For an explanation of this function's behavior for registers of
2055cea7
RK
1098 class NO_REGS, see the comment for `register_operand'. */
1099
1100int
1101general_operand (op, mode)
1102 register rtx op;
1103 enum machine_mode mode;
1104{
1105 register enum rtx_code code = GET_CODE (op);
2055cea7
RK
1106
1107 if (mode == VOIDmode)
1108 mode = GET_MODE (op);
1109
1110 /* Don't accept CONST_INT or anything similar
1111 if the caller wants something floating. */
1112 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
1113 && GET_MODE_CLASS (mode) != MODE_INT
1114 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
1115 return 0;
1116
c033e268
AO
1117 if (GET_CODE (op) == CONST_INT
1118 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1119 return 0;
1120
2055cea7 1121 if (CONSTANT_P (op))
8acb2f24
JH
1122 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1123 || mode == VOIDmode)
2055cea7
RK
1124#ifdef LEGITIMATE_PIC_OPERAND_P
1125 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1126#endif
1127 && LEGITIMATE_CONSTANT_P (op));
1128
1129 /* Except for certain constants with VOIDmode, already checked for,
1130 OP's mode must match MODE if MODE specifies a mode. */
1131
1132 if (GET_MODE (op) != mode)
1133 return 0;
1134
1135 if (code == SUBREG)
1136 {
1137#ifdef INSN_SCHEDULING
1138 /* On machines that have insn scheduling, we want all memory
1139 reference to be explicit, so outlaw paradoxical SUBREGs. */
1140 if (GET_CODE (SUBREG_REG (op)) == MEM
1141 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op))))
1142 return 0;
1143#endif
1144
1145 op = SUBREG_REG (op);
1146 code = GET_CODE (op);
2055cea7
RK
1147 }
1148
1149 if (code == REG)
1150 /* A register whose class is NO_REGS is not a general operand. */
1151 return (REGNO (op) >= FIRST_PSEUDO_REGISTER
1152 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS);
1153
1154 if (code == MEM)
1155 {
1156 register rtx y = XEXP (op, 0);
4eeaee5d 1157
2055cea7
RK
1158 if (! volatile_ok && MEM_VOLATILE_P (op))
1159 return 0;
4eeaee5d 1160
38a448ca
RH
1161 if (GET_CODE (y) == ADDRESSOF)
1162 return 1;
4eeaee5d 1163
2055cea7
RK
1164 /* Use the mem's mode, since it will be reloaded thus. */
1165 mode = GET_MODE (op);
1166 GO_IF_LEGITIMATE_ADDRESS (mode, y, win);
1167 }
38a448ca
RH
1168
1169 /* Pretend this is an operand for now; we'll run force_operand
1170 on its replacement in fixup_var_refs_1. */
1171 if (code == ADDRESSOF)
1172 return 1;
1173
2055cea7
RK
1174 return 0;
1175
1176 win:
2055cea7
RK
1177 return 1;
1178}
1179\f
1180/* Return 1 if OP is a valid memory address for a memory reference
1181 of mode MODE.
1182
1183 The main use of this function is as a predicate in match_operand
1184 expressions in the machine description. */
1185
1186int
1187address_operand (op, mode)
1188 register rtx op;
1189 enum machine_mode mode;
1190{
1191 return memory_address_p (mode, op);
1192}
1193
1194/* Return 1 if OP is a register reference of mode MODE.
1195 If MODE is VOIDmode, accept a register in any mode.
1196
1197 The main use of this function is as a predicate in match_operand
1198 expressions in the machine description.
1199
1200 As a special exception, registers whose class is NO_REGS are
1201 not accepted by `register_operand'. The reason for this change
1202 is to allow the representation of special architecture artifacts
1203 (such as a condition code register) without extending the rtl
1204 definitions. Since registers of class NO_REGS cannot be used
1205 as registers in any case where register classes are examined,
1206 it is most consistent to keep this function from accepting them. */
1207
1208int
1209register_operand (op, mode)
1210 register rtx op;
1211 enum machine_mode mode;
1212{
1213 if (GET_MODE (op) != mode && mode != VOIDmode)
1214 return 0;
1215
1216 if (GET_CODE (op) == SUBREG)
1217 {
1218 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1219 because it is guaranteed to be reloaded into one.
1220 Just make sure the MEM is valid in itself.
1221 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1222 but currently it does result from (SUBREG (REG)...) where the
1223 reg went on the stack.) */
1224 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1225 return general_operand (op, mode);
cba057ed 1226
02188693 1227#ifdef CLASS_CANNOT_CHANGE_MODE
cba057ed
RK
1228 if (GET_CODE (SUBREG_REG (op)) == REG
1229 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER
02188693
RH
1230 && (TEST_HARD_REG_BIT
1231 (reg_class_contents[(int) CLASS_CANNOT_CHANGE_MODE],
1232 REGNO (SUBREG_REG (op))))
1233 && CLASS_CANNOT_CHANGE_MODE_P (mode, GET_MODE (SUBREG_REG (op)))
50dc6373
RK
1234 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_INT
1235 && GET_MODE_CLASS (GET_MODE (SUBREG_REG (op))) != MODE_COMPLEX_FLOAT)
cba057ed
RK
1236 return 0;
1237#endif
1238
2055cea7
RK
1239 op = SUBREG_REG (op);
1240 }
1241
14a774a9
RK
1242 /* If we have an ADDRESSOF, consider it valid since it will be
1243 converted into something that will not be a MEM. */
1244 if (GET_CODE (op) == ADDRESSOF)
1245 return 1;
1246
2055cea7
RK
1247 /* We don't consider registers whose class is NO_REGS
1248 to be a register operand. */
1249 return (GET_CODE (op) == REG
1250 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1251 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1252}
1253
556ffcc5
RH
1254/* Return 1 for a register in Pmode; ignore the tested mode. */
1255
1256int
1257pmode_register_operand (op, mode)
1258 rtx op;
1259 enum machine_mode mode ATTRIBUTE_UNUSED;
1260{
1261 return register_operand (op, Pmode);
1262}
1263
2055cea7
RK
1264/* Return 1 if OP should match a MATCH_SCRATCH, i.e., if it is a SCRATCH
1265 or a hard register. */
1266
1267int
1268scratch_operand (op, mode)
1269 register rtx op;
1270 enum machine_mode mode;
1271{
a05924f9
JH
1272 if (GET_MODE (op) != mode && mode != VOIDmode)
1273 return 0;
1274
1275 return (GET_CODE (op) == SCRATCH
1276 || (GET_CODE (op) == REG
1277 && REGNO (op) < FIRST_PSEUDO_REGISTER));
2055cea7
RK
1278}
1279
1280/* Return 1 if OP is a valid immediate operand for mode MODE.
1281
1282 The main use of this function is as a predicate in match_operand
1283 expressions in the machine description. */
1284
1285int
1286immediate_operand (op, mode)
1287 register rtx op;
1288 enum machine_mode mode;
1289{
1290 /* Don't accept CONST_INT or anything similar
1291 if the caller wants something floating. */
1292 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
1293 && GET_MODE_CLASS (mode) != MODE_INT
1294 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
1295 return 0;
1296
c033e268
AO
1297 if (GET_CODE (op) == CONST_INT
1298 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1299 return 0;
1300
ee5332b8
RH
1301 /* Accept CONSTANT_P_RTX, since it will be gone by CSE1 and
1302 result in 0/1. It seems a safe assumption that this is
1303 in range for everyone. */
1304 if (GET_CODE (op) == CONSTANT_P_RTX)
1305 return 1;
1306
2055cea7
RK
1307 return (CONSTANT_P (op)
1308 && (GET_MODE (op) == mode || mode == VOIDmode
1309 || GET_MODE (op) == VOIDmode)
1310#ifdef LEGITIMATE_PIC_OPERAND_P
1311 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1312#endif
1313 && LEGITIMATE_CONSTANT_P (op));
1314}
1315
1316/* Returns 1 if OP is an operand that is a CONST_INT. */
1317
1318int
1319const_int_operand (op, mode)
1320 register rtx op;
e51712db 1321 enum machine_mode mode ATTRIBUTE_UNUSED;
2055cea7
RK
1322{
1323 return GET_CODE (op) == CONST_INT;
1324}
1325
1326/* Returns 1 if OP is an operand that is a constant integer or constant
1327 floating-point number. */
1328
1329int
1330const_double_operand (op, mode)
1331 register rtx op;
1332 enum machine_mode mode;
1333{
1334 /* Don't accept CONST_INT or anything similar
1335 if the caller wants something floating. */
1336 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
1337 && GET_MODE_CLASS (mode) != MODE_INT
1338 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
1339 return 0;
1340
1341 return ((GET_CODE (op) == CONST_DOUBLE || GET_CODE (op) == CONST_INT)
1342 && (mode == VOIDmode || GET_MODE (op) == mode
1343 || GET_MODE (op) == VOIDmode));
1344}
1345
1346/* Return 1 if OP is a general operand that is not an immediate operand. */
1347
1348int
1349nonimmediate_operand (op, mode)
1350 register rtx op;
1351 enum machine_mode mode;
1352{
1353 return (general_operand (op, mode) && ! CONSTANT_P (op));
1354}
1355
1356/* Return 1 if OP is a register reference or immediate value of mode MODE. */
1357
1358int
1359nonmemory_operand (op, mode)
1360 register rtx op;
1361 enum machine_mode mode;
1362{
1363 if (CONSTANT_P (op))
1364 {
1365 /* Don't accept CONST_INT or anything similar
1366 if the caller wants something floating. */
1367 if (GET_MODE (op) == VOIDmode && mode != VOIDmode
4bb4c82e
RK
1368 && GET_MODE_CLASS (mode) != MODE_INT
1369 && GET_MODE_CLASS (mode) != MODE_PARTIAL_INT)
2055cea7
RK
1370 return 0;
1371
c033e268
AO
1372 if (GET_CODE (op) == CONST_INT
1373 && trunc_int_for_mode (INTVAL (op), mode) != INTVAL (op))
1374 return 0;
1375
8acb2f24
JH
1376 return ((GET_MODE (op) == VOIDmode || GET_MODE (op) == mode
1377 || mode == VOIDmode)
2055cea7
RK
1378#ifdef LEGITIMATE_PIC_OPERAND_P
1379 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1380#endif
1381 && LEGITIMATE_CONSTANT_P (op));
1382 }
1383
1384 if (GET_MODE (op) != mode && mode != VOIDmode)
1385 return 0;
1386
1387 if (GET_CODE (op) == SUBREG)
1388 {
1389 /* Before reload, we can allow (SUBREG (MEM...)) as a register operand
1390 because it is guaranteed to be reloaded into one.
1391 Just make sure the MEM is valid in itself.
1392 (Ideally, (SUBREG (MEM)...) should not exist after reload,
1393 but currently it does result from (SUBREG (REG)...) where the
1394 reg went on the stack.) */
1395 if (! reload_completed && GET_CODE (SUBREG_REG (op)) == MEM)
1396 return general_operand (op, mode);
1397 op = SUBREG_REG (op);
1398 }
1399
1400 /* We don't consider registers whose class is NO_REGS
1401 to be a register operand. */
1402 return (GET_CODE (op) == REG
1403 && (REGNO (op) >= FIRST_PSEUDO_REGISTER
1404 || REGNO_REG_CLASS (REGNO (op)) != NO_REGS));
1405}
1406
1407/* Return 1 if OP is a valid operand that stands for pushing a
1408 value of mode MODE onto the stack.
1409
1410 The main use of this function is as a predicate in match_operand
1411 expressions in the machine description. */
1412
1413int
1414push_operand (op, mode)
1415 rtx op;
1416 enum machine_mode mode;
1417{
a8d19608
RK
1418 unsigned int rounded_size = GET_MODE_SIZE (mode);
1419
1420#ifdef PUSH_ROUNDING
1421 rounded_size = PUSH_ROUNDING (rounded_size);
1422#endif
1423
2055cea7
RK
1424 if (GET_CODE (op) != MEM)
1425 return 0;
1426
aeb7ff68 1427 if (mode != VOIDmode && GET_MODE (op) != mode)
2055cea7
RK
1428 return 0;
1429
1430 op = XEXP (op, 0);
1431
a8d19608 1432 if (rounded_size == GET_MODE_SIZE (mode))
70a32495
JH
1433 {
1434 if (GET_CODE (op) != STACK_PUSH_CODE)
1435 return 0;
1436 }
1437 else
1438 {
70a32495
JH
1439 if (GET_CODE (op) != PRE_MODIFY
1440 || GET_CODE (XEXP (op, 1)) != PLUS
1441 || XEXP (XEXP (op, 1), 0) != XEXP (op, 0)
1442 || GET_CODE (XEXP (XEXP (op, 1), 1)) != CONST_INT
1443#ifdef STACK_GROWS_DOWNWARD
a8d19608 1444 || INTVAL (XEXP (XEXP (op, 1), 1)) != - (int) rounded_size
70a32495
JH
1445#else
1446 || INTVAL (XEXP (XEXP (op, 1), 1)) != rounded_size
1447#endif
1448 )
1449 return 0;
1450 }
2055cea7
RK
1451
1452 return XEXP (op, 0) == stack_pointer_rtx;
1453}
1454
6fbe9bd8
RH
1455/* Return 1 if OP is a valid operand that stands for popping a
1456 value of mode MODE off the stack.
1457
1458 The main use of this function is as a predicate in match_operand
1459 expressions in the machine description. */
1460
1461int
1462pop_operand (op, mode)
1463 rtx op;
1464 enum machine_mode mode;
1465{
1466 if (GET_CODE (op) != MEM)
1467 return 0;
1468
aeb7ff68 1469 if (mode != VOIDmode && GET_MODE (op) != mode)
6fbe9bd8
RH
1470 return 0;
1471
1472 op = XEXP (op, 0);
1473
1474 if (GET_CODE (op) != STACK_POP_CODE)
1475 return 0;
1476
1477 return XEXP (op, 0) == stack_pointer_rtx;
1478}
1479
2055cea7
RK
1480/* Return 1 if ADDR is a valid memory address for mode MODE. */
1481
1482int
1483memory_address_p (mode, addr)
80cca0e1 1484 enum machine_mode mode ATTRIBUTE_UNUSED;
2055cea7
RK
1485 register rtx addr;
1486{
38a448ca
RH
1487 if (GET_CODE (addr) == ADDRESSOF)
1488 return 1;
1489
2055cea7
RK
1490 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
1491 return 0;
1492
1493 win:
1494 return 1;
1495}
1496
1497/* Return 1 if OP is a valid memory reference with mode MODE,
1498 including a valid address.
1499
1500 The main use of this function is as a predicate in match_operand
1501 expressions in the machine description. */
1502
1503int
1504memory_operand (op, mode)
1505 register rtx op;
1506 enum machine_mode mode;
1507{
1508 rtx inner;
1509
1510 if (! reload_completed)
1511 /* Note that no SUBREG is a memory operand before end of reload pass,
1512 because (SUBREG (MEM...)) forces reloading into a register. */
1513 return GET_CODE (op) == MEM && general_operand (op, mode);
1514
1515 if (mode != VOIDmode && GET_MODE (op) != mode)
1516 return 0;
1517
1518 inner = op;
1519 if (GET_CODE (inner) == SUBREG)
1520 inner = SUBREG_REG (inner);
1521
1522 return (GET_CODE (inner) == MEM && general_operand (op, mode));
1523}
1524
1525/* Return 1 if OP is a valid indirect memory reference with mode MODE;
1526 that is, a memory reference whose address is a general_operand. */
1527
1528int
1529indirect_operand (op, mode)
1530 register rtx op;
1531 enum machine_mode mode;
1532{
1533 /* Before reload, a SUBREG isn't in memory (see memory_operand, above). */
1534 if (! reload_completed
1535 && GET_CODE (op) == SUBREG && GET_CODE (SUBREG_REG (op)) == MEM)
1536 {
ddef6bc7 1537 register int offset = SUBREG_BYTE (op);
2055cea7
RK
1538 rtx inner = SUBREG_REG (op);
1539
b0e0a0f9
RK
1540 if (mode != VOIDmode && GET_MODE (op) != mode)
1541 return 0;
1542
2055cea7
RK
1543 /* The only way that we can have a general_operand as the resulting
1544 address is if OFFSET is zero and the address already is an operand
1545 or if the address is (plus Y (const_int -OFFSET)) and Y is an
1546 operand. */
1547
1548 return ((offset == 0 && general_operand (XEXP (inner, 0), Pmode))
1549 || (GET_CODE (XEXP (inner, 0)) == PLUS
1550 && GET_CODE (XEXP (XEXP (inner, 0), 1)) == CONST_INT
1551 && INTVAL (XEXP (XEXP (inner, 0), 1)) == -offset
1552 && general_operand (XEXP (XEXP (inner, 0), 0), Pmode)));
1553 }
1554
1555 return (GET_CODE (op) == MEM
1556 && memory_operand (op, mode)
1557 && general_operand (XEXP (op, 0), Pmode));
1558}
1559
1560/* Return 1 if this is a comparison operator. This allows the use of
1561 MATCH_OPERATOR to recognize all the branch insns. */
1562
1563int
1564comparison_operator (op, mode)
1565 register rtx op;
1566 enum machine_mode mode;
1567{
1568 return ((mode == VOIDmode || GET_MODE (op) == mode)
1569 && GET_RTX_CLASS (GET_CODE (op)) == '<');
1570}
1571\f
1572/* If BODY is an insn body that uses ASM_OPERANDS,
1573 return the number of operands (both input and output) in the insn.
1574 Otherwise return -1. */
1575
1576int
1577asm_noperands (body)
1578 rtx body;
1579{
6c698a6d 1580 switch (GET_CODE (body))
2055cea7 1581 {
6c698a6d
JH
1582 case ASM_OPERANDS:
1583 /* No output operands: return number of input operands. */
1584 return ASM_OPERANDS_INPUT_LENGTH (body);
1585 case SET:
1586 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1587 /* Single output operand: BODY is (set OUTPUT (asm_operands ...)). */
1588 return ASM_OPERANDS_INPUT_LENGTH (SET_SRC (body)) + 1;
1589 else
1590 return -1;
1591 case PARALLEL:
1592 if (GET_CODE (XVECEXP (body, 0, 0)) == SET
1593 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2055cea7 1594 {
6c698a6d
JH
1595 /* Multiple output operands, or 1 output plus some clobbers:
1596 body is [(set OUTPUT (asm_operands ...))... (clobber (reg ...))...]. */
1597 int i;
1598 int n_sets;
2055cea7 1599
6c698a6d
JH
1600 /* Count backwards through CLOBBERs to determine number of SETs. */
1601 for (i = XVECLEN (body, 0); i > 0; i--)
1602 {
1603 if (GET_CODE (XVECEXP (body, 0, i - 1)) == SET)
1604 break;
1605 if (GET_CODE (XVECEXP (body, 0, i - 1)) != CLOBBER)
1606 return -1;
1607 }
2055cea7 1608
6c698a6d
JH
1609 /* N_SETS is now number of output operands. */
1610 n_sets = i;
1611
1612 /* Verify that all the SETs we have
1613 came from a single original asm_operands insn
1614 (so that invalid combinations are blocked). */
1615 for (i = 0; i < n_sets; i++)
1616 {
1617 rtx elt = XVECEXP (body, 0, i);
1618 if (GET_CODE (elt) != SET)
1619 return -1;
1620 if (GET_CODE (SET_SRC (elt)) != ASM_OPERANDS)
1621 return -1;
1622 /* If these ASM_OPERANDS rtx's came from different original insns
1623 then they aren't allowed together. */
1624 if (ASM_OPERANDS_INPUT_VEC (SET_SRC (elt))
1625 != ASM_OPERANDS_INPUT_VEC (SET_SRC (XVECEXP (body, 0, 0))))
1626 return -1;
1627 }
1628 return (ASM_OPERANDS_INPUT_LENGTH (SET_SRC (XVECEXP (body, 0, 0)))
1629 + n_sets);
2055cea7 1630 }
6c698a6d
JH
1631 else if (GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1632 {
1633 /* 0 outputs, but some clobbers:
1634 body is [(asm_operands ...) (clobber (reg ...))...]. */
1635 int i;
2055cea7 1636
6c698a6d
JH
1637 /* Make sure all the other parallel things really are clobbers. */
1638 for (i = XVECLEN (body, 0) - 1; i > 0; i--)
1639 if (GET_CODE (XVECEXP (body, 0, i)) != CLOBBER)
1640 return -1;
2055cea7 1641
6c698a6d
JH
1642 return ASM_OPERANDS_INPUT_LENGTH (XVECEXP (body, 0, 0));
1643 }
1644 else
1645 return -1;
1646 default:
1647 return -1;
2055cea7 1648 }
2055cea7
RK
1649}
1650
1651/* Assuming BODY is an insn body that uses ASM_OPERANDS,
1652 copy its operands (both input and output) into the vector OPERANDS,
1653 the locations of the operands within the insn into the vector OPERAND_LOCS,
1654 and the constraints for the operands into CONSTRAINTS.
1655 Write the modes of the operands into MODES.
1656 Return the assembler-template.
1657
1658 If MODES, OPERAND_LOCS, CONSTRAINTS or OPERANDS is 0,
1659 we don't store that info. */
1660
3cce094d 1661const char *
2055cea7
RK
1662decode_asm_operands (body, operands, operand_locs, constraints, modes)
1663 rtx body;
1664 rtx *operands;
1665 rtx **operand_locs;
9b3142b3 1666 const char **constraints;
2055cea7
RK
1667 enum machine_mode *modes;
1668{
1669 register int i;
1670 int noperands;
3cce094d 1671 const char *template = 0;
2055cea7
RK
1672
1673 if (GET_CODE (body) == SET && GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
1674 {
1675 rtx asmop = SET_SRC (body);
1676 /* Single output operand: BODY is (set OUTPUT (asm_operands ....)). */
1677
1678 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop) + 1;
1679
1680 for (i = 1; i < noperands; i++)
1681 {
1682 if (operand_locs)
1683 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i - 1);
1684 if (operands)
1685 operands[i] = ASM_OPERANDS_INPUT (asmop, i - 1);
1686 if (constraints)
1687 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i - 1);
1688 if (modes)
1689 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i - 1);
1690 }
1691
1692 /* The output is in the SET.
1693 Its constraint is in the ASM_OPERANDS itself. */
1694 if (operands)
1695 operands[0] = SET_DEST (body);
1696 if (operand_locs)
1697 operand_locs[0] = &SET_DEST (body);
1698 if (constraints)
1699 constraints[0] = ASM_OPERANDS_OUTPUT_CONSTRAINT (asmop);
1700 if (modes)
1701 modes[0] = GET_MODE (SET_DEST (body));
1702 template = ASM_OPERANDS_TEMPLATE (asmop);
1703 }
1704 else if (GET_CODE (body) == ASM_OPERANDS)
1705 {
1706 rtx asmop = body;
1707 /* No output operands: BODY is (asm_operands ....). */
1708
1709 noperands = ASM_OPERANDS_INPUT_LENGTH (asmop);
1710
1711 /* The input operands are found in the 1st element vector. */
1712 /* Constraints for inputs are in the 2nd element vector. */
1713 for (i = 0; i < noperands; i++)
1714 {
1715 if (operand_locs)
1716 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1717 if (operands)
1718 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1719 if (constraints)
1720 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1721 if (modes)
1722 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1723 }
1724 template = ASM_OPERANDS_TEMPLATE (asmop);
1725 }
1726 else if (GET_CODE (body) == PARALLEL
1727 && GET_CODE (XVECEXP (body, 0, 0)) == SET)
1728 {
1729 rtx asmop = SET_SRC (XVECEXP (body, 0, 0));
1730 int nparallel = XVECLEN (body, 0); /* Includes CLOBBERs. */
1731 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1732 int nout = 0; /* Does not include CLOBBERs. */
1733
1734 /* At least one output, plus some CLOBBERs. */
1735
1736 /* The outputs are in the SETs.
1737 Their constraints are in the ASM_OPERANDS itself. */
1738 for (i = 0; i < nparallel; i++)
1739 {
1740 if (GET_CODE (XVECEXP (body, 0, i)) == CLOBBER)
1741 break; /* Past last SET */
1742
1743 if (operands)
1744 operands[i] = SET_DEST (XVECEXP (body, 0, i));
1745 if (operand_locs)
1746 operand_locs[i] = &SET_DEST (XVECEXP (body, 0, i));
1747 if (constraints)
1748 constraints[i] = XSTR (SET_SRC (XVECEXP (body, 0, i)), 1);
1749 if (modes)
1750 modes[i] = GET_MODE (SET_DEST (XVECEXP (body, 0, i)));
1751 nout++;
1752 }
1753
1754 for (i = 0; i < nin; i++)
1755 {
1756 if (operand_locs)
1757 operand_locs[i + nout] = &ASM_OPERANDS_INPUT (asmop, i);
1758 if (operands)
1759 operands[i + nout] = ASM_OPERANDS_INPUT (asmop, i);
1760 if (constraints)
1761 constraints[i + nout] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1762 if (modes)
1763 modes[i + nout] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1764 }
1765
1766 template = ASM_OPERANDS_TEMPLATE (asmop);
1767 }
1768 else if (GET_CODE (body) == PARALLEL
1769 && GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
1770 {
1771 /* No outputs, but some CLOBBERs. */
1772
1773 rtx asmop = XVECEXP (body, 0, 0);
1774 int nin = ASM_OPERANDS_INPUT_LENGTH (asmop);
1775
1776 for (i = 0; i < nin; i++)
1777 {
1778 if (operand_locs)
1779 operand_locs[i] = &ASM_OPERANDS_INPUT (asmop, i);
1780 if (operands)
1781 operands[i] = ASM_OPERANDS_INPUT (asmop, i);
1782 if (constraints)
1783 constraints[i] = ASM_OPERANDS_INPUT_CONSTRAINT (asmop, i);
1784 if (modes)
1785 modes[i] = ASM_OPERANDS_INPUT_MODE (asmop, i);
1786 }
1787
1788 template = ASM_OPERANDS_TEMPLATE (asmop);
1789 }
1790
1791 return template;
1792}
1f06ee8d 1793
1afbe1c4
RH
1794/* Check if an asm_operand matches it's constraints.
1795 Return > 0 if ok, = 0 if bad, < 0 if inconclusive. */
1f06ee8d
RH
1796
1797int
1798asm_operand_ok (op, constraint)
1799 rtx op;
1800 const char *constraint;
1801{
1afbe1c4
RH
1802 int result = 0;
1803
1f06ee8d
RH
1804 /* Use constrain_operands after reload. */
1805 if (reload_completed)
1806 abort ();
1807
1808 while (*constraint)
1809 {
c2cba7a9
RH
1810 char c = *constraint++;
1811 switch (c)
1f06ee8d
RH
1812 {
1813 case '=':
1814 case '+':
1815 case '*':
1816 case '%':
1817 case '?':
1818 case '!':
1819 case '#':
1820 case '&':
1821 case ',':
1822 break;
1823
1824 case '0': case '1': case '2': case '3': case '4':
1825 case '5': case '6': case '7': case '8': case '9':
1afbe1c4
RH
1826 /* For best results, our caller should have given us the
1827 proper matching constraint, but we can't actually fail
1828 the check if they didn't. Indicate that results are
1829 inconclusive. */
1830 result = -1;
1f06ee8d
RH
1831 break;
1832
1833 case 'p':
1834 if (address_operand (op, VOIDmode))
1835 return 1;
1836 break;
1837
1838 case 'm':
1839 case 'V': /* non-offsettable */
1840 if (memory_operand (op, VOIDmode))
1841 return 1;
1842 break;
1843
1844 case 'o': /* offsettable */
1845 if (offsettable_nonstrict_memref_p (op))
1846 return 1;
1847 break;
1848
1849 case '<':
1afbe1c4
RH
1850 /* ??? Before flow, auto inc/dec insns are not supposed to exist,
1851 excepting those that expand_call created. Further, on some
1852 machines which do not have generalized auto inc/dec, an inc/dec
1853 is not a memory_operand.
1854
1855 Match any memory and hope things are resolved after reload. */
1856
1f06ee8d 1857 if (GET_CODE (op) == MEM
1afbe1c4
RH
1858 && (1
1859 || GET_CODE (XEXP (op, 0)) == PRE_DEC
1f06ee8d
RH
1860 || GET_CODE (XEXP (op, 0)) == POST_DEC))
1861 return 1;
1862 break;
1863
1864 case '>':
1865 if (GET_CODE (op) == MEM
1afbe1c4
RH
1866 && (1
1867 || GET_CODE (XEXP (op, 0)) == PRE_INC
1f06ee8d
RH
1868 || GET_CODE (XEXP (op, 0)) == POST_INC))
1869 return 1;
1870 break;
1871
1872 case 'E':
1873#ifndef REAL_ARITHMETIC
1874 /* Match any floating double constant, but only if
1875 we can examine the bits of it reliably. */
1876 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
1877 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
1878 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
1879 break;
1880#endif
1881 /* FALLTHRU */
1882
1883 case 'F':
1884 if (GET_CODE (op) == CONST_DOUBLE)
1885 return 1;
1886 break;
1887
1888 case 'G':
1889 if (GET_CODE (op) == CONST_DOUBLE
1890 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'G'))
1891 return 1;
1892 break;
1893 case 'H':
1894 if (GET_CODE (op) == CONST_DOUBLE
1895 && CONST_DOUBLE_OK_FOR_LETTER_P (op, 'H'))
1896 return 1;
1897 break;
1898
1899 case 's':
1900 if (GET_CODE (op) == CONST_INT
1901 || (GET_CODE (op) == CONST_DOUBLE
1902 && GET_MODE (op) == VOIDmode))
1903 break;
1904 /* FALLTHRU */
1905
1906 case 'i':
1907 if (CONSTANT_P (op)
1908#ifdef LEGITIMATE_PIC_OPERAND_P
1909 && (! flag_pic || LEGITIMATE_PIC_OPERAND_P (op))
1910#endif
1911 )
1912 return 1;
1913 break;
1914
1915 case 'n':
1916 if (GET_CODE (op) == CONST_INT
1917 || (GET_CODE (op) == CONST_DOUBLE
1918 && GET_MODE (op) == VOIDmode))
1919 return 1;
1920 break;
1921
1922 case 'I':
1923 if (GET_CODE (op) == CONST_INT
1924 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'I'))
1925 return 1;
1926 break;
1927 case 'J':
1928 if (GET_CODE (op) == CONST_INT
1929 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'J'))
1930 return 1;
1931 break;
1932 case 'K':
1933 if (GET_CODE (op) == CONST_INT
1934 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'K'))
1935 return 1;
1936 break;
1937 case 'L':
1938 if (GET_CODE (op) == CONST_INT
1939 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'L'))
1940 return 1;
1941 break;
1942 case 'M':
1943 if (GET_CODE (op) == CONST_INT
1944 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'M'))
1945 return 1;
1946 break;
1947 case 'N':
1948 if (GET_CODE (op) == CONST_INT
1949 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'N'))
1950 return 1;
1951 break;
1952 case 'O':
1953 if (GET_CODE (op) == CONST_INT
1954 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'O'))
1955 return 1;
1956 break;
1957 case 'P':
1958 if (GET_CODE (op) == CONST_INT
1959 && CONST_OK_FOR_LETTER_P (INTVAL (op), 'P'))
1960 return 1;
1961 break;
1962
1963 case 'X':
1964 return 1;
1965
1966 case 'g':
1967 if (general_operand (op, VOIDmode))
1968 return 1;
1969 break;
1970
c2cba7a9
RH
1971 default:
1972 /* For all other letters, we first check for a register class,
1973 otherwise it is an EXTRA_CONSTRAINT. */
1974 if (REG_CLASS_FROM_LETTER (c) != NO_REGS)
1975 {
1976 case 'r':
1977 if (GET_MODE (op) == BLKmode)
1978 break;
1979 if (register_operand (op, VOIDmode))
1980 return 1;
1981 }
1f06ee8d 1982#ifdef EXTRA_CONSTRAINT
c2cba7a9 1983 if (EXTRA_CONSTRAINT (op, c))
1f06ee8d 1984 return 1;
1f06ee8d 1985#endif
1f06ee8d
RH
1986 break;
1987 }
1988 }
1989
1afbe1c4 1990 return result;
1f06ee8d 1991}
2055cea7 1992\f
2055cea7
RK
1993/* Given an rtx *P, if it is a sum containing an integer constant term,
1994 return the location (type rtx *) of the pointer to that constant term.
1995 Otherwise, return a null pointer. */
1996
1997static rtx *
1998find_constant_term_loc (p)
1999 rtx *p;
2000{
2001 register rtx *tem;
2002 register enum rtx_code code = GET_CODE (*p);
2003
2004 /* If *P IS such a constant term, P is its location. */
2005
2006 if (code == CONST_INT || code == SYMBOL_REF || code == LABEL_REF
2007 || code == CONST)
2008 return p;
2009
2010 /* Otherwise, if not a sum, it has no constant term. */
2011
2012 if (GET_CODE (*p) != PLUS)
2013 return 0;
2014
2015 /* If one of the summands is constant, return its location. */
2016
2017 if (XEXP (*p, 0) && CONSTANT_P (XEXP (*p, 0))
2018 && XEXP (*p, 1) && CONSTANT_P (XEXP (*p, 1)))
2019 return p;
2020
2021 /* Otherwise, check each summand for containing a constant term. */
2022
2023 if (XEXP (*p, 0) != 0)
2024 {
2025 tem = find_constant_term_loc (&XEXP (*p, 0));
2026 if (tem != 0)
2027 return tem;
2028 }
2029
2030 if (XEXP (*p, 1) != 0)
2031 {
2032 tem = find_constant_term_loc (&XEXP (*p, 1));
2033 if (tem != 0)
2034 return tem;
2035 }
2036
2037 return 0;
2038}
2039\f
2040/* Return 1 if OP is a memory reference
2041 whose address contains no side effects
2042 and remains valid after the addition
2043 of a positive integer less than the
2044 size of the object being referenced.
2045
2046 We assume that the original address is valid and do not check it.
2047
2048 This uses strict_memory_address_p as a subroutine, so
2049 don't use it before reload. */
2050
2051int
2052offsettable_memref_p (op)
2053 rtx op;
2054{
2055 return ((GET_CODE (op) == MEM)
2056 && offsettable_address_p (1, GET_MODE (op), XEXP (op, 0)));
2057}
2058
2059/* Similar, but don't require a strictly valid mem ref:
2060 consider pseudo-regs valid as index or base regs. */
2061
2062int
2063offsettable_nonstrict_memref_p (op)
2064 rtx op;
2065{
2066 return ((GET_CODE (op) == MEM)
2067 && offsettable_address_p (0, GET_MODE (op), XEXP (op, 0)));
2068}
2069
2070/* Return 1 if Y is a memory address which contains no side effects
2071 and would remain valid after the addition of a positive integer
2072 less than the size of that mode.
2073
2074 We assume that the original address is valid and do not check it.
2075 We do check that it is valid for narrower modes.
2076
2077 If STRICTP is nonzero, we require a strictly valid address,
2078 for the sake of use in reload.c. */
2079
2080int
2081offsettable_address_p (strictp, mode, y)
2082 int strictp;
2083 enum machine_mode mode;
2084 register rtx y;
2085{
2086 register enum rtx_code ycode = GET_CODE (y);
2087 register rtx z;
2088 rtx y1 = y;
2089 rtx *y2;
13536812 2090 int (*addressp) PARAMS ((enum machine_mode, rtx)) =
341a243e 2091 (strictp ? strict_memory_address_p : memory_address_p);
7bdebc3a 2092 unsigned int mode_sz = GET_MODE_SIZE (mode);
2055cea7
RK
2093
2094 if (CONSTANT_ADDRESS_P (y))
2095 return 1;
2096
2097 /* Adjusting an offsettable address involves changing to a narrower mode.
2098 Make sure that's OK. */
2099
2100 if (mode_dependent_address_p (y))
2101 return 0;
2102
7bdebc3a
RH
2103 /* ??? How much offset does an offsettable BLKmode reference need?
2104 Clearly that depends on the situation in which it's being used.
2105 However, the current situation in which we test 0xffffffff is
2106 less than ideal. Caveat user. */
2107 if (mode_sz == 0)
2108 mode_sz = BIGGEST_ALIGNMENT / BITS_PER_UNIT;
2109
2055cea7
RK
2110 /* If the expression contains a constant term,
2111 see if it remains valid when max possible offset is added. */
2112
2113 if ((ycode == PLUS) && (y2 = find_constant_term_loc (&y1)))
2114 {
2115 int good;
2116
2117 y1 = *y2;
7bdebc3a 2118 *y2 = plus_constant (*y2, mode_sz - 1);
2055cea7
RK
2119 /* Use QImode because an odd displacement may be automatically invalid
2120 for any wider mode. But it should be valid for a single byte. */
2121 good = (*addressp) (QImode, y);
2122
2123 /* In any case, restore old contents of memory. */
2124 *y2 = y1;
2125 return good;
2126 }
2127
4b983fdc 2128 if (GET_RTX_CLASS (ycode) == 'a')
2055cea7
RK
2129 return 0;
2130
2131 /* The offset added here is chosen as the maximum offset that
2132 any instruction could need to add when operating on something
2133 of the specified mode. We assume that if Y and Y+c are
2134 valid addresses then so is Y+d for all 0<d<c. */
2135
7bdebc3a 2136 z = plus_constant_for_output (y, mode_sz - 1);
2055cea7
RK
2137
2138 /* Use QImode because an odd displacement may be automatically invalid
2139 for any wider mode. But it should be valid for a single byte. */
2140 return (*addressp) (QImode, z);
2141}
2142
2143/* Return 1 if ADDR is an address-expression whose effect depends
2144 on the mode of the memory reference it is used in.
2145
2146 Autoincrement addressing is a typical example of mode-dependence
2147 because the amount of the increment depends on the mode. */
2148
2149int
2150mode_dependent_address_p (addr)
47c3ed98 2151 rtx addr ATTRIBUTE_UNUSED; /* Maybe used in GO_IF_MODE_DEPENDENT_ADDRESS. */
2055cea7
RK
2152{
2153 GO_IF_MODE_DEPENDENT_ADDRESS (addr, win);
2154 return 0;
47c3ed98
KG
2155 /* Label `win' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2156 win: ATTRIBUTE_UNUSED_LABEL
2055cea7
RK
2157 return 1;
2158}
2159
2160/* Return 1 if OP is a general operand
2161 other than a memory ref with a mode dependent address. */
2162
2163int
2164mode_independent_operand (op, mode)
2165 enum machine_mode mode;
2166 rtx op;
2167{
2168 rtx addr;
2169
2170 if (! general_operand (op, mode))
2171 return 0;
2172
2173 if (GET_CODE (op) != MEM)
2174 return 1;
2175
2176 addr = XEXP (op, 0);
2177 GO_IF_MODE_DEPENDENT_ADDRESS (addr, lose);
2178 return 1;
47c3ed98
KG
2179 /* Label `lose' might (not) be used via GO_IF_MODE_DEPENDENT_ADDRESS. */
2180 lose: ATTRIBUTE_UNUSED_LABEL
2055cea7
RK
2181 return 0;
2182}
2183
3bdf5ad1
RK
2184/* Given an operand OP that is a valid memory reference which
2185 satisfies offsettable_memref_p, return a new memory reference whose
2186 address has been adjusted by OFFSET. OFFSET should be positive and
2187 less than the size of the object referenced. */
2055cea7
RK
2188
2189rtx
2190adj_offsettable_operand (op, offset)
2191 rtx op;
2192 int offset;
2193{
2194 register enum rtx_code code = GET_CODE (op);
2195
2196 if (code == MEM)
2197 {
2198 register rtx y = XEXP (op, 0);
2199 register rtx new;
2200
2201 if (CONSTANT_ADDRESS_P (y))
2202 {
c5c76735
JL
2203 new = gen_rtx_MEM (GET_MODE (op),
2204 plus_constant_for_output (y, offset));
3bdf5ad1 2205 MEM_COPY_ATTRIBUTES (new, op);
2055cea7
RK
2206 return new;
2207 }
2208
2209 if (GET_CODE (y) == PLUS)
2210 {
2211 rtx z = y;
2212 register rtx *const_loc;
2213
2214 op = copy_rtx (op);
2215 z = XEXP (op, 0);
2216 const_loc = find_constant_term_loc (&z);
2217 if (const_loc)
2218 {
2219 *const_loc = plus_constant_for_output (*const_loc, offset);
2220 return op;
2221 }
2222 }
2223
38a448ca 2224 new = gen_rtx_MEM (GET_MODE (op), plus_constant_for_output (y, offset));
3bdf5ad1 2225 MEM_COPY_ATTRIBUTES (new, op);
2055cea7
RK
2226 return new;
2227 }
2228 abort ();
2229}
2230\f
d90ffc8d
JH
2231/* Like extract_insn, but save insn extracted and don't extract again, when
2232 called again for the same insn expecting that recog_data still contain the
2233 valid information. This is used primary by gen_attr infrastructure that
2234 often does extract insn again and again. */
2235void
2236extract_insn_cached (insn)
2237 rtx insn;
2238{
2239 if (recog_data.insn == insn && INSN_CODE (insn) >= 0)
2240 return;
2241 extract_insn (insn);
2242 recog_data.insn = insn;
2243}
2244/* Do cached extract_insn, constrain_operand and complain about failures.
2245 Used by insn_attrtab. */
2246void
2247extract_constrain_insn_cached (insn)
2248 rtx insn;
2249{
2250 extract_insn_cached (insn);
2251 if (which_alternative == -1
2252 && !constrain_operands (reload_completed))
2253 fatal_insn_not_found (insn);
2254}
6c698a6d
JH
2255/* Do cached constrain_operand and complain about failures. */
2256int
2257constrain_operands_cached (strict)
2258 int strict;
2259{
2260 if (which_alternative == -1)
2261 return constrain_operands (strict);
2262 else
2263 return 1;
2264}
d90ffc8d 2265\f
1ccbefce
RH
2266/* Analyze INSN and fill in recog_data. */
2267
0a578fee
BS
2268void
2269extract_insn (insn)
2270 rtx insn;
2271{
2272 int i;
2273 int icode;
2274 int noperands;
2275 rtx body = PATTERN (insn);
2276
d90ffc8d 2277 recog_data.insn = NULL;
1ccbefce
RH
2278 recog_data.n_operands = 0;
2279 recog_data.n_alternatives = 0;
2280 recog_data.n_dups = 0;
d90ffc8d 2281 which_alternative = -1;
0a578fee
BS
2282
2283 switch (GET_CODE (body))
2284 {
2285 case USE:
2286 case CLOBBER:
2287 case ASM_INPUT:
2288 case ADDR_VEC:
2289 case ADDR_DIFF_VEC:
2290 return;
2291
2292 case SET:
6c698a6d
JH
2293 if (GET_CODE (SET_SRC (body)) == ASM_OPERANDS)
2294 goto asm_insn;
2295 else
2296 goto normal_insn;
0a578fee 2297 case PARALLEL:
6c698a6d
JH
2298 if ((GET_CODE (XVECEXP (body, 0, 0)) == SET
2299 && GET_CODE (SET_SRC (XVECEXP (body, 0, 0))) == ASM_OPERANDS)
2300 || GET_CODE (XVECEXP (body, 0, 0)) == ASM_OPERANDS)
2301 goto asm_insn;
2302 else
2303 goto normal_insn;
0a578fee 2304 case ASM_OPERANDS:
6c698a6d 2305 asm_insn:
1ccbefce 2306 recog_data.n_operands = noperands = asm_noperands (body);
0a578fee
BS
2307 if (noperands >= 0)
2308 {
0a578fee
BS
2309 /* This insn is an `asm' with operands. */
2310
2311 /* expand_asm_operands makes sure there aren't too many operands. */
2312 if (noperands > MAX_RECOG_OPERANDS)
2313 abort ();
2314
2315 /* Now get the operand values and constraints out of the insn. */
1ccbefce
RH
2316 decode_asm_operands (body, recog_data.operand,
2317 recog_data.operand_loc,
2318 recog_data.constraints,
2319 recog_data.operand_mode);
0a578fee
BS
2320 if (noperands > 0)
2321 {
1ccbefce
RH
2322 const char *p = recog_data.constraints[0];
2323 recog_data.n_alternatives = 1;
0a578fee 2324 while (*p)
1ccbefce 2325 recog_data.n_alternatives += (*p++ == ',');
0a578fee 2326 }
0a578fee
BS
2327 break;
2328 }
6c698a6d 2329 fatal_insn_not_found (insn);
0a578fee
BS
2330
2331 default:
6c698a6d 2332 normal_insn:
0a578fee
BS
2333 /* Ordinary insn: recognize it, get the operands via insn_extract
2334 and get the constraints. */
2335
2336 icode = recog_memoized (insn);
2337 if (icode < 0)
2338 fatal_insn_not_found (insn);
2339
a995e389
RH
2340 recog_data.n_operands = noperands = insn_data[icode].n_operands;
2341 recog_data.n_alternatives = insn_data[icode].n_alternatives;
2342 recog_data.n_dups = insn_data[icode].n_dups;
0a578fee
BS
2343
2344 insn_extract (insn);
2345
2346 for (i = 0; i < noperands; i++)
2347 {
a995e389 2348 recog_data.constraints[i] = insn_data[icode].operand[i].constraint;
a995e389 2349 recog_data.operand_mode[i] = insn_data[icode].operand[i].mode;
e7adb6fb
JH
2350 /* VOIDmode match_operands gets mode from their real operand. */
2351 if (recog_data.operand_mode[i] == VOIDmode)
2352 recog_data.operand_mode[i] = GET_MODE (recog_data.operand[i]);
0a578fee
BS
2353 }
2354 }
0eadeb15 2355 for (i = 0; i < noperands; i++)
1ccbefce
RH
2356 recog_data.operand_type[i]
2357 = (recog_data.constraints[i][0] == '=' ? OP_OUT
2358 : recog_data.constraints[i][0] == '+' ? OP_INOUT
2359 : OP_IN);
f62a15e3 2360
1ccbefce 2361 if (recog_data.n_alternatives > MAX_RECOG_ALTERNATIVES)
f62a15e3 2362 abort ();
0a578fee
BS
2363}
2364
f62a15e3
BS
2365/* After calling extract_insn, you can use this function to extract some
2366 information from the constraint strings into a more usable form.
2367 The collected data is stored in recog_op_alt. */
2368void
2369preprocess_constraints ()
2370{
2371 int i;
2372
341a243e 2373 memset (recog_op_alt, 0, sizeof recog_op_alt);
1ccbefce 2374 for (i = 0; i < recog_data.n_operands; i++)
f62a15e3
BS
2375 {
2376 int j;
2377 struct operand_alternative *op_alt;
1ccbefce 2378 const char *p = recog_data.constraints[i];
f62a15e3
BS
2379
2380 op_alt = recog_op_alt[i];
2381
1ccbefce 2382 for (j = 0; j < recog_data.n_alternatives; j++)
f62a15e3
BS
2383 {
2384 op_alt[j].class = NO_REGS;
2385 op_alt[j].constraint = p;
2386 op_alt[j].matches = -1;
2387 op_alt[j].matched = -1;
2388
2389 if (*p == '\0' || *p == ',')
2390 {
2391 op_alt[j].anything_ok = 1;
2392 continue;
2393 }
2394
2395 for (;;)
2396 {
2397 char c = *p++;
2398 if (c == '#')
2399 do
2400 c = *p++;
2401 while (c != ',' && c != '\0');
2402 if (c == ',' || c == '\0')
2403 break;
2404
2405 switch (c)
2406 {
2407 case '=': case '+': case '*': case '%':
2408 case 'E': case 'F': case 'G': case 'H':
2409 case 's': case 'i': case 'n':
2410 case 'I': case 'J': case 'K': case 'L':
2411 case 'M': case 'N': case 'O': case 'P':
f62a15e3
BS
2412 /* These don't say anything we care about. */
2413 break;
2414
2415 case '?':
2416 op_alt[j].reject += 6;
2417 break;
2418 case '!':
2419 op_alt[j].reject += 600;
2420 break;
2421 case '&':
2422 op_alt[j].earlyclobber = 1;
2423 break;
2424
2425 case '0': case '1': case '2': case '3': case '4':
2426 case '5': case '6': case '7': case '8': case '9':
2427 op_alt[j].matches = c - '0';
37c5269a 2428 recog_op_alt[op_alt[j].matches][j].matched = i;
f62a15e3
BS
2429 break;
2430
2431 case 'm':
2432 op_alt[j].memory_ok = 1;
2433 break;
2434 case '<':
2435 op_alt[j].decmem_ok = 1;
2436 break;
2437 case '>':
2438 op_alt[j].incmem_ok = 1;
2439 break;
2440 case 'V':
2441 op_alt[j].nonoffmem_ok = 1;
2442 break;
2443 case 'o':
2444 op_alt[j].offmem_ok = 1;
2445 break;
2446 case 'X':
2447 op_alt[j].anything_ok = 1;
2448 break;
2449
2450 case 'p':
541f7d56 2451 op_alt[j].is_address = 1;
f62a15e3
BS
2452 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) BASE_REG_CLASS];
2453 break;
2454
2455 case 'g': case 'r':
2456 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) GENERAL_REGS];
2457 break;
2458
2459 default:
973838fd 2460 op_alt[j].class = reg_class_subunion[(int) op_alt[j].class][(int) REG_CLASS_FROM_LETTER ((unsigned char)c)];
f62a15e3
BS
2461 break;
2462 }
2463 }
2464 }
2465 }
2466}
2467
0eadeb15 2468/* Check the operands of an insn against the insn's operand constraints
2055cea7 2469 and return 1 if they are valid.
0eadeb15
BS
2470 The information about the insn's operands, constraints, operand modes
2471 etc. is obtained from the global variables set up by extract_insn.
2055cea7
RK
2472
2473 WHICH_ALTERNATIVE is set to a number which indicates which
2474 alternative of constraints was matched: 0 for the first alternative,
2475 1 for the next, etc.
2476
2477 In addition, when two operands are match
2478 and it happens that the output operand is (reg) while the
2479 input operand is --(reg) or ++(reg) (a pre-inc or pre-dec),
2480 make the output operand look like the input.
2481 This is because the output operand is the one the template will print.
2482
2483 This is used in final, just before printing the assembler code and by
2484 the routines that determine an insn's attribute.
2485
2486 If STRICT is a positive non-zero value, it means that we have been
2487 called after reload has been completed. In that case, we must
2488 do all checks strictly. If it is zero, it means that we have been called
2489 before reload has completed. In that case, we first try to see if we can
2490 find an alternative that matches strictly. If not, we try again, this
2491 time assuming that reload will fix up the insn. This provides a "best
2492 guess" for the alternative and is used to compute attributes of insns prior
2493 to reload. A negative value of STRICT is used for this internal call. */
2494
2495struct funny_match
2496{
2497 int this, other;
2498};
2499
2500int
0eadeb15 2501constrain_operands (strict)
2055cea7
RK
2502 int strict;
2503{
9b3142b3 2504 const char *constraints[MAX_RECOG_OPERANDS];
9e21be9d 2505 int matching_operands[MAX_RECOG_OPERANDS];
9e21be9d 2506 int earlyclobber[MAX_RECOG_OPERANDS];
2055cea7 2507 register int c;
2055cea7
RK
2508
2509 struct funny_match funny_match[MAX_RECOG_OPERANDS];
2510 int funny_match_index;
2055cea7 2511
4667f705 2512 which_alternative = 0;
1ccbefce 2513 if (recog_data.n_operands == 0 || recog_data.n_alternatives == 0)
2055cea7
RK
2514 return 1;
2515
1ccbefce 2516 for (c = 0; c < recog_data.n_operands; c++)
9e21be9d 2517 {
1ccbefce 2518 constraints[c] = recog_data.constraints[c];
9e21be9d 2519 matching_operands[c] = -1;
9e21be9d 2520 }
2055cea7 2521
4667f705 2522 do
2055cea7
RK
2523 {
2524 register int opno;
2525 int lose = 0;
2526 funny_match_index = 0;
2527
1ccbefce 2528 for (opno = 0; opno < recog_data.n_operands; opno++)
2055cea7 2529 {
1ccbefce 2530 register rtx op = recog_data.operand[opno];
2055cea7 2531 enum machine_mode mode = GET_MODE (op);
9b3142b3 2532 register const char *p = constraints[opno];
2055cea7
RK
2533 int offset = 0;
2534 int win = 0;
2535 int val;
2536
9e21be9d
RK
2537 earlyclobber[opno] = 0;
2538
b85f21c0 2539 /* A unary operator may be accepted by the predicate, but it
38a448ca 2540 is irrelevant for matching constraints. */
b85f21c0
ILT
2541 if (GET_RTX_CLASS (GET_CODE (op)) == '1')
2542 op = XEXP (op, 0);
2543
2055cea7
RK
2544 if (GET_CODE (op) == SUBREG)
2545 {
2546 if (GET_CODE (SUBREG_REG (op)) == REG
2547 && REGNO (SUBREG_REG (op)) < FIRST_PSEUDO_REGISTER)
ddef6bc7
JJ
2548 offset = subreg_regno_offset (REGNO (SUBREG_REG (op)),
2549 GET_MODE (SUBREG_REG (op)),
2550 SUBREG_BYTE (op),
2551 GET_MODE (op));
2055cea7
RK
2552 op = SUBREG_REG (op);
2553 }
2554
2555 /* An empty constraint or empty alternative
2556 allows anything which matched the pattern. */
2557 if (*p == 0 || *p == ',')
2558 win = 1;
2559
2560 while (*p && (c = *p++) != ',')
2561 switch (c)
2562 {
c5c76735
JL
2563 case '?': case '!': case '*': case '%':
2564 case '=': case '+':
2055cea7
RK
2565 break;
2566
4d3067db
RK
2567 case '#':
2568 /* Ignore rest of this alternative as far as
2569 constraint checking is concerned. */
2570 while (*p && *p != ',')
2571 p++;
2572 break;
2573
9e21be9d
RK
2574 case '&':
2575 earlyclobber[opno] = 1;
2576 break;
2577
c5c76735
JL
2578 case '0': case '1': case '2': case '3': case '4':
2579 case '5': case '6': case '7': case '8': case '9':
2580
2055cea7
RK
2581 /* This operand must be the same as a previous one.
2582 This kind of constraint is used for instructions such
2583 as add when they take only two operands.
2584
2585 Note that the lower-numbered operand is passed first.
2586
2587 If we are not testing strictly, assume that this constraint
2588 will be satisfied. */
2589 if (strict < 0)
2590 val = 1;
2591 else
62674ffe 2592 {
1ccbefce
RH
2593 rtx op1 = recog_data.operand[c - '0'];
2594 rtx op2 = recog_data.operand[opno];
62674ffe
JH
2595
2596 /* A unary operator may be accepted by the predicate,
2597 but it is irrelevant for matching constraints. */
2598 if (GET_RTX_CLASS (GET_CODE (op1)) == '1')
2599 op1 = XEXP (op1, 0);
2600 if (GET_RTX_CLASS (GET_CODE (op2)) == '1')
2601 op2 = XEXP (op2, 0);
2602
2603 val = operands_match_p (op1, op2);
2604 }
2055cea7 2605
9e21be9d
RK
2606 matching_operands[opno] = c - '0';
2607 matching_operands[c - '0'] = opno;
2608
2055cea7
RK
2609 if (val != 0)
2610 win = 1;
2611 /* If output is *x and input is *--x,
2612 arrange later to change the output to *--x as well,
2613 since the output op is the one that will be printed. */
2614 if (val == 2 && strict > 0)
2615 {
2616 funny_match[funny_match_index].this = opno;
2617 funny_match[funny_match_index++].other = c - '0';
2618 }
2619 break;
2620
2621 case 'p':
2622 /* p is used for address_operands. When we are called by
a8647766
RK
2623 gen_reload, no one will have checked that the address is
2624 strictly valid, i.e., that all pseudos requiring hard regs
2625 have gotten them. */
2055cea7 2626 if (strict <= 0
1ccbefce 2627 || (strict_memory_address_p (recog_data.operand_mode[opno],
0eadeb15 2628 op)))
2055cea7
RK
2629 win = 1;
2630 break;
2631
2632 /* No need to check general_operand again;
2633 it was done in insn-recog.c. */
2634 case 'g':
2635 /* Anything goes unless it is a REG and really has a hard reg
2636 but the hard reg is not in the class GENERAL_REGS. */
2637 if (strict < 0
2638 || GENERAL_REGS == ALL_REGS
2639 || GET_CODE (op) != REG
3c3eeea6
RK
2640 || (reload_in_progress
2641 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2055cea7
RK
2642 || reg_fits_class_p (op, GENERAL_REGS, offset, mode))
2643 win = 1;
2644 break;
2645
2055cea7 2646 case 'X':
0f41302f
MS
2647 /* This is used for a MATCH_SCRATCH in the cases when
2648 we don't actually need anything. So anything goes
2649 any time. */
2055cea7
RK
2650 win = 1;
2651 break;
2652
2653 case 'm':
2654 if (GET_CODE (op) == MEM
2655 /* Before reload, accept what reload can turn into mem. */
3c3eeea6
RK
2656 || (strict < 0 && CONSTANT_P (op))
2657 /* During reload, accept a pseudo */
2658 || (reload_in_progress && GET_CODE (op) == REG
2659 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2055cea7
RK
2660 win = 1;
2661 break;
2662
2663 case '<':
2664 if (GET_CODE (op) == MEM
2665 && (GET_CODE (XEXP (op, 0)) == PRE_DEC
2666 || GET_CODE (XEXP (op, 0)) == POST_DEC))
2667 win = 1;
2668 break;
2669
2670 case '>':
2671 if (GET_CODE (op) == MEM
2672 && (GET_CODE (XEXP (op, 0)) == PRE_INC
2673 || GET_CODE (XEXP (op, 0)) == POST_INC))
2674 win = 1;
2675 break;
2676
2677 case 'E':
b990f635 2678#ifndef REAL_ARITHMETIC
2055cea7
RK
2679 /* Match any CONST_DOUBLE, but only if
2680 we can examine the bits of it reliably. */
2681 if ((HOST_FLOAT_FORMAT != TARGET_FLOAT_FORMAT
9e4223f2 2682 || HOST_BITS_PER_WIDE_INT != BITS_PER_WORD)
d1b765a5 2683 && GET_MODE (op) != VOIDmode && ! flag_pretend_float)
2055cea7 2684 break;
b990f635 2685#endif
2055cea7
RK
2686 if (GET_CODE (op) == CONST_DOUBLE)
2687 win = 1;
2688 break;
2689
2690 case 'F':
2691 if (GET_CODE (op) == CONST_DOUBLE)
2692 win = 1;
2693 break;
2694
2695 case 'G':
2696 case 'H':
2697 if (GET_CODE (op) == CONST_DOUBLE
2698 && CONST_DOUBLE_OK_FOR_LETTER_P (op, c))
2699 win = 1;
2700 break;
2701
2702 case 's':
2703 if (GET_CODE (op) == CONST_INT
2704 || (GET_CODE (op) == CONST_DOUBLE
2705 && GET_MODE (op) == VOIDmode))
2706 break;
2707 case 'i':
2708 if (CONSTANT_P (op))
2709 win = 1;
2710 break;
2711
2712 case 'n':
2713 if (GET_CODE (op) == CONST_INT
2714 || (GET_CODE (op) == CONST_DOUBLE
2715 && GET_MODE (op) == VOIDmode))
2716 win = 1;
2717 break;
2718
2719 case 'I':
2720 case 'J':
2721 case 'K':
2722 case 'L':
2723 case 'M':
2724 case 'N':
2725 case 'O':
2726 case 'P':
2727 if (GET_CODE (op) == CONST_INT
2728 && CONST_OK_FOR_LETTER_P (INTVAL (op), c))
2729 win = 1;
2730 break;
2731
2055cea7
RK
2732 case 'V':
2733 if (GET_CODE (op) == MEM
69f724c0
JL
2734 && ((strict > 0 && ! offsettable_memref_p (op))
2735 || (strict < 0
2736 && !(CONSTANT_P (op) || GET_CODE (op) == MEM))
2737 || (reload_in_progress
2738 && !(GET_CODE (op) == REG
2739 && REGNO (op) >= FIRST_PSEUDO_REGISTER))))
2055cea7
RK
2740 win = 1;
2741 break;
2742
2743 case 'o':
2744 if ((strict > 0 && offsettable_memref_p (op))
2745 || (strict == 0 && offsettable_nonstrict_memref_p (op))
2746 /* Before reload, accept what reload can handle. */
2747 || (strict < 0
3c3eeea6
RK
2748 && (CONSTANT_P (op) || GET_CODE (op) == MEM))
2749 /* During reload, accept a pseudo */
2750 || (reload_in_progress && GET_CODE (op) == REG
2751 && REGNO (op) >= FIRST_PSEUDO_REGISTER))
2055cea7
RK
2752 win = 1;
2753 break;
2754
2755 default:
c2cba7a9
RH
2756 {
2757 enum reg_class class;
2758
2759 class = (c == 'r' ? GENERAL_REGS : REG_CLASS_FROM_LETTER (c));
2760 if (class != NO_REGS)
2761 {
2762 if (strict < 0
2763 || (strict == 0
2764 && GET_CODE (op) == REG
2765 && REGNO (op) >= FIRST_PSEUDO_REGISTER)
2766 || (strict == 0 && GET_CODE (op) == SCRATCH)
2767 || (GET_CODE (op) == REG
2768 && reg_fits_class_p (op, class, offset, mode)))
2769 win = 1;
2770 }
2771#ifdef EXTRA_CONSTRAINT
2772 else if (EXTRA_CONSTRAINT (op, c))
2773 win = 1;
2774#endif
2775 break;
2776 }
2055cea7
RK
2777 }
2778
2779 constraints[opno] = p;
2780 /* If this operand did not win somehow,
2781 this alternative loses. */
2782 if (! win)
2783 lose = 1;
2784 }
2785 /* This alternative won; the operands are ok.
2786 Change whichever operands this alternative says to change. */
2787 if (! lose)
2788 {
9e21be9d
RK
2789 int opno, eopno;
2790
2791 /* See if any earlyclobber operand conflicts with some other
2792 operand. */
2793
2794 if (strict > 0)
1ccbefce 2795 for (eopno = 0; eopno < recog_data.n_operands; eopno++)
62946075
RS
2796 /* Ignore earlyclobber operands now in memory,
2797 because we would often report failure when we have
2798 two memory operands, one of which was formerly a REG. */
2799 if (earlyclobber[eopno]
1ccbefce
RH
2800 && GET_CODE (recog_data.operand[eopno]) == REG)
2801 for (opno = 0; opno < recog_data.n_operands; opno++)
2802 if ((GET_CODE (recog_data.operand[opno]) == MEM
2803 || recog_data.operand_type[opno] != OP_OUT)
9e21be9d 2804 && opno != eopno
0f41302f 2805 /* Ignore things like match_operator operands. */
1ccbefce 2806 && *recog_data.constraints[opno] != 0
9e21be9d 2807 && ! (matching_operands[opno] == eopno
1ccbefce
RH
2808 && operands_match_p (recog_data.operand[opno],
2809 recog_data.operand[eopno]))
2810 && ! safe_from_earlyclobber (recog_data.operand[opno],
2811 recog_data.operand[eopno]))
9e21be9d
RK
2812 lose = 1;
2813
2814 if (! lose)
2055cea7 2815 {
9e21be9d
RK
2816 while (--funny_match_index >= 0)
2817 {
1ccbefce
RH
2818 recog_data.operand[funny_match[funny_match_index].other]
2819 = recog_data.operand[funny_match[funny_match_index].this];
9e21be9d
RK
2820 }
2821
2822 return 1;
2055cea7 2823 }
2055cea7
RK
2824 }
2825
2826 which_alternative++;
2827 }
4667f705 2828 while (which_alternative < recog_data.n_alternatives);
2055cea7 2829
d90ffc8d 2830 which_alternative = -1;
2055cea7
RK
2831 /* If we are about to reject this, but we are not to test strictly,
2832 try a very loose test. Only return failure if it fails also. */
2833 if (strict == 0)
0eadeb15 2834 return constrain_operands (-1);
2055cea7
RK
2835 else
2836 return 0;
2837}
2838
2839/* Return 1 iff OPERAND (assumed to be a REG rtx)
38a448ca 2840 is a hard reg in class CLASS when its regno is offset by OFFSET
2055cea7
RK
2841 and changed to mode MODE.
2842 If REG occupies multiple hard regs, all of them must be in CLASS. */
2843
2844int
2845reg_fits_class_p (operand, class, offset, mode)
2846 rtx operand;
2847 register enum reg_class class;
2848 int offset;
2849 enum machine_mode mode;
2850{
2851 register int regno = REGNO (operand);
2852 if (regno < FIRST_PSEUDO_REGISTER
2853 && TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2854 regno + offset))
2855 {
2856 register int sr;
2857 regno += offset;
2858 for (sr = HARD_REGNO_NREGS (regno, mode) - 1;
2859 sr > 0; sr--)
2860 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) class],
2861 regno + sr))
2862 break;
2863 return sr == 0;
2864 }
2865
2866 return 0;
2867}
ca545bb5 2868\f
d3a923ee 2869/* Split all insns in the function. If UPD_LIFE, update life info after. */
ca545bb5
BM
2870
2871void
d3a923ee
RH
2872split_all_insns (upd_life)
2873 int upd_life;
ca545bb5 2874{
d3a923ee
RH
2875 sbitmap blocks;
2876 int changed;
2877 int i;
2878
2879 blocks = sbitmap_alloc (n_basic_blocks);
2880 sbitmap_zero (blocks);
2881 changed = 0;
ca545bb5 2882
d3a923ee 2883 for (i = n_basic_blocks - 1; i >= 0; --i)
ca545bb5 2884 {
d3a923ee
RH
2885 basic_block bb = BASIC_BLOCK (i);
2886 rtx insn, next;
ca545bb5 2887
d3a923ee 2888 for (insn = bb->head; insn ; insn = next)
ca545bb5 2889 {
d3a923ee 2890 rtx set;
ca545bb5 2891
d3a923ee
RH
2892 /* Can't use `next_real_insn' because that might go across
2893 CODE_LABELS and short-out basic blocks. */
2894 next = NEXT_INSN (insn);
5527bf14 2895 if (! INSN_P (insn))
d3a923ee 2896 ;
ca545bb5 2897
d3a923ee
RH
2898 /* Don't split no-op move insns. These should silently
2899 disappear later in final. Splitting such insns would
2900 break the code that handles REG_NO_CONFLICT blocks. */
ca545bb5 2901
d3a923ee 2902 else if ((set = single_set (insn)) != NULL
7142e318 2903 && set_noop_p (set))
ca545bb5 2904 {
d3a923ee
RH
2905 /* Nops get in the way while scheduling, so delete them
2906 now if register allocation has already been done. It
2907 is too risky to try to do this before register
2908 allocation, and there are unlikely to be very many
2909 nops then anyways. */
2910 if (reload_completed)
2911 {
2912 PUT_CODE (insn, NOTE);
2913 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2914 NOTE_SOURCE_FILE (insn) = 0;
2915 }
ca545bb5 2916 }
d3a923ee 2917 else
ca545bb5 2918 {
d3a923ee
RH
2919 /* Split insns here to get max fine-grain parallelism. */
2920 rtx first = PREV_INSN (insn);
2921 rtx last = try_split (PATTERN (insn), insn, 1);
2922
2923 if (last != insn)
ca545bb5 2924 {
d3a923ee
RH
2925 SET_BIT (blocks, i);
2926 changed = 1;
2927
2928 /* try_split returns the NOTE that INSN became. */
d3a923ee
RH
2929 PUT_CODE (insn, NOTE);
2930 NOTE_SOURCE_FILE (insn) = 0;
2931 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
2932
1d766db0
BS
2933 /* ??? Coddle to md files that generate subregs in post-
2934 reload splitters instead of computing the proper
2935 hard register. */
2936 if (reload_completed && first != last)
2937 {
2938 first = NEXT_INSN (first);
2939 while (1)
2940 {
2941 if (INSN_P (first))
2942 cleanup_subreg_operands (first);
2943 if (first == last)
2944 break;
2945 first = NEXT_INSN (first);
2946 }
2947 }
2948
d3a923ee
RH
2949 if (insn == bb->end)
2950 {
2951 bb->end = last;
2952 break;
2953 }
ca545bb5
BM
2954 }
2955 }
d3a923ee
RH
2956
2957 if (insn == bb->end)
2958 break;
ca545bb5
BM
2959 }
2960
d3a923ee
RH
2961 /* ??? When we're called from just after reload, the CFG is in bad
2962 shape, and we may have fallen off the end. This could be fixed
2963 by having reload not try to delete unreachable code. Otherwise
2964 assert we found the end insn. */
2965 if (insn == NULL && upd_life)
2966 abort ();
ca545bb5 2967 }
d3a923ee
RH
2968
2969 if (changed && upd_life)
2970 {
c88e8206 2971 compute_bb_for_insn (get_max_uid ());
d3a923ee 2972 count_or_remove_death_notes (blocks, 1);
49c3bb12 2973 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
d3a923ee
RH
2974 }
2975
2976 sbitmap_free (blocks);
ca545bb5 2977}
ede7cd44
RH
2978\f
2979#ifdef HAVE_peephole2
23280139
RH
2980struct peep2_insn_data
2981{
2982 rtx insn;
2983 regset live_before;
2984};
2985
2986static struct peep2_insn_data peep2_insn_data[MAX_INSNS_PER_PEEP2 + 1];
2987static int peep2_current;
2988
2989/* A non-insn marker indicating the last insn of the block.
2990 The live_before regset for this element is correct, indicating
2991 global_live_at_end for the block. */
2992#define PEEP2_EOB pc_rtx
2993
2994/* Return the Nth non-note insn after `current', or return NULL_RTX if it
2995 does not exist. Used by the recognizer to find the next insn to match
2996 in a multi-insn pattern. */
d3a923ee 2997
ede7cd44 2998rtx
23280139 2999peep2_next_insn (n)
ede7cd44
RH
3000 int n;
3001{
23280139
RH
3002 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3003 abort ();
3004
3005 n += peep2_current;
3006 if (n >= MAX_INSNS_PER_PEEP2 + 1)
3007 n -= MAX_INSNS_PER_PEEP2 + 1;
3008
3009 if (peep2_insn_data[n].insn == PEEP2_EOB)
3010 return NULL_RTX;
3011 return peep2_insn_data[n].insn;
3012}
3013
3014/* Return true if REGNO is dead before the Nth non-note insn
3015 after `current'. */
3016
3017int
3018peep2_regno_dead_p (ofs, regno)
3019 int ofs;
3020 int regno;
3021{
3022 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3023 abort ();
3024
3025 ofs += peep2_current;
3026 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3027 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3028
3029 if (peep2_insn_data[ofs].insn == NULL_RTX)
3030 abort ();
3031
3032 return ! REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno);
3033}
3034
3035/* Similarly for a REG. */
3036
3037int
3038peep2_reg_dead_p (ofs, reg)
3039 int ofs;
3040 rtx reg;
3041{
3042 int regno, n;
3043
3044 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3045 abort ();
3046
3047 ofs += peep2_current;
3048 if (ofs >= MAX_INSNS_PER_PEEP2 + 1)
3049 ofs -= MAX_INSNS_PER_PEEP2 + 1;
3050
3051 if (peep2_insn_data[ofs].insn == NULL_RTX)
3052 abort ();
3053
3054 regno = REGNO (reg);
3055 n = HARD_REGNO_NREGS (regno, GET_MODE (reg));
3056 while (--n >= 0)
3057 if (REGNO_REG_SET_P (peep2_insn_data[ofs].live_before, regno + n))
3058 return 0;
3059 return 1;
3060}
3061
3062/* Try to find a hard register of mode MODE, matching the register class in
3063 CLASS_STR, which is available at the beginning of insn CURRENT_INSN and
3064 remains available until the end of LAST_INSN. LAST_INSN may be NULL_RTX,
3065 in which case the only condition is that the register must be available
3066 before CURRENT_INSN.
3067 Registers that already have bits set in REG_SET will not be considered.
3068
3069 If an appropriate register is available, it will be returned and the
3070 corresponding bit(s) in REG_SET will be set; otherwise, NULL_RTX is
3071 returned. */
3072
3073rtx
3074peep2_find_free_register (from, to, class_str, mode, reg_set)
3075 int from, to;
3076 const char *class_str;
3077 enum machine_mode mode;
3078 HARD_REG_SET *reg_set;
3079{
3080 static int search_ofs;
3081 enum reg_class class;
3082 HARD_REG_SET live;
3083 int i;
3084
3085 if (from >= MAX_INSNS_PER_PEEP2 + 1 || to >= MAX_INSNS_PER_PEEP2 + 1)
3086 abort ();
3087
3088 from += peep2_current;
3089 if (from >= MAX_INSNS_PER_PEEP2 + 1)
3090 from -= MAX_INSNS_PER_PEEP2 + 1;
3091 to += peep2_current;
3092 if (to >= MAX_INSNS_PER_PEEP2 + 1)
3093 to -= MAX_INSNS_PER_PEEP2 + 1;
3094
3095 if (peep2_insn_data[from].insn == NULL_RTX)
3096 abort ();
3097 REG_SET_TO_HARD_REG_SET (live, peep2_insn_data[from].live_before);
3098
3099 while (from != to)
ede7cd44 3100 {
23280139
RH
3101 HARD_REG_SET this_live;
3102
3103 if (++from >= MAX_INSNS_PER_PEEP2 + 1)
3104 from = 0;
3105 if (peep2_insn_data[from].insn == NULL_RTX)
3106 abort ();
3107 REG_SET_TO_HARD_REG_SET (this_live, peep2_insn_data[from].live_before);
3108 IOR_HARD_REG_SET (live, this_live);
3109 }
3110
3111 class = (class_str[0] == 'r' ? GENERAL_REGS
3112 : REG_CLASS_FROM_LETTER (class_str[0]));
3113
3114 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
3115 {
3116 int raw_regno, regno, success, j;
3117
3118 /* Distribute the free registers as much as possible. */
3119 raw_regno = search_ofs + i;
3120 if (raw_regno >= FIRST_PSEUDO_REGISTER)
3121 raw_regno -= FIRST_PSEUDO_REGISTER;
3122#ifdef REG_ALLOC_ORDER
3123 regno = reg_alloc_order[raw_regno];
3124#else
3125 regno = raw_regno;
3126#endif
3127
3128 /* Don't allocate fixed registers. */
3129 if (fixed_regs[regno])
3130 continue;
3131 /* Make sure the register is of the right class. */
3132 if (! TEST_HARD_REG_BIT (reg_class_contents[class], regno))
3133 continue;
3134 /* And can support the mode we need. */
3135 if (! HARD_REGNO_MODE_OK (regno, mode))
3136 continue;
3137 /* And that we don't create an extra save/restore. */
3138 if (! call_used_regs[regno] && ! regs_ever_live[regno])
3139 continue;
3140 /* And we don't clobber traceback for noreturn functions. */
3141 if ((regno == FRAME_POINTER_REGNUM || regno == HARD_FRAME_POINTER_REGNUM)
3142 && (! reload_completed || frame_pointer_needed))
3143 continue;
3144
3145 success = 1;
3146 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3147 {
3148 if (TEST_HARD_REG_BIT (*reg_set, regno + j)
3149 || TEST_HARD_REG_BIT (live, regno + j))
3150 {
3151 success = 0;
3152 break;
3153 }
3154 }
3155 if (success)
d3a923ee 3156 {
23280139
RH
3157 for (j = HARD_REGNO_NREGS (regno, mode) - 1; j >= 0; j--)
3158 SET_HARD_REG_BIT (*reg_set, regno + j);
ede7cd44 3159
23280139
RH
3160 /* Start the next search with the next register. */
3161 if (++raw_regno >= FIRST_PSEUDO_REGISTER)
3162 raw_regno = 0;
3163 search_ofs = raw_regno;
ede7cd44 3164
23280139 3165 return gen_rtx_REG (mode, regno);
d3a923ee 3166 }
ede7cd44
RH
3167 }
3168
23280139
RH
3169 search_ofs = 0;
3170 return NULL_RTX;
ede7cd44
RH
3171}
3172
3173/* Perform the peephole2 optimization pass. */
23280139 3174
ede7cd44
RH
3175void
3176peephole2_optimize (dump_file)
3177 FILE *dump_file ATTRIBUTE_UNUSED;
3178{
23280139 3179 regset_head rs_heads[MAX_INSNS_PER_PEEP2 + 2];
d3a923ee 3180 rtx insn, prev;
23280139
RH
3181 regset live;
3182 int i, b;
3183#ifdef HAVE_conditional_execution
d3a923ee 3184 sbitmap blocks;
23280139
RH
3185 int changed;
3186#endif
ede7cd44 3187
23280139
RH
3188 /* Initialize the regsets we're going to use. */
3189 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3190 peep2_insn_data[i].live_before = INITIALIZE_REG_SET (rs_heads[i]);
3191 live = INITIALIZE_REG_SET (rs_heads[i]);
ede7cd44 3192
23280139 3193#ifdef HAVE_conditional_execution
d3a923ee
RH
3194 blocks = sbitmap_alloc (n_basic_blocks);
3195 sbitmap_zero (blocks);
3196 changed = 0;
23280139
RH
3197#else
3198 count_or_remove_death_notes (NULL, 1);
3199#endif
d3a923ee 3200
23280139 3201 for (b = n_basic_blocks - 1; b >= 0; --b)
ede7cd44 3202 {
23280139
RH
3203 basic_block bb = BASIC_BLOCK (b);
3204 struct propagate_block_info *pbi;
3205
3206 /* Indicate that all slots except the last holds invalid data. */
3207 for (i = 0; i < MAX_INSNS_PER_PEEP2; ++i)
3208 peep2_insn_data[i].insn = NULL_RTX;
3209
3210 /* Indicate that the last slot contains live_after data. */
3211 peep2_insn_data[MAX_INSNS_PER_PEEP2].insn = PEEP2_EOB;
3212 peep2_current = MAX_INSNS_PER_PEEP2;
d3a923ee 3213
23280139
RH
3214 /* Start up propagation. */
3215 COPY_REG_SET (live, bb->global_live_at_end);
3216 COPY_REG_SET (peep2_insn_data[MAX_INSNS_PER_PEEP2].live_before, live);
3217
3218#ifdef HAVE_conditional_execution
7dfc0fbe 3219 pbi = init_propagate_block_info (bb, live, NULL, NULL, 0);
23280139 3220#else
7dfc0fbe 3221 pbi = init_propagate_block_info (bb, live, NULL, NULL, PROP_DEATH_NOTES);
23280139 3222#endif
ede7cd44 3223
d3a923ee
RH
3224 for (insn = bb->end; ; insn = prev)
3225 {
3226 prev = PREV_INSN (insn);
88741818 3227 if (INSN_P (insn))
ede7cd44 3228 {
23280139
RH
3229 rtx try;
3230 int match_len;
3231
3232 /* Record this insn. */
3233 if (--peep2_current < 0)
3234 peep2_current = MAX_INSNS_PER_PEEP2;
3235 peep2_insn_data[peep2_current].insn = insn;
3236 propagate_one_insn (pbi, insn);
3237 COPY_REG_SET (peep2_insn_data[peep2_current].live_before, live);
3238
3239 /* Match the peephole. */
3240 try = peephole2_insns (PATTERN (insn), insn, &match_len);
d3a923ee
RH
3241 if (try != NULL)
3242 {
23280139
RH
3243 i = match_len + peep2_current;
3244 if (i >= MAX_INSNS_PER_PEEP2 + 1)
3245 i -= MAX_INSNS_PER_PEEP2 + 1;
3246
3247 /* Replace the old sequence with the new. */
3248 flow_delete_insn_chain (insn, peep2_insn_data[i].insn);
d3a923ee
RH
3249 try = emit_insn_after (try, prev);
3250
23280139
RH
3251 /* Adjust the basic block boundaries. */
3252 if (peep2_insn_data[i].insn == bb->end)
d3a923ee
RH
3253 bb->end = try;
3254 if (insn == bb->head)
3255 bb->head = NEXT_INSN (prev);
3256
23280139
RH
3257#ifdef HAVE_conditional_execution
3258 /* With conditional execution, we cannot back up the
3259 live information so easily, since the conditional
3260 death data structures are not so self-contained.
3261 So record that we've made a modification to this
3262 block and update life information at the end. */
3263 SET_BIT (blocks, b);
d3a923ee 3264 changed = 1;
23280139
RH
3265
3266 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3267 peep2_insn_data[i].insn = NULL_RTX;
3268 peep2_insn_data[peep2_current].insn = PEEP2_EOB;
3269#else
3270 /* Back up lifetime information past the end of the
3271 newly created sequence. */
3272 if (++i >= MAX_INSNS_PER_PEEP2 + 1)
3273 i = 0;
3274 COPY_REG_SET (live, peep2_insn_data[i].live_before);
3275
3276 /* Update life information for the new sequence. */
3277 do
3278 {
3279 if (INSN_P (try))
3280 {
3281 if (--i < 0)
3282 i = MAX_INSNS_PER_PEEP2;
3283 peep2_insn_data[i].insn = try;
3284 propagate_one_insn (pbi, try);
3285 COPY_REG_SET (peep2_insn_data[i].live_before, live);
3286 }
3287 try = PREV_INSN (try);
3288 }
3289 while (try != prev);
3290
3291 /* ??? Should verify that LIVE now matches what we
3292 had before the new sequence. */
3293
3294 peep2_current = i;
3295#endif
d3a923ee 3296 }
ede7cd44 3297 }
d3a923ee
RH
3298
3299 if (insn == bb->head)
3300 break;
ede7cd44 3301 }
23280139
RH
3302
3303 free_propagate_block_info (pbi);
ede7cd44
RH
3304 }
3305
23280139
RH
3306 for (i = 0; i < MAX_INSNS_PER_PEEP2 + 1; ++i)
3307 FREE_REG_SET (peep2_insn_data[i].live_before);
3308 FREE_REG_SET (live);
d3a923ee 3309
23280139 3310#ifdef HAVE_conditional_execution
d3a923ee 3311 count_or_remove_death_notes (blocks, 1);
49c3bb12 3312 update_life_info (blocks, UPDATE_LIFE_LOCAL, PROP_DEATH_NOTES);
23280139 3313 sbitmap_free (blocks);
ede7cd44 3314#endif
23280139
RH
3315}
3316#endif /* HAVE_peephole2 */
This page took 1.698869 seconds and 5 git commands to generate.