1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "insn-flags.h"
87 #include "insn-codes.h"
88 #include "insn-attr.h"
92 /* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94 #define gen_lowpart dont_use_gen_lowpart_you_dummy
96 /* Number of attempts to combine instructions in this function. */
98 static int combine_attempts
;
100 /* Number of attempts that got as far as substitution in this function. */
102 static int combine_merges
;
104 /* Number of instructions combined with added SETs in this function. */
106 static int combine_extras
;
108 /* Number of instructions combined in this function. */
110 static int combine_successes
;
112 /* Totals over entire compilation. */
114 static int total_attempts
, total_merges
, total_extras
, total_successes
;
116 /* Vector mapping INSN_UIDs to cuids.
117 The cuids are like uids but increase monotonically always.
118 Combine always uses cuids so that it can compare them.
119 But actually renumbering the uids, which we used to do,
120 proves to be a bad idea because it makes it hard to compare
121 the dumps produced by earlier passes with those from later passes. */
123 static int *uid_cuid
;
125 /* Get the cuid of an insn. */
127 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
129 /* Maximum register number, which is the size of the tables below. */
131 static int combine_max_regno
;
133 /* Record last point of death of (hard or pseudo) register n. */
135 static rtx
*reg_last_death
;
137 /* Record last point of modification of (hard or pseudo) register n. */
139 static rtx
*reg_last_set
;
141 /* Record the cuid of the last insn that invalidated memory
142 (anything that writes memory, and subroutine calls, but not pushes). */
144 static int mem_last_set
;
146 /* Record the cuid of the last CALL_INSN
147 so we can tell whether a potential combination crosses any calls. */
149 static int last_call_cuid
;
151 /* When `subst' is called, this is the insn that is being modified
152 (by combining in a previous insn). The PATTERN of this insn
153 is still the old pattern partially modified and it should not be
154 looked at, but this may be used to examine the successors of the insn
155 to judge whether a simplification is valid. */
157 static rtx subst_insn
;
159 /* This is the lowest CUID that `subst' is currently dealing with.
160 get_last_value will not return a value if the register was set at or
161 after this CUID. If not for this mechanism, we could get confused if
162 I2 or I1 in try_combine were an insn that used the old value of a register
163 to obtain a new value. In that case, we might erroneously get the
164 new value of the register when we wanted the old one. */
166 static int subst_low_cuid
;
168 /* This is the value of undobuf.num_undo when we started processing this
169 substitution. This will prevent gen_rtx_combine from re-used a piece
170 from the previous expression. Doing so can produce circular rtl
173 static int previous_num_undos
;
175 /* The next group of arrays allows the recording of the last value assigned
176 to (hard or pseudo) register n. We use this information to see if a
177 operation being processed is redundant given a prior operation performed
178 on the register. For example, an `and' with a constant is redundant if
179 all the zero bits are already known to be turned off.
181 We use an approach similar to that used by cse, but change it in the
184 (1) We do not want to reinitialize at each label.
185 (2) It is useful, but not critical, to know the actual value assigned
186 to a register. Often just its form is helpful.
188 Therefore, we maintain the following arrays:
190 reg_last_set_value the last value assigned
191 reg_last_set_label records the value of label_tick when the
192 register was assigned
193 reg_last_set_table_tick records the value of label_tick when a
194 value using the register is assigned
195 reg_last_set_invalid set to non-zero when it is not valid
196 to use the value of this register in some
199 To understand the usage of these tables, it is important to understand
200 the distinction between the value in reg_last_set_value being valid
201 and the register being validly contained in some other expression in the
204 Entry I in reg_last_set_value is valid if it is non-zero, and either
205 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
207 Register I may validly appear in any expression returned for the value
208 of another register if reg_n_sets[i] is 1. It may also appear in the
209 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
210 reg_last_set_invalid[j] is zero.
212 If an expression is found in the table containing a register which may
213 not validly appear in an expression, the register is replaced by
214 something that won't match, (clobber (const_int 0)).
216 reg_last_set_invalid[i] is set non-zero when register I is being assigned
217 to and reg_last_set_table_tick[i] == label_tick. */
219 /* Record last value assigned to (hard or pseudo) register n. */
221 static rtx
*reg_last_set_value
;
223 /* Record the value of label_tick when the value for register n is placed in
224 reg_last_set_value[n]. */
226 static short *reg_last_set_label
;
228 /* Record the value of label_tick when an expression involving register n
229 is placed in reg_last_set_value. */
231 static short *reg_last_set_table_tick
;
233 /* Set non-zero if references to register n in expressions should not be
236 static char *reg_last_set_invalid
;
238 /* Incremented for each label. */
240 static short label_tick
;
242 /* Some registers that are set more than once and used in more than one
243 basic block are nevertheless always set in similar ways. For example,
244 a QImode register may be loaded from memory in two places on a machine
245 where byte loads zero extend.
247 We record in the following array what we know about the significant
248 bits of a register, specifically which bits are known to be zero.
250 If an entry is zero, it means that we don't know anything special. */
252 static HOST_WIDE_INT
*reg_significant
;
254 /* Mode used to compute significance in reg_significant. It is the largest
255 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
257 static enum machine_mode significant_mode
;
259 /* Nonzero if we know that a register has some leading bits that are always
260 equal to the sign bit. */
262 static char *reg_sign_bit_copies
;
264 /* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
265 It is zero while computing them. This prevents propagating values based
266 on previously set values, which can be incorrect if a variable
267 is modified in a loop. */
269 static int significant_valid
;
271 /* Record one modification to rtl structure
272 to be undone by storing old_contents into *where.
273 is_int is 1 if the contents are an int. */
289 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
290 num_undo says how many are currently recorded.
292 storage is nonzero if we must undo the allocation of new storage.
293 The value of storage is what to pass to obfree.
295 other_insn is nonzero if we have modified some other insn in the process
296 of working on subst_insn. It must be verified too. */
304 struct undo undo
[MAX_UNDO
];
308 static struct undobuf undobuf
;
310 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
311 insn. The substitution can be undone by undo_all. If INTO is already
312 set to NEWVAL, do not record this change. Because computing NEWVAL might
313 also call SUBST, we have to compute it before we put anything into
316 #define SUBST(INTO, NEWVAL) \
317 do { rtx _new = (NEWVAL); \
318 if (undobuf.num_undo < MAX_UNDO) \
320 undobuf.undo[undobuf.num_undo].where = &INTO; \
321 undobuf.undo[undobuf.num_undo].old_contents = INTO; \
322 undobuf.undo[undobuf.num_undo].is_int = 0; \
324 if (undobuf.undo[undobuf.num_undo].old_contents != INTO) \
325 undobuf.num_undo++; \
329 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
331 Note that substitution for the value of a CONST_INT is not safe. */
333 #define SUBST_INT(INTO, NEWVAL) \
334 do { if (undobuf.num_undo < MAX_UNDO) \
337 = (struct undo_int *)&undobuf.undo[undobuf.num_undo]; \
338 u->where = (int *) &INTO; \
339 u->old_contents = INTO; \
342 if (u->old_contents != INTO) \
343 undobuf.num_undo++; \
347 /* Number of times the pseudo being substituted for
348 was found and replaced. */
350 static int n_occurrences
;
352 static void set_significant ();
353 static void move_deaths ();
355 static void record_value_for_reg ();
356 static void record_dead_and_set_regs ();
357 static int use_crosses_set_p ();
358 static rtx
try_combine ();
359 static rtx
*find_split_point ();
361 static void undo_all ();
362 static int reg_dead_at_p ();
363 static rtx
expand_compound_operation ();
364 static rtx
expand_field_assignment ();
365 static rtx
make_extraction ();
366 static int get_pos_from_mask ();
367 static rtx
force_to_mode ();
368 static rtx
make_field_assignment ();
369 static rtx
make_compound_operation ();
370 static rtx
apply_distributive_law ();
371 static rtx
simplify_and_const_int ();
372 static unsigned HOST_WIDE_INT
significant_bits ();
373 static int num_sign_bit_copies ();
374 static int merge_outer_ops ();
375 static rtx
simplify_shift_const ();
376 static int recog_for_combine ();
377 static rtx
gen_lowpart_for_combine ();
378 static rtx
gen_rtx_combine ();
379 static rtx
gen_binary ();
380 static rtx
gen_unary ();
381 static enum rtx_code
simplify_comparison ();
382 static int reversible_comparison_p ();
383 static int get_last_value_validate ();
384 static rtx
get_last_value ();
385 static void distribute_notes ();
386 static void distribute_links ();
388 /* Main entry point for combiner. F is the first insn of the function.
389 NREGS is the first unused pseudo-reg number. */
392 combine_instructions (f
, nregs
)
396 register rtx insn
, next
, prev
;
398 register rtx links
, nextlinks
;
400 combine_attempts
= 0;
403 combine_successes
= 0;
405 combine_max_regno
= nregs
;
407 reg_last_death
= (rtx
*) alloca (nregs
* sizeof (rtx
));
408 reg_last_set
= (rtx
*) alloca (nregs
* sizeof (rtx
));
409 reg_last_set_value
= (rtx
*) alloca (nregs
* sizeof (rtx
));
410 reg_last_set_table_tick
= (short *) alloca (nregs
* sizeof (short));
411 reg_last_set_label
= (short *) alloca (nregs
* sizeof (short));
412 reg_last_set_invalid
= (char *) alloca (nregs
* sizeof (char));
413 reg_significant
= (HOST_WIDE_INT
*) alloca (nregs
* sizeof (HOST_WIDE_INT
));
414 reg_sign_bit_copies
= (char *) alloca (nregs
* sizeof (char));
416 bzero (reg_last_death
, nregs
* sizeof (rtx
));
417 bzero (reg_last_set
, nregs
* sizeof (rtx
));
418 bzero (reg_last_set_value
, nregs
* sizeof (rtx
));
419 bzero (reg_last_set_table_tick
, nregs
* sizeof (short));
420 bzero (reg_last_set_invalid
, nregs
* sizeof (char));
421 bzero (reg_significant
, nregs
* sizeof (HOST_WIDE_INT
));
422 bzero (reg_sign_bit_copies
, nregs
* sizeof (char));
424 init_recog_no_volatile ();
426 /* Compute maximum uid value so uid_cuid can be allocated. */
428 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
429 if (INSN_UID (insn
) > i
)
432 uid_cuid
= (int *) alloca ((i
+ 1) * sizeof (int));
434 significant_mode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
436 /* Don't use reg_significant when computing it. This can cause problems
437 when, for example, we have j <<= 1 in a loop. */
439 significant_valid
= 0;
441 /* Compute the mapping from uids to cuids.
442 Cuids are numbers assigned to insns, like uids,
443 except that cuids increase monotonically through the code.
445 Scan all SETs and see if we can deduce anything about what
446 bits are significant for some registers. */
448 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
450 INSN_CUID (insn
) = ++i
;
451 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
452 note_stores (PATTERN (insn
), set_significant
);
455 significant_valid
= 1;
457 /* Now scan all the insns in forward order. */
463 for (insn
= f
; insn
; insn
= next
? next
: NEXT_INSN (insn
))
467 if (GET_CODE (insn
) == CODE_LABEL
)
470 else if (GET_CODE (insn
) == INSN
471 || GET_CODE (insn
) == CALL_INSN
472 || GET_CODE (insn
) == JUMP_INSN
)
474 /* Try this insn with each insn it links back to. */
476 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
477 if ((next
= try_combine (insn
, XEXP (links
, 0), NULL_RTX
)) != 0)
480 /* Try each sequence of three linked insns ending with this one. */
482 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
483 for (nextlinks
= LOG_LINKS (XEXP (links
, 0)); nextlinks
;
484 nextlinks
= XEXP (nextlinks
, 1))
485 if ((next
= try_combine (insn
, XEXP (links
, 0),
486 XEXP (nextlinks
, 0))) != 0)
490 /* Try to combine a jump insn that uses CC0
491 with a preceding insn that sets CC0, and maybe with its
492 logical predecessor as well.
493 This is how we make decrement-and-branch insns.
494 We need this special code because data flow connections
495 via CC0 do not get entered in LOG_LINKS. */
497 if (GET_CODE (insn
) == JUMP_INSN
498 && (prev
= prev_nonnote_insn (insn
)) != 0
499 && GET_CODE (prev
) == INSN
500 && sets_cc0_p (PATTERN (prev
)))
502 if ((next
= try_combine (insn
, prev
, NULL_RTX
)) != 0)
505 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
506 nextlinks
= XEXP (nextlinks
, 1))
507 if ((next
= try_combine (insn
, prev
,
508 XEXP (nextlinks
, 0))) != 0)
512 /* Do the same for an insn that explicitly references CC0. */
513 if (GET_CODE (insn
) == INSN
514 && (prev
= prev_nonnote_insn (insn
)) != 0
515 && GET_CODE (prev
) == INSN
516 && sets_cc0_p (PATTERN (prev
))
517 && GET_CODE (PATTERN (insn
)) == SET
518 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (insn
))))
520 if ((next
= try_combine (insn
, prev
, NULL_RTX
)) != 0)
523 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
524 nextlinks
= XEXP (nextlinks
, 1))
525 if ((next
= try_combine (insn
, prev
,
526 XEXP (nextlinks
, 0))) != 0)
530 /* Finally, see if any of the insns that this insn links to
531 explicitly references CC0. If so, try this insn, that insn,
532 and its predecessor if it sets CC0. */
533 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
534 if (GET_CODE (XEXP (links
, 0)) == INSN
535 && GET_CODE (PATTERN (XEXP (links
, 0))) == SET
536 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (XEXP (links
, 0))))
537 && (prev
= prev_nonnote_insn (XEXP (links
, 0))) != 0
538 && GET_CODE (prev
) == INSN
539 && sets_cc0_p (PATTERN (prev
))
540 && (next
= try_combine (insn
, XEXP (links
, 0), prev
)) != 0)
544 /* Try combining an insn with two different insns whose results it
546 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
547 for (nextlinks
= XEXP (links
, 1); nextlinks
;
548 nextlinks
= XEXP (nextlinks
, 1))
549 if ((next
= try_combine (insn
, XEXP (links
, 0),
550 XEXP (nextlinks
, 0))) != 0)
553 if (GET_CODE (insn
) != NOTE
)
554 record_dead_and_set_regs (insn
);
561 total_attempts
+= combine_attempts
;
562 total_merges
+= combine_merges
;
563 total_extras
+= combine_extras
;
564 total_successes
+= combine_successes
;
567 /* Called via note_stores. If X is a pseudo that is used in more than
568 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
569 set, record what bits are significant. If we are clobbering X,
570 ignore this "set" because the clobbered value won't be used.
572 If we are setting only a portion of X and we can't figure out what
573 portion, assume all bits will be used since we don't know what will
576 Similarly, set how many bits of X are known to be copies of the sign bit
577 at all locations in the function. This is the smallest number implied
581 set_significant (x
, set
)
587 if (GET_CODE (x
) == REG
588 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
589 && reg_n_sets
[REGNO (x
)] > 1
590 && reg_basic_block
[REGNO (x
)] < 0
591 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
)
593 if (GET_CODE (set
) == CLOBBER
)
596 /* If this is a complex assignment, see if we can convert it into a
597 simple assignment. */
598 set
= expand_field_assignment (set
);
599 if (SET_DEST (set
) == x
)
601 reg_significant
[REGNO (x
)]
602 |= significant_bits (SET_SRC (set
), significant_mode
);
603 num
= num_sign_bit_copies (SET_SRC (set
), GET_MODE (x
));
604 if (reg_sign_bit_copies
[REGNO (x
)] == 0
605 || reg_sign_bit_copies
[REGNO (x
)] > num
)
606 reg_sign_bit_copies
[REGNO (x
)] = num
;
610 reg_significant
[REGNO (x
)] = GET_MODE_MASK (GET_MODE (x
));
611 reg_sign_bit_copies
[REGNO (x
)] = 0;
616 /* See if INSN can be combined into I3. PRED and SUCC are optionally
617 insns that were previously combined into I3 or that will be combined
618 into the merger of INSN and I3.
620 Return 0 if the combination is not allowed for any reason.
622 If the combination is allowed, *PDEST will be set to the single
623 destination of INSN and *PSRC to the single source, and this function
627 can_combine_p (insn
, i3
, pred
, succ
, pdest
, psrc
)
634 rtx set
= 0, src
, dest
;
636 int all_adjacent
= (succ
? (next_active_insn (insn
) == succ
637 && next_active_insn (succ
) == i3
)
638 : next_active_insn (insn
) == i3
);
640 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
641 or a PARALLEL consisting of such a SET and CLOBBERs.
643 If INSN has CLOBBER parallel parts, ignore them for our processing.
644 By definition, these happen during the execution of the insn. When it
645 is merged with another insn, all bets are off. If they are, in fact,
646 needed and aren't also supplied in I3, they may be added by
647 recog_for_combine. Otherwise, it won't match.
649 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
652 Get the source and destination of INSN. If more than one, can't
655 if (GET_CODE (PATTERN (insn
)) == SET
)
656 set
= PATTERN (insn
);
657 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
658 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
660 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
662 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
664 switch (GET_CODE (elt
))
666 /* We can ignore CLOBBERs. */
671 /* Ignore SETs whose result isn't used but not those that
672 have side-effects. */
673 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (elt
))
674 && ! side_effects_p (elt
))
677 /* If we have already found a SET, this is a second one and
678 so we cannot combine with this insn. */
686 /* Anything else means we can't combine. */
692 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
693 so don't do anything with it. */
694 || GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
)
703 set
= expand_field_assignment (set
);
704 src
= SET_SRC (set
), dest
= SET_DEST (set
);
706 /* Don't eliminate a store in the stack pointer. */
707 if (dest
== stack_pointer_rtx
708 /* Don't install a subreg involving two modes not tieable.
709 It can worsen register allocation, and can even make invalid reload
710 insns, since the reg inside may need to be copied from in the
711 outside mode, and that may be invalid if it is an fp reg copied in
712 integer mode. As a special exception, we can allow this if
713 I3 is simply copying DEST, a REG, to CC0. */
714 || (GET_CODE (src
) == SUBREG
715 && ! MODES_TIEABLE_P (GET_MODE (src
), GET_MODE (SUBREG_REG (src
)))
717 && ! (GET_CODE (i3
) == INSN
&& GET_CODE (PATTERN (i3
)) == SET
718 && SET_DEST (PATTERN (i3
)) == cc0_rtx
719 && GET_CODE (dest
) == REG
&& dest
== SET_SRC (PATTERN (i3
)))
722 /* If we couldn't eliminate a field assignment, we can't combine. */
723 || GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == STRICT_LOW_PART
724 /* Don't combine with an insn that sets a register to itself if it has
725 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
726 || (rtx_equal_p (src
, dest
) && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
727 /* Can't merge a function call. */
728 || GET_CODE (src
) == CALL
729 /* Don't substitute into an incremented register. */
730 || FIND_REG_INC_NOTE (i3
, dest
)
731 || (succ
&& FIND_REG_INC_NOTE (succ
, dest
))
732 /* Don't combine the end of a libcall into anything. */
733 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
734 /* Make sure that DEST is not used after SUCC but before I3. */
735 || (succ
&& ! all_adjacent
736 && reg_used_between_p (dest
, succ
, i3
))
737 /* Make sure that the value that is to be substituted for the register
738 does not use any registers whose values alter in between. However,
739 If the insns are adjacent, a use can't cross a set even though we
740 think it might (this can happen for a sequence of insns each setting
741 the same destination; reg_last_set of that register might point to
742 a NOTE). Also, don't move a volatile asm across any other insns. */
744 && (use_crosses_set_p (src
, INSN_CUID (insn
))
745 || (GET_CODE (src
) == ASM_OPERANDS
&& MEM_VOLATILE_P (src
))))
746 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
747 better register allocation by not doing the combine. */
748 || find_reg_note (i3
, REG_NO_CONFLICT
, dest
)
749 || (succ
&& find_reg_note (succ
, REG_NO_CONFLICT
, dest
))
750 /* Don't combine across a CALL_INSN, because that would possibly
751 change whether the life span of some REGs crosses calls or not,
752 and it is a pain to update that information.
753 Exception: if source is a constant, moving it later can't hurt.
754 Accept that special case, because it helps -fforce-addr a lot. */
755 || (INSN_CUID (insn
) < last_call_cuid
&& ! CONSTANT_P (src
)))
758 /* DEST must either be a REG or CC0. */
759 if (GET_CODE (dest
) == REG
)
761 /* If register alignment is being enforced for multi-word items in all
762 cases except for parameters, it is possible to have a register copy
763 insn referencing a hard register that is not allowed to contain the
764 mode being copied and which would not be valid as an operand of most
765 insns. Eliminate this problem by not combining with such an insn.
767 Also, on some machines we don't want to extend the life of a hard
770 if (GET_CODE (src
) == REG
771 && ((REGNO (dest
) < FIRST_PSEUDO_REGISTER
772 && ! HARD_REGNO_MODE_OK (REGNO (dest
), GET_MODE (dest
)))
773 #ifdef SMALL_REGISTER_CLASSES
774 /* Don't extend the life of a hard register. */
775 || REGNO (src
) < FIRST_PSEUDO_REGISTER
777 || (REGNO (src
) < FIRST_PSEUDO_REGISTER
778 && ! HARD_REGNO_MODE_OK (REGNO (src
), GET_MODE (src
)))
783 else if (GET_CODE (dest
) != CC0
)
786 /* Don't substitute for a register intended as a clobberable operand. */
787 if (GET_CODE (PATTERN (i3
)) == PARALLEL
)
788 for (i
= XVECLEN (PATTERN (i3
), 0) - 1; i
>= 0; i
--)
789 if (GET_CODE (XVECEXP (PATTERN (i3
), 0, i
)) == CLOBBER
790 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3
), 0, i
), 0), dest
))
793 /* If INSN contains anything volatile, or is an `asm' (whether volatile
794 or not), reject, unless nothing volatile comes between it and I3,
795 with the exception of SUCC. */
797 if (GET_CODE (src
) == ASM_OPERANDS
|| volatile_refs_p (src
))
798 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
799 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
800 && p
!= succ
&& volatile_refs_p (PATTERN (p
)))
803 /* If INSN or I2 contains an autoincrement or autodecrement,
804 make sure that register is not used between there and I3,
805 and not already used in I3 either.
806 Also insist that I3 not be a jump; if it were one
807 and the incremented register were spilled, we would lose. */
810 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
811 if (REG_NOTE_KIND (link
) == REG_INC
812 && (GET_CODE (i3
) == JUMP_INSN
813 || reg_used_between_p (XEXP (link
, 0), insn
, i3
)
814 || reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i3
))))
819 /* Don't combine an insn that follows a CC0-setting insn.
820 An insn that uses CC0 must not be separated from the one that sets it.
821 We do, however, allow I2 to follow a CC0-setting insn if that insn
822 is passed as I1; in that case it will be deleted also.
823 We also allow combining in this case if all the insns are adjacent
824 because that would leave the two CC0 insns adjacent as well.
825 It would be more logical to test whether CC0 occurs inside I1 or I2,
826 but that would be much slower, and this ought to be equivalent. */
828 p
= prev_nonnote_insn (insn
);
829 if (p
&& p
!= pred
&& GET_CODE (p
) == INSN
&& sets_cc0_p (PATTERN (p
))
834 /* If we get here, we have passed all the tests and the combination is
843 /* LOC is the location within I3 that contains its pattern or the component
844 of a PARALLEL of the pattern. We validate that it is valid for combining.
846 One problem is if I3 modifies its output, as opposed to replacing it
847 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
848 so would produce an insn that is not equivalent to the original insns.
852 (set (reg:DI 101) (reg:DI 100))
853 (set (subreg:SI (reg:DI 101) 0) <foo>)
855 This is NOT equivalent to:
857 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
858 (set (reg:DI 101) (reg:DI 100))])
860 Not only does this modify 100 (in which case it might still be valid
861 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
863 We can also run into a problem if I2 sets a register that I1
864 uses and I1 gets directly substituted into I3 (not via I2). In that
865 case, we would be getting the wrong value of I2DEST into I3, so we
866 must reject the combination. This case occurs when I2 and I1 both
867 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
868 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
869 of a SET must prevent combination from occurring.
871 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
872 if the destination of a SET is a hard register.
874 Before doing the above check, we first try to expand a field assignment
875 into a set of logical operations.
877 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
878 we place a register that is both set and used within I3. If more than one
879 such register is detected, we fail.
881 Return 1 if the combination is valid, zero otherwise. */
884 combinable_i3pat (i3
, loc
, i2dest
, i1dest
, i1_not_in_src
, pi3dest_killed
)
894 if (GET_CODE (x
) == SET
)
896 rtx set
= expand_field_assignment (x
);
897 rtx dest
= SET_DEST (set
);
898 rtx src
= SET_SRC (set
);
899 rtx inner_dest
= dest
, inner_src
= src
;
903 while (GET_CODE (inner_dest
) == STRICT_LOW_PART
904 || GET_CODE (inner_dest
) == SUBREG
905 || GET_CODE (inner_dest
) == ZERO_EXTRACT
)
906 inner_dest
= XEXP (inner_dest
, 0);
908 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
911 while (GET_CODE (inner_src
) == STRICT_LOW_PART
912 || GET_CODE (inner_src
) == SUBREG
913 || GET_CODE (inner_src
) == ZERO_EXTRACT
)
914 inner_src
= XEXP (inner_src
, 0);
916 /* If it is better that two different modes keep two different pseudos,
917 avoid combining them. This avoids producing the following pattern
919 (set (subreg:SI (reg/v:QI 21) 0)
920 (lshiftrt:SI (reg/v:SI 20)
922 If that were made, reload could not handle the pair of
923 reg 20/21, since it would try to get any GENERAL_REGS
924 but some of them don't handle QImode. */
926 if (rtx_equal_p (inner_src
, i2dest
)
927 && GET_CODE (inner_dest
) == REG
928 && ! MODES_TIEABLE_P (GET_MODE (i2dest
), GET_MODE (inner_dest
)))
932 /* Check for the case where I3 modifies its output, as
934 if ((inner_dest
!= dest
935 && (reg_overlap_mentioned_p (i2dest
, inner_dest
)
936 || (i1dest
&& reg_overlap_mentioned_p (i1dest
, inner_dest
))))
937 /* This is the same test done in can_combine_p except that we
938 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
940 || (GET_CODE (inner_dest
) == REG
941 && REGNO (inner_dest
) < FIRST_PSEUDO_REGISTER
942 #ifdef SMALL_REGISTER_CLASSES
943 && GET_CODE (src
) != CALL
945 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest
),
946 GET_MODE (inner_dest
))
950 || (i1_not_in_src
&& reg_overlap_mentioned_p (i1dest
, src
)))
953 /* If DEST is used in I3, it is being killed in this insn,
954 so record that for later. */
955 if (pi3dest_killed
&& GET_CODE (dest
) == REG
956 && reg_referenced_p (dest
, PATTERN (i3
)))
961 *pi3dest_killed
= dest
;
965 else if (GET_CODE (x
) == PARALLEL
)
969 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
970 if (! combinable_i3pat (i3
, &XVECEXP (x
, 0, i
), i2dest
, i1dest
,
971 i1_not_in_src
, pi3dest_killed
))
978 /* Try to combine the insns I1 and I2 into I3.
979 Here I1 and I2 appear earlier than I3.
980 I1 can be zero; then we combine just I2 into I3.
982 It we are combining three insns and the resulting insn is not recognized,
983 try splitting it into two insns. If that happens, I2 and I3 are retained
984 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
987 If we created two insns, return I2; otherwise return I3.
988 Return 0 if the combination does not work. Then nothing is changed. */
991 try_combine (i3
, i2
, i1
)
992 register rtx i3
, i2
, i1
;
994 /* New patterns for I3 and I3, respectively. */
995 rtx newpat
, newi2pat
= 0;
996 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
997 int added_sets_1
, added_sets_2
;
998 /* Total number of SETs to put into I3. */
1000 /* Nonzero is I2's body now appears in I3. */
1002 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1003 int insn_code_number
, i2_code_number
, other_code_number
;
1004 /* Contains I3 if the destination of I3 is used in its source, which means
1005 that the old life of I3 is being killed. If that usage is placed into
1006 I2 and not in I3, a REG_DEAD note must be made. */
1007 rtx i3dest_killed
= 0;
1008 /* SET_DEST and SET_SRC of I2 and I1. */
1009 rtx i2dest
, i2src
, i1dest
= 0, i1src
= 0;
1010 /* PATTERN (I2), or a copy of it in certain cases. */
1012 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1013 int i2dest_in_i2src
, i1dest_in_i1src
= 0, i2dest_in_i1src
= 0;
1014 int i1_feeds_i3
= 0;
1015 /* Notes that must be added to REG_NOTES in I3 and I2. */
1016 rtx new_i3_notes
, new_i2_notes
;
1023 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1024 This can occur when flow deletes an insn that it has merged into an
1025 auto-increment address. We also can't do anything if I3 has a
1026 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1029 if (GET_RTX_CLASS (GET_CODE (i3
)) != 'i'
1030 || GET_RTX_CLASS (GET_CODE (i2
)) != 'i'
1031 || (i1
&& GET_RTX_CLASS (GET_CODE (i1
)) != 'i')
1032 || find_reg_note (i3
, REG_LIBCALL
, NULL_RTX
))
1037 undobuf
.num_undo
= previous_num_undos
= 0;
1038 undobuf
.other_insn
= 0;
1040 /* Save the current high-water-mark so we can free storage if we didn't
1041 accept this combination. */
1042 undobuf
.storage
= (char *) oballoc (0);
1044 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1045 code below, set I1 to be the earlier of the two insns. */
1046 if (i1
&& INSN_CUID (i1
) > INSN_CUID (i2
))
1047 temp
= i1
, i1
= i2
, i2
= temp
;
1049 /* First check for one important special-case that the code below will
1050 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1051 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1052 we may be able to replace that destination with the destination of I3.
1053 This occurs in the common code where we compute both a quotient and
1054 remainder into a structure, in which case we want to do the computation
1055 directly into the structure to avoid register-register copies.
1057 We make very conservative checks below and only try to handle the
1058 most common cases of this. For example, we only handle the case
1059 where I2 and I3 are adjacent to avoid making difficult register
1062 if (i1
== 0 && GET_CODE (i3
) == INSN
&& GET_CODE (PATTERN (i3
)) == SET
1063 && GET_CODE (SET_SRC (PATTERN (i3
))) == REG
1064 && REGNO (SET_SRC (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
1065 #ifdef SMALL_REGISTER_CLASSES
1066 && (GET_CODE (SET_DEST (PATTERN (i3
))) != REG
1067 || REGNO (SET_DEST (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
)
1069 && find_reg_note (i3
, REG_DEAD
, SET_SRC (PATTERN (i3
)))
1070 && GET_CODE (PATTERN (i2
)) == PARALLEL
1071 && ! side_effects_p (SET_DEST (PATTERN (i3
)))
1072 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1073 below would need to check what is inside (and reg_overlap_mentioned_p
1074 doesn't support those codes anyway). Don't allow those destinations;
1075 the resulting insn isn't likely to be recognized anyway. */
1076 && GET_CODE (SET_DEST (PATTERN (i3
))) != ZERO_EXTRACT
1077 && GET_CODE (SET_DEST (PATTERN (i3
))) != STRICT_LOW_PART
1078 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3
)),
1079 SET_DEST (PATTERN (i3
)))
1080 && next_real_insn (i2
) == i3
)
1082 rtx p2
= PATTERN (i2
);
1084 /* Make sure that the destination of I3,
1085 which we are going to substitute into one output of I2,
1086 is not used within another output of I2. We must avoid making this:
1087 (parallel [(set (mem (reg 69)) ...)
1088 (set (reg 69) ...)])
1089 which is not well-defined as to order of actions.
1090 (Besides, reload can't handle output reloads for this.)
1092 The problem can also happen if the dest of I3 is a memory ref,
1093 if another dest in I2 is an indirect memory ref. */
1094 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
1095 if (GET_CODE (XVECEXP (p2
, 0, i
)) == SET
1096 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3
)),
1097 SET_DEST (XVECEXP (p2
, 0, i
))))
1100 if (i
== XVECLEN (p2
, 0))
1101 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
1102 if (SET_DEST (XVECEXP (p2
, 0, i
)) == SET_SRC (PATTERN (i3
)))
1107 subst_low_cuid
= INSN_CUID (i2
);
1110 i2dest
= SET_SRC (PATTERN (i3
));
1112 /* Replace the dest in I2 with our dest and make the resulting
1113 insn the new pattern for I3. Then skip to where we
1114 validate the pattern. Everything was set up above. */
1115 SUBST (SET_DEST (XVECEXP (p2
, 0, i
)),
1116 SET_DEST (PATTERN (i3
)));
1119 goto validate_replacement
;
1124 /* If we have no I1 and I2 looks like:
1125 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1127 make up a dummy I1 that is
1130 (set (reg:CC X) (compare:CC Y (const_int 0)))
1132 (We can ignore any trailing CLOBBERs.)
1134 This undoes a previous combination and allows us to match a branch-and-
1137 if (i1
== 0 && GET_CODE (PATTERN (i2
)) == PARALLEL
1138 && XVECLEN (PATTERN (i2
), 0) >= 2
1139 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 0)) == SET
1140 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 0))))
1142 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0))) == COMPARE
1143 && XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 1) == const0_rtx
1144 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 1)) == SET
1145 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 1))) == REG
1146 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 0),
1147 SET_SRC (XVECEXP (PATTERN (i2
), 0, 1))))
1149 for (i
= XVECLEN (PATTERN (i2
), 0) - 1; i
>= 2; i
--)
1150 if (GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) != CLOBBER
)
1155 /* We make I1 with the same INSN_UID as I2. This gives it
1156 the same INSN_CUID for value tracking. Our fake I1 will
1157 never appear in the insn stream so giving it the same INSN_UID
1158 as I2 will not cause a problem. */
1160 i1
= gen_rtx (INSN
, VOIDmode
, INSN_UID (i2
), 0, i2
,
1161 XVECEXP (PATTERN (i2
), 0, 1), -1, 0, 0);
1163 SUBST (PATTERN (i2
), XVECEXP (PATTERN (i2
), 0, 0));
1164 SUBST (XEXP (SET_SRC (PATTERN (i2
)), 0),
1165 SET_DEST (PATTERN (i1
)));
1170 /* Verify that I2 and I1 are valid for combining. */
1171 if (! can_combine_p (i2
, i3
, i1
, NULL_RTX
, &i2dest
, &i2src
)
1172 || (i1
&& ! can_combine_p (i1
, i3
, NULL_RTX
, i2
, &i1dest
, &i1src
)))
1178 /* Record whether I2DEST is used in I2SRC and similarly for the other
1179 cases. Knowing this will help in register status updating below. */
1180 i2dest_in_i2src
= reg_overlap_mentioned_p (i2dest
, i2src
);
1181 i1dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i1dest
, i1src
);
1182 i2dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i2dest
, i1src
);
1184 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1186 i1_feeds_i3
= i1
&& ! reg_overlap_mentioned_p (i1dest
, i2src
);
1188 /* Ensure that I3's pattern can be the destination of combines. */
1189 if (! combinable_i3pat (i3
, &PATTERN (i3
), i2dest
, i1dest
,
1190 i1
&& i2dest_in_i1src
&& i1_feeds_i3
,
1197 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1198 We used to do this EXCEPT in one case: I3 has a post-inc in an
1199 output operand. However, that exception can give rise to insns like
1201 which is a famous insn on the PDP-11 where the value of r3 used as the
1202 source was model-dependent. Avoid this sort of thing. */
1205 if (!(GET_CODE (PATTERN (i3
)) == SET
1206 && GET_CODE (SET_SRC (PATTERN (i3
))) == REG
1207 && GET_CODE (SET_DEST (PATTERN (i3
))) == MEM
1208 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_INC
1209 || GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_DEC
)))
1210 /* It's not the exception. */
1213 for (link
= REG_NOTES (i3
); link
; link
= XEXP (link
, 1))
1214 if (REG_NOTE_KIND (link
) == REG_INC
1215 && (reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i2
))
1217 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i1
)))))
1224 /* See if the SETs in I1 or I2 need to be kept around in the merged
1225 instruction: whenever the value set there is still needed past I3.
1226 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1228 For the SET in I1, we have two cases: If I1 and I2 independently
1229 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1230 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1231 in I1 needs to be kept around unless I1DEST dies or is set in either
1232 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1233 I1DEST. If so, we know I1 feeds into I2. */
1235 added_sets_2
= ! dead_or_set_p (i3
, i2dest
);
1238 = i1
&& ! (i1_feeds_i3
? dead_or_set_p (i3
, i1dest
)
1239 : (dead_or_set_p (i3
, i1dest
) || dead_or_set_p (i2
, i1dest
)));
1241 /* If the set in I2 needs to be kept around, we must make a copy of
1242 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1243 PATTERN (I2), we are only substituting for the original I1DEST, not into
1244 an already-substituted copy. This also prevents making self-referential
1245 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1248 i2pat
= (GET_CODE (PATTERN (i2
)) == PARALLEL
1249 ? gen_rtx (SET
, VOIDmode
, i2dest
, i2src
)
1253 i2pat
= copy_rtx (i2pat
);
1257 /* Substitute in the latest insn for the regs set by the earlier ones. */
1259 maxreg
= max_reg_num ();
1263 /* It is possible that the source of I2 or I1 may be performing an
1264 unneeded operation, such as a ZERO_EXTEND of something that is known
1265 to have the high part zero. Handle that case by letting subst look at
1266 the innermost one of them.
1268 Another way to do this would be to have a function that tries to
1269 simplify a single insn instead of merging two or more insns. We don't
1270 do this because of the potential of infinite loops and because
1271 of the potential extra memory required. However, doing it the way
1272 we are is a bit of a kludge and doesn't catch all cases.
1274 But only do this if -fexpensive-optimizations since it slows things down
1275 and doesn't usually win. */
1277 if (flag_expensive_optimizations
)
1279 /* Pass pc_rtx so no substitutions are done, just simplifications.
1280 The cases that we are interested in here do not involve the few
1281 cases were is_replaced is checked. */
1284 subst_low_cuid
= INSN_CUID (i1
);
1285 i1src
= subst (i1src
, pc_rtx
, pc_rtx
, 0, 0);
1289 subst_low_cuid
= INSN_CUID (i2
);
1290 i2src
= subst (i2src
, pc_rtx
, pc_rtx
, 0, 0);
1293 previous_num_undos
= undobuf
.num_undo
;
1297 /* Many machines that don't use CC0 have insns that can both perform an
1298 arithmetic operation and set the condition code. These operations will
1299 be represented as a PARALLEL with the first element of the vector
1300 being a COMPARE of an arithmetic operation with the constant zero.
1301 The second element of the vector will set some pseudo to the result
1302 of the same arithmetic operation. If we simplify the COMPARE, we won't
1303 match such a pattern and so will generate an extra insn. Here we test
1304 for this case, where both the comparison and the operation result are
1305 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1306 I2SRC. Later we will make the PARALLEL that contains I2. */
1308 if (i1
== 0 && added_sets_2
&& GET_CODE (PATTERN (i3
)) == SET
1309 && GET_CODE (SET_SRC (PATTERN (i3
))) == COMPARE
1310 && XEXP (SET_SRC (PATTERN (i3
)), 1) == const0_rtx
1311 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3
)), 0), i2dest
))
1314 enum machine_mode compare_mode
;
1316 newpat
= PATTERN (i3
);
1317 SUBST (XEXP (SET_SRC (newpat
), 0), i2src
);
1321 #ifdef EXTRA_CC_MODES
1322 /* See if a COMPARE with the operand we substituted in should be done
1323 with the mode that is currently being used. If not, do the same
1324 processing we do in `subst' for a SET; namely, if the destination
1325 is used only once, try to replace it with a register of the proper
1326 mode and also replace the COMPARE. */
1327 if (undobuf
.other_insn
== 0
1328 && (cc_use
= find_single_use (SET_DEST (newpat
), i3
,
1329 &undobuf
.other_insn
))
1330 && ((compare_mode
= SELECT_CC_MODE (GET_CODE (*cc_use
),
1332 != GET_MODE (SET_DEST (newpat
))))
1334 int regno
= REGNO (SET_DEST (newpat
));
1335 rtx new_dest
= gen_rtx (REG
, compare_mode
, regno
);
1337 if (regno
< FIRST_PSEUDO_REGISTER
1338 || (reg_n_sets
[regno
] == 1 && ! added_sets_2
1339 && ! REG_USERVAR_P (SET_DEST (newpat
))))
1341 if (regno
>= FIRST_PSEUDO_REGISTER
)
1342 SUBST (regno_reg_rtx
[regno
], new_dest
);
1344 SUBST (SET_DEST (newpat
), new_dest
);
1345 SUBST (XEXP (*cc_use
, 0), new_dest
);
1346 SUBST (SET_SRC (newpat
),
1347 gen_rtx_combine (COMPARE
, compare_mode
,
1348 i2src
, const0_rtx
));
1351 undobuf
.other_insn
= 0;
1358 n_occurrences
= 0; /* `subst' counts here */
1360 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1361 need to make a unique copy of I2SRC each time we substitute it
1362 to avoid self-referential rtl. */
1364 subst_low_cuid
= INSN_CUID (i2
);
1365 newpat
= subst (PATTERN (i3
), i2dest
, i2src
, 0,
1366 ! i1_feeds_i3
&& i1dest_in_i1src
);
1367 previous_num_undos
= undobuf
.num_undo
;
1369 /* Record whether i2's body now appears within i3's body. */
1370 i2_is_used
= n_occurrences
;
1373 /* If we already got a failure, don't try to do more. Otherwise,
1374 try to substitute in I1 if we have it. */
1376 if (i1
&& GET_CODE (newpat
) != CLOBBER
)
1378 /* Before we can do this substitution, we must redo the test done
1379 above (see detailed comments there) that ensures that I1DEST
1380 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1382 if (! combinable_i3pat (NULL_RTX
, &newpat
, i1dest
, NULL_RTX
,
1390 subst_low_cuid
= INSN_CUID (i1
);
1391 newpat
= subst (newpat
, i1dest
, i1src
, 0, 0);
1392 previous_num_undos
= undobuf
.num_undo
;
1395 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1396 to count all the ways that I2SRC and I1SRC can be used. */
1397 if ((FIND_REG_INC_NOTE (i2
, NULL_RTX
) != 0
1398 && i2_is_used
+ added_sets_2
> 1)
1399 || (i1
!= 0 && FIND_REG_INC_NOTE (i1
, NULL_RTX
) != 0
1400 && (n_occurrences
+ added_sets_1
+ (added_sets_2
&& ! i1_feeds_i3
)
1402 /* Fail if we tried to make a new register (we used to abort, but there's
1403 really no reason to). */
1404 || max_reg_num () != maxreg
1405 /* Fail if we couldn't do something and have a CLOBBER. */
1406 || GET_CODE (newpat
) == CLOBBER
)
1412 /* If the actions of the earlier insns must be kept
1413 in addition to substituting them into the latest one,
1414 we must make a new PARALLEL for the latest insn
1415 to hold additional the SETs. */
1417 if (added_sets_1
|| added_sets_2
)
1421 if (GET_CODE (newpat
) == PARALLEL
)
1423 rtvec old
= XVEC (newpat
, 0);
1424 total_sets
= XVECLEN (newpat
, 0) + added_sets_1
+ added_sets_2
;
1425 newpat
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (total_sets
));
1426 bcopy (&old
->elem
[0], &XVECEXP (newpat
, 0, 0),
1427 sizeof (old
->elem
[0]) * old
->num_elem
);
1432 total_sets
= 1 + added_sets_1
+ added_sets_2
;
1433 newpat
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (total_sets
));
1434 XVECEXP (newpat
, 0, 0) = old
;
1438 XVECEXP (newpat
, 0, --total_sets
)
1439 = (GET_CODE (PATTERN (i1
)) == PARALLEL
1440 ? gen_rtx (SET
, VOIDmode
, i1dest
, i1src
) : PATTERN (i1
));
1444 /* If there is no I1, use I2's body as is. We used to also not do
1445 the subst call below if I2 was substituted into I3,
1446 but that could lose a simplification. */
1448 XVECEXP (newpat
, 0, --total_sets
) = i2pat
;
1450 /* See comment where i2pat is assigned. */
1451 XVECEXP (newpat
, 0, --total_sets
)
1452 = subst (i2pat
, i1dest
, i1src
, 0, 0);
1456 /* We come here when we are replacing a destination in I2 with the
1457 destination of I3. */
1458 validate_replacement
:
1460 /* Is the result of combination a valid instruction? */
1461 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1463 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1464 the second SET's destination is a register that is unused. In that case,
1465 we just need the first SET. This can occur when simplifying a divmod
1466 insn. We *must* test for this case here because the code below that
1467 splits two independent SETs doesn't handle this case correctly when it
1468 updates the register status. Also check the case where the first
1469 SET's destination is unused. That would not cause incorrect code, but
1470 does cause an unneeded insn to remain. */
1472 if (insn_code_number
< 0 && GET_CODE (newpat
) == PARALLEL
1473 && XVECLEN (newpat
, 0) == 2
1474 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1475 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1476 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) == REG
1477 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (XVECEXP (newpat
, 0, 1)))
1478 && ! side_effects_p (SET_SRC (XVECEXP (newpat
, 0, 1)))
1479 && asm_noperands (newpat
) < 0)
1481 newpat
= XVECEXP (newpat
, 0, 0);
1482 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1485 else if (insn_code_number
< 0 && GET_CODE (newpat
) == PARALLEL
1486 && XVECLEN (newpat
, 0) == 2
1487 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1488 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1489 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) == REG
1490 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (XVECEXP (newpat
, 0, 0)))
1491 && ! side_effects_p (SET_SRC (XVECEXP (newpat
, 0, 0)))
1492 && asm_noperands (newpat
) < 0)
1494 newpat
= XVECEXP (newpat
, 0, 1);
1495 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1498 /* See if this is an XOR. If so, perhaps the problem is that the
1499 constant is out of range. Replace it with a complemented XOR with
1500 a complemented constant; it might be in range. */
1502 else if (insn_code_number
< 0 && GET_CODE (newpat
) == SET
1503 && GET_CODE (SET_SRC (newpat
)) == XOR
1504 && GET_CODE (XEXP (SET_SRC (newpat
), 1)) == CONST_INT
1505 && ((temp
= simplify_unary_operation (NOT
,
1506 GET_MODE (SET_SRC (newpat
)),
1507 XEXP (SET_SRC (newpat
), 1),
1508 GET_MODE (SET_SRC (newpat
))))
1511 enum machine_mode i_mode
= GET_MODE (SET_SRC (newpat
));
1513 = gen_rtx_combine (SET
, VOIDmode
, SET_DEST (newpat
),
1514 gen_unary (NOT
, i_mode
,
1515 gen_binary (XOR
, i_mode
,
1516 XEXP (SET_SRC (newpat
), 0),
1519 insn_code_number
= recog_for_combine (&pat
, i3
, &new_i3_notes
);
1520 if (insn_code_number
>= 0)
1524 /* If we were combining three insns and the result is a simple SET
1525 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1526 insns. There are two ways to do this. It can be split using a
1527 machine-specific method (like when you have an addition of a large
1528 constant) or by combine in the function find_split_point. */
1530 if (i1
&& insn_code_number
< 0 && GET_CODE (newpat
) == SET
1531 && asm_noperands (newpat
) < 0)
1533 rtx m_split
, *split
;
1534 rtx ni2dest
= i2dest
;
1536 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1537 use I2DEST as a scratch register will help. In the latter case,
1538 convert I2DEST to the mode of the source of NEWPAT if we can. */
1540 m_split
= split_insns (newpat
, i3
);
1543 /* If I2DEST is a hard register or the only use of a pseudo,
1544 we can change its mode. */
1545 if (GET_MODE (SET_DEST (newpat
)) != GET_MODE (i2dest
)
1546 && GET_MODE (SET_DEST (newpat
)) != VOIDmode
1547 && GET_CODE (i2dest
) == REG
1548 && (REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
1549 || (reg_n_sets
[REGNO (i2dest
)] == 1 && ! added_sets_2
1550 && ! REG_USERVAR_P (i2dest
))))
1551 ni2dest
= gen_rtx (REG
, GET_MODE (SET_DEST (newpat
)),
1554 m_split
= split_insns (gen_rtx (PARALLEL
, VOIDmode
,
1555 gen_rtvec (2, newpat
,
1562 if (m_split
&& GET_CODE (m_split
) == SEQUENCE
1563 && XVECLEN (m_split
, 0) == 2
1564 && (next_real_insn (i2
) == i3
1565 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split
, 0, 0)),
1568 rtx newi3pat
= PATTERN (XVECEXP (m_split
, 0, 1));
1569 newi2pat
= PATTERN (XVECEXP (m_split
, 0, 0));
1571 /* In case we changed the mode of I2DEST, replace it in the
1572 pseudo-register table here. We can't do it above in case this
1573 code doesn't get executed and we do a split the other way. */
1575 if (REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
1576 SUBST (regno_reg_rtx
[REGNO (i2dest
)], ni2dest
);
1578 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1579 if (i2_code_number
>= 0)
1580 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1582 if (insn_code_number
>= 0)
1585 /* It is possible that both insns now set the destination of I3.
1586 If so, we must show an extra use of it and update
1589 if (insn_code_number
>= 0 && GET_CODE (SET_DEST (newpat
)) == REG
1590 && GET_CODE (SET_DEST (newi2pat
)) == REG
1591 && REGNO (SET_DEST (newpat
)) == REGNO (SET_DEST (newi2pat
)))
1593 reg_n_sets
[REGNO (SET_DEST (newpat
))]++;
1594 set_significant (SET_DEST (newi2pat
), newi2pat
);
1595 set_significant (SET_DEST (newpat
), newpat
);
1599 /* If we can split it and use I2DEST, go ahead and see if that
1600 helps things be recognized. Verify that none of the registers
1601 are set between I2 and I3. */
1602 if (insn_code_number
< 0 && (split
= find_split_point (&newpat
, i3
)) != 0
1604 && GET_CODE (i2dest
) == REG
1606 /* We need I2DEST in the proper mode. If it is a hard register
1607 or the only use of a pseudo, we can change its mode. */
1608 && (GET_MODE (*split
) == GET_MODE (i2dest
)
1609 || GET_MODE (*split
) == VOIDmode
1610 || REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
1611 || (reg_n_sets
[REGNO (i2dest
)] == 1 && ! added_sets_2
1612 && ! REG_USERVAR_P (i2dest
)))
1613 && (next_real_insn (i2
) == i3
1614 || ! use_crosses_set_p (*split
, INSN_CUID (i2
)))
1615 /* We can't overwrite I2DEST if its value is still used by
1617 && ! reg_referenced_p (i2dest
, newpat
))
1619 rtx newdest
= i2dest
;
1621 /* Get NEWDEST as a register in the proper mode. We have already
1622 validated that we can do this. */
1623 if (GET_MODE (i2dest
) != GET_MODE (*split
)
1624 && GET_MODE (*split
) != VOIDmode
)
1626 newdest
= gen_rtx (REG
, GET_MODE (*split
), REGNO (i2dest
));
1628 if (REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
1629 SUBST (regno_reg_rtx
[REGNO (i2dest
)], newdest
);
1632 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1633 an ASHIFT. This can occur if it was inside a PLUS and hence
1634 appeared to be a memory address. This is a kludge. */
1635 if (GET_CODE (*split
) == MULT
1636 && GET_CODE (XEXP (*split
, 1)) == CONST_INT
1637 && (i
= exact_log2 (INTVAL (XEXP (*split
, 1)))) >= 0)
1638 SUBST (*split
, gen_rtx_combine (ASHIFT
, GET_MODE (*split
),
1639 XEXP (*split
, 0), GEN_INT (i
)));
1641 #ifdef INSN_SCHEDULING
1642 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1643 be written as a ZERO_EXTEND. */
1644 if (GET_CODE (*split
) == SUBREG
1645 && GET_CODE (SUBREG_REG (*split
)) == MEM
)
1646 SUBST (*split
, gen_rtx_combine (ZERO_EXTEND
, GET_MODE (*split
),
1650 newi2pat
= gen_rtx_combine (SET
, VOIDmode
, newdest
, *split
);
1651 SUBST (*split
, newdest
);
1652 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1653 if (i2_code_number
>= 0)
1654 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1658 /* Check for a case where we loaded from memory in a narrow mode and
1659 then sign extended it, but we need both registers. In that case,
1660 we have a PARALLEL with both loads from the same memory location.
1661 We can split this into a load from memory followed by a register-register
1662 copy. This saves at least one insn, more if register allocation can
1663 eliminate the copy. */
1665 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
1666 && GET_CODE (newpat
) == PARALLEL
1667 && XVECLEN (newpat
, 0) == 2
1668 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1669 && GET_CODE (SET_SRC (XVECEXP (newpat
, 0, 0))) == SIGN_EXTEND
1670 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1671 && rtx_equal_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1672 XEXP (SET_SRC (XVECEXP (newpat
, 0, 0)), 0))
1673 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1675 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
1676 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
1677 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
1678 SET_SRC (XVECEXP (newpat
, 0, 1)))
1679 && ! find_reg_note (i3
, REG_UNUSED
,
1680 SET_DEST (XVECEXP (newpat
, 0, 0))))
1682 newi2pat
= XVECEXP (newpat
, 0, 0);
1683 newpat
= XVECEXP (newpat
, 0, 1);
1684 SUBST (SET_SRC (newpat
),
1685 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat
)),
1686 SET_DEST (newi2pat
)));
1687 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1688 if (i2_code_number
>= 0)
1689 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1691 if (insn_code_number
>= 0)
1696 /* If we will be able to accept this, we have made a change to the
1697 destination of I3. This can invalidate a LOG_LINKS pointing
1698 to I3. No other part of combine.c makes such a transformation.
1700 The new I3 will have a destination that was previously the
1701 destination of I1 or I2 and which was used in i2 or I3. Call
1702 distribute_links to make a LOG_LINK from the next use of
1703 that destination. */
1705 PATTERN (i3
) = newpat
;
1706 distribute_links (gen_rtx (INSN_LIST
, VOIDmode
, i3
, NULL_RTX
));
1708 /* I3 now uses what used to be its destination and which is
1709 now I2's destination. That means we need a LOG_LINK from
1710 I3 to I2. But we used to have one, so we still will.
1712 However, some later insn might be using I2's dest and have
1713 a LOG_LINK pointing at I3. We must remove this link.
1714 The simplest way to remove the link is to point it at I1,
1715 which we know will be a NOTE. */
1717 for (insn
= NEXT_INSN (i3
);
1718 insn
&& GET_CODE (insn
) != CODE_LABEL
1719 && GET_CODE (PREV_INSN (insn
)) != JUMP_INSN
;
1720 insn
= NEXT_INSN (insn
))
1722 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1723 && reg_referenced_p (SET_DEST (newi2pat
), PATTERN (insn
)))
1725 for (link
= LOG_LINKS (insn
); link
;
1726 link
= XEXP (link
, 1))
1727 if (XEXP (link
, 0) == i3
)
1728 XEXP (link
, 0) = i1
;
1736 /* Similarly, check for a case where we have a PARALLEL of two independent
1737 SETs but we started with three insns. In this case, we can do the sets
1738 as two separate insns. This case occurs when some SET allows two
1739 other insns to combine, but the destination of that SET is still live. */
1741 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
1742 && GET_CODE (newpat
) == PARALLEL
1743 && XVECLEN (newpat
, 0) == 2
1744 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1745 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != ZERO_EXTRACT
1746 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != STRICT_LOW_PART
1747 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1748 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
1749 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
1750 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1752 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1753 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != USE
1754 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != USE
1755 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
1756 XVECEXP (newpat
, 0, 0))
1757 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 0)),
1758 XVECEXP (newpat
, 0, 1)))
1760 newi2pat
= XVECEXP (newpat
, 0, 1);
1761 newpat
= XVECEXP (newpat
, 0, 0);
1763 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1764 if (i2_code_number
>= 0)
1765 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1768 /* If it still isn't recognized, fail and change things back the way they
1770 if ((insn_code_number
< 0
1771 /* Is the result a reasonable ASM_OPERANDS? */
1772 && (! check_asm_operands (newpat
) || added_sets_1
|| added_sets_2
)))
1778 /* If we had to change another insn, make sure it is valid also. */
1779 if (undobuf
.other_insn
)
1781 rtx other_notes
= REG_NOTES (undobuf
.other_insn
);
1782 rtx other_pat
= PATTERN (undobuf
.other_insn
);
1783 rtx new_other_notes
;
1786 other_code_number
= recog_for_combine (&other_pat
, undobuf
.other_insn
,
1789 if (other_code_number
< 0 && ! check_asm_operands (other_pat
))
1795 PATTERN (undobuf
.other_insn
) = other_pat
;
1797 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1798 are still valid. Then add any non-duplicate notes added by
1799 recog_for_combine. */
1800 for (note
= REG_NOTES (undobuf
.other_insn
); note
; note
= next
)
1802 next
= XEXP (note
, 1);
1804 if (REG_NOTE_KIND (note
) == REG_UNUSED
1805 && ! reg_set_p (XEXP (note
, 0), PATTERN (undobuf
.other_insn
)))
1806 remove_note (undobuf
.other_insn
, note
);
1809 distribute_notes (new_other_notes
, undobuf
.other_insn
,
1810 undobuf
.other_insn
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1813 /* We now know that we can do this combination. Merge the insns and
1814 update the status of registers and LOG_LINKS. */
1817 rtx i3notes
, i2notes
, i1notes
= 0;
1818 rtx i3links
, i2links
, i1links
= 0;
1820 int all_adjacent
= (next_real_insn (i2
) == i3
1821 && (i1
== 0 || next_real_insn (i1
) == i2
));
1823 /* Compute which registers we expect to eliminate. */
1824 rtx elim_i2
= (newi2pat
|| i2dest_in_i2src
|| i2dest_in_i1src
1826 rtx elim_i1
= i1
== 0 || i1dest_in_i1src
? 0 : i1dest
;
1828 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1830 i3notes
= REG_NOTES (i3
), i3links
= LOG_LINKS (i3
);
1831 i2notes
= REG_NOTES (i2
), i2links
= LOG_LINKS (i2
);
1833 i1notes
= REG_NOTES (i1
), i1links
= LOG_LINKS (i1
);
1835 /* Ensure that we do not have something that should not be shared but
1836 occurs multiple times in the new insns. Check this by first
1837 resetting all the `used' flags and then copying anything is shared. */
1839 reset_used_flags (i3notes
);
1840 reset_used_flags (i2notes
);
1841 reset_used_flags (i1notes
);
1842 reset_used_flags (newpat
);
1843 reset_used_flags (newi2pat
);
1844 if (undobuf
.other_insn
)
1845 reset_used_flags (PATTERN (undobuf
.other_insn
));
1847 i3notes
= copy_rtx_if_shared (i3notes
);
1848 i2notes
= copy_rtx_if_shared (i2notes
);
1849 i1notes
= copy_rtx_if_shared (i1notes
);
1850 newpat
= copy_rtx_if_shared (newpat
);
1851 newi2pat
= copy_rtx_if_shared (newi2pat
);
1852 if (undobuf
.other_insn
)
1853 reset_used_flags (PATTERN (undobuf
.other_insn
));
1855 INSN_CODE (i3
) = insn_code_number
;
1856 PATTERN (i3
) = newpat
;
1857 if (undobuf
.other_insn
)
1858 INSN_CODE (undobuf
.other_insn
) = other_code_number
;
1860 /* We had one special case above where I2 had more than one set and
1861 we replaced a destination of one of those sets with the destination
1862 of I3. In that case, we have to update LOG_LINKS of insns later
1863 in this basic block. Note that this (expensive) case is rare. */
1865 if (GET_CODE (PATTERN (i2
)) == PARALLEL
)
1866 for (i
= 0; i
< XVECLEN (PATTERN (i2
), 0); i
++)
1867 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))) == REG
1868 && SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)) != i2dest
1869 && ! find_reg_note (i2
, REG_UNUSED
,
1870 SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))))
1874 for (insn
= NEXT_INSN (i2
); insn
; insn
= NEXT_INSN (insn
))
1876 if (insn
!= i3
&& GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1877 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
1878 if (XEXP (link
, 0) == i2
)
1879 XEXP (link
, 0) = i3
;
1881 if (GET_CODE (insn
) == CODE_LABEL
1882 || GET_CODE (insn
) == JUMP_INSN
)
1894 INSN_CODE (i2
) = i2_code_number
;
1895 PATTERN (i2
) = newi2pat
;
1899 PUT_CODE (i2
, NOTE
);
1900 NOTE_LINE_NUMBER (i2
) = NOTE_INSN_DELETED
;
1901 NOTE_SOURCE_FILE (i2
) = 0;
1908 PUT_CODE (i1
, NOTE
);
1909 NOTE_LINE_NUMBER (i1
) = NOTE_INSN_DELETED
;
1910 NOTE_SOURCE_FILE (i1
) = 0;
1913 /* Get death notes for everything that is now used in either I3 or
1914 I2 and used to die in a previous insn. */
1916 move_deaths (newpat
, i1
? INSN_CUID (i1
) : INSN_CUID (i2
), i3
, &midnotes
);
1918 move_deaths (newi2pat
, INSN_CUID (i1
), i2
, &midnotes
);
1920 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1922 distribute_notes (i3notes
, i3
, i3
, newi2pat
? i2
: NULL_RTX
,
1925 distribute_notes (i2notes
, i2
, i3
, newi2pat
? i2
: NULL_RTX
,
1928 distribute_notes (i1notes
, i1
, i3
, newi2pat
? i2
: NULL_RTX
,
1931 distribute_notes (midnotes
, NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1934 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1935 know these are REG_UNUSED and want them to go to the desired insn,
1936 so we always pass it as i3. */
1937 if (newi2pat
&& new_i2_notes
)
1938 distribute_notes (new_i2_notes
, i2
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1940 distribute_notes (new_i3_notes
, i3
, i3
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1942 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1943 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
1946 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i3dest_killed
, NULL_RTX
),
1947 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1948 NULL_RTX
, NULL_RTX
);
1950 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1951 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1952 we passed I3 in that case, it might delete I2. */
1954 if (i2dest_in_i2src
)
1956 if (newi2pat
&& reg_set_p (i2dest
, newi2pat
))
1957 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i2dest
, NULL_RTX
),
1958 NULL_RTX
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1960 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i2dest
, NULL_RTX
),
1961 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1962 NULL_RTX
, NULL_RTX
);
1965 if (i1dest_in_i1src
)
1967 if (newi2pat
&& reg_set_p (i1dest
, newi2pat
))
1968 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i1dest
, NULL_RTX
),
1969 NULL_RTX
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1971 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i1dest
, NULL_RTX
),
1972 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1973 NULL_RTX
, NULL_RTX
);
1976 distribute_links (i3links
);
1977 distribute_links (i2links
);
1978 distribute_links (i1links
);
1980 if (GET_CODE (i2dest
) == REG
)
1983 rtx i2_insn
= 0, i2_val
= 0, set
;
1985 /* The insn that used to set this register doesn't exist, and
1986 this life of the register may not exist either. See if one of
1987 I3's links points to an insn that sets I2DEST. If it does,
1988 that is now the last known value for I2DEST. If we don't update
1989 this and I2 set the register to a value that depended on its old
1990 contents, we will get confused. If this insn is used, thing
1991 will be set correctly in combine_instructions. */
1993 for (link
= LOG_LINKS (i3
); link
; link
= XEXP (link
, 1))
1994 if ((set
= single_set (XEXP (link
, 0))) != 0
1995 && rtx_equal_p (i2dest
, SET_DEST (set
)))
1996 i2_insn
= XEXP (link
, 0), i2_val
= SET_SRC (set
);
1998 record_value_for_reg (i2dest
, i2_insn
, i2_val
);
2000 /* If the reg formerly set in I2 died only once and that was in I3,
2001 zero its use count so it won't make `reload' do any work. */
2002 if (! added_sets_2
&& newi2pat
== 0)
2004 regno
= REGNO (i2dest
);
2005 reg_n_sets
[regno
]--;
2006 if (reg_n_sets
[regno
] == 0
2007 && ! (basic_block_live_at_start
[0][regno
/ REGSET_ELT_BITS
]
2008 & ((REGSET_ELT_TYPE
) 1 << (regno
% REGSET_ELT_BITS
))))
2009 reg_n_refs
[regno
] = 0;
2013 if (i1
&& GET_CODE (i1dest
) == REG
)
2016 rtx i1_insn
= 0, i1_val
= 0, set
;
2018 for (link
= LOG_LINKS (i3
); link
; link
= XEXP (link
, 1))
2019 if ((set
= single_set (XEXP (link
, 0))) != 0
2020 && rtx_equal_p (i1dest
, SET_DEST (set
)))
2021 i1_insn
= XEXP (link
, 0), i1_val
= SET_SRC (set
);
2023 record_value_for_reg (i1dest
, i1_insn
, i1_val
);
2025 regno
= REGNO (i1dest
);
2028 reg_n_sets
[regno
]--;
2029 if (reg_n_sets
[regno
] == 0
2030 && ! (basic_block_live_at_start
[0][regno
/ REGSET_ELT_BITS
]
2031 & ((REGSET_ELT_TYPE
) 1 << (regno
% REGSET_ELT_BITS
))))
2032 reg_n_refs
[regno
] = 0;
2036 /* If I3 is now an unconditional jump, ensure that it has a
2037 BARRIER following it since it may have initially been a
2038 conditional jump. */
2040 if ((GET_CODE (newpat
) == RETURN
|| simplejump_p (i3
))
2041 && GET_CODE (next_nonnote_insn (i3
)) != BARRIER
)
2042 emit_barrier_after (i3
);
2045 combine_successes
++;
2047 return newi2pat
? i2
: i3
;
2050 /* Undo all the modifications recorded in undobuf. */
2056 if (undobuf
.num_undo
> MAX_UNDO
)
2057 undobuf
.num_undo
= MAX_UNDO
;
2058 for (i
= undobuf
.num_undo
- 1; i
>= 0; i
--)
2059 *undobuf
.undo
[i
].where
= undobuf
.undo
[i
].old_contents
;
2061 obfree (undobuf
.storage
);
2062 undobuf
.num_undo
= 0;
2065 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2066 where we have an arithmetic expression and return that point. LOC will
2069 try_combine will call this function to see if an insn can be split into
2073 find_split_point (loc
, insn
)
2078 enum rtx_code code
= GET_CODE (x
);
2080 int len
= 0, pos
, unsignedp
;
2083 /* First special-case some codes. */
2087 #ifdef INSN_SCHEDULING
2088 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2090 if (GET_CODE (SUBREG_REG (x
)) == MEM
)
2093 return find_split_point (&SUBREG_REG (x
), insn
);
2097 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2098 using LO_SUM and HIGH. */
2099 if (GET_CODE (XEXP (x
, 0)) == CONST
2100 || GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
2103 gen_rtx_combine (LO_SUM
, Pmode
,
2104 gen_rtx_combine (HIGH
, Pmode
, XEXP (x
, 0)),
2106 return &XEXP (XEXP (x
, 0), 0);
2110 /* If we have a PLUS whose second operand is a constant and the
2111 address is not valid, perhaps will can split it up using
2112 the machine-specific way to split large constants. We use
2113 the first psuedo-reg (one of the virtual regs) as a placeholder;
2114 it will not remain in the result. */
2115 if (GET_CODE (XEXP (x
, 0)) == PLUS
2116 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2117 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2119 rtx reg
= regno_reg_rtx
[FIRST_PSEUDO_REGISTER
];
2120 rtx seq
= split_insns (gen_rtx (SET
, VOIDmode
, reg
, XEXP (x
, 0)),
2123 /* This should have produced two insns, each of which sets our
2124 placeholder. If the source of the second is a valid address,
2125 we can make put both sources together and make a split point
2128 if (seq
&& XVECLEN (seq
, 0) == 2
2129 && GET_CODE (XVECEXP (seq
, 0, 0)) == INSN
2130 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) == SET
2131 && SET_DEST (PATTERN (XVECEXP (seq
, 0, 0))) == reg
2132 && ! reg_mentioned_p (reg
,
2133 SET_SRC (PATTERN (XVECEXP (seq
, 0, 0))))
2134 && GET_CODE (XVECEXP (seq
, 0, 1)) == INSN
2135 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 1))) == SET
2136 && SET_DEST (PATTERN (XVECEXP (seq
, 0, 1))) == reg
2137 && memory_address_p (GET_MODE (x
),
2138 SET_SRC (PATTERN (XVECEXP (seq
, 0, 1)))))
2140 rtx src1
= SET_SRC (PATTERN (XVECEXP (seq
, 0, 0)));
2141 rtx src2
= SET_SRC (PATTERN (XVECEXP (seq
, 0, 1)));
2143 /* Replace the placeholder in SRC2 with SRC1. If we can
2144 find where in SRC2 it was placed, that can become our
2145 split point and we can replace this address with SRC2.
2146 Just try two obvious places. */
2148 src2
= replace_rtx (src2
, reg
, src1
);
2150 if (XEXP (src2
, 0) == src1
)
2151 split
= &XEXP (src2
, 0);
2152 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2
, 0)))[0] == 'e'
2153 && XEXP (XEXP (src2
, 0), 0) == src1
)
2154 split
= &XEXP (XEXP (src2
, 0), 0);
2158 SUBST (XEXP (x
, 0), src2
);
2167 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2168 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2169 we need to put the operand into a register. So split at that
2172 if (SET_DEST (x
) == cc0_rtx
2173 && GET_CODE (SET_SRC (x
)) != COMPARE
2174 && GET_CODE (SET_SRC (x
)) != ZERO_EXTRACT
2175 && GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) != 'o'
2176 && ! (GET_CODE (SET_SRC (x
)) == SUBREG
2177 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x
)))) == 'o'))
2178 return &SET_SRC (x
);
2181 /* See if we can split SET_SRC as it stands. */
2182 split
= find_split_point (&SET_SRC (x
), insn
);
2183 if (split
&& split
!= &SET_SRC (x
))
2186 /* See if this is a bitfield assignment with everything constant. If
2187 so, this is an IOR of an AND, so split it into that. */
2188 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2189 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2190 <= HOST_BITS_PER_WIDE_INT
)
2191 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
2192 && GET_CODE (XEXP (SET_DEST (x
), 2)) == CONST_INT
2193 && GET_CODE (SET_SRC (x
)) == CONST_INT
2194 && ((INTVAL (XEXP (SET_DEST (x
), 1))
2195 + INTVAL (XEXP (SET_DEST (x
), 2)))
2196 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0))))
2197 && ! side_effects_p (XEXP (SET_DEST (x
), 0)))
2199 int pos
= INTVAL (XEXP (SET_DEST (x
), 2));
2200 int len
= INTVAL (XEXP (SET_DEST (x
), 1));
2201 int src
= INTVAL (SET_SRC (x
));
2202 rtx dest
= XEXP (SET_DEST (x
), 0);
2203 enum machine_mode mode
= GET_MODE (dest
);
2204 unsigned HOST_WIDE_INT mask
= ((HOST_WIDE_INT
) 1 << len
) - 1;
2207 pos
= GET_MODE_BITSIZE (mode
) - len
- pos
;
2212 gen_binary (IOR
, mode
, dest
, GEN_INT (src
<< pos
)));
2215 gen_binary (IOR
, mode
,
2216 gen_binary (AND
, mode
, dest
,
2217 GEN_INT (~ (mask
<< pos
)
2218 & GET_MODE_MASK (mode
))),
2219 GEN_INT (src
<< pos
)));
2221 SUBST (SET_DEST (x
), dest
);
2223 split
= find_split_point (&SET_SRC (x
), insn
);
2224 if (split
&& split
!= &SET_SRC (x
))
2228 /* Otherwise, see if this is an operation that we can split into two.
2229 If so, try to split that. */
2230 code
= GET_CODE (SET_SRC (x
));
2235 /* If we are AND'ing with a large constant that is only a single
2236 bit and the result is only being used in a context where we
2237 need to know if it is zero or non-zero, replace it with a bit
2238 extraction. This will avoid the large constant, which might
2239 have taken more than one insn to make. If the constant were
2240 not a valid argument to the AND but took only one insn to make,
2241 this is no worse, but if it took more than one insn, it will
2244 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
2245 && GET_CODE (XEXP (SET_SRC (x
), 0)) == REG
2246 && (pos
= exact_log2 (INTVAL (XEXP (SET_SRC (x
), 1)))) >= 7
2247 && GET_CODE (SET_DEST (x
)) == REG
2248 && (split
= find_single_use (SET_DEST (x
), insn
, NULL_PTR
)) != 0
2249 && (GET_CODE (*split
) == EQ
|| GET_CODE (*split
) == NE
)
2250 && XEXP (*split
, 0) == SET_DEST (x
)
2251 && XEXP (*split
, 1) == const0_rtx
)
2254 make_extraction (GET_MODE (SET_DEST (x
)),
2255 XEXP (SET_SRC (x
), 0),
2256 pos
, NULL_RTX
, 1, 1, 0, 0));
2257 return find_split_point (loc
, insn
);
2262 inner
= XEXP (SET_SRC (x
), 0);
2264 len
= GET_MODE_BITSIZE (GET_MODE (inner
));
2270 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
2271 && GET_CODE (XEXP (SET_SRC (x
), 2)) == CONST_INT
)
2273 inner
= XEXP (SET_SRC (x
), 0);
2274 len
= INTVAL (XEXP (SET_SRC (x
), 1));
2275 pos
= INTVAL (XEXP (SET_SRC (x
), 2));
2278 pos
= GET_MODE_BITSIZE (GET_MODE (inner
)) - len
- pos
;
2280 unsignedp
= (code
== ZERO_EXTRACT
);
2285 if (len
&& pos
>= 0 && pos
+ len
<= GET_MODE_BITSIZE (GET_MODE (inner
)))
2287 enum machine_mode mode
= GET_MODE (SET_SRC (x
));
2289 /* For unsigned, we have a choice of a shift followed by an
2290 AND or two shifts. Use two shifts for field sizes where the
2291 constant might be too large. We assume here that we can
2292 always at least get 8-bit constants in an AND insn, which is
2293 true for every current RISC. */
2295 if (unsignedp
&& len
<= 8)
2300 gen_rtx_combine (LSHIFTRT
, mode
,
2301 gen_lowpart_for_combine (mode
, inner
),
2303 GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1)));
2305 split
= find_split_point (&SET_SRC (x
), insn
);
2306 if (split
&& split
!= &SET_SRC (x
))
2313 (unsignedp
? LSHIFTRT
: ASHIFTRT
, mode
,
2314 gen_rtx_combine (ASHIFT
, mode
,
2315 gen_lowpart_for_combine (mode
, inner
),
2316 GEN_INT (GET_MODE_BITSIZE (mode
)
2318 GEN_INT (GET_MODE_BITSIZE (mode
) - len
)));
2320 split
= find_split_point (&SET_SRC (x
), insn
);
2321 if (split
&& split
!= &SET_SRC (x
))
2326 /* See if this is a simple operation with a constant as the second
2327 operand. It might be that this constant is out of range and hence
2328 could be used as a split point. */
2329 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '2'
2330 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == 'c'
2331 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '<')
2332 && CONSTANT_P (XEXP (SET_SRC (x
), 1))
2333 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x
), 0))) == 'o'
2334 || (GET_CODE (XEXP (SET_SRC (x
), 0)) == SUBREG
2335 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x
), 0))))
2337 return &XEXP (SET_SRC (x
), 1);
2339 /* Finally, see if this is a simple operation with its first operand
2340 not in a register. The operation might require this operand in a
2341 register, so return it as a split point. We can always do this
2342 because if the first operand were another operation, we would have
2343 already found it as a split point. */
2344 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '2'
2345 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == 'c'
2346 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '<'
2347 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '1')
2348 && ! register_operand (XEXP (SET_SRC (x
), 0), VOIDmode
))
2349 return &XEXP (SET_SRC (x
), 0);
2355 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2356 it is better to write this as (not (ior A B)) so we can split it.
2357 Similarly for IOR. */
2358 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == NOT
)
2361 gen_rtx_combine (NOT
, GET_MODE (x
),
2362 gen_rtx_combine (code
== IOR
? AND
: IOR
,
2364 XEXP (XEXP (x
, 0), 0),
2365 XEXP (XEXP (x
, 1), 0))));
2366 return find_split_point (loc
, insn
);
2369 /* Many RISC machines have a large set of logical insns. If the
2370 second operand is a NOT, put it first so we will try to split the
2371 other operand first. */
2372 if (GET_CODE (XEXP (x
, 1)) == NOT
)
2374 rtx tem
= XEXP (x
, 0);
2375 SUBST (XEXP (x
, 0), XEXP (x
, 1));
2376 SUBST (XEXP (x
, 1), tem
);
2381 /* Otherwise, select our actions depending on our rtx class. */
2382 switch (GET_RTX_CLASS (code
))
2384 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2386 split
= find_split_point (&XEXP (x
, 2), insn
);
2389 /* ... fall through ... */
2393 split
= find_split_point (&XEXP (x
, 1), insn
);
2396 /* ... fall through ... */
2398 /* Some machines have (and (shift ...) ...) insns. If X is not
2399 an AND, but XEXP (X, 0) is, use it as our split point. */
2400 if (GET_CODE (x
) != AND
&& GET_CODE (XEXP (x
, 0)) == AND
)
2401 return &XEXP (x
, 0);
2403 split
= find_split_point (&XEXP (x
, 0), insn
);
2409 /* Otherwise, we don't have a split point. */
2413 /* Throughout X, replace FROM with TO, and return the result.
2414 The result is TO if X is FROM;
2415 otherwise the result is X, but its contents may have been modified.
2416 If they were modified, a record was made in undobuf so that
2417 undo_all will (among other things) return X to its original state.
2419 If the number of changes necessary is too much to record to undo,
2420 the excess changes are not made, so the result is invalid.
2421 The changes already made can still be undone.
2422 undobuf.num_undo is incremented for such changes, so by testing that
2423 the caller can tell whether the result is valid.
2425 `n_occurrences' is incremented each time FROM is replaced.
2427 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2429 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2430 by copying if `n_occurrences' is non-zero. */
2433 subst (x
, from
, to
, in_dest
, unique_copy
)
2434 register rtx x
, from
, to
;
2439 register int len
, i
;
2440 register enum rtx_code code
= GET_CODE (x
), orig_code
= code
;
2442 enum machine_mode mode
= GET_MODE (x
);
2443 enum machine_mode op0_mode
= VOIDmode
;
2448 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2449 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2450 If it is 0, that cannot be done. We can now do this for any MEM
2451 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2452 If not for that, MEM's would very rarely be safe. */
2454 /* Reject MODEs bigger than a word, because we might not be able
2455 to reference a two-register group starting with an arbitrary register
2456 (and currently gen_lowpart might crash for a SUBREG). */
2458 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2459 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2461 /* Two expressions are equal if they are identical copies of a shared
2462 RTX or if they are both registers with the same register number
2465 #define COMBINE_RTX_EQUAL_P(X,Y) \
2467 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2468 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2470 if (! in_dest
&& COMBINE_RTX_EQUAL_P (x
, from
))
2473 return (unique_copy
&& n_occurrences
> 1 ? copy_rtx (to
) : to
);
2476 /* If X and FROM are the same register but different modes, they will
2477 not have been seen as equal above. However, flow.c will make a
2478 LOG_LINKS entry for that case. If we do nothing, we will try to
2479 rerecognize our original insn and, when it succeeds, we will
2480 delete the feeding insn, which is incorrect.
2482 So force this insn not to match in this (rare) case. */
2483 if (! in_dest
&& code
== REG
&& GET_CODE (from
) == REG
2484 && REGNO (x
) == REGNO (from
))
2485 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
2487 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2488 of which may contain things that can be combined. */
2489 if (code
!= MEM
&& code
!= LO_SUM
&& GET_RTX_CLASS (code
) == 'o')
2492 /* It is possible to have a subexpression appear twice in the insn.
2493 Suppose that FROM is a register that appears within TO.
2494 Then, after that subexpression has been scanned once by `subst',
2495 the second time it is scanned, TO may be found. If we were
2496 to scan TO here, we would find FROM within it and create a
2497 self-referent rtl structure which is completely wrong. */
2498 if (COMBINE_RTX_EQUAL_P (x
, to
))
2501 len
= GET_RTX_LENGTH (code
);
2502 fmt
= GET_RTX_FORMAT (code
);
2504 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2505 set up to skip this common case. All other cases where we want to
2506 suppress replacing something inside a SET_SRC are handled via the
2509 && (GET_CODE (SET_DEST (x
)) == REG
2510 || GET_CODE (SET_DEST (x
)) == CC0
2511 || GET_CODE (SET_DEST (x
)) == PC
))
2514 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2516 op0_mode
= GET_MODE (XEXP (x
, 0));
2518 for (i
= 0; i
< len
; i
++)
2523 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2526 if (COMBINE_RTX_EQUAL_P (XVECEXP (x
, i
, j
), from
))
2528 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
2533 new = subst (XVECEXP (x
, i
, j
), from
, to
, 0, unique_copy
);
2535 /* If this substitution failed, this whole thing fails. */
2536 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
2540 SUBST (XVECEXP (x
, i
, j
), new);
2543 else if (fmt
[i
] == 'e')
2547 if (COMBINE_RTX_EQUAL_P (XEXP (x
, i
), from
))
2549 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
2553 /* If we are in a SET_DEST, suppress most cases unless we
2554 have gone inside a MEM, in which case we want to
2555 simplify the address. We assume here that things that
2556 are actually part of the destination have their inner
2557 parts in the first expression. This is true for SUBREG,
2558 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2559 things aside from REG and MEM that should appear in a
2561 new = subst (XEXP (x
, i
), from
, to
,
2563 && (code
== SUBREG
|| code
== STRICT_LOW_PART
2564 || code
== ZERO_EXTRACT
))
2566 && i
== 0), unique_copy
);
2568 /* If we found that we will have to reject this combination,
2569 indicate that by returning the CLOBBER ourselves, rather than
2570 an expression containing it. This will speed things up as
2571 well as prevent accidents where two CLOBBERs are considered
2572 to be equal, thus producing an incorrect simplification. */
2574 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
2577 SUBST (XEXP (x
, i
), new);
2581 /* We come back to here if we have replaced the expression with one of
2582 a different code and it is likely that further simplification will be
2587 code
= GET_CODE (x
);
2589 /* If this is a commutative operation, put a constant last and a complex
2590 expression first. We don't need to do this for comparisons here. */
2591 if (GET_RTX_CLASS (code
) == 'c'
2592 && ((CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2593 || (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == 'o'
2594 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) != 'o')
2595 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2596 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x
, 0)))) == 'o'
2597 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) != 'o')))
2600 SUBST (XEXP (x
, 0), XEXP (x
, 1));
2601 SUBST (XEXP (x
, 1), temp
);
2604 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2605 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2606 things. Don't deal with operations that change modes here. */
2608 if ((GET_RTX_CLASS (code
) == '2' || GET_RTX_CLASS (code
) == 'c')
2609 && GET_CODE (XEXP (x
, 0)) == IF_THEN_ELSE
)
2611 SUBST (XEXP (XEXP (x
, 0), 1),
2612 subst (gen_binary (code
, mode
, XEXP (XEXP (x
, 0), 1),
2614 pc_rtx
, pc_rtx
, 0));
2615 SUBST (XEXP (XEXP (x
, 0), 2),
2616 subst (gen_binary (code
, mode
, XEXP (XEXP (x
, 0), 2),
2618 pc_rtx
, pc_rtx
, 0));
2624 else if (GET_RTX_CLASS (code
) == '1'
2625 && GET_CODE (XEXP (x
, 0)) == IF_THEN_ELSE
2626 && GET_MODE (XEXP (x
, 0)) == mode
)
2628 SUBST (XEXP (XEXP (x
, 0), 1),
2629 subst (gen_unary (code
, mode
, XEXP (XEXP (x
, 0), 1)),
2630 pc_rtx
, pc_rtx
, 0));
2631 SUBST (XEXP (XEXP (x
, 0), 2),
2632 subst (gen_unary (code
, mode
, XEXP (XEXP (x
, 0), 2)),
2633 pc_rtx
, pc_rtx
, 0));
2639 /* Try to fold this expression in case we have constants that weren't
2642 switch (GET_RTX_CLASS (code
))
2645 temp
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
2648 temp
= simplify_relational_operation (code
, op0_mode
,
2649 XEXP (x
, 0), XEXP (x
, 1));
2650 #ifdef FLOAT_STORE_FLAG_VALUE
2651 if (temp
!= 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2652 temp
= ((temp
== const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2653 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE
, GET_MODE (x
)));
2658 temp
= simplify_binary_operation (code
, mode
, XEXP (x
, 0), XEXP (x
, 1));
2662 temp
= simplify_ternary_operation (code
, mode
, op0_mode
, XEXP (x
, 0),
2663 XEXP (x
, 1), XEXP (x
, 2));
2668 x
= temp
, code
= GET_CODE (temp
);
2670 /* If we have restarted more than 4 times, we are probably looping, so
2672 if (++n_restarts
> 4)
2675 /* First see if we can apply the inverse distributive law. */
2676 if (code
== PLUS
|| code
== MINUS
|| code
== IOR
|| code
== XOR
)
2678 x
= apply_distributive_law (x
);
2679 code
= GET_CODE (x
);
2682 /* If CODE is an associative operation not otherwise handled, see if we
2683 can associate some operands. This can win if they are constants or
2684 if they are logically related (i.e. (a & b) & a. */
2685 if ((code
== PLUS
|| code
== MINUS
2686 || code
== MULT
|| code
== AND
|| code
== IOR
|| code
== XOR
2687 || code
== DIV
|| code
== UDIV
2688 || code
== SMAX
|| code
== SMIN
|| code
== UMAX
|| code
== UMIN
)
2689 && GET_MODE_CLASS (mode
) == MODE_INT
)
2691 if (GET_CODE (XEXP (x
, 0)) == code
)
2693 rtx other
= XEXP (XEXP (x
, 0), 0);
2694 rtx inner_op0
= XEXP (XEXP (x
, 0), 1);
2695 rtx inner_op1
= XEXP (x
, 1);
2698 /* Make sure we pass the constant operand if any as the second
2699 one if this is a commutative operation. */
2700 if (CONSTANT_P (inner_op0
) && GET_RTX_CLASS (code
) == 'c')
2702 rtx tem
= inner_op0
;
2703 inner_op0
= inner_op1
;
2706 inner
= simplify_binary_operation (code
== MINUS
? PLUS
2707 : code
== DIV
? MULT
2708 : code
== UDIV
? MULT
2710 mode
, inner_op0
, inner_op1
);
2712 /* For commutative operations, try the other pair if that one
2714 if (inner
== 0 && GET_RTX_CLASS (code
) == 'c')
2716 other
= XEXP (XEXP (x
, 0), 1);
2717 inner
= simplify_binary_operation (code
, mode
,
2718 XEXP (XEXP (x
, 0), 0),
2724 x
= gen_binary (code
, mode
, other
, inner
);
2731 /* A little bit of algebraic simplification here. */
2735 /* Ensure that our address has any ASHIFTs converted to MULT in case
2736 address-recognizing predicates are called later. */
2737 temp
= make_compound_operation (XEXP (x
, 0), MEM
);
2738 SUBST (XEXP (x
, 0), temp
);
2742 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2743 is paradoxical. If we can't do that safely, then it becomes
2744 something nonsensical so that this combination won't take place. */
2746 if (GET_CODE (SUBREG_REG (x
)) == MEM
2747 && (GET_MODE_SIZE (mode
)
2748 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
2750 rtx inner
= SUBREG_REG (x
);
2751 int endian_offset
= 0;
2752 /* Don't change the mode of the MEM
2753 if that would change the meaning of the address. */
2754 if (MEM_VOLATILE_P (SUBREG_REG (x
))
2755 || mode_dependent_address_p (XEXP (inner
, 0)))
2756 return gen_rtx (CLOBBER
, mode
, const0_rtx
);
2758 #if BYTES_BIG_ENDIAN
2759 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2760 endian_offset
+= UNITS_PER_WORD
- GET_MODE_SIZE (mode
);
2761 if (GET_MODE_SIZE (GET_MODE (inner
)) < UNITS_PER_WORD
)
2762 endian_offset
-= UNITS_PER_WORD
- GET_MODE_SIZE (GET_MODE (inner
));
2764 /* Note if the plus_constant doesn't make a valid address
2765 then this combination won't be accepted. */
2766 x
= gen_rtx (MEM
, mode
,
2767 plus_constant (XEXP (inner
, 0),
2768 (SUBREG_WORD (x
) * UNITS_PER_WORD
2770 MEM_VOLATILE_P (x
) = MEM_VOLATILE_P (inner
);
2771 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (inner
);
2772 MEM_IN_STRUCT_P (x
) = MEM_IN_STRUCT_P (inner
);
2776 /* If we are in a SET_DEST, these other cases can't apply. */
2780 /* Changing mode twice with SUBREG => just change it once,
2781 or not at all if changing back to starting mode. */
2782 if (GET_CODE (SUBREG_REG (x
)) == SUBREG
)
2784 if (mode
== GET_MODE (SUBREG_REG (SUBREG_REG (x
)))
2785 && SUBREG_WORD (x
) == 0 && SUBREG_WORD (SUBREG_REG (x
)) == 0)
2786 return SUBREG_REG (SUBREG_REG (x
));
2788 SUBST_INT (SUBREG_WORD (x
),
2789 SUBREG_WORD (x
) + SUBREG_WORD (SUBREG_REG (x
)));
2790 SUBST (SUBREG_REG (x
), SUBREG_REG (SUBREG_REG (x
)));
2793 /* SUBREG of a hard register => just change the register number
2794 and/or mode. If the hard register is not valid in that mode,
2795 suppress this combination. */
2797 if (GET_CODE (SUBREG_REG (x
)) == REG
2798 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
2800 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
),
2802 return gen_rtx (REG
, mode
,
2803 REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
));
2805 return gen_rtx (CLOBBER
, mode
, const0_rtx
);
2808 /* For a constant, try to pick up the part we want. Handle a full
2809 word and low-order part. Only do this if we are narrowing
2810 the constant; if it is being widened, we have no idea what
2811 the extra bits will have been set to. */
2813 if (CONSTANT_P (SUBREG_REG (x
)) && op0_mode
!= VOIDmode
2814 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
2815 && GET_MODE_SIZE (op0_mode
) < UNITS_PER_WORD
2816 && GET_MODE_CLASS (mode
) == MODE_INT
)
2818 temp
= operand_subword (SUBREG_REG (x
), SUBREG_WORD (x
),
2824 if (CONSTANT_P (SUBREG_REG (x
)) && subreg_lowpart_p (x
)
2825 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (op0_mode
))
2826 return gen_lowpart_for_combine (mode
, SUBREG_REG (x
));
2828 /* If we are narrowing the object, we need to see if we can simplify
2829 the expression for the object knowing that we only need the
2832 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2833 && subreg_lowpart_p (x
))
2834 return force_to_mode (SUBREG_REG (x
), mode
, GET_MODE_BITSIZE (mode
),
2839 /* (not (plus X -1)) can become (neg X). */
2840 if (GET_CODE (XEXP (x
, 0)) == PLUS
2841 && XEXP (XEXP (x
, 0), 1) == constm1_rtx
)
2843 x
= gen_rtx_combine (NEG
, mode
, XEXP (XEXP (x
, 0), 0));
2847 /* Similarly, (not (neg X)) is (plus X -1). */
2848 if (GET_CODE (XEXP (x
, 0)) == NEG
)
2850 x
= gen_rtx_combine (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), constm1_rtx
);
2854 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2855 if (GET_CODE (XEXP (x
, 0)) == XOR
2856 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2857 && (temp
= simplify_unary_operation (NOT
, mode
,
2858 XEXP (XEXP (x
, 0), 1),
2861 SUBST (XEXP (XEXP (x
, 0), 1), temp
);
2865 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2866 other than 1, but that is not valid. We could do a similar
2867 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2868 but this doesn't seem common enough to bother with. */
2869 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2870 && XEXP (XEXP (x
, 0), 0) == const1_rtx
)
2872 x
= gen_rtx (ROTATE
, mode
, gen_unary (NOT
, mode
, const1_rtx
),
2873 XEXP (XEXP (x
, 0), 1));
2877 if (GET_CODE (XEXP (x
, 0)) == SUBREG
2878 && subreg_lowpart_p (XEXP (x
, 0))
2879 && (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0)))
2880 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x
, 0)))))
2881 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == ASHIFT
2882 && XEXP (SUBREG_REG (XEXP (x
, 0)), 0) == const1_rtx
)
2884 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (XEXP (x
, 0)));
2886 x
= gen_rtx (ROTATE
, inner_mode
,
2887 gen_unary (NOT
, inner_mode
, const1_rtx
),
2888 XEXP (SUBREG_REG (XEXP (x
, 0)), 1));
2889 x
= gen_lowpart_for_combine (mode
, x
);
2893 #if STORE_FLAG_VALUE == -1
2894 /* (not (comparison foo bar)) can be done by reversing the comparison
2896 if (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
2897 && reversible_comparison_p (XEXP (x
, 0)))
2898 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
2899 mode
, XEXP (XEXP (x
, 0), 0),
2900 XEXP (XEXP (x
, 0), 1));
2903 /* Apply De Morgan's laws to reduce number of patterns for machines
2904 with negating logical insns (and-not, nand, etc.). If result has
2905 only one NOT, put it first, since that is how the patterns are
2908 if (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == AND
)
2910 rtx in1
= XEXP (XEXP (x
, 0), 0), in2
= XEXP (XEXP (x
, 0), 1);
2912 if (GET_CODE (in1
) == NOT
)
2913 in1
= XEXP (in1
, 0);
2915 in1
= gen_rtx_combine (NOT
, GET_MODE (in1
), in1
);
2917 if (GET_CODE (in2
) == NOT
)
2918 in2
= XEXP (in2
, 0);
2919 else if (GET_CODE (in2
) == CONST_INT
2920 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2921 in2
= GEN_INT (GET_MODE_MASK (mode
) & ~ INTVAL (in2
));
2923 in2
= gen_rtx_combine (NOT
, GET_MODE (in2
), in2
);
2925 if (GET_CODE (in2
) == NOT
)
2928 in2
= in1
; in1
= tem
;
2931 x
= gen_rtx_combine (GET_CODE (XEXP (x
, 0)) == IOR
? AND
: IOR
,
2938 /* (neg (plus X 1)) can become (not X). */
2939 if (GET_CODE (XEXP (x
, 0)) == PLUS
2940 && XEXP (XEXP (x
, 0), 1) == const1_rtx
)
2942 x
= gen_rtx_combine (NOT
, mode
, XEXP (XEXP (x
, 0), 0));
2946 /* Similarly, (neg (not X)) is (plus X 1). */
2947 if (GET_CODE (XEXP (x
, 0)) == NOT
)
2949 x
= gen_rtx_combine (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), const1_rtx
);
2953 /* (neg (minus X Y)) can become (minus Y X). */
2954 if (GET_CODE (XEXP (x
, 0)) == MINUS
2955 && (GET_MODE_CLASS (mode
) != MODE_FLOAT
2956 /* x-y != -(y-x) with IEEE floating point. */
2957 || TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
))
2959 x
= gen_binary (MINUS
, mode
, XEXP (XEXP (x
, 0), 1),
2960 XEXP (XEXP (x
, 0), 0));
2964 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
2965 if (GET_CODE (XEXP (x
, 0)) == XOR
&& XEXP (XEXP (x
, 0), 1) == const1_rtx
2966 && significant_bits (XEXP (XEXP (x
, 0), 0), mode
) == 1)
2968 x
= gen_binary (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), constm1_rtx
);
2972 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2973 if we can then eliminate the NEG (e.g.,
2974 if the operand is a constant). */
2976 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
)
2978 temp
= simplify_unary_operation (NEG
, mode
,
2979 XEXP (XEXP (x
, 0), 0), mode
);
2982 SUBST (XEXP (XEXP (x
, 0), 0), temp
);
2987 temp
= expand_compound_operation (XEXP (x
, 0));
2989 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2990 replaced by (lshiftrt X C). This will convert
2991 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2993 if (GET_CODE (temp
) == ASHIFTRT
2994 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
2995 && INTVAL (XEXP (temp
, 1)) == GET_MODE_BITSIZE (mode
) - 1)
2997 x
= simplify_shift_const (temp
, LSHIFTRT
, mode
, XEXP (temp
, 0),
2998 INTVAL (XEXP (temp
, 1)));
3002 /* If X has only a single bit significant, say, bit I, convert
3003 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3004 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3005 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3006 or a SUBREG of one since we'd be making the expression more
3007 complex if it was just a register. */
3009 if (GET_CODE (temp
) != REG
3010 && ! (GET_CODE (temp
) == SUBREG
3011 && GET_CODE (SUBREG_REG (temp
)) == REG
)
3012 && (i
= exact_log2 (significant_bits (temp
, mode
))) >= 0)
3014 rtx temp1
= simplify_shift_const
3015 (NULL_RTX
, ASHIFTRT
, mode
,
3016 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, temp
,
3017 GET_MODE_BITSIZE (mode
) - 1 - i
),
3018 GET_MODE_BITSIZE (mode
) - 1 - i
);
3020 /* If all we did was surround TEMP with the two shifts, we
3021 haven't improved anything, so don't use it. Otherwise,
3022 we are better off with TEMP1. */
3023 if (GET_CODE (temp1
) != ASHIFTRT
3024 || GET_CODE (XEXP (temp1
, 0)) != ASHIFT
3025 || XEXP (XEXP (temp1
, 0), 0) != temp
)
3033 case FLOAT_TRUNCATE
:
3034 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3035 if (GET_CODE (XEXP (x
, 0)) == FLOAT_EXTEND
3036 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == mode
)
3037 return XEXP (XEXP (x
, 0), 0);
3042 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3043 using cc0, in which case we want to leave it as a COMPARE
3044 so we can distinguish it from a register-register-copy. */
3045 if (XEXP (x
, 1) == const0_rtx
)
3048 /* In IEEE floating point, x-0 is not the same as x. */
3049 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
3050 || GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_INT
)
3051 && XEXP (x
, 1) == CONST0_RTX (GET_MODE (XEXP (x
, 0))))
3057 /* (const (const X)) can become (const X). Do it this way rather than
3058 returning the inner CONST since CONST can be shared with a
3060 if (GET_CODE (XEXP (x
, 0)) == CONST
)
3061 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3066 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3067 can add in an offset. find_split_point will split this address up
3068 again if it doesn't match. */
3069 if (GET_CODE (XEXP (x
, 0)) == HIGH
3070 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)))
3076 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3077 outermost. That's because that's the way indexed addresses are
3078 supposed to appear. This code used to check many more cases, but
3079 they are now checked elsewhere. */
3080 if (GET_CODE (XEXP (x
, 0)) == PLUS
3081 && CONSTANT_ADDRESS_P (XEXP (XEXP (x
, 0), 1)))
3082 return gen_binary (PLUS
, mode
,
3083 gen_binary (PLUS
, mode
, XEXP (XEXP (x
, 0), 0),
3085 XEXP (XEXP (x
, 0), 1));
3087 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3088 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3089 bit-field and can be replaced by either a sign_extend or a
3090 sign_extract. The `and' may be a zero_extend. */
3091 if (GET_CODE (XEXP (x
, 0)) == XOR
3092 && GET_CODE (XEXP (x
, 1)) == CONST_INT
3093 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3094 && INTVAL (XEXP (x
, 1)) == - INTVAL (XEXP (XEXP (x
, 0), 1))
3095 && (i
= exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) >= 0
3096 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3097 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == AND
3098 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
3099 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
3100 == ((HOST_WIDE_INT
) 1 << (i
+ 1)) - 1))
3101 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ZERO_EXTEND
3102 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)))
3105 x
= simplify_shift_const
3106 (NULL_RTX
, ASHIFTRT
, mode
,
3107 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
3108 XEXP (XEXP (XEXP (x
, 0), 0), 0),
3109 GET_MODE_BITSIZE (mode
) - (i
+ 1)),
3110 GET_MODE_BITSIZE (mode
) - (i
+ 1));
3114 /* If only the low-order bit of X is significant, (plus x -1)
3115 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3116 the bitsize of the mode - 1. This allows simplification of
3117 "a = (b & 8) == 0;" */
3118 if (XEXP (x
, 1) == constm1_rtx
3119 && GET_CODE (XEXP (x
, 0)) != REG
3120 && ! (GET_CODE (XEXP (x
,0)) == SUBREG
3121 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
)
3122 && significant_bits (XEXP (x
, 0), mode
) == 1)
3124 x
= simplify_shift_const
3125 (NULL_RTX
, ASHIFTRT
, mode
,
3126 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
3127 gen_rtx_combine (XOR
, mode
,
3128 XEXP (x
, 0), const1_rtx
),
3129 GET_MODE_BITSIZE (mode
) - 1),
3130 GET_MODE_BITSIZE (mode
) - 1);
3134 /* If we are adding two things that have no bits in common, convert
3135 the addition into an IOR. This will often be further simplified,
3136 for example in cases like ((a & 1) + (a & 2)), which can
3139 if ((significant_bits (XEXP (x
, 0), mode
)
3140 & significant_bits (XEXP (x
, 1), mode
)) == 0)
3142 x
= gen_binary (IOR
, mode
, XEXP (x
, 0), XEXP (x
, 1));
3148 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3149 (and <foo> (const_int pow2-1)) */
3150 if (GET_CODE (XEXP (x
, 1)) == AND
3151 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
3152 && exact_log2 (- INTVAL (XEXP (XEXP (x
, 1), 1))) >= 0
3153 && rtx_equal_p (XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)))
3155 x
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (x
, 0),
3156 - INTVAL (XEXP (XEXP (x
, 1), 1)) - 1);
3162 /* If we have (mult (plus A B) C), apply the distributive law and then
3163 the inverse distributive law to see if things simplify. This
3164 occurs mostly in addresses, often when unrolling loops. */
3166 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3168 x
= apply_distributive_law
3169 (gen_binary (PLUS
, mode
,
3170 gen_binary (MULT
, mode
,
3171 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3172 gen_binary (MULT
, mode
,
3173 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3175 if (GET_CODE (x
) != MULT
)
3179 /* If this is multiplication by a power of two and its first operand is
3180 a shift, treat the multiply as a shift to allow the shifts to
3181 possibly combine. */
3182 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3183 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
3184 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
3185 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
3186 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
3187 || GET_CODE (XEXP (x
, 0)) == ROTATE
3188 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
3190 x
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, XEXP (x
, 0), i
);
3194 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3195 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
3196 && XEXP (XEXP (x
, 0), 0) == const1_rtx
)
3197 return gen_rtx_combine (ASHIFT
, mode
, XEXP (x
, 1),
3198 XEXP (XEXP (x
, 0), 1));
3202 /* If this is a divide by a power of two, treat it as a shift if
3203 its first operand is a shift. */
3204 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3205 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
3206 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
3207 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
3208 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
3209 || GET_CODE (XEXP (x
, 0)) == ROTATE
3210 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
3212 x
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
, XEXP (x
, 0), i
);
3218 case GT
: case GTU
: case GE
: case GEU
:
3219 case LT
: case LTU
: case LE
: case LEU
:
3220 /* If the first operand is a condition code, we can't do anything
3222 if (GET_CODE (XEXP (x
, 0)) == COMPARE
3223 || (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) != MODE_CC
3225 && XEXP (x
, 0) != cc0_rtx
3229 rtx op0
= XEXP (x
, 0);
3230 rtx op1
= XEXP (x
, 1);
3231 enum rtx_code new_code
;
3233 if (GET_CODE (op0
) == COMPARE
)
3234 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
3236 /* Simplify our comparison, if possible. */
3237 new_code
= simplify_comparison (code
, &op0
, &op1
);
3239 #if STORE_FLAG_VALUE == 1
3240 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3241 if only the low-order bit is significant in X (such as when
3242 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3244 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
3245 && op1
== const0_rtx
3246 && significant_bits (op0
, GET_MODE (op0
)) == 1)
3247 return gen_lowpart_for_combine (mode
, op0
);
3248 else if (new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
3249 && op1
== const0_rtx
3250 && significant_bits (op0
, GET_MODE (op0
)) == 1)
3251 return gen_rtx_combine (XOR
, mode
,
3252 gen_lowpart_for_combine (mode
, op0
),
3256 #if STORE_FLAG_VALUE == -1
3257 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3258 to (neg x) if only the low-order bit of X is significant.
3259 This converts (ne (zero_extract X 1 Y) 0) to
3260 (sign_extract X 1 Y). */
3261 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
3262 && op1
== const0_rtx
3263 && significant_bits (op0
, GET_MODE (op0
)) == 1)
3265 x
= gen_rtx_combine (NEG
, mode
,
3266 gen_lowpart_for_combine (mode
, op0
));
3271 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3272 one significant bit, we can convert (ne x 0) to (ashift x c)
3273 where C puts the bit in the sign bit. Remove any AND with
3274 STORE_FLAG_VALUE when we are done, since we are only going to
3275 test the sign bit. */
3276 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
3277 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3278 && (STORE_FLAG_VALUE
3279 == (HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (mode
) - 1))
3280 && op1
== const0_rtx
3281 && mode
== GET_MODE (op0
)
3282 && (i
= exact_log2 (significant_bits (op0
, GET_MODE (op0
)))) >= 0)
3284 x
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, op0
,
3285 GET_MODE_BITSIZE (mode
) - 1 - i
);
3286 if (GET_CODE (x
) == AND
&& XEXP (x
, 1) == const_true_rtx
)
3292 /* If the code changed, return a whole new comparison. */
3293 if (new_code
!= code
)
3294 return gen_rtx_combine (new_code
, mode
, op0
, op1
);
3296 /* Otherwise, keep this operation, but maybe change its operands.
3297 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3298 SUBST (XEXP (x
, 0), op0
);
3299 SUBST (XEXP (x
, 1), op1
);
3304 /* If we are testing a register for equality see if that register is
3305 used in one of the arms. If so, and we know something about its
3306 value in that arm, try to simplify it. */
3308 if ((GET_CODE (XEXP (x
, 0)) == EQ
|| GET_CODE (XEXP (x
, 0)) == NE
)
3309 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == REG
)
3311 /* Get the value being compared and the value it has on the equal
3314 rtx from
= XEXP (XEXP (x
, 0), 0);
3315 rtx val_if_eq
= XEXP (XEXP (x
, 0), 1);
3316 rtx val_if_ne
= from
;
3317 int is_eq
= (GET_CODE (XEXP (x
, 0)) == EQ
);
3319 /* If we are comparing against zero and the expressiond being tested
3320 has only a single significant bit, that is it's value when it is
3321 not equal to zero. Simplilarly if it is known to be -1 or 0. */
3323 if (val_if_eq
== const0_rtx
3324 && exact_log2 (sig
= significant_bits (from
,
3325 GET_MODE (from
))) >= 0)
3326 val_if_ne
= GEN_INT (sig
);
3327 else if (val_if_eq
== const0_rtx
3328 && (num_sign_bit_copies (from
, GET_MODE (from
))
3329 == GET_MODE_BITSIZE (GET_MODE (from
))))
3330 val_if_ne
= constm1_rtx
;
3332 /* Now simplify an arm if we know the value of the register
3333 in the branch and it is used in the arm. Be carefull due to
3334 the potential of locally-shared RTL. */
3336 if ((is_eq
|| val_if_ne
!= from
)
3337 && reg_mentioned_p (from
, XEXP (x
, 1)))
3338 SUBST (XEXP (x
, 1), subst (copy_rtx (XEXP (x
, 1)), from
,
3339 is_eq
? val_if_eq
: val_if_ne
, 0));
3341 if ((! is_eq
|| val_if_ne
!= from
)
3342 && reg_mentioned_p (from
, XEXP (x
, 2)))
3343 SUBST (XEXP (x
, 2), subst (XEXP (x
, 2), from
,
3344 is_eq
? val_if_ne
: val_if_eq
, 0));
3347 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3348 reversed, do so to avoid needing two sets of patterns for
3349 subtract-and-branch insns. Similarly if we have a constant in that
3351 if ((XEXP (x
, 1) == pc_rtx
|| GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3352 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3353 && reversible_comparison_p (XEXP (x
, 0)))
3356 gen_binary (reverse_condition (GET_CODE (XEXP (x
, 0))),
3357 GET_MODE (XEXP (x
, 0)),
3358 XEXP (XEXP (x
, 0), 0), XEXP (XEXP (x
, 0), 1)));
3361 SUBST (XEXP (x
, 1), XEXP (x
, 2));
3362 SUBST (XEXP (x
, 2), temp
);
3370 /* If we are processing SET_DEST, we are done. */
3374 x
= expand_compound_operation (x
);
3375 if (GET_CODE (x
) != code
)
3380 /* (set (pc) (return)) gets written as (return). */
3381 if (GET_CODE (SET_DEST (x
)) == PC
&& GET_CODE (SET_SRC (x
)) == RETURN
)
3384 /* Convert this into a field assignment operation, if possible. */
3385 x
= make_field_assignment (x
);
3387 /* If we are setting CC0 or if the source is a COMPARE, look for the
3388 use of the comparison result and try to simplify it unless we already
3389 have used undobuf.other_insn. */
3390 if ((GET_CODE (SET_SRC (x
)) == COMPARE
3392 || SET_DEST (x
) == cc0_rtx
3395 && (cc_use
= find_single_use (SET_DEST (x
), subst_insn
,
3397 && (undobuf
.other_insn
== 0 || other_insn
== undobuf
.other_insn
)
3398 && GET_RTX_CLASS (GET_CODE (*cc_use
)) == '<'
3399 && XEXP (*cc_use
, 0) == SET_DEST (x
))
3401 enum rtx_code old_code
= GET_CODE (*cc_use
);
3402 enum rtx_code new_code
;
3404 int other_changed
= 0;
3405 enum machine_mode compare_mode
= GET_MODE (SET_DEST (x
));
3407 if (GET_CODE (SET_SRC (x
)) == COMPARE
)
3408 op0
= XEXP (SET_SRC (x
), 0), op1
= XEXP (SET_SRC (x
), 1);
3410 op0
= SET_SRC (x
), op1
= const0_rtx
;
3412 /* Simplify our comparison, if possible. */
3413 new_code
= simplify_comparison (old_code
, &op0
, &op1
);
3415 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3416 /* If this machine has CC modes other than CCmode, check to see
3417 if we need to use a different CC mode here. */
3418 compare_mode
= SELECT_CC_MODE (new_code
, op0
, op1
);
3420 /* If the mode changed, we have to change SET_DEST, the mode
3421 in the compare, and the mode in the place SET_DEST is used.
3422 If SET_DEST is a hard register, just build new versions with
3423 the proper mode. If it is a pseudo, we lose unless it is only
3424 time we set the pseudo, in which case we can safely change
3426 if (compare_mode
!= GET_MODE (SET_DEST (x
)))
3428 int regno
= REGNO (SET_DEST (x
));
3429 rtx new_dest
= gen_rtx (REG
, compare_mode
, regno
);
3431 if (regno
< FIRST_PSEUDO_REGISTER
3432 || (reg_n_sets
[regno
] == 1
3433 && ! REG_USERVAR_P (SET_DEST (x
))))
3435 if (regno
>= FIRST_PSEUDO_REGISTER
)
3436 SUBST (regno_reg_rtx
[regno
], new_dest
);
3438 SUBST (SET_DEST (x
), new_dest
);
3439 SUBST (XEXP (*cc_use
, 0), new_dest
);
3445 /* If the code changed, we have to build a new comparison
3446 in undobuf.other_insn. */
3447 if (new_code
!= old_code
)
3451 SUBST (*cc_use
, gen_rtx_combine (new_code
, GET_MODE (*cc_use
),
3452 SET_DEST (x
), const0_rtx
));
3454 /* If the only change we made was to change an EQ into an
3455 NE or vice versa, OP0 has only one significant bit,
3456 and OP1 is zero, check if changing the user of the condition
3457 code will produce a valid insn. If it won't, we can keep
3458 the original code in that insn by surrounding our operation
3461 if (((old_code
== NE
&& new_code
== EQ
)
3462 || (old_code
== EQ
&& new_code
== NE
))
3463 && ! other_changed
&& op1
== const0_rtx
3464 && (GET_MODE_BITSIZE (GET_MODE (op0
))
3465 <= HOST_BITS_PER_WIDE_INT
)
3466 && (exact_log2 (mask
= significant_bits (op0
,
3470 rtx pat
= PATTERN (other_insn
), note
= 0;
3472 if ((recog_for_combine (&pat
, undobuf
.other_insn
, ¬e
) < 0
3473 && ! check_asm_operands (pat
)))
3475 PUT_CODE (*cc_use
, old_code
);
3478 op0
= gen_binary (XOR
, GET_MODE (op0
), op0
,
3487 undobuf
.other_insn
= other_insn
;
3490 /* If we are now comparing against zero, change our source if
3491 needed. If we do not use cc0, we always have a COMPARE. */
3492 if (op1
== const0_rtx
&& SET_DEST (x
) == cc0_rtx
)
3493 SUBST (SET_SRC (x
), op0
);
3497 /* Otherwise, if we didn't previously have a COMPARE in the
3498 correct mode, we need one. */
3499 if (GET_CODE (SET_SRC (x
)) != COMPARE
3500 || GET_MODE (SET_SRC (x
)) != compare_mode
)
3501 SUBST (SET_SRC (x
), gen_rtx_combine (COMPARE
, compare_mode
,
3505 /* Otherwise, update the COMPARE if needed. */
3506 SUBST (XEXP (SET_SRC (x
), 0), op0
);
3507 SUBST (XEXP (SET_SRC (x
), 1), op1
);
3512 /* Get SET_SRC in a form where we have placed back any
3513 compound expressions. Then do the checks below. */
3514 temp
= make_compound_operation (SET_SRC (x
), SET
);
3515 SUBST (SET_SRC (x
), temp
);
3518 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3519 operation, and X being a REG or (subreg (reg)), we may be able to
3520 convert this to (set (subreg:m2 x) (op)).
3522 We can always do this if M1 is narrower than M2 because that
3523 means that we only care about the low bits of the result.
3525 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3526 not defined), we cannot perform a narrower operation that
3527 requested since the high-order bits will be undefined. On
3528 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
3529 transformation is safe as long as M1 and M2 have the same number
3532 if (GET_CODE (SET_SRC (x
)) == SUBREG
3533 && subreg_lowpart_p (SET_SRC (x
))
3534 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x
)))) != 'o'
3535 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x
))) + (UNITS_PER_WORD
- 1))
3537 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
))))
3538 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
3539 #ifndef BYTE_LOADS_ZERO_EXTEND
3540 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
3541 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
)))))
3543 && (GET_CODE (SET_DEST (x
)) == REG
3544 || (GET_CODE (SET_DEST (x
)) == SUBREG
3545 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
)))
3547 SUBST (SET_DEST (x
),
3548 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x
))),
3550 SUBST (SET_SRC (x
), SUBREG_REG (SET_SRC (x
)));
3553 #ifdef BYTE_LOADS_ZERO_EXTEND
3554 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3555 M wider than N, this would require a paradoxical subreg.
3556 Replace the subreg with a zero_extend to avoid the reload that
3557 would otherwise be required. */
3558 if (GET_CODE (SET_SRC (x
)) == SUBREG
3559 && subreg_lowpart_p (SET_SRC (x
))
3560 && SUBREG_WORD (SET_SRC (x
)) == 0
3561 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
3562 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
)))))
3563 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == MEM
)
3564 SUBST (SET_SRC (x
), gen_rtx_combine (ZERO_EXTEND
,
3565 GET_MODE (SET_SRC (x
)),
3566 XEXP (SET_SRC (x
), 0)));
3572 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3574 x
= simplify_and_const_int (x
, mode
, XEXP (x
, 0),
3575 INTVAL (XEXP (x
, 1)));
3577 /* If we have (ior (and (X C1) C2)) and the next restart would be
3578 the last, simplify this by making C1 as small as possible
3580 if (n_restarts
>= 3 && GET_CODE (x
) == IOR
3581 && GET_CODE (XEXP (x
, 0)) == AND
3582 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3583 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3585 temp
= gen_binary (AND
, mode
, XEXP (XEXP (x
, 0), 0),
3586 GEN_INT (INTVAL (XEXP (XEXP (x
, 0), 1))
3587 & ~ INTVAL (XEXP (x
, 1))));
3588 return gen_binary (IOR
, mode
, temp
, XEXP (x
, 1));
3591 if (GET_CODE (x
) != AND
)
3595 /* Convert (A | B) & A to A. */
3596 if (GET_CODE (XEXP (x
, 0)) == IOR
3597 && (rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3598 || rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1)))
3599 && ! side_effects_p (XEXP (XEXP (x
, 0), 0))
3600 && ! side_effects_p (XEXP (XEXP (x
, 0), 1)))
3603 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3604 insn (and may simplify more). */
3605 else if (GET_CODE (XEXP (x
, 0)) == XOR
3606 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3607 && ! side_effects_p (XEXP (x
, 1)))
3609 x
= gen_binary (AND
, mode
,
3610 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 1)),
3614 else if (GET_CODE (XEXP (x
, 0)) == XOR
3615 && rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))
3616 && ! side_effects_p (XEXP (x
, 1)))
3618 x
= gen_binary (AND
, mode
,
3619 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 0)),
3624 /* Similarly for (~ (A ^ B)) & A. */
3625 else if (GET_CODE (XEXP (x
, 0)) == NOT
3626 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == XOR
3627 && rtx_equal_p (XEXP (XEXP (XEXP (x
, 0), 0), 0), XEXP (x
, 1))
3628 && ! side_effects_p (XEXP (x
, 1)))
3630 x
= gen_binary (AND
, mode
, XEXP (XEXP (XEXP (x
, 0), 0), 1),
3634 else if (GET_CODE (XEXP (x
, 0)) == NOT
3635 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == XOR
3636 && rtx_equal_p (XEXP (XEXP (XEXP (x
, 0), 0), 1), XEXP (x
, 1))
3637 && ! side_effects_p (XEXP (x
, 1)))
3639 x
= gen_binary (AND
, mode
, XEXP (XEXP (XEXP (x
, 0), 0), 0),
3644 /* If we have (and A B) with A not an object but that is known to
3645 be -1 or 0, this is equivalent to the expression
3646 (if_then_else (ne A (const_int 0)) B (const_int 0))
3647 We make this conversion because it may allow further
3648 simplifications and then allow use of conditional move insns. */
3650 if (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) != 'o'
3651 && ! (GET_CODE (XEXP (x
, 0)) == SUBREG
3652 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x
, 0)))) == 'o')
3653 && (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
3654 == GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))))
3656 rtx op0
= XEXP (x
, 0);
3657 rtx op1
= const0_rtx
;
3658 enum rtx_code comp_code
3659 = simplify_comparison (NE
, &op0
, &op1
);
3661 x
= gen_rtx_combine (IF_THEN_ELSE
, mode
,
3662 gen_binary (comp_code
, VOIDmode
, op0
, op1
),
3663 XEXP (x
, 1), const0_rtx
);
3667 /* In the following group of tests (and those in case IOR below),
3668 we start with some combination of logical operations and apply
3669 the distributive law followed by the inverse distributive law.
3670 Most of the time, this results in no change. However, if some of
3671 the operands are the same or inverses of each other, simplifications
3674 For example, (and (ior A B) (not B)) can occur as the result of
3675 expanding a bit field assignment. When we apply the distributive
3676 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3677 which then simplifies to (and (A (not B))). */
3679 /* If we have (and (ior A B) C), apply the distributive law and then
3680 the inverse distributive law to see if things simplify. */
3682 if (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == XOR
)
3684 x
= apply_distributive_law
3685 (gen_binary (GET_CODE (XEXP (x
, 0)), mode
,
3686 gen_binary (AND
, mode
,
3687 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3688 gen_binary (AND
, mode
,
3689 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3690 if (GET_CODE (x
) != AND
)
3694 if (GET_CODE (XEXP (x
, 1)) == IOR
|| GET_CODE (XEXP (x
, 1)) == XOR
)
3696 x
= apply_distributive_law
3697 (gen_binary (GET_CODE (XEXP (x
, 1)), mode
,
3698 gen_binary (AND
, mode
,
3699 XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)),
3700 gen_binary (AND
, mode
,
3701 XEXP (XEXP (x
, 1), 1), XEXP (x
, 0))));
3702 if (GET_CODE (x
) != AND
)
3706 /* Similarly, taking advantage of the fact that
3707 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3709 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == XOR
)
3711 x
= apply_distributive_law
3712 (gen_binary (XOR
, mode
,
3713 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 0), 0),
3714 XEXP (XEXP (x
, 1), 0)),
3715 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 0), 0),
3716 XEXP (XEXP (x
, 1), 1))));
3717 if (GET_CODE (x
) != AND
)
3721 else if (GET_CODE (XEXP (x
, 1)) == NOT
&& GET_CODE (XEXP (x
, 0)) == XOR
)
3723 x
= apply_distributive_law
3724 (gen_binary (XOR
, mode
,
3725 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 1), 0),
3726 XEXP (XEXP (x
, 0), 0)),
3727 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 1), 0),
3728 XEXP (XEXP (x
, 0), 1))));
3729 if (GET_CODE (x
) != AND
)
3735 /* (ior A C) is C if all significant bits of A are on in C. */
3736 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3737 && (significant_bits (XEXP (x
, 0), mode
)
3738 & ~ INTVAL (XEXP (x
, 1))) == 0)
3741 /* Convert (A & B) | A to A. */
3742 if (GET_CODE (XEXP (x
, 0)) == AND
3743 && (rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3744 || rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1)))
3745 && ! side_effects_p (XEXP (XEXP (x
, 0), 0))
3746 && ! side_effects_p (XEXP (XEXP (x
, 0), 1)))
3749 /* If we have (ior (and A B) C), apply the distributive law and then
3750 the inverse distributive law to see if things simplify. */
3752 if (GET_CODE (XEXP (x
, 0)) == AND
)
3754 x
= apply_distributive_law
3755 (gen_binary (AND
, mode
,
3756 gen_binary (IOR
, mode
,
3757 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3758 gen_binary (IOR
, mode
,
3759 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3761 if (GET_CODE (x
) != IOR
)
3765 if (GET_CODE (XEXP (x
, 1)) == AND
)
3767 x
= apply_distributive_law
3768 (gen_binary (AND
, mode
,
3769 gen_binary (IOR
, mode
,
3770 XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)),
3771 gen_binary (IOR
, mode
,
3772 XEXP (XEXP (x
, 1), 1), XEXP (x
, 0))));
3774 if (GET_CODE (x
) != IOR
)
3778 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3779 mode size to (rotate A CX). */
3781 if (((GET_CODE (XEXP (x
, 0)) == ASHIFT
3782 && GET_CODE (XEXP (x
, 1)) == LSHIFTRT
)
3783 || (GET_CODE (XEXP (x
, 1)) == ASHIFT
3784 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
))
3785 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (XEXP (x
, 1), 0))
3786 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3787 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
3788 && (INTVAL (XEXP (XEXP (x
, 0), 1)) + INTVAL (XEXP (XEXP (x
, 1), 1))
3789 == GET_MODE_BITSIZE (mode
)))
3793 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
)
3794 shift_count
= XEXP (XEXP (x
, 0), 1);
3796 shift_count
= XEXP (XEXP (x
, 1), 1);
3797 x
= gen_rtx (ROTATE
, mode
, XEXP (XEXP (x
, 0), 0), shift_count
);
3803 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3804 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3807 int num_negated
= 0;
3808 rtx in1
= XEXP (x
, 0), in2
= XEXP (x
, 1);
3810 if (GET_CODE (in1
) == NOT
)
3811 num_negated
++, in1
= XEXP (in1
, 0);
3812 if (GET_CODE (in2
) == NOT
)
3813 num_negated
++, in2
= XEXP (in2
, 0);
3815 if (num_negated
== 2)
3817 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3818 SUBST (XEXP (x
, 1), XEXP (XEXP (x
, 1), 0));
3820 else if (num_negated
== 1)
3822 x
= gen_unary (NOT
, mode
,
3823 gen_binary (XOR
, mode
, in1
, in2
));
3828 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3829 correspond to a machine insn or result in further simplifications
3830 if B is a constant. */
3832 if (GET_CODE (XEXP (x
, 0)) == AND
3833 && rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))
3834 && ! side_effects_p (XEXP (x
, 1)))
3836 x
= gen_binary (AND
, mode
,
3837 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 0)),
3841 else if (GET_CODE (XEXP (x
, 0)) == AND
3842 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3843 && ! side_effects_p (XEXP (x
, 1)))
3845 x
= gen_binary (AND
, mode
,
3846 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 1)),
3852 #if STORE_FLAG_VALUE == 1
3853 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3855 if (XEXP (x
, 1) == const1_rtx
3856 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3857 && reversible_comparison_p (XEXP (x
, 0)))
3858 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3859 mode
, XEXP (XEXP (x
, 0), 0),
3860 XEXP (XEXP (x
, 0), 1));
3863 /* (xor (comparison foo bar) (const_int sign-bit))
3864 when STORE_FLAG_VALUE is the sign bit. */
3865 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3866 && (STORE_FLAG_VALUE
3867 == (HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (mode
) - 1))
3868 && XEXP (x
, 1) == const_true_rtx
3869 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3870 && reversible_comparison_p (XEXP (x
, 0)))
3871 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3872 mode
, XEXP (XEXP (x
, 0), 0),
3873 XEXP (XEXP (x
, 0), 1));
3877 /* (abs (neg <foo>)) -> (abs <foo>) */
3878 if (GET_CODE (XEXP (x
, 0)) == NEG
)
3879 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3881 /* If operand is something known to be positive, ignore the ABS. */
3882 if (GET_CODE (XEXP (x
, 0)) == FFS
|| GET_CODE (XEXP (x
, 0)) == ABS
3883 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
3884 <= HOST_BITS_PER_WIDE_INT
)
3885 && ((significant_bits (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
3886 & ((HOST_WIDE_INT
) 1
3887 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1)))
3892 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3893 if (num_sign_bit_copies (XEXP (x
, 0), mode
) == GET_MODE_BITSIZE (mode
))
3895 x
= gen_rtx_combine (NEG
, mode
, XEXP (x
, 0));
3901 /* (ffs (*_extend <X>)) = (ffs <X>) */
3902 if (GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
3903 || GET_CODE (XEXP (x
, 0)) == ZERO_EXTEND
)
3904 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3908 /* (float (sign_extend <X>)) = (float <X>). */
3909 if (GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
)
3910 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3919 /* If this is a shift by a constant amount, simplify it. */
3920 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3922 x
= simplify_shift_const (x
, code
, mode
, XEXP (x
, 0),
3923 INTVAL (XEXP (x
, 1)));
3924 if (GET_CODE (x
) != code
)
3928 #ifdef SHIFT_COUNT_TRUNCATED
3929 else if (GET_CODE (XEXP (x
, 1)) != REG
)
3931 force_to_mode (XEXP (x
, 1), GET_MODE (x
),
3932 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x
))),
3942 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3943 operations" because they can be replaced with two more basic operations.
3944 ZERO_EXTEND is also considered "compound" because it can be replaced with
3945 an AND operation, which is simpler, though only one operation.
3947 The function expand_compound_operation is called with an rtx expression
3948 and will convert it to the appropriate shifts and AND operations,
3949 simplifying at each stage.
3951 The function make_compound_operation is called to convert an expression
3952 consisting of shifts and ANDs into the equivalent compound expression.
3953 It is the inverse of this function, loosely speaking. */
3956 expand_compound_operation (x
)
3964 switch (GET_CODE (x
))
3969 /* We can't necessarily use a const_int for a multiword mode;
3970 it depends on implicitly extending the value.
3971 Since we don't know the right way to extend it,
3972 we can't tell whether the implicit way is right.
3974 Even for a mode that is no wider than a const_int,
3975 we can't win, because we need to sign extend one of its bits through
3976 the rest of it, and we don't know which bit. */
3977 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3980 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x
, 0)), XEXP (x
, 0)))
3983 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)));
3984 /* If the inner object has VOIDmode (the only way this can happen
3985 is if it is a ASM_OPERANDS), we can't do anything since we don't
3986 know how much masking to do. */
3995 /* If the operand is a CLOBBER, just return it. */
3996 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
3999 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
4000 || GET_CODE (XEXP (x
, 2)) != CONST_INT
4001 || GET_MODE (XEXP (x
, 0)) == VOIDmode
)
4004 len
= INTVAL (XEXP (x
, 1));
4005 pos
= INTVAL (XEXP (x
, 2));
4007 /* If this goes outside the object being extracted, replace the object
4008 with a (use (mem ...)) construct that only combine understands
4009 and is used only for this purpose. */
4010 if (len
+ pos
> GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
4011 SUBST (XEXP (x
, 0), gen_rtx (USE
, GET_MODE (x
), XEXP (x
, 0)));
4014 pos
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - len
- pos
;
4022 /* If we reach here, we want to return a pair of shifts. The inner
4023 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4024 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4025 logical depending on the value of UNSIGNEDP.
4027 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4028 converted into an AND of a shift.
4030 We must check for the case where the left shift would have a negative
4031 count. This can happen in a case like (x >> 31) & 255 on machines
4032 that can't shift by a constant. On those machines, we would first
4033 combine the shift with the AND to produce a variable-position
4034 extraction. Then the constant of 31 would be substituted in to produce
4035 a such a position. */
4037 modewidth
= GET_MODE_BITSIZE (GET_MODE (x
));
4038 if (modewidth
>= pos
- len
)
4039 tem
= simplify_shift_const (NULL_RTX
, unsignedp
? LSHIFTRT
: ASHIFTRT
,
4041 simplify_shift_const (NULL_RTX
, ASHIFT
,
4044 modewidth
- pos
- len
),
4047 else if (unsignedp
&& len
< HOST_BITS_PER_WIDE_INT
)
4048 tem
= simplify_and_const_int (NULL_RTX
, GET_MODE (x
),
4049 simplify_shift_const (NULL_RTX
, LSHIFTRT
,
4052 ((HOST_WIDE_INT
) 1 << len
) - 1);
4054 /* Any other cases we can't handle. */
4058 /* If we couldn't do this for some reason, return the original
4060 if (GET_CODE (tem
) == CLOBBER
)
4066 /* X is a SET which contains an assignment of one object into
4067 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4068 or certain SUBREGS). If possible, convert it into a series of
4071 We half-heartedly support variable positions, but do not at all
4072 support variable lengths. */
4075 expand_field_assignment (x
)
4079 rtx pos
; /* Always counts from low bit. */
4082 enum machine_mode compute_mode
;
4084 /* Loop until we find something we can't simplify. */
4087 if (GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
4088 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
)
4090 inner
= SUBREG_REG (XEXP (SET_DEST (x
), 0));
4091 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)));
4094 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
4095 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
)
4097 inner
= XEXP (SET_DEST (x
), 0);
4098 len
= INTVAL (XEXP (SET_DEST (x
), 1));
4099 pos
= XEXP (SET_DEST (x
), 2);
4101 /* If the position is constant and spans the width of INNER,
4102 surround INNER with a USE to indicate this. */
4103 if (GET_CODE (pos
) == CONST_INT
4104 && INTVAL (pos
) + len
> GET_MODE_BITSIZE (GET_MODE (inner
)))
4105 inner
= gen_rtx (USE
, GET_MODE (SET_DEST (x
)), inner
);
4108 if (GET_CODE (pos
) == CONST_INT
)
4109 pos
= GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner
)) - len
4111 else if (GET_CODE (pos
) == MINUS
4112 && GET_CODE (XEXP (pos
, 1)) == CONST_INT
4113 && (INTVAL (XEXP (pos
, 1))
4114 == GET_MODE_BITSIZE (GET_MODE (inner
)) - len
))
4115 /* If position is ADJUST - X, new position is X. */
4116 pos
= XEXP (pos
, 0);
4118 pos
= gen_binary (MINUS
, GET_MODE (pos
),
4119 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner
))
4125 /* A SUBREG between two modes that occupy the same numbers of words
4126 can be done by moving the SUBREG to the source. */
4127 else if (GET_CODE (SET_DEST (x
)) == SUBREG
4128 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
4129 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
4130 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
4131 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
4133 x
= gen_rtx (SET
, VOIDmode
, SUBREG_REG (SET_DEST (x
)),
4134 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x
))),
4141 while (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
4142 inner
= SUBREG_REG (inner
);
4144 compute_mode
= GET_MODE (inner
);
4146 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4147 if (len
< HOST_BITS_PER_WIDE_INT
)
4148 mask
= GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1);
4152 /* Now compute the equivalent expression. Make a copy of INNER
4153 for the SET_DEST in case it is a MEM into which we will substitute;
4154 we don't want shared RTL in that case. */
4155 x
= gen_rtx (SET
, VOIDmode
, copy_rtx (inner
),
4156 gen_binary (IOR
, compute_mode
,
4157 gen_binary (AND
, compute_mode
,
4158 gen_unary (NOT
, compute_mode
,
4163 gen_binary (ASHIFT
, compute_mode
,
4164 gen_binary (AND
, compute_mode
,
4165 gen_lowpart_for_combine
4175 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4176 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4177 the starting bit position.
4179 INNER may be a USE. This will occur when we started with a bitfield
4180 that went outside the boundary of the object in memory, which is
4181 allowed on most machines. To isolate this case, we produce a USE
4182 whose mode is wide enough and surround the MEM with it. The only
4183 code that understands the USE is this routine. If it is not removed,
4184 it will cause the resulting insn not to match.
4186 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4189 IN_DEST is non-zero if this is a reference in the destination of a
4190 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4191 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4194 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4195 ZERO_EXTRACT should be built even for bits starting at bit 0.
4197 MODE is the desired mode of the result (if IN_DEST == 0). */
4200 make_extraction (mode
, inner
, pos
, pos_rtx
, len
,
4201 unsignedp
, in_dest
, in_compare
)
4202 enum machine_mode mode
;
4208 int in_dest
, in_compare
;
4210 enum machine_mode is_mode
= GET_MODE (inner
);
4211 enum machine_mode inner_mode
;
4212 enum machine_mode wanted_mem_mode
= byte_mode
;
4213 enum machine_mode pos_mode
= word_mode
;
4214 enum machine_mode extraction_mode
= word_mode
;
4215 enum machine_mode tmode
= mode_for_size (len
, MODE_INT
, 1);
4219 /* Get some information about INNER and get the innermost object. */
4220 if (GET_CODE (inner
) == USE
)
4221 /* We don't need to adjust the position because we set up the USE
4222 to pretend that it was a full-word object. */
4223 spans_byte
= 1, inner
= XEXP (inner
, 0);
4224 else if (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
4225 inner
= SUBREG_REG (inner
);
4227 inner_mode
= GET_MODE (inner
);
4229 if (pos_rtx
&& GET_CODE (pos_rtx
) == CONST_INT
)
4230 pos
= INTVAL (pos_rtx
);
4232 /* See if this can be done without an extraction. We never can if the
4233 width of the field is not the same as that of some integer mode. For
4234 registers, we can only avoid the extraction if the position is at the
4235 low-order bit and this is either not in the destination or we have the
4236 appropriate STRICT_LOW_PART operation available.
4238 For MEM, we can avoid an extract if the field starts on an appropriate
4239 boundary and we can change the mode of the memory reference. However,
4240 we cannot directly access the MEM if we have a USE and the underlying
4241 MEM is not TMODE. This combination means that MEM was being used in a
4242 context where bits outside its mode were being referenced; that is only
4243 valid in bit-field insns. */
4245 if (tmode
!= BLKmode
4246 && ! (spans_byte
&& inner_mode
!= tmode
)
4247 && ((pos
== 0 && GET_CODE (inner
) != MEM
4249 || (GET_CODE (inner
) == REG
4250 && (movstrict_optab
->handlers
[(int) tmode
].insn_code
4251 != CODE_FOR_nothing
))))
4252 || (GET_CODE (inner
) == MEM
&& pos
>= 0
4254 % (STRICT_ALIGNMENT
? GET_MODE_ALIGNMENT (tmode
)
4255 : BITS_PER_UNIT
)) == 0
4256 /* We can't do this if we are widening INNER_MODE (it
4257 may not be aligned, for one thing). */
4258 && GET_MODE_BITSIZE (inner_mode
) >= GET_MODE_BITSIZE (tmode
)
4259 && (inner_mode
== tmode
4260 || (! mode_dependent_address_p (XEXP (inner
, 0))
4261 && ! MEM_VOLATILE_P (inner
))))))
4263 int offset
= pos
/ BITS_PER_UNIT
;
4265 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4266 field. If the original and current mode are the same, we need not
4267 adjust the offset. Otherwise, we do if bytes big endian.
4269 If INNER is not a MEM, get a piece consisting of the just the field
4270 of interest (in this case POS must be 0). */
4272 if (GET_CODE (inner
) == MEM
)
4274 #if BYTES_BIG_ENDIAN
4275 if (inner_mode
!= tmode
)
4276 offset
= (GET_MODE_SIZE (inner_mode
)
4277 - GET_MODE_SIZE (tmode
) - offset
);
4280 new = gen_rtx (MEM
, tmode
, plus_constant (XEXP (inner
, 0), offset
));
4281 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner
);
4282 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner
);
4283 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner
);
4285 else if (GET_CODE (inner
) == REG
)
4286 /* We can't call gen_lowpart_for_combine here since we always want
4287 a SUBREG and it would sometimes return a new hard register. */
4288 new = gen_rtx (SUBREG
, tmode
, inner
,
4290 && GET_MODE_SIZE (is_mode
) > UNITS_PER_WORD
)
4291 ? ((GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (tmode
)
4295 new = force_to_mode (inner
, tmode
, len
, NULL_RTX
);
4297 /* If this extraction is going into the destination of a SET,
4298 make a STRICT_LOW_PART unless we made a MEM. */
4301 return (GET_CODE (new) == MEM
? new
4302 : (GET_CODE (new) != SUBREG
4303 ? gen_rtx (CLOBBER
, tmode
, const0_rtx
)
4304 : gen_rtx_combine (STRICT_LOW_PART
, VOIDmode
, new)));
4306 /* Otherwise, sign- or zero-extend unless we already are in the
4309 return (mode
== tmode
? new
4310 : gen_rtx_combine (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
,
4314 /* Unless this isin a COMPARE or we have a funny memory reference,
4315 don't do anything with field extracts starting at the low-order
4316 bit since they are simple AND operations. */
4317 if (pos
== 0 && ! in_dest
&& ! in_compare
&& ! spans_byte
)
4320 /* Get the mode to use should INNER be a MEM, the mode for the position,
4321 and the mode for the result. */
4325 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][0];
4326 pos_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][2];
4327 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][3];
4332 if (! in_dest
&& unsignedp
)
4334 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
4335 pos_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][3];
4336 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][0];
4341 if (! in_dest
&& ! unsignedp
)
4343 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
4344 pos_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][3];
4345 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][0];
4349 /* Never narrow an object, since that might not be safe. */
4351 if (mode
!= VOIDmode
4352 && GET_MODE_SIZE (extraction_mode
) < GET_MODE_SIZE (mode
))
4353 extraction_mode
= mode
;
4355 if (pos_rtx
&& GET_MODE (pos_rtx
) != VOIDmode
4356 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
4357 pos_mode
= GET_MODE (pos_rtx
);
4359 /* If this is not from memory or we have to change the mode of memory and
4360 cannot, the desired mode is EXTRACTION_MODE. */
4361 if (GET_CODE (inner
) != MEM
4362 || (inner_mode
!= wanted_mem_mode
4363 && (mode_dependent_address_p (XEXP (inner
, 0))
4364 || MEM_VOLATILE_P (inner
))))
4365 wanted_mem_mode
= extraction_mode
;
4368 /* If position is constant, compute new position. Otherwise, build
4371 pos
= (MAX (GET_MODE_BITSIZE (is_mode
), GET_MODE_BITSIZE (wanted_mem_mode
))
4375 = gen_rtx_combine (MINUS
, GET_MODE (pos_rtx
),
4376 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode
),
4377 GET_MODE_BITSIZE (wanted_mem_mode
))
4382 /* If INNER has a wider mode, make it smaller. If this is a constant
4383 extract, try to adjust the byte to point to the byte containing
4385 if (wanted_mem_mode
!= VOIDmode
4386 && GET_MODE_SIZE (wanted_mem_mode
) < GET_MODE_SIZE (is_mode
)
4387 && ((GET_CODE (inner
) == MEM
4388 && (inner_mode
== wanted_mem_mode
4389 || (! mode_dependent_address_p (XEXP (inner
, 0))
4390 && ! MEM_VOLATILE_P (inner
))))))
4394 /* The computations below will be correct if the machine is big
4395 endian in both bits and bytes or little endian in bits and bytes.
4396 If it is mixed, we must adjust. */
4398 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4399 if (! spans_byte
&& is_mode
!= wanted_mem_mode
)
4400 offset
= (GET_MODE_SIZE (is_mode
)
4401 - GET_MODE_SIZE (wanted_mem_mode
) - offset
);
4404 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4405 adjust OFFSET to compensate. */
4406 #if BYTES_BIG_ENDIAN
4408 && GET_MODE_SIZE (inner_mode
) < GET_MODE_SIZE (is_mode
))
4409 offset
-= GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (inner_mode
);
4412 /* If this is a constant position, we can move to the desired byte. */
4415 offset
+= pos
/ BITS_PER_UNIT
;
4416 pos
%= GET_MODE_BITSIZE (wanted_mem_mode
);
4419 if (offset
!= 0 || inner_mode
!= wanted_mem_mode
)
4421 rtx newmem
= gen_rtx (MEM
, wanted_mem_mode
,
4422 plus_constant (XEXP (inner
, 0), offset
));
4423 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (inner
);
4424 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (inner
);
4425 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (inner
);
4430 /* If INNER is not memory, we can always get it into the proper mode. */
4431 else if (GET_CODE (inner
) != MEM
)
4432 inner
= force_to_mode (inner
, extraction_mode
,
4433 (pos
< 0 ? GET_MODE_BITSIZE (extraction_mode
)
4437 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4438 have to zero extend. Otherwise, we can just use a SUBREG. */
4440 && GET_MODE_SIZE (pos_mode
) > GET_MODE_SIZE (GET_MODE (pos_rtx
)))
4441 pos_rtx
= gen_rtx_combine (ZERO_EXTEND
, pos_mode
, pos_rtx
);
4443 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
4444 pos_rtx
= gen_lowpart_for_combine (pos_mode
, pos_rtx
);
4446 /* Make POS_RTX unless we already have it and it is correct. */
4447 if (pos_rtx
== 0 || (pos
>= 0 && INTVAL (pos_rtx
) != pos
))
4448 pos_rtx
= GEN_INT (pos
);
4450 /* Make the required operation. See if we can use existing rtx. */
4451 new = gen_rtx_combine (unsignedp
? ZERO_EXTRACT
: SIGN_EXTRACT
,
4452 extraction_mode
, inner
, GEN_INT (len
), pos_rtx
);
4454 new = gen_lowpart_for_combine (mode
, new);
4459 /* Look at the expression rooted at X. Look for expressions
4460 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4461 Form these expressions.
4463 Return the new rtx, usually just X.
4465 Also, for machines like the Vax that don't have logical shift insns,
4466 try to convert logical to arithmetic shift operations in cases where
4467 they are equivalent. This undoes the canonicalizations to logical
4468 shifts done elsewhere.
4470 We try, as much as possible, to re-use rtl expressions to save memory.
4472 IN_CODE says what kind of expression we are processing. Normally, it is
4473 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4474 being kludges), it is MEM. When processing the arguments of a comparison
4475 or a COMPARE against zero, it is COMPARE. */
4478 make_compound_operation (x
, in_code
)
4480 enum rtx_code in_code
;
4482 enum rtx_code code
= GET_CODE (x
);
4483 enum machine_mode mode
= GET_MODE (x
);
4484 int mode_width
= GET_MODE_BITSIZE (mode
);
4485 enum rtx_code next_code
;
4490 /* Select the code to be used in recursive calls. Once we are inside an
4491 address, we stay there. If we have a comparison, set to COMPARE,
4492 but once inside, go back to our default of SET. */
4494 next_code
= (code
== MEM
|| code
== PLUS
|| code
== MINUS
? MEM
4495 : ((code
== COMPARE
|| GET_RTX_CLASS (code
) == '<')
4496 && XEXP (x
, 1) == const0_rtx
) ? COMPARE
4497 : in_code
== COMPARE
? SET
: in_code
);
4499 /* Process depending on the code of this operation. If NEW is set
4500 non-zero, it will be returned. */
4506 /* Convert shifts by constants into multiplications if inside
4508 if (in_code
== MEM
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
4509 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
4510 && INTVAL (XEXP (x
, 1)) >= 0)
4511 new = gen_rtx_combine (MULT
, mode
, XEXP (x
, 0),
4512 GEN_INT ((HOST_WIDE_INT
) 1
4513 << INTVAL (XEXP (x
, 1))));
4517 /* If the second operand is not a constant, we can't do anything
4519 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
4522 /* If the constant is a power of two minus one and the first operand
4523 is a logical right shift, make an extraction. */
4524 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4525 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4526 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0), -1,
4527 XEXP (XEXP (x
, 0), 1), i
, 1,
4528 0, in_code
== COMPARE
);
4530 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4531 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
4532 && subreg_lowpart_p (XEXP (x
, 0))
4533 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == LSHIFTRT
4534 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4535 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x
, 0))),
4536 XEXP (SUBREG_REG (XEXP (x
, 0)), 0), -1,
4537 XEXP (SUBREG_REG (XEXP (x
, 0)), 1), i
, 1,
4538 0, in_code
== COMPARE
);
4541 /* If we are have (and (rotate X C) M) and C is larger than the number
4542 of bits in M, this is an extraction. */
4544 else if (GET_CODE (XEXP (x
, 0)) == ROTATE
4545 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4546 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0
4547 && i
<= INTVAL (XEXP (XEXP (x
, 0), 1)))
4548 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0),
4549 (GET_MODE_BITSIZE (mode
)
4550 - INTVAL (XEXP (XEXP (x
, 0), 1))),
4551 NULL_RTX
, i
, 1, 0, in_code
== COMPARE
);
4553 /* On machines without logical shifts, if the operand of the AND is
4554 a logical shift and our mask turns off all the propagated sign
4555 bits, we can replace the logical shift with an arithmetic shift. */
4556 else if (ashr_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
4557 && (lshr_optab
->handlers
[(int) mode
].insn_code
4558 == CODE_FOR_nothing
)
4559 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4560 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4561 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
4562 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
4563 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
4565 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
4567 mask
>>= INTVAL (XEXP (XEXP (x
, 0), 1));
4568 if ((INTVAL (XEXP (x
, 1)) & ~mask
) == 0)
4570 gen_rtx_combine (ASHIFTRT
, mode
, XEXP (XEXP (x
, 0), 0),
4571 XEXP (XEXP (x
, 0), 1)));
4574 /* If the constant is one less than a power of two, this might be
4575 representable by an extraction even if no shift is present.
4576 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4577 we are in a COMPARE. */
4578 else if ((i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4579 new = make_extraction (mode
, XEXP (x
, 0), 0, NULL_RTX
, i
, 1,
4580 0, in_code
== COMPARE
);
4582 /* If we are in a comparison and this is an AND with a power of two,
4583 convert this into the appropriate bit extract. */
4584 else if (in_code
== COMPARE
4585 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
4586 new = make_extraction (mode
, XEXP (x
, 0), i
, NULL_RTX
, 1, 1, 0, 1);
4591 /* If the sign bit is known to be zero, replace this with an
4592 arithmetic shift. */
4593 if (ashr_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
4594 && lshr_optab
->handlers
[(int) mode
].insn_code
!= CODE_FOR_nothing
4595 && mode_width
<= HOST_BITS_PER_WIDE_INT
4596 && (significant_bits (XEXP (x
, 0), mode
)
4597 & (1 << (mode_width
- 1))) == 0)
4599 new = gen_rtx_combine (ASHIFTRT
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4603 /* ... fall through ... */
4606 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4607 this is a SIGN_EXTRACT. */
4608 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4609 && GET_CODE (XEXP (x
, 0)) == ASHIFT
4610 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4611 && INTVAL (XEXP (x
, 1)) >= INTVAL (XEXP (XEXP (x
, 0), 1)))
4612 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0),
4613 (INTVAL (XEXP (x
, 1))
4614 - INTVAL (XEXP (XEXP (x
, 0), 1))),
4615 NULL_RTX
, mode_width
- INTVAL (XEXP (x
, 1)),
4616 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
4618 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4619 cases, we are better off returning a SIGN_EXTEND of the operation. */
4621 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4622 && (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == AND
4623 || GET_CODE (XEXP (x
, 0)) == XOR
4624 || GET_CODE (XEXP (x
, 0)) == PLUS
)
4625 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == ASHIFT
4626 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
4627 && INTVAL (XEXP (x
, 1)) >= INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
4628 && INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4629 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4630 && (INTVAL (XEXP (XEXP (x
, 0), 1))
4631 & (((HOST_WIDE_INT
) 1
4632 << INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))) - 1)) == 0)
4634 HOST_WIDE_INT newop1
4635 = (INTVAL (XEXP (XEXP (x
, 0), 1))
4636 >> INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1)));
4638 new = make_extraction (mode
,
4639 gen_binary (GET_CODE (XEXP (x
, 0)), mode
,
4640 XEXP (XEXP (XEXP (x
, 0), 0), 0),
4642 (INTVAL (XEXP (x
, 1))
4643 - INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))),
4644 NULL_RTX
, mode_width
- INTVAL (XEXP (x
, 1)),
4645 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
4653 x
= gen_lowpart_for_combine (mode
, new);
4654 code
= GET_CODE (x
);
4657 /* Now recursively process each operand of this operation. */
4658 fmt
= GET_RTX_FORMAT (code
);
4659 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
4662 new = make_compound_operation (XEXP (x
, i
), next_code
);
4663 SUBST (XEXP (x
, i
), new);
4669 /* Given M see if it is a value that would select a field of bits
4670 within an item, but not the entire word. Return -1 if not.
4671 Otherwise, return the starting position of the field, where 0 is the
4674 *PLEN is set to the length of the field. */
4677 get_pos_from_mask (m
, plen
)
4678 unsigned HOST_WIDE_INT m
;
4681 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4682 int pos
= exact_log2 (m
& - m
);
4687 /* Now shift off the low-order zero bits and see if we have a power of
4689 *plen
= exact_log2 ((m
>> pos
) + 1);
4697 /* Rewrite X so that it is an expression in MODE. We only care about the
4698 low-order BITS bits so we can ignore AND operations that just clear
4701 Also, if REG is non-zero and X is a register equal in value to REG,
4702 replace X with REG. */
4705 force_to_mode (x
, mode
, bits
, reg
)
4707 enum machine_mode mode
;
4711 enum rtx_code code
= GET_CODE (x
);
4712 enum machine_mode op_mode
= mode
;
4714 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
4715 just get X in the proper mode. */
4717 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
)
4718 || bits
> GET_MODE_BITSIZE (mode
))
4719 return gen_lowpart_for_combine (mode
, x
);
4727 x
= expand_compound_operation (x
);
4728 if (GET_CODE (x
) != code
)
4729 return force_to_mode (x
, mode
, bits
, reg
);
4733 if (reg
!= 0 && (rtx_equal_p (get_last_value (reg
), x
)
4734 || rtx_equal_p (reg
, get_last_value (x
))))
4739 if (bits
< HOST_BITS_PER_WIDE_INT
)
4740 x
= GEN_INT (INTVAL (x
) & (((HOST_WIDE_INT
) 1 << bits
) - 1));
4744 /* Ignore low-order SUBREGs. */
4745 if (subreg_lowpart_p (x
))
4746 return force_to_mode (SUBREG_REG (x
), mode
, bits
, reg
);
4750 /* If this is an AND with a constant. Otherwise, we fall through to
4751 do the general binary case. */
4753 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4755 HOST_WIDE_INT mask
= INTVAL (XEXP (x
, 1));
4756 int len
= exact_log2 (mask
+ 1);
4757 rtx op
= XEXP (x
, 0);
4759 /* If this is masking some low-order bits, we may be able to
4760 impose a stricter constraint on what bits of the operand are
4763 op
= force_to_mode (op
, mode
, len
> 0 ? MIN (len
, bits
) : bits
,
4766 if (bits
< HOST_BITS_PER_WIDE_INT
)
4767 mask
&= ((HOST_WIDE_INT
) 1 << bits
) - 1;
4769 /* If we have no AND in MODE, use the original mode for the
4772 if (and_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4773 op_mode
= GET_MODE (x
);
4775 x
= simplify_and_const_int (x
, op_mode
, op
, mask
);
4777 /* If X is still an AND, see if it is an AND with a mask that
4778 is just some low-order bits. If so, and it is BITS wide (it
4779 can't be wider), we don't need it. */
4781 if (GET_CODE (x
) == AND
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
4782 && bits
< HOST_BITS_PER_WIDE_INT
4783 && INTVAL (XEXP (x
, 1)) == ((HOST_WIDE_INT
) 1 << bits
) - 1)
4789 /* ... fall through ... */
4796 /* For most binary operations, just propagate into the operation and
4797 change the mode if we have an operation of that mode. */
4800 && add_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4802 && sub_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4803 || (code
== MULT
&& (smul_optab
->handlers
[(int) mode
].insn_code
4804 == CODE_FOR_nothing
))
4806 && ior_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4807 || (code
== XOR
&& (xor_optab
->handlers
[(int) mode
].insn_code
4808 == CODE_FOR_nothing
)))
4809 op_mode
= GET_MODE (x
);
4811 x
= gen_binary (code
, op_mode
,
4812 gen_lowpart_for_combine (op_mode
,
4813 force_to_mode (XEXP (x
, 0),
4816 gen_lowpart_for_combine (op_mode
,
4817 force_to_mode (XEXP (x
, 1),
4824 /* For left shifts, do the same, but just for the first operand.
4825 If the shift count is a constant, we need even fewer bits of the
4828 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) < bits
)
4829 bits
-= INTVAL (XEXP (x
, 1));
4832 && ashl_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4833 || (code
== LSHIFT
&& (lshl_optab
->handlers
[(int) mode
].insn_code
4834 == CODE_FOR_nothing
)))
4835 op_mode
= GET_MODE (x
);
4837 x
= gen_binary (code
, op_mode
,
4838 gen_lowpart_for_combine (op_mode
,
4839 force_to_mode (XEXP (x
, 0),
4846 /* Here we can only do something if the shift count is a constant and
4847 the count plus BITS is no larger than the width of MODE, we can do
4848 the shift in MODE. */
4850 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4851 && INTVAL (XEXP (x
, 1)) + bits
<= GET_MODE_BITSIZE (mode
))
4853 rtx inner
= force_to_mode (XEXP (x
, 0), mode
,
4854 bits
+ INTVAL (XEXP (x
, 1)), reg
);
4856 if (lshr_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4857 op_mode
= GET_MODE (x
);
4859 x
= gen_binary (LSHIFTRT
, op_mode
,
4860 gen_lowpart_for_combine (op_mode
, inner
),
4866 /* If this is a sign-extension operation that just affects bits
4867 we don't care about, remove it. */
4869 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4870 && INTVAL (XEXP (x
, 1)) >= 0
4871 && INTVAL (XEXP (x
, 1)) <= GET_MODE_BITSIZE (GET_MODE (x
)) - bits
4872 && GET_CODE (XEXP (x
, 0)) == ASHIFT
4873 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4874 && INTVAL (XEXP (XEXP (x
, 0), 1)) == INTVAL (XEXP (x
, 1)))
4875 return force_to_mode (XEXP (XEXP (x
, 0), 0), mode
, bits
, reg
);
4881 && neg_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
4882 || (code
== NOT
&& (one_cmpl_optab
->handlers
[(int) mode
].insn_code
4883 == CODE_FOR_nothing
)))
4884 op_mode
= GET_MODE (x
);
4886 /* Handle these similarly to the way we handle most binary operations. */
4887 x
= gen_unary (code
, op_mode
,
4888 gen_lowpart_for_combine (op_mode
,
4889 force_to_mode (XEXP (x
, 0), mode
,
4894 /* We have no way of knowing if the IF_THEN_ELSE can itself be
4895 written in a narrower mode. We play it safe and do not do so. */
4898 gen_lowpart_for_combine (GET_MODE (x
),
4899 force_to_mode (XEXP (x
, 1), mode
,
4902 gen_lowpart_for_combine (GET_MODE (x
),
4903 force_to_mode (XEXP (x
, 2), mode
,
4908 /* Ensure we return a value of the proper mode. */
4909 return gen_lowpart_for_combine (mode
, x
);
4912 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
4913 Return that assignment if so.
4915 We only handle the most common cases. */
4918 make_field_assignment (x
)
4921 rtx dest
= SET_DEST (x
);
4922 rtx src
= SET_SRC (x
);
4928 enum machine_mode mode
;
4930 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4931 a clear of a one-bit field. We will have changed it to
4932 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4935 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == ROTATE
4936 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == CONST_INT
4937 && INTVAL (XEXP (XEXP (src
, 0), 0)) == -2
4938 && (rtx_equal_p (dest
, XEXP (src
, 1))
4939 || rtx_equal_p (dest
, get_last_value (XEXP (src
, 1)))
4940 || rtx_equal_p (get_last_value (dest
), XEXP (src
, 1))))
4942 assign
= make_extraction (VOIDmode
, dest
, -1, XEXP (XEXP (src
, 0), 1),
4944 return gen_rtx (SET
, VOIDmode
, assign
, const0_rtx
);
4947 else if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == SUBREG
4948 && subreg_lowpart_p (XEXP (src
, 0))
4949 && (GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
4950 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src
, 0)))))
4951 && GET_CODE (SUBREG_REG (XEXP (src
, 0))) == ROTATE
4952 && INTVAL (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == -2
4953 && (rtx_equal_p (dest
, XEXP (src
, 1))
4954 || rtx_equal_p (dest
, get_last_value (XEXP (src
, 1)))
4955 || rtx_equal_p (get_last_value (dest
), XEXP (src
, 1))))
4957 assign
= make_extraction (VOIDmode
, dest
, -1,
4958 XEXP (SUBREG_REG (XEXP (src
, 0)), 1),
4960 return gen_rtx (SET
, VOIDmode
, assign
, const0_rtx
);
4963 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4965 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == ASHIFT
4966 && XEXP (XEXP (src
, 0), 0) == const1_rtx
4967 && (rtx_equal_p (dest
, XEXP (src
, 1))
4968 || rtx_equal_p (dest
, get_last_value (XEXP (src
, 1)))
4969 || rtx_equal_p (get_last_value (dest
), XEXP (src
, 1))))
4971 assign
= make_extraction (VOIDmode
, dest
, -1, XEXP (XEXP (src
, 0), 1),
4973 return gen_rtx (SET
, VOIDmode
, assign
, const1_rtx
);
4976 /* The other case we handle is assignments into a constant-position
4977 field. They look like (ior (and DEST C1) OTHER). If C1 represents
4978 a mask that has all one bits except for a group of zero bits and
4979 OTHER is known to have zeros where C1 has ones, this is such an
4980 assignment. Compute the position and length from C1. Shift OTHER
4981 to the appropriate position, force it to the required mode, and
4982 make the extraction. Check for the AND in both operands. */
4984 if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == AND
4985 && GET_CODE (XEXP (XEXP (src
, 0), 1)) == CONST_INT
4986 && (rtx_equal_p (XEXP (XEXP (src
, 0), 0), dest
)
4987 || rtx_equal_p (XEXP (XEXP (src
, 0), 0), get_last_value (dest
))
4988 || rtx_equal_p (get_last_value (XEXP (XEXP (src
, 0), 1)), dest
)))
4989 c1
= INTVAL (XEXP (XEXP (src
, 0), 1)), other
= XEXP (src
, 1);
4990 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 1)) == AND
4991 && GET_CODE (XEXP (XEXP (src
, 1), 1)) == CONST_INT
4992 && (rtx_equal_p (XEXP (XEXP (src
, 1), 0), dest
)
4993 || rtx_equal_p (XEXP (XEXP (src
, 1), 0), get_last_value (dest
))
4994 || rtx_equal_p (get_last_value (XEXP (XEXP (src
, 1), 0)),
4996 c1
= INTVAL (XEXP (XEXP (src
, 1), 1)), other
= XEXP (src
, 0);
5000 pos
= get_pos_from_mask (~c1
, &len
);
5001 if (pos
< 0 || pos
+ len
> GET_MODE_BITSIZE (GET_MODE (dest
))
5002 || (c1
& significant_bits (other
, GET_MODE (other
))) != 0)
5005 assign
= make_extraction (VOIDmode
, dest
, pos
, NULL_RTX
, len
, 1, 1, 0);
5007 /* The mode to use for the source is the mode of the assignment, or of
5008 what is inside a possible STRICT_LOW_PART. */
5009 mode
= (GET_CODE (assign
) == STRICT_LOW_PART
5010 ? GET_MODE (XEXP (assign
, 0)) : GET_MODE (assign
));
5012 /* Shift OTHER right POS places and make it the source, restricting it
5013 to the proper length and mode. */
5015 src
= force_to_mode (simplify_shift_const (NULL_RTX
, LSHIFTRT
,
5016 GET_MODE (src
), other
, pos
),
5019 return gen_rtx_combine (SET
, VOIDmode
, assign
, src
);
5022 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5026 apply_distributive_law (x
)
5029 enum rtx_code code
= GET_CODE (x
);
5030 rtx lhs
, rhs
, other
;
5032 enum rtx_code inner_code
;
5034 /* The outer operation can only be one of the following: */
5035 if (code
!= IOR
&& code
!= AND
&& code
!= XOR
5036 && code
!= PLUS
&& code
!= MINUS
)
5039 lhs
= XEXP (x
, 0), rhs
= XEXP (x
, 1);
5041 /* If either operand is a primitive we can't do anything, so get out fast. */
5042 if (GET_RTX_CLASS (GET_CODE (lhs
)) == 'o'
5043 || GET_RTX_CLASS (GET_CODE (rhs
)) == 'o')
5046 lhs
= expand_compound_operation (lhs
);
5047 rhs
= expand_compound_operation (rhs
);
5048 inner_code
= GET_CODE (lhs
);
5049 if (inner_code
!= GET_CODE (rhs
))
5052 /* See if the inner and outer operations distribute. */
5059 /* These all distribute except over PLUS. */
5060 if (code
== PLUS
|| code
== MINUS
)
5065 if (code
!= PLUS
&& code
!= MINUS
)
5071 /* These are also multiplies, so they distribute over everything. */
5075 /* Non-paradoxical SUBREGs distributes over all operations, provided
5076 the inner modes and word numbers are the same, this is an extraction
5077 of a low-order part, we don't convert an fp operation to int or
5078 vice versa, and we would not be converting a single-word
5079 operation into a multi-word operation. The latter test is not
5080 required, but it prevents generating unneeded multi-word operations.
5081 Some of the previous tests are redundant given the latter test, but
5082 are retained because they are required for correctness.
5084 We produce the result slightly differently in this case. */
5086 if (GET_MODE (SUBREG_REG (lhs
)) != GET_MODE (SUBREG_REG (rhs
))
5087 || SUBREG_WORD (lhs
) != SUBREG_WORD (rhs
)
5088 || ! subreg_lowpart_p (lhs
)
5089 || (GET_MODE_CLASS (GET_MODE (lhs
))
5090 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs
))))
5091 || (GET_MODE_SIZE (GET_MODE (lhs
))
5092 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))))
5093 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))) > UNITS_PER_WORD
)
5096 tem
= gen_binary (code
, GET_MODE (SUBREG_REG (lhs
)),
5097 SUBREG_REG (lhs
), SUBREG_REG (rhs
));
5098 return gen_lowpart_for_combine (GET_MODE (x
), tem
);
5104 /* Set LHS and RHS to the inner operands (A and B in the example
5105 above) and set OTHER to the common operand (C in the example).
5106 These is only one way to do this unless the inner operation is
5108 if (GET_RTX_CLASS (inner_code
) == 'c'
5109 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 0)))
5110 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 1);
5111 else if (GET_RTX_CLASS (inner_code
) == 'c'
5112 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 1)))
5113 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 0);
5114 else if (GET_RTX_CLASS (inner_code
) == 'c'
5115 && rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 0)))
5116 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 1);
5117 else if (rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 1)))
5118 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 0);
5122 /* Form the new inner operation, seeing if it simplifies first. */
5123 tem
= gen_binary (code
, GET_MODE (x
), lhs
, rhs
);
5125 /* There is one exception to the general way of distributing:
5126 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5127 if (code
== XOR
&& inner_code
== IOR
)
5130 other
= gen_unary (NOT
, GET_MODE (x
), other
);
5133 /* We may be able to continuing distributing the result, so call
5134 ourselves recursively on the inner operation before forming the
5135 outer operation, which we return. */
5136 return gen_binary (inner_code
, GET_MODE (x
),
5137 apply_distributive_law (tem
), other
);
5140 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5143 Return an equivalent form, if different from X. Otherwise, return X. If
5144 X is zero, we are to always construct the equivalent form. */
5147 simplify_and_const_int (x
, mode
, varop
, constop
)
5149 enum machine_mode mode
;
5151 unsigned HOST_WIDE_INT constop
;
5153 register enum machine_mode tmode
;
5155 unsigned HOST_WIDE_INT significant
;
5157 /* There is a large class of optimizations based on the principle that
5158 some operations produce results where certain bits are known to be zero,
5159 and hence are not significant to the AND. For example, if we have just
5160 done a left shift of one bit, the low-order bit is known to be zero and
5161 hence an AND with a mask of ~1 would not do anything.
5163 At the end of the following loop, we set:
5165 VAROP to be the item to be AND'ed with;
5166 CONSTOP to the constant value to AND it with. */
5170 /* If we ever encounter a mode wider than the host machine's widest
5171 integer size, we can't compute the masks accurately, so give up. */
5172 if (GET_MODE_BITSIZE (GET_MODE (varop
)) > HOST_BITS_PER_WIDE_INT
)
5175 /* Unless one of the cases below does a `continue',
5176 a `break' will be executed to exit the loop. */
5178 switch (GET_CODE (varop
))
5181 /* If VAROP is a (clobber (const_int)), return it since we know
5182 we are generating something that won't match. */
5185 #if ! BITS_BIG_ENDIAN
5187 /* VAROP is a (use (mem ..)) that was made from a bit-field
5188 extraction that spanned the boundary of the MEM. If we are
5189 now masking so it is within that boundary, we don't need the
5191 if ((constop
& ~ GET_MODE_MASK (GET_MODE (XEXP (varop
, 0)))) == 0)
5193 varop
= XEXP (varop
, 0);
5200 if (subreg_lowpart_p (varop
)
5201 /* We can ignore the effect this SUBREG if it narrows the mode
5202 or, on machines where byte operations zero extend, if the
5203 constant masks to zero all the bits the mode doesn't have. */
5204 && ((GET_MODE_SIZE (GET_MODE (varop
))
5205 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
))))
5206 #ifdef BYTE_LOADS_ZERO_EXTEND
5208 & GET_MODE_MASK (GET_MODE (varop
))
5209 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop
)))))
5213 varop
= SUBREG_REG (varop
);
5222 /* Try to expand these into a series of shifts and then work
5223 with that result. If we can't, for example, if the extract
5224 isn't at a fixed position, give up. */
5225 temp
= expand_compound_operation (varop
);
5234 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
)
5236 constop
&= INTVAL (XEXP (varop
, 1));
5237 varop
= XEXP (varop
, 0);
5244 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5245 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5246 operation which may be a bitfield extraction. */
5248 if (GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
5249 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
5250 && INTVAL (XEXP (XEXP (varop
, 0), 1)) >= 0
5251 && INTVAL (XEXP (XEXP (varop
, 0), 1)) < HOST_BITS_PER_WIDE_INT
5252 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
5253 && (INTVAL (XEXP (varop
, 1))
5254 & ~ significant_bits (XEXP (varop
, 0),
5255 GET_MODE (varop
)) == 0))
5257 temp
= GEN_INT ((INTVAL (XEXP (varop
, 1)) & constop
)
5258 << INTVAL (XEXP (XEXP (varop
, 0), 1)));
5259 temp
= gen_binary (GET_CODE (varop
), GET_MODE (varop
),
5260 XEXP (XEXP (varop
, 0), 0), temp
);
5261 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
5262 temp
, XEXP (varop
, 1));
5266 /* Apply the AND to both branches of the IOR or XOR, then try to
5267 apply the distributive law. This may eliminate operations
5268 if either branch can be simplified because of the AND.
5269 It may also make some cases more complex, but those cases
5270 probably won't match a pattern either with or without this. */
5272 gen_lowpart_for_combine
5273 (mode
, apply_distributive_law
5275 (GET_CODE (varop
), GET_MODE (varop
),
5276 simplify_and_const_int (NULL_RTX
, GET_MODE (varop
),
5277 XEXP (varop
, 0), constop
),
5278 simplify_and_const_int (NULL_RTX
, GET_MODE (varop
),
5279 XEXP (varop
, 1), constop
))));
5282 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5283 LSHIFTRT we can do the same as above. */
5285 if (GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
5286 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
5287 && INTVAL (XEXP (XEXP (varop
, 0), 1)) >= 0
5288 && INTVAL (XEXP (XEXP (varop
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
5290 temp
= GEN_INT (constop
<< INTVAL (XEXP (XEXP (varop
, 0), 1)));
5291 temp
= gen_binary (XOR
, GET_MODE (varop
),
5292 XEXP (XEXP (varop
, 0), 0), temp
);
5293 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
5294 temp
, XEXP (XEXP (varop
, 0), 1));
5300 /* If we are just looking for the sign bit, we don't need this
5301 shift at all, even if it has a variable count. */
5302 if (constop
== ((HOST_WIDE_INT
) 1
5303 << (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1)))
5305 varop
= XEXP (varop
, 0);
5309 /* If this is a shift by a constant, get a mask that contains
5310 those bits that are not copies of the sign bit. We then have
5311 two cases: If CONSTOP only includes those bits, this can be
5312 a logical shift, which may allow simplifications. If CONSTOP
5313 is a single-bit field not within those bits, we are requesting
5314 a copy of the sign bit and hence can shift the sign bit to
5315 the appropriate location. */
5316 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5317 && INTVAL (XEXP (varop
, 1)) >= 0
5318 && INTVAL (XEXP (varop
, 1)) < HOST_BITS_PER_WIDE_INT
)
5322 significant
= GET_MODE_MASK (GET_MODE (varop
));
5323 significant
>>= INTVAL (XEXP (varop
, 1));
5325 if ((constop
& ~significant
) == 0
5326 || (i
= exact_log2 (constop
)) >= 0)
5328 varop
= simplify_shift_const
5329 (varop
, LSHIFTRT
, GET_MODE (varop
), XEXP (varop
, 0),
5330 i
< 0 ? INTVAL (XEXP (varop
, 1))
5331 : GET_MODE_BITSIZE (GET_MODE (varop
)) - 1 - i
);
5332 if (GET_CODE (varop
) != ASHIFTRT
)
5337 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5338 even if the shift count isn't a constant. */
5340 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
5341 XEXP (varop
, 0), XEXP (varop
, 1));
5345 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5346 included in STORE_FLAG_VALUE and FOO has no significant bits
5348 if ((constop
& ~ STORE_FLAG_VALUE
) == 0
5349 && XEXP (varop
, 0) == const0_rtx
5350 && (significant_bits (XEXP (varop
, 0), mode
) & ~ constop
) == 0)
5352 varop
= XEXP (varop
, 0);
5358 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5359 low-order bits (as in an alignment operation) and FOO is already
5360 aligned to that boundary, we can convert remove this AND
5361 and possibly the PLUS if it is now adding zero. */
5362 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5363 && exact_log2 (-constop
) >= 0
5364 && (significant_bits (XEXP (varop
, 0), mode
) & ~ constop
) == 0)
5366 varop
= plus_constant (XEXP (varop
, 0),
5367 INTVAL (XEXP (varop
, 1)) & constop
);
5372 /* ... fall through ... */
5375 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5376 less than powers of two and M2 is narrower than M1, we can
5377 eliminate the inner AND. This occurs when incrementing
5380 if (GET_CODE (XEXP (varop
, 0)) == ZERO_EXTRACT
5381 || GET_CODE (XEXP (varop
, 0)) == ZERO_EXTEND
)
5382 SUBST (XEXP (varop
, 0),
5383 expand_compound_operation (XEXP (varop
, 0)));
5385 if (GET_CODE (XEXP (varop
, 0)) == AND
5386 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
5387 && exact_log2 (constop
+ 1) >= 0
5388 && exact_log2 (INTVAL (XEXP (XEXP (varop
, 0), 1)) + 1) >= 0
5389 && (~ INTVAL (XEXP (XEXP (varop
, 0), 1)) & constop
) == 0)
5390 SUBST (XEXP (varop
, 0), XEXP (XEXP (varop
, 0), 0));
5397 /* If we have reached a constant, this whole thing is constant. */
5398 if (GET_CODE (varop
) == CONST_INT
)
5399 return GEN_INT (constop
& INTVAL (varop
));
5401 /* See what bits are significant in VAROP. */
5402 significant
= significant_bits (varop
, mode
);
5404 /* Turn off all bits in the constant that are known to already be zero.
5405 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5406 which is tested below. */
5408 constop
&= significant
;
5410 /* If we don't have any bits left, return zero. */
5414 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5415 if we already had one (just check for the simplest cases). */
5416 if (x
&& GET_CODE (XEXP (x
, 0)) == SUBREG
5417 && GET_MODE (XEXP (x
, 0)) == mode
5418 && SUBREG_REG (XEXP (x
, 0)) == varop
)
5419 varop
= XEXP (x
, 0);
5421 varop
= gen_lowpart_for_combine (mode
, varop
);
5423 /* If we can't make the SUBREG, try to return what we were given. */
5424 if (GET_CODE (varop
) == CLOBBER
)
5425 return x
? x
: varop
;
5427 /* If we are only masking insignificant bits, return VAROP. */
5428 if (constop
== significant
)
5431 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5432 else if (x
== 0 || GET_CODE (x
) != AND
|| GET_MODE (x
) != mode
)
5433 x
= gen_rtx_combine (AND
, mode
, varop
, GEN_INT (constop
));
5437 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
5438 || INTVAL (XEXP (x
, 1)) != constop
)
5439 SUBST (XEXP (x
, 1), GEN_INT (constop
));
5441 SUBST (XEXP (x
, 0), varop
);
5447 /* Given an expression, X, compute which bits in X can be non-zero.
5448 We don't care about bits outside of those defined in MODE.
5450 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5451 a shift, AND, or zero_extract, we can do better. */
5453 static unsigned HOST_WIDE_INT
5454 significant_bits (x
, mode
)
5456 enum machine_mode mode
;
5458 unsigned HOST_WIDE_INT significant
= GET_MODE_MASK (mode
);
5459 unsigned HOST_WIDE_INT inner_sig
;
5461 int mode_width
= GET_MODE_BITSIZE (mode
);
5464 /* If X is wider than MODE, use its mode instead. */
5465 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
5467 mode
= GET_MODE (x
);
5468 significant
= GET_MODE_MASK (mode
);
5469 mode_width
= GET_MODE_BITSIZE (mode
);
5472 if (mode_width
> HOST_BITS_PER_WIDE_INT
)
5473 /* Our only callers in this case look for single bit values. So
5474 just return the mode mask. Those tests will then be false. */
5477 code
= GET_CODE (x
);
5481 #ifdef STACK_BOUNDARY
5482 /* If this is the stack pointer, we may know something about its
5483 alignment. If PUSH_ROUNDING is defined, it is possible for the
5484 stack to be momentarily aligned only to that amount, so we pick
5485 the least alignment. */
5487 if (x
== stack_pointer_rtx
)
5489 int sp_alignment
= STACK_BOUNDARY
/ BITS_PER_UNIT
;
5491 #ifdef PUSH_ROUNDING
5492 sp_alignment
= MIN (PUSH_ROUNDING (1), sp_alignment
);
5495 return significant
& ~ (sp_alignment
- 1);
5499 /* If X is a register whose value we can find, use that value.
5500 Otherwise, use the previously-computed significant bits for this
5503 tem
= get_last_value (x
);
5505 return significant_bits (tem
, mode
);
5506 else if (significant_valid
&& reg_significant
[REGNO (x
)])
5507 return reg_significant
[REGNO (x
)] & significant
;
5514 #ifdef BYTE_LOADS_ZERO_EXTEND
5516 /* In many, if not most, RISC machines, reading a byte from memory
5517 zeros the rest of the register. Noticing that fact saves a lot
5518 of extra zero-extends. */
5519 significant
&= GET_MODE_MASK (GET_MODE (x
));
5523 #if STORE_FLAG_VALUE == 1
5530 if (GET_MODE_CLASS (mode
) == MODE_INT
)
5533 /* A comparison operation only sets the bits given by its mode. The
5534 rest are set undefined. */
5535 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
5536 significant
|= (GET_MODE_MASK (mode
) & ~ GET_MODE_MASK (GET_MODE (x
)));
5541 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
5542 == GET_MODE_BITSIZE (GET_MODE (x
)))
5545 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
5546 significant
|= (GET_MODE_MASK (mode
) & ~ GET_MODE_MASK (GET_MODE (x
)));
5550 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
5551 == GET_MODE_BITSIZE (GET_MODE (x
)))
5556 significant
&= (significant_bits (XEXP (x
, 0), mode
)
5557 & GET_MODE_MASK (mode
));
5561 significant
&= significant_bits (XEXP (x
, 0), mode
);
5562 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
5563 significant
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
5567 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5568 Otherwise, show all the bits in the outer mode but not the inner
5570 inner_sig
= significant_bits (XEXP (x
, 0), mode
);
5571 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
5573 inner_sig
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
5576 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1))))
5577 inner_sig
|= (GET_MODE_MASK (mode
)
5578 & ~ GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
5581 significant
&= inner_sig
;
5585 significant
&= (significant_bits (XEXP (x
, 0), mode
)
5586 & significant_bits (XEXP (x
, 1), mode
));
5590 case UMIN
: case UMAX
: case SMIN
: case SMAX
:
5591 significant
&= (significant_bits (XEXP (x
, 0), mode
)
5592 | significant_bits (XEXP (x
, 1), mode
));
5595 case PLUS
: case MINUS
:
5597 case DIV
: case UDIV
:
5598 case MOD
: case UMOD
:
5599 /* We can apply the rules of arithmetic to compute the number of
5600 high- and low-order zero bits of these operations. We start by
5601 computing the width (position of the highest-order non-zero bit)
5602 and the number of low-order zero bits for each value. */
5604 unsigned HOST_WIDE_INT sig0
= significant_bits (XEXP (x
, 0), mode
);
5605 unsigned HOST_WIDE_INT sig1
= significant_bits (XEXP (x
, 1), mode
);
5606 int width0
= floor_log2 (sig0
) + 1;
5607 int width1
= floor_log2 (sig1
) + 1;
5608 int low0
= floor_log2 (sig0
& -sig0
);
5609 int low1
= floor_log2 (sig1
& -sig1
);
5610 int op0_maybe_minusp
= (sig0
& (1 << (mode_width
- 1)));
5611 int op1_maybe_minusp
= (sig1
& (1 << (mode_width
- 1)));
5612 int result_width
= mode_width
;
5618 result_width
= MAX (width0
, width1
) + 1;
5619 result_low
= MIN (low0
, low1
);
5622 result_low
= MIN (low0
, low1
);
5625 result_width
= width0
+ width1
;
5626 result_low
= low0
+ low1
;
5629 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
5630 result_width
= width0
;
5633 result_width
= width0
;
5636 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
5637 result_width
= MIN (width0
, width1
);
5638 result_low
= MIN (low0
, low1
);
5641 result_width
= MIN (width0
, width1
);
5642 result_low
= MIN (low0
, low1
);
5646 if (result_width
< mode_width
)
5647 significant
&= ((HOST_WIDE_INT
) 1 << result_width
) - 1;
5650 significant
&= ~ (((HOST_WIDE_INT
) 1 << result_low
) - 1);
5655 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5656 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
5657 significant
&= ((HOST_WIDE_INT
) 1 << INTVAL (XEXP (x
, 1))) - 1;
5661 /* If the inner mode is a single word for both the host and target
5662 machines, we can compute this from which bits of the inner
5663 object are known significant. */
5664 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
5665 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
5666 <= HOST_BITS_PER_WIDE_INT
))
5668 significant
&= significant_bits (SUBREG_REG (x
), mode
);
5669 #ifndef BYTE_LOADS_ZERO_EXTEND
5670 /* On many CISC machines, accessing an object in a wider mode
5671 causes the high-order bits to become undefined. So they are
5672 not known to be zero. */
5673 if (GET_MODE_SIZE (GET_MODE (x
))
5674 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
5675 significant
|= (GET_MODE_MASK (GET_MODE (x
))
5676 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
5686 /* The significant bits are in two classes: any bits within MODE
5687 that aren't in GET_MODE (x) are always significant. The rest of the
5688 significant bits are those that are significant in the operand of
5689 the shift when shifted the appropriate number of bits. This
5690 shows that high-order bits are cleared by the right shift and
5691 low-order bits by left shifts. */
5692 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5693 && INTVAL (XEXP (x
, 1)) >= 0
5694 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
5696 enum machine_mode inner_mode
= GET_MODE (x
);
5697 int width
= GET_MODE_BITSIZE (inner_mode
);
5698 int count
= INTVAL (XEXP (x
, 1));
5699 unsigned HOST_WIDE_INT mode_mask
= GET_MODE_MASK (inner_mode
);
5700 unsigned HOST_WIDE_INT op_significant
5701 = significant_bits (XEXP (x
, 0), mode
);
5702 unsigned HOST_WIDE_INT inner
= op_significant
& mode_mask
;
5703 unsigned HOST_WIDE_INT outer
= 0;
5705 if (mode_width
> width
)
5706 outer
= (op_significant
& significant
& ~ mode_mask
);
5708 if (code
== LSHIFTRT
)
5710 else if (code
== ASHIFTRT
)
5714 /* If the sign bit was significant at before the shift, we
5715 need to mark all the places it could have been copied to
5716 by the shift significant. */
5717 if (inner
& ((HOST_WIDE_INT
) 1 << (width
- 1 - count
)))
5718 inner
|= (((HOST_WIDE_INT
) 1 << count
) - 1) << (width
- count
);
5720 else if (code
== LSHIFT
|| code
== ASHIFT
)
5723 inner
= ((inner
<< (count
% width
)
5724 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
5726 significant
&= (outer
| inner
);
5731 /* This is at most the number of bits in the mode. */
5732 significant
= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
) + 1)) - 1;
5736 significant
&= (significant_bits (XEXP (x
, 1), mode
)
5737 | significant_bits (XEXP (x
, 2), mode
));
5744 /* Return the number of bits at the high-order end of X that are known to
5745 be equal to the sign bit. This number will always be between 1 and
5746 the number of bits in the mode of X. MODE is the mode to be used
5747 if X is VOIDmode. */
5750 num_sign_bit_copies (x
, mode
)
5752 enum machine_mode mode
;
5754 enum rtx_code code
= GET_CODE (x
);
5756 int num0
, num1
, result
;
5757 unsigned HOST_WIDE_INT sig
;
5760 /* If we weren't given a mode, use the mode of X. If the mode is still
5761 VOIDmode, we don't know anything. */
5763 if (mode
== VOIDmode
)
5764 mode
= GET_MODE (x
);
5766 if (mode
== VOIDmode
)
5769 bitwidth
= GET_MODE_BITSIZE (mode
);
5774 if (significant_valid
&& reg_sign_bit_copies
[REGNO (x
)] != 0)
5775 return reg_sign_bit_copies
[REGNO (x
)];
5777 tem
= get_last_value (x
);
5779 return num_sign_bit_copies (tem
, mode
);
5783 /* If the constant is negative, take its 1's complement and remask.
5784 Then see how many zero bits we have. */
5785 sig
= INTVAL (x
) & GET_MODE_MASK (mode
);
5786 if (sig
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1)))
5787 sig
= (~ sig
) & GET_MODE_MASK (mode
);
5789 return (sig
== 0 ? bitwidth
: bitwidth
- floor_log2 (sig
) - 1);
5792 /* For a smaller object, just ignore the high bits. */
5793 if (bitwidth
<= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
5795 num0
= num_sign_bit_copies (SUBREG_REG (x
), VOIDmode
);
5796 return MAX (1, (num0
5797 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
5803 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
5804 return MAX (1, bitwidth
- INTVAL (XEXP (x
, 1)));
5808 return (bitwidth
- GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
5809 + num_sign_bit_copies (XEXP (x
, 0), VOIDmode
));
5812 /* For a smaller object, just ignore the high bits. */
5813 num0
= num_sign_bit_copies (XEXP (x
, 0), VOIDmode
);
5814 return MAX (1, (num0
- (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
5818 return num_sign_bit_copies (XEXP (x
, 0), mode
);
5820 case ROTATE
: case ROTATERT
:
5821 /* If we are rotating left by a number of bits less than the number
5822 of sign bit copies, we can just subtract that amount from the
5824 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5825 && INTVAL (XEXP (x
, 1)) >= 0 && INTVAL (XEXP (x
, 1)) < bitwidth
)
5827 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5828 return MAX (1, num0
- (code
== ROTATE
? INTVAL (XEXP (x
, 1))
5829 : bitwidth
- INTVAL (XEXP (x
, 1))));
5834 /* In general, this subtracts one sign bit copy. But if the value
5835 is known to be positive, the number of sign bit copies is the
5836 same as that of the input. Finally, if the input has just one
5837 significant bit, all the bits are copies of the sign bit. */
5838 sig
= significant_bits (XEXP (x
, 0), mode
);
5842 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5844 && (((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & sig
))
5849 case IOR
: case AND
: case XOR
:
5850 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
5851 /* Logical operations will preserve the number of sign-bit copies.
5852 MIN and MAX operations always return one of the operands. */
5853 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5854 num1
= num_sign_bit_copies (XEXP (x
, 1), mode
);
5855 return MIN (num0
, num1
);
5857 case PLUS
: case MINUS
:
5858 /* For addition and subtraction, we can have a 1-bit carry. However,
5859 if we are subtracting 1 from a positive number, there will not
5860 be such a carry. Furthermore, if the positive number is known to
5861 be 0 or 1, we know the result is either -1 or 0. */
5863 if (code
== PLUS
&& XEXP (x
, 1) == constm1_rtx
)
5865 sig
= significant_bits (XEXP (x
, 0), mode
);
5866 if ((((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & sig
) == 0)
5867 return (sig
== 1 || sig
== 0 ? bitwidth
5868 : bitwidth
- floor_log2 (sig
));
5871 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5872 num1
= num_sign_bit_copies (XEXP (x
, 1), mode
);
5873 return MAX (1, MIN (num0
, num1
) - 1);
5876 /* The number of bits of the product is the sum of the number of
5877 bits of both terms. However, unless one of the terms if known
5878 to be positive, we must allow for an additional bit since negating
5879 a negative number can remove one sign bit copy. */
5881 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5882 num1
= num_sign_bit_copies (XEXP (x
, 1), mode
);
5884 result
= bitwidth
- (bitwidth
- num0
) - (bitwidth
- num1
);
5886 && ((significant_bits (XEXP (x
, 0), mode
)
5887 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
5888 && (significant_bits (XEXP (x
, 1), mode
)
5889 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) != 0))
5892 return MAX (1, result
);
5895 /* The result must be <= the first operand. */
5896 return num_sign_bit_copies (XEXP (x
, 0), mode
);
5899 /* The result must be <= the scond operand. */
5900 return num_sign_bit_copies (XEXP (x
, 1), mode
);
5903 /* Similar to unsigned division, except that we have to worry about
5904 the case where the divisor is negative, in which case we have
5906 result
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5908 && (significant_bits (XEXP (x
, 1), mode
)
5909 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
5915 result
= num_sign_bit_copies (XEXP (x
, 1), mode
);
5917 && (significant_bits (XEXP (x
, 1), mode
)
5918 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
5924 /* Shifts by a constant add to the number of bits equal to the
5926 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5927 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5928 && INTVAL (XEXP (x
, 1)) > 0)
5929 num0
= MIN (bitwidth
, num0
+ INTVAL (XEXP (x
, 1)));
5935 /* Left shifts destroy copies. */
5936 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
5937 || INTVAL (XEXP (x
, 1)) < 0
5938 || INTVAL (XEXP (x
, 1)) >= bitwidth
)
5941 num0
= num_sign_bit_copies (XEXP (x
, 0), mode
);
5942 return MAX (1, num0
- INTVAL (XEXP (x
, 1)));
5945 num0
= num_sign_bit_copies (XEXP (x
, 1), mode
);
5946 num1
= num_sign_bit_copies (XEXP (x
, 2), mode
);
5947 return MIN (num0
, num1
);
5949 #if STORE_FLAG_VALUE == -1
5950 case EQ
: case NE
: case GE
: case GT
: case LE
: case LT
:
5951 case GEU
: case GTU
: case LEU
: case LTU
:
5956 /* If we haven't been able to figure it out by one of the above rules,
5957 see if some of the high-order bits are known to be zero. If so,
5958 count those bits and return one less than that amount. */
5960 sig
= significant_bits (x
, mode
);
5961 return sig
== GET_MODE_MASK (mode
) ? 1 : bitwidth
- floor_log2 (sig
) - 1;
5964 /* This function is called from `simplify_shift_const' to merge two
5965 outer operations. Specifically, we have already found that we need
5966 to perform operation *POP0 with constant *PCONST0 at the outermost
5967 position. We would now like to also perform OP1 with constant CONST1
5968 (with *POP0 being done last).
5970 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5971 the resulting operation. *PCOMP_P is set to 1 if we would need to
5972 complement the innermost operand, otherwise it is unchanged.
5974 MODE is the mode in which the operation will be done. No bits outside
5975 the width of this mode matter. It is assumed that the width of this mode
5976 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
5978 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
5979 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
5980 result is simply *PCONST0.
5982 If the resulting operation cannot be expressed as one operation, we
5983 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
5986 merge_outer_ops (pop0
, pconst0
, op1
, const1
, mode
, pcomp_p
)
5987 enum rtx_code
*pop0
;
5988 HOST_WIDE_INT
*pconst0
;
5990 HOST_WIDE_INT const1
;
5991 enum machine_mode mode
;
5994 enum rtx_code op0
= *pop0
;
5995 HOST_WIDE_INT const0
= *pconst0
;
5997 const0
&= GET_MODE_MASK (mode
);
5998 const1
&= GET_MODE_MASK (mode
);
6000 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6004 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6007 if (op1
== NIL
|| op0
== SET
)
6010 else if (op0
== NIL
)
6011 op0
= op1
, const0
= const1
;
6013 else if (op0
== op1
)
6035 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6036 else if (op0
== PLUS
|| op1
== PLUS
|| op0
== NEG
|| op1
== NEG
)
6039 /* If the two constants aren't the same, we can't do anything. The
6040 remaining six cases can all be done. */
6041 else if (const0
!= const1
)
6049 /* (a & b) | b == b */
6051 else /* op1 == XOR */
6052 /* (a ^ b) | b == a | b */
6058 /* (a & b) ^ b == (~a) & b */
6059 op0
= AND
, *pcomp_p
= 1;
6060 else /* op1 == IOR */
6061 /* (a | b) ^ b == a & ~b */
6062 op0
= AND
, *pconst0
= ~ const0
;
6067 /* (a | b) & b == b */
6069 else /* op1 == XOR */
6070 /* (a ^ b) & b) == (~a) & b */
6075 /* Check for NO-OP cases. */
6076 const0
&= GET_MODE_MASK (mode
);
6078 && (op0
== IOR
|| op0
== XOR
|| op0
== PLUS
))
6080 else if (const0
== 0 && op0
== AND
)
6082 else if (const0
== GET_MODE_MASK (mode
) && op0
== AND
)
6091 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6092 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6093 that we started with.
6095 The shift is normally computed in the widest mode we find in VAROP, as
6096 long as it isn't a different number of words than RESULT_MODE. Exceptions
6097 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6100 simplify_shift_const (x
, code
, result_mode
, varop
, count
)
6103 enum machine_mode result_mode
;
6107 enum rtx_code orig_code
= code
;
6108 int orig_count
= count
;
6109 enum machine_mode mode
= result_mode
;
6110 enum machine_mode shift_mode
, tmode
;
6112 = (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
6113 /* We form (outer_op (code varop count) (outer_const)). */
6114 enum rtx_code outer_op
= NIL
;
6115 HOST_WIDE_INT outer_const
;
6117 int complement_p
= 0;
6120 /* If we were given an invalid count, don't do anything except exactly
6121 what was requested. */
6123 if (count
< 0 || count
> GET_MODE_BITSIZE (mode
))
6128 return gen_rtx (code
, mode
, varop
, GEN_INT (count
));
6131 /* Unless one of the branches of the `if' in this loop does a `continue',
6132 we will `break' the loop after the `if'. */
6136 /* If we have an operand of (clobber (const_int 0)), just return that
6138 if (GET_CODE (varop
) == CLOBBER
)
6141 /* If we discovered we had to complement VAROP, leave. Making a NOT
6142 here would cause an infinite loop. */
6146 /* Convert ROTATETRT to ROTATE. */
6147 if (code
== ROTATERT
)
6148 code
= ROTATE
, count
= GET_MODE_BITSIZE (result_mode
) - count
;
6150 /* Canonicalize LSHIFT to ASHIFT. */
6154 /* We need to determine what mode we will do the shift in. If the
6155 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6156 was originally done in. Otherwise, we can do it in MODE, the widest
6157 mode encountered. */
6158 shift_mode
= (code
== ASHIFTRT
|| code
== ROTATE
? result_mode
: mode
);
6160 /* Handle cases where the count is greater than the size of the mode
6161 minus 1. For ASHIFT, use the size minus one as the count (this can
6162 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6163 take the count modulo the size. For other shifts, the result is
6166 Since these shifts are being produced by the compiler by combining
6167 multiple operations, each of which are defined, we know what the
6168 result is supposed to be. */
6170 if (count
> GET_MODE_BITSIZE (shift_mode
) - 1)
6172 if (code
== ASHIFTRT
)
6173 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
6174 else if (code
== ROTATE
|| code
== ROTATERT
)
6175 count
%= GET_MODE_BITSIZE (shift_mode
);
6178 /* We can't simply return zero because there may be an
6186 /* Negative counts are invalid and should not have been made (a
6187 programmer-specified negative count should have been handled
6192 /* An arithmetic right shift of a quantity known to be -1 or 0
6194 if (code
== ASHIFTRT
6195 && (num_sign_bit_copies (varop
, shift_mode
)
6196 == GET_MODE_BITSIZE (shift_mode
)))
6202 /* We simplify the tests below and elsewhere by converting
6203 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6204 `make_compound_operation' will convert it to a ASHIFTRT for
6205 those machines (such as Vax) that don't have a LSHIFTRT. */
6206 if (GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
6208 && ((significant_bits (varop
, shift_mode
)
6209 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (shift_mode
) - 1)))
6213 switch (GET_CODE (varop
))
6219 new = expand_compound_operation (varop
);
6228 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6229 minus the width of a smaller mode, we can do this with a
6230 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6231 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6232 && ! mode_dependent_address_p (XEXP (varop
, 0))
6233 && ! MEM_VOLATILE_P (varop
)
6234 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
6235 MODE_INT
, 1)) != BLKmode
)
6237 #if BYTES_BIG_ENDIAN
6238 new = gen_rtx (MEM
, tmode
, XEXP (varop
, 0));
6240 new = gen_rtx (MEM
, tmode
,
6241 plus_constant (XEXP (varop
, 0),
6242 count
/ BITS_PER_UNIT
));
6243 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop
);
6244 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop
);
6245 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop
);
6247 varop
= gen_rtx_combine (code
== ASHIFTRT
? SIGN_EXTEND
6248 : ZERO_EXTEND
, mode
, new);
6255 /* Similar to the case above, except that we can only do this if
6256 the resulting mode is the same as that of the underlying
6257 MEM and adjust the address depending on the *bits* endianness
6258 because of the way that bit-field extract insns are defined. */
6259 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6260 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
6261 MODE_INT
, 1)) != BLKmode
6262 && tmode
== GET_MODE (XEXP (varop
, 0)))
6265 new = XEXP (varop
, 0);
6267 new = copy_rtx (XEXP (varop
, 0));
6268 SUBST (XEXP (new, 0),
6269 plus_constant (XEXP (new, 0),
6270 count
/ BITS_PER_UNIT
));
6273 varop
= gen_rtx_combine (code
== ASHIFTRT
? SIGN_EXTEND
6274 : ZERO_EXTEND
, mode
, new);
6281 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6282 the same number of words as what we've seen so far. Then store
6283 the widest mode in MODE. */
6284 if (SUBREG_WORD (varop
) == 0
6285 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
6286 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
6289 varop
= SUBREG_REG (varop
);
6290 if (GET_MODE_SIZE (GET_MODE (varop
)) > GET_MODE_SIZE (mode
))
6291 mode
= GET_MODE (varop
);
6297 /* Some machines use MULT instead of ASHIFT because MULT
6298 is cheaper. But it is still better on those machines to
6299 merge two shifts into one. */
6300 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
6301 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
6303 varop
= gen_binary (ASHIFT
, GET_MODE (varop
), XEXP (varop
, 0),
6304 GEN_INT (exact_log2 (INTVAL (XEXP (varop
, 1)))));;
6310 /* Similar, for when divides are cheaper. */
6311 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
6312 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
6314 varop
= gen_binary (LSHIFTRT
, GET_MODE (varop
), XEXP (varop
, 0),
6315 GEN_INT (exact_log2 (INTVAL (XEXP (varop
, 1)))));
6321 /* If we are extracting just the sign bit of an arithmetic right
6322 shift, that shift is not needed. */
6323 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1)
6325 varop
= XEXP (varop
, 0);
6329 /* ... fall through ... */
6335 /* Here we have two nested shifts. The result is usually the
6336 AND of a new shift with a mask. We compute the result below. */
6337 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
6338 && INTVAL (XEXP (varop
, 1)) >= 0
6339 && INTVAL (XEXP (varop
, 1)) < GET_MODE_BITSIZE (GET_MODE (varop
))
6340 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
6341 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
6343 enum rtx_code first_code
= GET_CODE (varop
);
6344 int first_count
= INTVAL (XEXP (varop
, 1));
6345 unsigned HOST_WIDE_INT mask
;
6349 if (first_code
== LSHIFT
)
6350 first_code
= ASHIFT
;
6352 /* We have one common special case. We can't do any merging if
6353 the inner code is an ASHIFTRT of a smaller mode. However, if
6354 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6355 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6356 we can convert it to
6357 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6358 This simplifies certain SIGN_EXTEND operations. */
6359 if (code
== ASHIFT
&& first_code
== ASHIFTRT
6360 && (GET_MODE_BITSIZE (result_mode
)
6361 - GET_MODE_BITSIZE (GET_MODE (varop
))) == count
)
6363 /* C3 has the low-order C1 bits zero. */
6365 mask
= (GET_MODE_MASK (mode
)
6366 & ~ (((HOST_WIDE_INT
) 1 << first_count
) - 1));
6368 varop
= simplify_and_const_int (NULL_RTX
, result_mode
,
6369 XEXP (varop
, 0), mask
);
6370 varop
= simplify_shift_const (NULL_RTX
, ASHIFT
, result_mode
,
6372 count
= first_count
;
6377 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6378 than C1 high-order bits equal to the sign bit, we can convert
6379 this to either an ASHIFT or a ASHIFTRT depending on the
6382 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6384 if (code
== ASHIFTRT
&& first_code
== ASHIFT
6385 && GET_MODE (varop
) == shift_mode
6386 && (num_sign_bit_copies (XEXP (varop
, 0), shift_mode
)
6389 count
-= first_count
;
6391 count
= - count
, code
= ASHIFT
;
6392 varop
= XEXP (varop
, 0);
6396 /* There are some cases we can't do. If CODE is ASHIFTRT,
6397 we can only do this if FIRST_CODE is also ASHIFTRT.
6399 We can't do the case when CODE is ROTATE and FIRST_CODE is
6402 If the mode of this shift is not the mode of the outer shift,
6403 we can't do this if either shift is ASHIFTRT or ROTATE.
6405 Finally, we can't do any of these if the mode is too wide
6406 unless the codes are the same.
6408 Handle the case where the shift codes are the same
6411 if (code
== first_code
)
6413 if (GET_MODE (varop
) != result_mode
6414 && (code
== ASHIFTRT
|| code
== ROTATE
))
6417 count
+= first_count
;
6418 varop
= XEXP (varop
, 0);
6422 if (code
== ASHIFTRT
6423 || (code
== ROTATE
&& first_code
== ASHIFTRT
)
6424 || GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
6425 || (GET_MODE (varop
) != result_mode
6426 && (first_code
== ASHIFTRT
|| first_code
== ROTATE
6427 || code
== ROTATE
)))
6430 /* To compute the mask to apply after the shift, shift the
6431 significant bits of the inner shift the same way the
6432 outer shift will. */
6434 mask_rtx
= GEN_INT (significant_bits (varop
, GET_MODE (varop
)));
6437 = simplify_binary_operation (code
, result_mode
, mask_rtx
,
6440 /* Give up if we can't compute an outer operation to use. */
6442 || GET_CODE (mask_rtx
) != CONST_INT
6443 || ! merge_outer_ops (&outer_op
, &outer_const
, AND
,
6445 result_mode
, &complement_p
))
6448 /* If the shifts are in the same direction, we add the
6449 counts. Otherwise, we subtract them. */
6450 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6451 == (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
))
6452 count
+= first_count
;
6454 count
-= first_count
;
6456 /* If COUNT is positive, the new shift is usually CODE,
6457 except for the two exceptions below, in which case it is
6458 FIRST_CODE. If the count is negative, FIRST_CODE should
6461 && ((first_code
== ROTATE
&& code
== ASHIFT
)
6462 || (first_code
== ASHIFTRT
&& code
== LSHIFTRT
)))
6465 code
= first_code
, count
= - count
;
6467 varop
= XEXP (varop
, 0);
6471 /* If we have (A << B << C) for any shift, we can convert this to
6472 (A << C << B). This wins if A is a constant. Only try this if
6473 B is not a constant. */
6475 else if (GET_CODE (varop
) == code
6476 && GET_CODE (XEXP (varop
, 1)) != CONST_INT
6478 = simplify_binary_operation (code
, mode
,
6482 varop
= gen_rtx_combine (code
, mode
, new, XEXP (varop
, 1));
6489 /* Make this fit the case below. */
6490 varop
= gen_rtx_combine (XOR
, mode
, XEXP (varop
, 0),
6491 GEN_INT (GET_MODE_MASK (mode
)));
6497 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
6498 with C the size of VAROP - 1 and the shift is logical if
6499 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6500 we have an (le X 0) operation. If we have an arithmetic shift
6501 and STORE_FLAG_VALUE is 1 or we have a logical shift with
6502 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
6504 if (GET_CODE (varop
) == IOR
&& GET_CODE (XEXP (varop
, 0)) == PLUS
6505 && XEXP (XEXP (varop
, 0), 1) == constm1_rtx
6506 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
6507 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
6508 && count
== GET_MODE_BITSIZE (GET_MODE (varop
)) - 1
6509 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
6512 varop
= gen_rtx_combine (LE
, GET_MODE (varop
), XEXP (varop
, 1),
6515 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
6516 varop
= gen_rtx_combine (NEG
, GET_MODE (varop
), varop
);
6521 /* If we have (shift (logical)), move the logical to the outside
6522 to allow it to possibly combine with another logical and the
6523 shift to combine with another shift. This also canonicalizes to
6524 what a ZERO_EXTRACT looks like. Also, some machines have
6525 (and (shift)) insns. */
6527 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
6528 && (new = simplify_binary_operation (code
, result_mode
,
6530 GEN_INT (count
))) != 0
6531 && merge_outer_ops (&outer_op
, &outer_const
, GET_CODE (varop
),
6532 INTVAL (new), result_mode
, &complement_p
))
6534 varop
= XEXP (varop
, 0);
6538 /* If we can't do that, try to simplify the shift in each arm of the
6539 logical expression, make a new logical expression, and apply
6540 the inverse distributive law. */
6542 rtx lhs
= simplify_shift_const (NULL_RTX
, code
, result_mode
,
6543 XEXP (varop
, 0), count
);
6544 rtx rhs
= simplify_shift_const (NULL_RTX
, code
, result_mode
,
6545 XEXP (varop
, 1), count
);
6547 varop
= gen_binary (GET_CODE (varop
), result_mode
, lhs
, rhs
);
6548 varop
= apply_distributive_law (varop
);
6555 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
6556 says that the sign bit can be tested, FOO has mode MODE, C is
6557 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
6560 && XEXP (varop
, 1) == const0_rtx
6561 && GET_MODE (XEXP (varop
, 0)) == result_mode
6562 && count
== GET_MODE_BITSIZE (result_mode
) - 1
6563 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
6564 && ((STORE_FLAG_VALUE
6565 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (result_mode
) - 1))))
6566 && significant_bits (XEXP (varop
, 0), result_mode
) == 1
6567 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
6568 (HOST_WIDE_INT
) 1, result_mode
,
6571 varop
= XEXP (varop
, 0);
6578 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
6579 than the number of bits in the mode is equivalent to A. */
6580 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1
6581 && significant_bits (XEXP (varop
, 0), result_mode
) == 1)
6583 varop
= XEXP (varop
, 0);
6588 /* NEG commutes with ASHIFT since it is multiplication. Move the
6589 NEG outside to allow shifts to combine. */
6591 && merge_outer_ops (&outer_op
, &outer_const
, NEG
,
6592 (HOST_WIDE_INT
) 0, result_mode
,
6595 varop
= XEXP (varop
, 0);
6601 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
6602 is one less than the number of bits in the mode is
6603 equivalent to (xor A 1). */
6604 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1
6605 && XEXP (varop
, 1) == constm1_rtx
6606 && significant_bits (XEXP (varop
, 0), result_mode
) == 1
6607 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
6608 (HOST_WIDE_INT
) 1, result_mode
,
6612 varop
= XEXP (varop
, 0);
6616 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
6617 significant in BAR are those being shifted out and those
6618 bits are known zero in FOO, we can replace the PLUS with FOO.
6619 Similarly in the other operand order. This code occurs when
6620 we are computing the size of a variable-size array. */
6622 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6623 && count
< HOST_BITS_PER_WIDE_INT
6624 && significant_bits (XEXP (varop
, 1), result_mode
) >> count
== 0
6625 && (significant_bits (XEXP (varop
, 1), result_mode
)
6626 & significant_bits (XEXP (varop
, 0), result_mode
)) == 0)
6628 varop
= XEXP (varop
, 0);
6631 else if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6632 && count
< HOST_BITS_PER_WIDE_INT
6633 && 0 == (significant_bits (XEXP (varop
, 0), result_mode
)
6635 && 0 == (significant_bits (XEXP (varop
, 0), result_mode
)
6636 & significant_bits (XEXP (varop
, 1),
6639 varop
= XEXP (varop
, 1);
6643 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
6645 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
6646 && (new = simplify_binary_operation (ASHIFT
, result_mode
,
6648 GEN_INT (count
))) != 0
6649 && merge_outer_ops (&outer_op
, &outer_const
, PLUS
,
6650 INTVAL (new), result_mode
, &complement_p
))
6652 varop
= XEXP (varop
, 0);
6658 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
6659 with C the size of VAROP - 1 and the shift is logical if
6660 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6661 we have a (gt X 0) operation. If the shift is arithmetic with
6662 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
6663 we have a (neg (gt X 0)) operation. */
6665 if (GET_CODE (XEXP (varop
, 0)) == ASHIFTRT
6666 && count
== GET_MODE_BITSIZE (GET_MODE (varop
)) - 1
6667 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
6668 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
6669 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
6670 && INTVAL (XEXP (XEXP (varop
, 0), 1)) == count
6671 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
6674 varop
= gen_rtx_combine (GT
, GET_MODE (varop
), XEXP (varop
, 1),
6677 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
6678 varop
= gen_rtx_combine (NEG
, GET_MODE (varop
), varop
);
6688 /* We need to determine what mode to do the shift in. If the shift is
6689 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
6690 done in. Otherwise, we can do it in MODE, the widest mode encountered.
6691 The code we care about is that of the shift that will actually be done,
6692 not the shift that was originally requested. */
6693 shift_mode
= (code
== ASHIFTRT
|| code
== ROTATE
? result_mode
: mode
);
6695 /* We have now finished analyzing the shift. The result should be
6696 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
6697 OUTER_OP is non-NIL, it is an operation that needs to be applied
6698 to the result of the shift. OUTER_CONST is the relevant constant,
6699 but we must turn off all bits turned off in the shift.
6701 If we were passed a value for X, see if we can use any pieces of
6702 it. If not, make new rtx. */
6704 if (x
&& GET_RTX_CLASS (GET_CODE (x
)) == '2'
6705 && GET_CODE (XEXP (x
, 1)) == CONST_INT
6706 && INTVAL (XEXP (x
, 1)) == count
)
6707 const_rtx
= XEXP (x
, 1);
6709 const_rtx
= GEN_INT (count
);
6711 if (x
&& GET_CODE (XEXP (x
, 0)) == SUBREG
6712 && GET_MODE (XEXP (x
, 0)) == shift_mode
6713 && SUBREG_REG (XEXP (x
, 0)) == varop
)
6714 varop
= XEXP (x
, 0);
6715 else if (GET_MODE (varop
) != shift_mode
)
6716 varop
= gen_lowpart_for_combine (shift_mode
, varop
);
6718 /* If we can't make the SUBREG, try to return what we were given. */
6719 if (GET_CODE (varop
) == CLOBBER
)
6720 return x
? x
: varop
;
6722 new = simplify_binary_operation (code
, shift_mode
, varop
, const_rtx
);
6727 if (x
== 0 || GET_CODE (x
) != code
|| GET_MODE (x
) != shift_mode
)
6728 x
= gen_rtx_combine (code
, shift_mode
, varop
, const_rtx
);
6730 SUBST (XEXP (x
, 0), varop
);
6731 SUBST (XEXP (x
, 1), const_rtx
);
6734 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
6735 turn off all the bits that the shift would have turned off. */
6736 if (orig_code
== LSHIFTRT
&& result_mode
!= shift_mode
)
6737 x
= simplify_and_const_int (NULL_RTX
, shift_mode
, x
,
6738 GET_MODE_MASK (result_mode
) >> orig_count
);
6740 /* Do the remainder of the processing in RESULT_MODE. */
6741 x
= gen_lowpart_for_combine (result_mode
, x
);
6743 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
6746 x
= gen_unary (NOT
, result_mode
, x
);
6748 if (outer_op
!= NIL
)
6750 if (GET_MODE_BITSIZE (result_mode
) < HOST_BITS_PER_WIDE_INT
)
6751 outer_const
&= GET_MODE_MASK (result_mode
);
6753 if (outer_op
== AND
)
6754 x
= simplify_and_const_int (NULL_RTX
, result_mode
, x
, outer_const
);
6755 else if (outer_op
== SET
)
6756 /* This means that we have determined that the result is
6757 equivalent to a constant. This should be rare. */
6758 x
= GEN_INT (outer_const
);
6759 else if (GET_RTX_CLASS (outer_op
) == '1')
6760 x
= gen_unary (outer_op
, result_mode
, x
);
6762 x
= gen_binary (outer_op
, result_mode
, x
, GEN_INT (outer_const
));
6768 /* Like recog, but we receive the address of a pointer to a new pattern.
6769 We try to match the rtx that the pointer points to.
6770 If that fails, we may try to modify or replace the pattern,
6771 storing the replacement into the same pointer object.
6773 Modifications include deletion or addition of CLOBBERs.
6775 PNOTES is a pointer to a location where any REG_UNUSED notes added for
6776 the CLOBBERs are placed.
6778 The value is the final insn code from the pattern ultimately matched,
6782 recog_for_combine (pnewpat
, insn
, pnotes
)
6787 register rtx pat
= *pnewpat
;
6788 int insn_code_number
;
6789 int num_clobbers_to_add
= 0;
6793 /* Is the result of combination a valid instruction? */
6794 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
6796 /* If it isn't, there is the possibility that we previously had an insn
6797 that clobbered some register as a side effect, but the combined
6798 insn doesn't need to do that. So try once more without the clobbers
6799 unless this represents an ASM insn. */
6801 if (insn_code_number
< 0 && ! check_asm_operands (pat
)
6802 && GET_CODE (pat
) == PARALLEL
)
6806 for (pos
= 0, i
= 0; i
< XVECLEN (pat
, 0); i
++)
6807 if (GET_CODE (XVECEXP (pat
, 0, i
)) != CLOBBER
)
6810 SUBST (XVECEXP (pat
, 0, pos
), XVECEXP (pat
, 0, i
));
6814 SUBST_INT (XVECLEN (pat
, 0), pos
);
6817 pat
= XVECEXP (pat
, 0, 0);
6819 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
6822 /* If we had any clobbers to add, make a new pattern than contains
6823 them. Then check to make sure that all of them are dead. */
6824 if (num_clobbers_to_add
)
6826 rtx newpat
= gen_rtx (PARALLEL
, VOIDmode
,
6827 gen_rtvec (GET_CODE (pat
) == PARALLEL
6828 ? XVECLEN (pat
, 0) + num_clobbers_to_add
6829 : num_clobbers_to_add
+ 1));
6831 if (GET_CODE (pat
) == PARALLEL
)
6832 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6833 XVECEXP (newpat
, 0, i
) = XVECEXP (pat
, 0, i
);
6835 XVECEXP (newpat
, 0, 0) = pat
;
6837 add_clobbers (newpat
, insn_code_number
);
6839 for (i
= XVECLEN (newpat
, 0) - num_clobbers_to_add
;
6840 i
< XVECLEN (newpat
, 0); i
++)
6842 if (GET_CODE (XEXP (XVECEXP (newpat
, 0, i
), 0)) == REG
6843 && ! reg_dead_at_p (XEXP (XVECEXP (newpat
, 0, i
), 0), insn
))
6845 notes
= gen_rtx (EXPR_LIST
, REG_UNUSED
,
6846 XEXP (XVECEXP (newpat
, 0, i
), 0), notes
);
6854 return insn_code_number
;
6857 /* Like gen_lowpart but for use by combine. In combine it is not possible
6858 to create any new pseudoregs. However, it is safe to create
6859 invalid memory addresses, because combine will try to recognize
6860 them and all they will do is make the combine attempt fail.
6862 If for some reason this cannot do its job, an rtx
6863 (clobber (const_int 0)) is returned.
6864 An insn containing that will not be recognized. */
6869 gen_lowpart_for_combine (mode
, x
)
6870 enum machine_mode mode
;
6875 if (GET_MODE (x
) == mode
)
6878 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6879 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
6881 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
6882 won't know what to do. So we will strip off the SUBREG here and
6883 process normally. */
6884 if (GET_CODE (x
) == SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
6887 if (GET_MODE (x
) == mode
)
6891 result
= gen_lowpart_common (mode
, x
);
6895 if (GET_CODE (x
) == MEM
)
6897 register int offset
= 0;
6900 /* Refuse to work on a volatile memory ref or one with a mode-dependent
6902 if (MEM_VOLATILE_P (x
) || mode_dependent_address_p (XEXP (x
, 0)))
6903 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
6905 /* If we want to refer to something bigger than the original memref,
6906 generate a perverse subreg instead. That will force a reload
6907 of the original memref X. */
6908 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
))
6909 return gen_rtx (SUBREG
, mode
, x
, 0);
6911 #if WORDS_BIG_ENDIAN
6912 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
6913 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
6915 #if BYTES_BIG_ENDIAN
6916 /* Adjust the address so that the address-after-the-data
6918 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
6919 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
6921 new = gen_rtx (MEM
, mode
, plus_constant (XEXP (x
, 0), offset
));
6922 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
6923 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
6924 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
6928 /* If X is a comparison operator, rewrite it in a new mode. This
6929 probably won't match, but may allow further simplifications. */
6930 else if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
6931 return gen_rtx_combine (GET_CODE (x
), mode
, XEXP (x
, 0), XEXP (x
, 1));
6933 /* If we couldn't simplify X any other way, just enclose it in a
6934 SUBREG. Normally, this SUBREG won't match, but some patterns may
6935 include an explicit SUBREG or we may simplify it further in combine. */
6940 if (WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
6941 word
= ((GET_MODE_SIZE (GET_MODE (x
))
6942 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
6944 return gen_rtx (SUBREG
, mode
, x
, word
);
6948 /* Make an rtx expression. This is a subset of gen_rtx and only supports
6949 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
6951 If the identical expression was previously in the insn (in the undobuf),
6952 it will be returned. Only if it is not found will a new expression
6957 gen_rtx_combine (va_alist
)
6962 enum machine_mode mode
;
6970 code
= va_arg (p
, enum rtx_code
);
6971 mode
= va_arg (p
, enum machine_mode
);
6972 n_args
= GET_RTX_LENGTH (code
);
6973 fmt
= GET_RTX_FORMAT (code
);
6975 if (n_args
== 0 || n_args
> 3)
6978 /* Get each arg and verify that it is supposed to be an expression. */
6979 for (j
= 0; j
< n_args
; j
++)
6984 args
[j
] = va_arg (p
, rtx
);
6987 /* See if this is in undobuf. Be sure we don't use objects that came
6988 from another insn; this could produce circular rtl structures. */
6990 for (i
= previous_num_undos
; i
< undobuf
.num_undo
; i
++)
6991 if (!undobuf
.undo
[i
].is_int
6992 && GET_CODE (undobuf
.undo
[i
].old_contents
) == code
6993 && GET_MODE (undobuf
.undo
[i
].old_contents
) == mode
)
6995 for (j
= 0; j
< n_args
; j
++)
6996 if (XEXP (undobuf
.undo
[i
].old_contents
, j
) != args
[j
])
7000 return undobuf
.undo
[i
].old_contents
;
7003 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7004 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7005 rt
= rtx_alloc (code
);
7006 PUT_MODE (rt
, mode
);
7007 XEXP (rt
, 0) = args
[0];
7010 XEXP (rt
, 1) = args
[1];
7012 XEXP (rt
, 2) = args
[2];
7017 /* These routines make binary and unary operations by first seeing if they
7018 fold; if not, a new expression is allocated. */
7021 gen_binary (code
, mode
, op0
, op1
)
7023 enum machine_mode mode
;
7028 if (GET_RTX_CLASS (code
) == '<')
7030 enum machine_mode op_mode
= GET_MODE (op0
);
7031 if (op_mode
== VOIDmode
)
7032 op_mode
= GET_MODE (op1
);
7033 result
= simplify_relational_operation (code
, op_mode
, op0
, op1
);
7036 result
= simplify_binary_operation (code
, mode
, op0
, op1
);
7041 /* Put complex operands first and constants second. */
7042 if (GET_RTX_CLASS (code
) == 'c'
7043 && ((CONSTANT_P (op0
) && GET_CODE (op1
) != CONST_INT
)
7044 || (GET_RTX_CLASS (GET_CODE (op0
)) == 'o'
7045 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')
7046 || (GET_CODE (op0
) == SUBREG
7047 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0
))) == 'o'
7048 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')))
7049 return gen_rtx_combine (code
, mode
, op1
, op0
);
7051 return gen_rtx_combine (code
, mode
, op0
, op1
);
7055 gen_unary (code
, mode
, op0
)
7057 enum machine_mode mode
;
7060 rtx result
= simplify_unary_operation (code
, mode
, op0
, mode
);
7065 return gen_rtx_combine (code
, mode
, op0
);
7068 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7069 comparison code that will be tested.
7071 The result is a possibly different comparison code to use. *POP0 and
7072 *POP1 may be updated.
7074 It is possible that we might detect that a comparison is either always
7075 true or always false. However, we do not perform general constant
7076 folding in combine, so this knowledge isn't useful. Such tautologies
7077 should have been detected earlier. Hence we ignore all such cases. */
7079 static enum rtx_code
7080 simplify_comparison (code
, pop0
, pop1
)
7089 enum machine_mode mode
, tmode
;
7091 /* Try a few ways of applying the same transformation to both operands. */
7094 /* If both operands are the same constant shift, see if we can ignore the
7095 shift. We can if the shift is a rotate or if the bits shifted out of
7096 this shift are not significant for either input and if the type of
7097 comparison is compatible with the shift. */
7098 if (GET_CODE (op0
) == GET_CODE (op1
)
7099 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
7100 && ((GET_CODE (op0
) == ROTATE
&& (code
== NE
|| code
== EQ
))
7101 || ((GET_CODE (op0
) == LSHIFTRT
7102 || GET_CODE (op0
) == ASHIFT
|| GET_CODE (op0
) == LSHIFT
)
7103 && (code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
))
7104 || (GET_CODE (op0
) == ASHIFTRT
7105 && (code
!= GTU
&& code
!= LTU
7106 && code
!= GEU
&& code
!= GEU
)))
7107 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7108 && INTVAL (XEXP (op0
, 1)) >= 0
7109 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
7110 && XEXP (op0
, 1) == XEXP (op1
, 1))
7112 enum machine_mode mode
= GET_MODE (op0
);
7113 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
7114 int shift_count
= INTVAL (XEXP (op0
, 1));
7116 if (GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFTRT
)
7117 mask
&= (mask
>> shift_count
) << shift_count
;
7118 else if (GET_CODE (op0
) == ASHIFT
|| GET_CODE (op0
) == LSHIFT
)
7119 mask
= (mask
& (mask
<< shift_count
)) >> shift_count
;
7121 if ((significant_bits (XEXP (op0
, 0), mode
) & ~ mask
) == 0
7122 && (significant_bits (XEXP (op1
, 0), mode
) & ~ mask
) == 0)
7123 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0);
7128 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7129 SUBREGs are of the same mode, and, in both cases, the AND would
7130 be redundant if the comparison was done in the narrower mode,
7131 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7132 and the operand's significant bits are 0xffffff01; in that case if
7133 we only care about QImode, we don't need the AND). This case occurs
7134 if the output mode of an scc insn is not SImode and
7135 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7137 else if (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == AND
7138 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7139 && GET_CODE (XEXP (op1
, 1)) == CONST_INT
7140 && GET_CODE (XEXP (op0
, 0)) == SUBREG
7141 && GET_CODE (XEXP (op1
, 0)) == SUBREG
7142 && (GET_MODE_SIZE (GET_MODE (XEXP (op0
, 0)))
7143 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0
, 0)))))
7144 && (GET_MODE (SUBREG_REG (XEXP (op0
, 0)))
7145 == GET_MODE (SUBREG_REG (XEXP (op1
, 0))))
7146 && (significant_bits (SUBREG_REG (XEXP (op0
, 0)),
7147 GET_MODE (SUBREG_REG (XEXP (op0
, 0))))
7148 & ~ INTVAL (XEXP (op0
, 1))) == 0
7149 && (significant_bits (SUBREG_REG (XEXP (op1
, 0)),
7150 GET_MODE (SUBREG_REG (XEXP (op1
, 0))))
7151 & ~ INTVAL (XEXP (op1
, 1))) == 0)
7153 op0
= SUBREG_REG (XEXP (op0
, 0));
7154 op1
= SUBREG_REG (XEXP (op1
, 0));
7156 /* the resulting comparison is always unsigned since we masked off
7157 the original sign bit. */
7158 code
= unsigned_condition (code
);
7164 /* If the first operand is a constant, swap the operands and adjust the
7165 comparison code appropriately. */
7166 if (CONSTANT_P (op0
))
7168 tem
= op0
, op0
= op1
, op1
= tem
;
7169 code
= swap_condition (code
);
7172 /* We now enter a loop during which we will try to simplify the comparison.
7173 For the most part, we only are concerned with comparisons with zero,
7174 but some things may really be comparisons with zero but not start
7175 out looking that way. */
7177 while (GET_CODE (op1
) == CONST_INT
)
7179 enum machine_mode mode
= GET_MODE (op0
);
7180 int mode_width
= GET_MODE_BITSIZE (mode
);
7181 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
7182 int equality_comparison_p
;
7183 int sign_bit_comparison_p
;
7184 int unsigned_comparison_p
;
7185 HOST_WIDE_INT const_op
;
7187 /* We only want to handle integral modes. This catches VOIDmode,
7188 CCmode, and the floating-point modes. An exception is that we
7189 can handle VOIDmode if OP0 is a COMPARE or a comparison
7192 if (GET_MODE_CLASS (mode
) != MODE_INT
7193 && ! (mode
== VOIDmode
7194 && (GET_CODE (op0
) == COMPARE
7195 || GET_RTX_CLASS (GET_CODE (op0
)) == '<')))
7198 /* Get the constant we are comparing against and turn off all bits
7199 not on in our mode. */
7200 const_op
= INTVAL (op1
);
7201 if (mode_width
<= HOST_BITS_PER_WIDE_INT
)
7204 /* If we are comparing against a constant power of two and the value
7205 being compared has only that single significant bit (e.g., it was
7206 `and'ed with that bit), we can replace this with a comparison
7209 && (code
== EQ
|| code
== NE
|| code
== GE
|| code
== GEU
7210 || code
== LT
|| code
== LTU
)
7211 && mode_width
<= HOST_BITS_PER_WIDE_INT
7212 && exact_log2 (const_op
) >= 0
7213 && significant_bits (op0
, mode
) == const_op
)
7215 code
= (code
== EQ
|| code
== GE
|| code
== GEU
? NE
: EQ
);
7216 op1
= const0_rtx
, const_op
= 0;
7219 /* Similarly, if we are comparing a value known to be either -1 or
7220 0 with -1, change it to the opposite comparison against zero. */
7223 && (code
== EQ
|| code
== NE
|| code
== GT
|| code
== LE
7224 || code
== GEU
|| code
== LTU
)
7225 && num_sign_bit_copies (op0
, mode
) == mode_width
)
7227 code
= (code
== EQ
|| code
== LE
|| code
== GEU
? NE
: EQ
);
7228 op1
= const0_rtx
, const_op
= 0;
7231 /* Do some canonicalizations based on the comparison code. We prefer
7232 comparisons against zero and then prefer equality comparisons.
7233 If we can reduce the size of a constant, we will do that too. */
7238 /* < C is equivalent to <= (C - 1) */
7242 op1
= GEN_INT (const_op
);
7244 /* ... fall through to LE case below. */
7250 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7254 op1
= GEN_INT (const_op
);
7258 /* If we are doing a <= 0 comparison on a value known to have
7259 a zero sign bit, we can replace this with == 0. */
7260 else if (const_op
== 0
7261 && mode_width
<= HOST_BITS_PER_WIDE_INT
7262 && (significant_bits (op0
, mode
)
7263 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
7268 /* >= C is equivalent to > (C - 1). */
7272 op1
= GEN_INT (const_op
);
7274 /* ... fall through to GT below. */
7280 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7284 op1
= GEN_INT (const_op
);
7288 /* If we are doing a > 0 comparison on a value known to have
7289 a zero sign bit, we can replace this with != 0. */
7290 else if (const_op
== 0
7291 && mode_width
<= HOST_BITS_PER_WIDE_INT
7292 && (significant_bits (op0
, mode
)
7293 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
7298 /* < C is equivalent to <= (C - 1). */
7302 op1
= GEN_INT (const_op
);
7304 /* ... fall through ... */
7307 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7308 else if (const_op
== (HOST_WIDE_INT
) 1 << (mode_width
- 1))
7310 const_op
= 0, op1
= const0_rtx
;
7318 /* unsigned <= 0 is equivalent to == 0 */
7322 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7323 else if (const_op
== ((HOST_WIDE_INT
) 1 << (mode_width
- 1)) - 1)
7325 const_op
= 0, op1
= const0_rtx
;
7331 /* >= C is equivalent to < (C - 1). */
7335 op1
= GEN_INT (const_op
);
7337 /* ... fall through ... */
7340 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7341 else if (const_op
== (HOST_WIDE_INT
) 1 << (mode_width
- 1))
7343 const_op
= 0, op1
= const0_rtx
;
7350 /* unsigned > 0 is equivalent to != 0 */
7354 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7355 else if (const_op
== ((HOST_WIDE_INT
) 1 << (mode_width
- 1)) - 1)
7357 const_op
= 0, op1
= const0_rtx
;
7363 /* Compute some predicates to simplify code below. */
7365 equality_comparison_p
= (code
== EQ
|| code
== NE
);
7366 sign_bit_comparison_p
= ((code
== LT
|| code
== GE
) && const_op
== 0);
7367 unsigned_comparison_p
= (code
== LTU
|| code
== LEU
|| code
== GTU
7370 /* Now try cases based on the opcode of OP0. If none of the cases
7371 does a "continue", we exit this loop immediately after the
7374 switch (GET_CODE (op0
))
7377 /* If we are extracting a single bit from a variable position in
7378 a constant that has only a single bit set and are comparing it
7379 with zero, we can convert this into an equality comparison
7380 between the position and the location of the single bit. We can't
7381 do this if bit endian and we don't have an extzv since we then
7382 can't know what mode to use for the endianness adjustment. */
7384 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7385 if (GET_CODE (XEXP (op0
, 0)) == CONST_INT
7386 && XEXP (op0
, 1) == const1_rtx
7387 && equality_comparison_p
&& const_op
== 0
7388 && (i
= exact_log2 (INTVAL (XEXP (op0
, 0)))) >= 0)
7391 i
= (GET_MODE_BITSIZE
7392 (insn_operand_mode
[(int) CODE_FOR_extzv
][1]) - 1 - i
);
7395 op0
= XEXP (op0
, 2);
7399 /* Result is nonzero iff shift count is equal to I. */
7400 code
= reverse_condition (code
);
7405 /* ... fall through ... */
7408 tem
= expand_compound_operation (op0
);
7417 /* If testing for equality, we can take the NOT of the constant. */
7418 if (equality_comparison_p
7419 && (tem
= simplify_unary_operation (NOT
, mode
, op1
, mode
)) != 0)
7421 op0
= XEXP (op0
, 0);
7426 /* If just looking at the sign bit, reverse the sense of the
7428 if (sign_bit_comparison_p
)
7430 op0
= XEXP (op0
, 0);
7431 code
= (code
== GE
? LT
: GE
);
7437 /* If testing for equality, we can take the NEG of the constant. */
7438 if (equality_comparison_p
7439 && (tem
= simplify_unary_operation (NEG
, mode
, op1
, mode
)) != 0)
7441 op0
= XEXP (op0
, 0);
7446 /* The remaining cases only apply to comparisons with zero. */
7450 /* When X is ABS or is known positive,
7451 (neg X) is < 0 if and only if X != 0. */
7453 if (sign_bit_comparison_p
7454 && (GET_CODE (XEXP (op0
, 0)) == ABS
7455 || (mode_width
<= HOST_BITS_PER_WIDE_INT
7456 && (significant_bits (XEXP (op0
, 0), mode
)
7457 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)))
7459 op0
= XEXP (op0
, 0);
7460 code
= (code
== LT
? NE
: EQ
);
7464 /* If we have NEG of something that is the result of a
7465 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
7466 two high-order bits must be the same and hence that
7467 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
7469 if (GET_CODE (XEXP (op0
, 0)) == SIGN_EXTEND
7470 || (GET_CODE (XEXP (op0
, 0)) == SIGN_EXTRACT
7471 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
7472 && (INTVAL (XEXP (XEXP (op0
, 0), 1))
7473 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0
, 0), 0)))))
7474 || (GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
7475 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
7476 && XEXP (XEXP (op0
, 0), 1) != const0_rtx
)
7477 || ((tem
= get_last_value (XEXP (op0
, 0))) != 0
7478 && (GET_CODE (tem
) == SIGN_EXTEND
7479 || (GET_CODE (tem
) == SIGN_EXTRACT
7480 && GET_CODE (XEXP (tem
, 1)) == CONST_INT
7481 && (INTVAL (XEXP (tem
, 1))
7482 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem
, 0)))))
7483 || (GET_CODE (tem
) == ASHIFTRT
7484 && GET_CODE (XEXP (tem
, 1)) == CONST_INT
7485 && XEXP (tem
, 1) != const0_rtx
))))
7487 op0
= XEXP (op0
, 0);
7488 code
= swap_condition (code
);
7494 /* If we are testing equality and our count is a constant, we
7495 can perform the inverse operation on our RHS. */
7496 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
7497 && (tem
= simplify_binary_operation (ROTATERT
, mode
,
7498 op1
, XEXP (op0
, 1))) != 0)
7500 op0
= XEXP (op0
, 0);
7505 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
7506 a particular bit. Convert it to an AND of a constant of that
7507 bit. This will be converted into a ZERO_EXTRACT. */
7508 if (const_op
== 0 && sign_bit_comparison_p
7509 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7510 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
7512 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
7515 - INTVAL (XEXP (op0
, 1)))));
7516 code
= (code
== LT
? NE
: EQ
);
7520 /* ... fall through ... */
7523 /* ABS is ignorable inside an equality comparison with zero. */
7524 if (const_op
== 0 && equality_comparison_p
)
7526 op0
= XEXP (op0
, 0);
7533 /* Can simplify (compare (zero/sign_extend FOO) CONST)
7534 to (compare FOO CONST) if CONST fits in FOO's mode and we
7535 are either testing inequality or have an unsigned comparison
7536 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
7537 if (! unsigned_comparison_p
7538 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
7539 <= HOST_BITS_PER_WIDE_INT
)
7540 && ((unsigned HOST_WIDE_INT
) const_op
7541 < (((HOST_WIDE_INT
) 1
7542 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))) - 1)))))
7544 op0
= XEXP (op0
, 0);
7550 /* If the inner mode is smaller and we are extracting the low
7551 part, we can treat the SUBREG as if it were a ZERO_EXTEND. */
7552 if (! subreg_lowpart_p (op0
)
7553 || GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) >= mode_width
)
7556 /* ... fall through ... */
7559 if ((unsigned_comparison_p
|| equality_comparison_p
)
7560 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
7561 <= HOST_BITS_PER_WIDE_INT
)
7562 && ((unsigned HOST_WIDE_INT
) const_op
7563 < GET_MODE_MASK (GET_MODE (XEXP (op0
, 0)))))
7565 op0
= XEXP (op0
, 0);
7571 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
7572 this for equality comparisons due to pathological cases involving
7574 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
7575 && (tem
= simplify_binary_operation (MINUS
, mode
, op1
,
7576 XEXP (op0
, 1))) != 0)
7578 op0
= XEXP (op0
, 0);
7583 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
7584 if (const_op
== 0 && XEXP (op0
, 1) == constm1_rtx
7585 && GET_CODE (XEXP (op0
, 0)) == ABS
&& sign_bit_comparison_p
)
7587 op0
= XEXP (XEXP (op0
, 0), 0);
7588 code
= (code
== LT
? EQ
: NE
);
7594 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
7595 of bits in X minus 1, is one iff X > 0. */
7596 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
7597 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
7598 && INTVAL (XEXP (XEXP (op0
, 0), 1)) == mode_width
- 1
7599 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
7601 op0
= XEXP (op0
, 1);
7602 code
= (code
== GE
? LE
: GT
);
7608 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
7609 if C is zero or B is a constant. */
7610 if (equality_comparison_p
7611 && 0 != (tem
= simplify_binary_operation (XOR
, mode
,
7612 XEXP (op0
, 1), op1
)))
7614 op0
= XEXP (op0
, 0);
7621 case LT
: case LTU
: case LE
: case LEU
:
7622 case GT
: case GTU
: case GE
: case GEU
:
7623 /* We can't do anything if OP0 is a condition code value, rather
7624 than an actual data value. */
7627 || XEXP (op0
, 0) == cc0_rtx
7629 || GET_MODE_CLASS (GET_MODE (XEXP (op0
, 0))) == MODE_CC
)
7632 /* Get the two operands being compared. */
7633 if (GET_CODE (XEXP (op0
, 0)) == COMPARE
)
7634 tem
= XEXP (XEXP (op0
, 0), 0), tem1
= XEXP (XEXP (op0
, 0), 1);
7636 tem
= XEXP (op0
, 0), tem1
= XEXP (op0
, 1);
7638 /* Check for the cases where we simply want the result of the
7639 earlier test or the opposite of that result. */
7641 || (code
== EQ
&& reversible_comparison_p (op0
))
7642 || (GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
7643 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7644 && (STORE_FLAG_VALUE
7645 & (((HOST_WIDE_INT
) 1
7646 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
7648 || (code
== GE
&& reversible_comparison_p (op0
)))))
7650 code
= (code
== LT
|| code
== NE
7651 ? GET_CODE (op0
) : reverse_condition (GET_CODE (op0
)));
7652 op0
= tem
, op1
= tem1
;
7658 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
7660 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == PLUS
7661 && XEXP (XEXP (op0
, 0), 1) == constm1_rtx
7662 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
7664 op0
= XEXP (op0
, 1);
7665 code
= (code
== GE
? GT
: LE
);
7671 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
7672 will be converted to a ZERO_EXTRACT later. */
7673 if (const_op
== 0 && equality_comparison_p
7674 && (GET_CODE (XEXP (op0
, 0)) == ASHIFT
7675 || GET_CODE (XEXP (op0
, 0)) == LSHIFT
)
7676 && XEXP (XEXP (op0
, 0), 0) == const1_rtx
)
7678 op0
= simplify_and_const_int
7679 (op0
, mode
, gen_rtx_combine (LSHIFTRT
, mode
,
7681 XEXP (XEXP (op0
, 0), 1)),
7686 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
7687 zero and X is a comparison and C1 and C2 describe only bits set
7688 in STORE_FLAG_VALUE, we can compare with X. */
7689 if (const_op
== 0 && equality_comparison_p
7690 && mode_width
<= HOST_BITS_PER_WIDE_INT
7691 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7692 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
7693 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
7694 && INTVAL (XEXP (XEXP (op0
, 0), 1)) >= 0
7695 && INTVAL (XEXP (XEXP (op0
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
7697 mask
= ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
7698 << INTVAL (XEXP (XEXP (op0
, 0), 1)));
7699 if ((~ STORE_FLAG_VALUE
& mask
) == 0
7700 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0
, 0), 0))) == '<'
7701 || ((tem
= get_last_value (XEXP (XEXP (op0
, 0), 0))) != 0
7702 && GET_RTX_CLASS (GET_CODE (tem
)) == '<')))
7704 op0
= XEXP (XEXP (op0
, 0), 0);
7709 /* If we are doing an equality comparison of an AND of a bit equal
7710 to the sign bit, replace this with a LT or GE comparison of
7711 the underlying value. */
7712 if (equality_comparison_p
7714 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7715 && mode_width
<= HOST_BITS_PER_WIDE_INT
7716 && ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
7717 == (HOST_WIDE_INT
) 1 << (mode_width
- 1)))
7719 op0
= XEXP (op0
, 0);
7720 code
= (code
== EQ
? GE
: LT
);
7724 /* If this AND operation is really a ZERO_EXTEND from a narrower
7725 mode, the constant fits within that mode, and this is either an
7726 equality or unsigned comparison, try to do this comparison in
7727 the narrower mode. */
7728 if ((equality_comparison_p
|| unsigned_comparison_p
)
7729 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7730 && (i
= exact_log2 ((INTVAL (XEXP (op0
, 1))
7731 & GET_MODE_MASK (mode
))
7733 && const_op
>> i
== 0
7734 && (tmode
= mode_for_size (i
, MODE_INT
, 1)) != BLKmode
)
7736 op0
= gen_lowpart_for_combine (tmode
, XEXP (op0
, 0));
7743 /* If we have (compare (xshift FOO N) (const_int C)) and
7744 the high order N bits of FOO (N+1 if an inequality comparison)
7745 are not significant, we can do this by comparing FOO with C
7746 shifted right N bits so long as the low-order N bits of C are
7748 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
7749 && INTVAL (XEXP (op0
, 1)) >= 0
7750 && ((INTVAL (XEXP (op0
, 1)) + ! equality_comparison_p
)
7751 < HOST_BITS_PER_WIDE_INT
)
7753 & ~ (((HOST_WIDE_INT
) 1
7754 << INTVAL (XEXP (op0
, 1))) - 1)) == 0)
7755 && mode_width
<= HOST_BITS_PER_WIDE_INT
7756 && (significant_bits (XEXP (op0
, 0), mode
)
7757 & ~ (mask
>> (INTVAL (XEXP (op0
, 1))
7758 + ! equality_comparison_p
))) == 0)
7760 const_op
>>= INTVAL (XEXP (op0
, 1));
7761 op1
= GEN_INT (const_op
);
7762 op0
= XEXP (op0
, 0);
7766 /* If we are doing a sign bit comparison, it means we are testing
7767 a particular bit. Convert it to the appropriate AND. */
7768 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
7769 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
7771 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
7774 - INTVAL (XEXP (op0
, 1)))));
7775 code
= (code
== LT
? NE
: EQ
);
7779 /* If this an equality comparison with zero and we are shifting
7780 the low bit to the sign bit, we can convert this to an AND of the
7782 if (const_op
== 0 && equality_comparison_p
7783 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7784 && INTVAL (XEXP (op0
, 1)) == mode_width
- 1)
7786 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
7793 /* If this is an equality comparison with zero, we can do this
7794 as a logical shift, which might be much simpler. */
7795 if (equality_comparison_p
&& const_op
== 0
7796 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
)
7798 op0
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
,
7800 INTVAL (XEXP (op0
, 1)));
7804 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
7805 do the comparison in a narrower mode. */
7806 if (! unsigned_comparison_p
7807 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7808 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
7809 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
7810 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
7811 MODE_INT
, 1)) != VOIDmode
7812 && ((unsigned HOST_WIDE_INT
) const_op
<= GET_MODE_MASK (tmode
)
7813 || ((unsigned HOST_WIDE_INT
) - const_op
7814 <= GET_MODE_MASK (tmode
))))
7816 op0
= gen_lowpart_for_combine (tmode
, XEXP (XEXP (op0
, 0), 0));
7820 /* ... fall through ... */
7822 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
7823 the low order N bits of FOO are not significant, we can do this
7824 by comparing FOO with C shifted left N bits so long as no
7826 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
7827 && INTVAL (XEXP (op0
, 1)) >= 0
7828 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
7829 && mode_width
<= HOST_BITS_PER_WIDE_INT
7830 && (significant_bits (XEXP (op0
, 0), mode
)
7831 & (((HOST_WIDE_INT
) 1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0
7833 || (floor_log2 (const_op
) + INTVAL (XEXP (op0
, 1))
7836 const_op
<<= INTVAL (XEXP (op0
, 1));
7837 op1
= GEN_INT (const_op
);
7838 op0
= XEXP (op0
, 0);
7842 /* If we are using this shift to extract just the sign bit, we
7843 can replace this with an LT or GE comparison. */
7845 && (equality_comparison_p
|| sign_bit_comparison_p
)
7846 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7847 && INTVAL (XEXP (op0
, 1)) == mode_width
- 1)
7849 op0
= XEXP (op0
, 0);
7850 code
= (code
== NE
|| code
== GT
? LT
: GE
);
7859 /* Now make any compound operations involved in this comparison. Then,
7860 check for an outmost SUBREG on OP0 that isn't doing anything or is
7861 paradoxical. The latter case can only occur when it is known that the
7862 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
7863 We can never remove a SUBREG for a non-equality comparison because the
7864 sign bit is in a different place in the underlying object. */
7866 op0
= make_compound_operation (op0
, op1
== const0_rtx
? COMPARE
: SET
);
7867 op1
= make_compound_operation (op1
, SET
);
7869 if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
7870 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7871 && (code
== NE
|| code
== EQ
)
7872 && ((GET_MODE_SIZE (GET_MODE (op0
))
7873 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))))
7875 op0
= SUBREG_REG (op0
);
7876 op1
= gen_lowpart_for_combine (GET_MODE (op0
), op1
);
7879 else if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
7880 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7881 && (code
== NE
|| code
== EQ
)
7882 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
7883 && (significant_bits (SUBREG_REG (op0
), GET_MODE (SUBREG_REG (op0
)))
7884 & ~ GET_MODE_MASK (GET_MODE (op0
))) == 0
7885 && (tem
= gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0
)),
7887 (significant_bits (tem
, GET_MODE (SUBREG_REG (op0
)))
7888 & ~ GET_MODE_MASK (GET_MODE (op0
))) == 0))
7889 op0
= SUBREG_REG (op0
), op1
= tem
;
7891 /* We now do the opposite procedure: Some machines don't have compare
7892 insns in all modes. If OP0's mode is an integer mode smaller than a
7893 word and we can't do a compare in that mode, see if there is a larger
7894 mode for which we can do the compare and where the only significant
7895 bits in OP0 and OP1 are those in the narrower mode. We can do
7896 this if this is an equality comparison, in which case we can
7897 merely widen the operation, or if we are testing the sign bit, in
7898 which case we can explicitly put in the test. */
7900 mode
= GET_MODE (op0
);
7901 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
7902 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
7903 && cmp_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
7904 for (tmode
= GET_MODE_WIDER_MODE (mode
);
7906 && GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
);
7907 tmode
= GET_MODE_WIDER_MODE (tmode
))
7908 if (cmp_optab
->handlers
[(int) tmode
].insn_code
!= CODE_FOR_nothing
7909 && (significant_bits (op0
, tmode
) & ~ GET_MODE_MASK (mode
)) == 0
7910 && (significant_bits (op1
, tmode
) & ~ GET_MODE_MASK (mode
)) == 0
7911 && (code
== EQ
|| code
== NE
7912 || (op1
== const0_rtx
&& (code
== LT
|| code
== GE
)
7913 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)))
7915 op0
= gen_lowpart_for_combine (tmode
, op0
);
7916 op1
= gen_lowpart_for_combine (tmode
, op1
);
7918 if (code
== LT
|| code
== GE
)
7920 op0
= gen_binary (AND
, tmode
, op0
,
7921 GEN_INT ((HOST_WIDE_INT
) 1
7922 << (GET_MODE_BITSIZE (mode
) - 1)));
7923 code
= (code
== LT
) ? NE
: EQ
;
7935 /* Return 1 if we know that X, a comparison operation, is not operating
7936 on a floating-point value or is EQ or NE, meaning that we can safely
7940 reversible_comparison_p (x
)
7943 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
7944 || GET_CODE (x
) == NE
|| GET_CODE (x
) == EQ
)
7947 switch (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))))
7953 x
= get_last_value (XEXP (x
, 0));
7954 return (x
&& GET_CODE (x
) == COMPARE
7955 && GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_INT
);
7961 /* Utility function for following routine. Called when X is part of a value
7962 being stored into reg_last_set_value. Sets reg_last_set_table_tick
7963 for each register mentioned. Similar to mention_regs in cse.c */
7966 update_table_tick (x
)
7969 register enum rtx_code code
= GET_CODE (x
);
7970 register char *fmt
= GET_RTX_FORMAT (code
);
7975 int regno
= REGNO (x
);
7976 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7977 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
7979 for (i
= regno
; i
< endregno
; i
++)
7980 reg_last_set_table_tick
[i
] = label_tick
;
7985 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7986 /* Note that we can't have an "E" in values stored; see
7987 get_last_value_validate. */
7989 update_table_tick (XEXP (x
, i
));
7992 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
7993 are saying that the register is clobbered and we no longer know its
7994 value. If INSN is zero, don't update reg_last_set; this call is normally
7995 done with VALUE also zero to invalidate the register. */
7998 record_value_for_reg (reg
, insn
, value
)
8003 int regno
= REGNO (reg
);
8004 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
8005 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1);
8008 /* If VALUE contains REG and we have a previous value for REG, substitute
8009 the previous value. */
8010 if (value
&& insn
&& reg_overlap_mentioned_p (reg
, value
))
8014 /* Set things up so get_last_value is allowed to see anything set up to
8016 subst_low_cuid
= INSN_CUID (insn
);
8017 tem
= get_last_value (reg
);
8020 value
= replace_rtx (copy_rtx (value
), reg
, tem
);
8023 /* For each register modified, show we don't know its value, that
8024 its value has been updated, and that we don't know the location of
8025 the death of the register. */
8026 for (i
= regno
; i
< endregno
; i
++)
8029 reg_last_set
[i
] = insn
;
8030 reg_last_set_value
[i
] = 0;
8031 reg_last_death
[i
] = 0;
8034 /* Mark registers that are being referenced in this value. */
8036 update_table_tick (value
);
8038 /* Now update the status of each register being set.
8039 If someone is using this register in this block, set this register
8040 to invalid since we will get confused between the two lives in this
8041 basic block. This makes using this register always invalid. In cse, we
8042 scan the table to invalidate all entries using this register, but this
8043 is too much work for us. */
8045 for (i
= regno
; i
< endregno
; i
++)
8047 reg_last_set_label
[i
] = label_tick
;
8048 if (value
&& reg_last_set_table_tick
[i
] == label_tick
)
8049 reg_last_set_invalid
[i
] = 1;
8051 reg_last_set_invalid
[i
] = 0;
8054 /* The value being assigned might refer to X (like in "x++;"). In that
8055 case, we must replace it with (clobber (const_int 0)) to prevent
8057 if (value
&& ! get_last_value_validate (&value
,
8058 reg_last_set_label
[regno
], 0))
8060 value
= copy_rtx (value
);
8061 if (! get_last_value_validate (&value
, reg_last_set_label
[regno
], 1))
8065 /* For the main register being modified, update the value. */
8066 reg_last_set_value
[regno
] = value
;
8070 /* Used for communication between the following two routines. */
8071 static rtx record_dead_insn
;
8073 /* Called via note_stores from record_dead_and_set_regs to handle one
8074 SET or CLOBBER in an insn. */
8077 record_dead_and_set_regs_1 (dest
, setter
)
8080 if (GET_CODE (dest
) == REG
)
8082 /* If we are setting the whole register, we know its value. Otherwise
8083 show that we don't know the value. We can handle SUBREG in
8085 if (GET_CODE (setter
) == SET
&& dest
== SET_DEST (setter
))
8086 record_value_for_reg (dest
, record_dead_insn
, SET_SRC (setter
));
8087 else if (GET_CODE (setter
) == SET
8088 && GET_CODE (SET_DEST (setter
)) == SUBREG
8089 && SUBREG_REG (SET_DEST (setter
)) == dest
8090 && subreg_lowpart_p (SET_DEST (setter
)))
8091 record_value_for_reg (dest
, record_dead_insn
,
8092 gen_lowpart_for_combine (GET_MODE (dest
),
8095 record_value_for_reg (dest
, record_dead_insn
, NULL_RTX
);
8097 else if (GET_CODE (dest
) == MEM
8098 /* Ignore pushes, they clobber nothing. */
8099 && ! push_operand (dest
, GET_MODE (dest
)))
8100 mem_last_set
= INSN_CUID (record_dead_insn
);
8103 /* Update the records of when each REG was most recently set or killed
8104 for the things done by INSN. This is the last thing done in processing
8105 INSN in the combiner loop.
8107 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8108 similar information mem_last_set (which insn most recently modified memory)
8109 and last_call_cuid (which insn was the most recent subroutine call). */
8112 record_dead_and_set_regs (insn
)
8116 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
8118 if (REG_NOTE_KIND (link
) == REG_DEAD
)
8119 reg_last_death
[REGNO (XEXP (link
, 0))] = insn
;
8120 else if (REG_NOTE_KIND (link
) == REG_INC
)
8121 record_value_for_reg (XEXP (link
, 0), insn
, NULL_RTX
);
8124 if (GET_CODE (insn
) == CALL_INSN
)
8125 last_call_cuid
= mem_last_set
= INSN_CUID (insn
);
8127 record_dead_insn
= insn
;
8128 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
);
8131 /* Utility routine for the following function. Verify that all the registers
8132 mentioned in *LOC are valid when *LOC was part of a value set when
8133 label_tick == TICK. Return 0 if some are not.
8135 If REPLACE is non-zero, replace the invalid reference with
8136 (clobber (const_int 0)) and return 1. This replacement is useful because
8137 we often can get useful information about the form of a value (e.g., if
8138 it was produced by a shift that always produces -1 or 0) even though
8139 we don't know exactly what registers it was produced from. */
8142 get_last_value_validate (loc
, tick
, replace
)
8148 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
8149 int len
= GET_RTX_LENGTH (GET_CODE (x
));
8152 if (GET_CODE (x
) == REG
)
8154 int regno
= REGNO (x
);
8155 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
8156 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
8159 for (j
= regno
; j
< endregno
; j
++)
8160 if (reg_last_set_invalid
[j
]
8161 /* If this is a pseudo-register that was only set once, it is
8163 || (! (regno
>= FIRST_PSEUDO_REGISTER
&& reg_n_sets
[regno
] == 1)
8164 && reg_last_set_label
[j
] > tick
))
8167 *loc
= gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
8174 for (i
= 0; i
< len
; i
++)
8176 && get_last_value_validate (&XEXP (x
, i
), tick
, replace
) == 0)
8177 /* Don't bother with these. They shouldn't occur anyway. */
8181 /* If we haven't found a reason for it to be invalid, it is valid. */
8185 /* Get the last value assigned to X, if known. Some registers
8186 in the value may be replaced with (clobber (const_int 0)) if their value
8187 is known longer known reliably. */
8196 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8197 then convert it to the desired mode. If this is a paradoxical SUBREG,
8198 we cannot predict what values the "extra" bits might have. */
8199 if (GET_CODE (x
) == SUBREG
8200 && subreg_lowpart_p (x
)
8201 && (GET_MODE_SIZE (GET_MODE (x
))
8202 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
8203 && (value
= get_last_value (SUBREG_REG (x
))) != 0)
8204 return gen_lowpart_for_combine (GET_MODE (x
), value
);
8206 if (GET_CODE (x
) != REG
)
8210 value
= reg_last_set_value
[regno
];
8212 /* If we don't have a value or if it isn't for this basic block, return 0. */
8215 || (reg_n_sets
[regno
] != 1
8216 && (reg_last_set_label
[regno
] != label_tick
)))
8219 /* If the value was set in a later insn that the ones we are processing,
8220 we can't use it, but make a quick check to see if the previous insn
8221 set it to something. This is commonly the case when the same pseudo
8222 is used by repeated insns. */
8224 if (reg_n_sets
[regno
] != 1
8225 && INSN_CUID (reg_last_set
[regno
]) >= subst_low_cuid
)
8229 for (insn
= prev_nonnote_insn (subst_insn
);
8230 insn
&& INSN_CUID (insn
) >= subst_low_cuid
;
8231 insn
= prev_nonnote_insn (insn
))
8235 && (set
= single_set (insn
)) != 0
8236 && rtx_equal_p (SET_DEST (set
), x
))
8238 value
= SET_SRC (set
);
8240 /* Make sure that VALUE doesn't reference X. Replace any
8241 expliit references with a CLOBBER. If there are any remaining
8242 references (rare), don't use the value. */
8244 if (reg_mentioned_p (x
, value
))
8245 value
= replace_rtx (copy_rtx (value
), x
,
8246 gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
));
8248 if (reg_overlap_mentioned_p (x
, value
))
8255 /* If the value has all its registers valid, return it. */
8256 if (get_last_value_validate (&value
, reg_last_set_label
[regno
], 0))
8259 /* Otherwise, make a copy and replace any invalid register with
8260 (clobber (const_int 0)). If that fails for some reason, return 0. */
8262 value
= copy_rtx (value
);
8263 if (get_last_value_validate (&value
, reg_last_set_label
[regno
], 1))
8269 /* Return nonzero if expression X refers to a REG or to memory
8270 that is set in an instruction more recent than FROM_CUID. */
8273 use_crosses_set_p (x
, from_cuid
)
8279 register enum rtx_code code
= GET_CODE (x
);
8283 register int regno
= REGNO (x
);
8284 #ifdef PUSH_ROUNDING
8285 /* Don't allow uses of the stack pointer to be moved,
8286 because we don't know whether the move crosses a push insn. */
8287 if (regno
== STACK_POINTER_REGNUM
)
8290 return (reg_last_set
[regno
]
8291 && INSN_CUID (reg_last_set
[regno
]) > from_cuid
);
8294 if (code
== MEM
&& mem_last_set
> from_cuid
)
8297 fmt
= GET_RTX_FORMAT (code
);
8299 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
8304 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
8305 if (use_crosses_set_p (XVECEXP (x
, i
, j
), from_cuid
))
8308 else if (fmt
[i
] == 'e'
8309 && use_crosses_set_p (XEXP (x
, i
), from_cuid
))
8315 /* Define three variables used for communication between the following
8318 static int reg_dead_regno
, reg_dead_endregno
;
8319 static int reg_dead_flag
;
8321 /* Function called via note_stores from reg_dead_at_p.
8323 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8324 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8327 reg_dead_at_p_1 (dest
, x
)
8331 int regno
, endregno
;
8333 if (GET_CODE (dest
) != REG
)
8336 regno
= REGNO (dest
);
8337 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
8338 ? HARD_REGNO_NREGS (regno
, GET_MODE (dest
)) : 1);
8340 if (reg_dead_endregno
> regno
&& reg_dead_regno
< endregno
)
8341 reg_dead_flag
= (GET_CODE (x
) == CLOBBER
) ? 1 : -1;
8344 /* Return non-zero if REG is known to be dead at INSN.
8346 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8347 referencing REG, it is dead. If we hit a SET referencing REG, it is
8348 live. Otherwise, see if it is live or dead at the start of the basic
8352 reg_dead_at_p (reg
, insn
)
8358 /* Set variables for reg_dead_at_p_1. */
8359 reg_dead_regno
= REGNO (reg
);
8360 reg_dead_endregno
= reg_dead_regno
+ (reg_dead_regno
< FIRST_PSEUDO_REGISTER
8361 ? HARD_REGNO_NREGS (reg_dead_regno
,
8367 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8368 beginning of function. */
8369 for (; insn
&& GET_CODE (insn
) != CODE_LABEL
;
8370 insn
= prev_nonnote_insn (insn
))
8372 note_stores (PATTERN (insn
), reg_dead_at_p_1
);
8374 return reg_dead_flag
== 1 ? 1 : 0;
8376 if (find_regno_note (insn
, REG_DEAD
, reg_dead_regno
))
8380 /* Get the basic block number that we were in. */
8385 for (block
= 0; block
< n_basic_blocks
; block
++)
8386 if (insn
== basic_block_head
[block
])
8389 if (block
== n_basic_blocks
)
8393 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
8394 if (basic_block_live_at_start
[block
][i
/ REGSET_ELT_BITS
]
8395 & ((REGSET_ELT_TYPE
) 1 << (i
% REGSET_ELT_BITS
)))
8401 /* Remove register number REGNO from the dead registers list of INSN.
8403 Return the note used to record the death, if there was one. */
8406 remove_death (regno
, insn
)
8410 register rtx note
= find_regno_note (insn
, REG_DEAD
, regno
);
8413 remove_note (insn
, note
);
8418 /* For each register (hardware or pseudo) used within expression X, if its
8419 death is in an instruction with cuid between FROM_CUID (inclusive) and
8420 TO_INSN (exclusive), put a REG_DEAD note for that register in the
8421 list headed by PNOTES.
8423 This is done when X is being merged by combination into TO_INSN. These
8424 notes will then be distributed as needed. */
8427 move_deaths (x
, from_cuid
, to_insn
, pnotes
)
8434 register int len
, i
;
8435 register enum rtx_code code
= GET_CODE (x
);
8439 register int regno
= REGNO (x
);
8440 register rtx where_dead
= reg_last_death
[regno
];
8442 if (where_dead
&& INSN_CUID (where_dead
) >= from_cuid
8443 && INSN_CUID (where_dead
) < INSN_CUID (to_insn
))
8445 rtx note
= remove_death (regno
, reg_last_death
[regno
]);
8447 /* It is possible for the call above to return 0. This can occur
8448 when reg_last_death points to I2 or I1 that we combined with.
8449 In that case make a new note. */
8453 XEXP (note
, 1) = *pnotes
;
8457 *pnotes
= gen_rtx (EXPR_LIST
, REG_DEAD
, x
, *pnotes
);
8463 else if (GET_CODE (x
) == SET
)
8465 rtx dest
= SET_DEST (x
);
8467 move_deaths (SET_SRC (x
), from_cuid
, to_insn
, pnotes
);
8469 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
8470 that accesses one word of a multi-word item, some
8471 piece of everything register in the expression is used by
8472 this insn, so remove any old death. */
8474 if (GET_CODE (dest
) == ZERO_EXTRACT
8475 || GET_CODE (dest
) == STRICT_LOW_PART
8476 || (GET_CODE (dest
) == SUBREG
8477 && (((GET_MODE_SIZE (GET_MODE (dest
))
8478 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
8479 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
8480 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))))
8482 move_deaths (dest
, from_cuid
, to_insn
, pnotes
);
8486 /* If this is some other SUBREG, we know it replaces the entire
8487 value, so use that as the destination. */
8488 if (GET_CODE (dest
) == SUBREG
)
8489 dest
= SUBREG_REG (dest
);
8491 /* If this is a MEM, adjust deaths of anything used in the address.
8492 For a REG (the only other possibility), the entire value is
8493 being replaced so the old value is not used in this insn. */
8495 if (GET_CODE (dest
) == MEM
)
8496 move_deaths (XEXP (dest
, 0), from_cuid
, to_insn
, pnotes
);
8500 else if (GET_CODE (x
) == CLOBBER
)
8503 len
= GET_RTX_LENGTH (code
);
8504 fmt
= GET_RTX_FORMAT (code
);
8506 for (i
= 0; i
< len
; i
++)
8511 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
8512 move_deaths (XVECEXP (x
, i
, j
), from_cuid
, to_insn
, pnotes
);
8514 else if (fmt
[i
] == 'e')
8515 move_deaths (XEXP (x
, i
), from_cuid
, to_insn
, pnotes
);
8519 /* Return 1 if X is the target of a bit-field assignment in BODY, the
8520 pattern of an insn. X must be a REG. */
8523 reg_bitfield_target_p (x
, body
)
8529 if (GET_CODE (body
) == SET
)
8531 rtx dest
= SET_DEST (body
);
8533 int regno
, tregno
, endregno
, endtregno
;
8535 if (GET_CODE (dest
) == ZERO_EXTRACT
)
8536 target
= XEXP (dest
, 0);
8537 else if (GET_CODE (dest
) == STRICT_LOW_PART
)
8538 target
= SUBREG_REG (XEXP (dest
, 0));
8542 if (GET_CODE (target
) == SUBREG
)
8543 target
= SUBREG_REG (target
);
8545 if (GET_CODE (target
) != REG
)
8548 tregno
= REGNO (target
), regno
= REGNO (x
);
8549 if (tregno
>= FIRST_PSEUDO_REGISTER
|| regno
>= FIRST_PSEUDO_REGISTER
)
8552 endtregno
= tregno
+ HARD_REGNO_NREGS (tregno
, GET_MODE (target
));
8553 endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
8555 return endregno
> tregno
&& regno
< endtregno
;
8558 else if (GET_CODE (body
) == PARALLEL
)
8559 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
8560 if (reg_bitfield_target_p (x
, XVECEXP (body
, 0, i
)))
8566 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
8567 as appropriate. I3 and I2 are the insns resulting from the combination
8568 insns including FROM (I2 may be zero).
8570 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
8571 not need REG_DEAD notes because they are being substituted for. This
8572 saves searching in the most common cases.
8574 Each note in the list is either ignored or placed on some insns, depending
8575 on the type of note. */
8578 distribute_notes (notes
, from_insn
, i3
, i2
, elim_i2
, elim_i1
)
8582 rtx elim_i2
, elim_i1
;
8584 rtx note
, next_note
;
8587 for (note
= notes
; note
; note
= next_note
)
8589 rtx place
= 0, place2
= 0;
8591 /* If this NOTE references a pseudo register, ensure it references
8592 the latest copy of that register. */
8593 if (XEXP (note
, 0) && GET_CODE (XEXP (note
, 0)) == REG
8594 && REGNO (XEXP (note
, 0)) >= FIRST_PSEUDO_REGISTER
)
8595 XEXP (note
, 0) = regno_reg_rtx
[REGNO (XEXP (note
, 0))];
8597 next_note
= XEXP (note
, 1);
8598 switch (REG_NOTE_KIND (note
))
8601 /* If this register is set or clobbered in I3, put the note there
8602 unless there is one already. */
8603 if (reg_set_p (XEXP (note
, 0), PATTERN (i3
)))
8605 if (! (GET_CODE (XEXP (note
, 0)) == REG
8606 ? find_regno_note (i3
, REG_UNUSED
, REGNO (XEXP (note
, 0)))
8607 : find_reg_note (i3
, REG_UNUSED
, XEXP (note
, 0))))
8610 /* Otherwise, if this register is used by I3, then this register
8611 now dies here, so we must put a REG_DEAD note here unless there
8613 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
))
8614 && ! (GET_CODE (XEXP (note
, 0)) == REG
8615 ? find_regno_note (i3
, REG_DEAD
, REGNO (XEXP (note
, 0)))
8616 : find_reg_note (i3
, REG_DEAD
, XEXP (note
, 0))))
8618 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
8626 /* These notes say something about results of an insn. We can
8627 only support them if they used to be on I3 in which case they
8628 remain on I3. Otherwise they are ignored. */
8629 if (from_insn
== i3
)
8634 case REG_NO_CONFLICT
:
8636 /* These notes say something about how a register is used. They must
8637 be present on any use of the register in I2 or I3. */
8638 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
)))
8641 if (i2
&& reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
)))
8651 /* It is too much trouble to try to see if this note is still
8652 correct in all situations. It is better to simply delete it. */
8656 /* If the insn previously containing this note still exists,
8657 put it back where it was. Otherwise move it to the previous
8658 insn. Adjust the corresponding REG_LIBCALL note. */
8659 if (GET_CODE (from_insn
) != NOTE
)
8663 tem
= find_reg_note (XEXP (note
, 0), REG_LIBCALL
, NULL_RTX
);
8664 place
= prev_real_insn (from_insn
);
8666 XEXP (tem
, 0) = place
;
8671 /* This is handled similarly to REG_RETVAL. */
8672 if (GET_CODE (from_insn
) != NOTE
)
8676 tem
= find_reg_note (XEXP (note
, 0), REG_RETVAL
, NULL_RTX
);
8677 place
= next_real_insn (from_insn
);
8679 XEXP (tem
, 0) = place
;
8684 /* If the register is used as an input in I3, it dies there.
8685 Similarly for I2, if it is non-zero and adjacent to I3.
8687 If the register is not used as an input in either I3 or I2
8688 and it is not one of the registers we were supposed to eliminate,
8689 there are two possibilities. We might have a non-adjacent I2
8690 or we might have somehow eliminated an additional register
8691 from a computation. For example, we might have had A & B where
8692 we discover that B will always be zero. In this case we will
8693 eliminate the reference to A.
8695 In both cases, we must search to see if we can find a previous
8696 use of A and put the death note there. */
8698 if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
)))
8700 else if (i2
!= 0 && next_nonnote_insn (i2
) == i3
8701 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
8704 if (XEXP (note
, 0) == elim_i2
|| XEXP (note
, 0) == elim_i1
)
8707 /* If the register is used in both I2 and I3 and it dies in I3,
8708 we might have added another reference to it. If reg_n_refs
8709 was 2, bump it to 3. This has to be correct since the
8710 register must have been set somewhere. The reason this is
8711 done is because local-alloc.c treats 2 references as a
8714 if (place
== i3
&& i2
!= 0 && GET_CODE (XEXP (note
, 0)) == REG
8715 && reg_n_refs
[REGNO (XEXP (note
, 0))]== 2
8716 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
8717 reg_n_refs
[REGNO (XEXP (note
, 0))] = 3;
8720 for (tem
= prev_nonnote_insn (i3
);
8721 tem
&& (GET_CODE (tem
) == INSN
8722 || GET_CODE (tem
) == CALL_INSN
);
8723 tem
= prev_nonnote_insn (tem
))
8725 /* If the register is being set at TEM, see if that is all
8726 TEM is doing. If so, delete TEM. Otherwise, make this
8727 into a REG_UNUSED note instead. */
8728 if (reg_set_p (XEXP (note
, 0), PATTERN (tem
)))
8730 rtx set
= single_set (tem
);
8732 /* Verify that it was the set, and not a clobber that
8733 modified the register. */
8735 if (set
!= 0 && ! side_effects_p (SET_SRC (set
))
8736 && rtx_equal_p (XEXP (note
, 0), SET_DEST (set
)))
8738 /* Move the notes and links of TEM elsewhere.
8739 This might delete other dead insns recursively.
8740 First set the pattern to something that won't use
8743 PATTERN (tem
) = pc_rtx
;
8745 distribute_notes (REG_NOTES (tem
), tem
, tem
,
8746 NULL_RTX
, NULL_RTX
, NULL_RTX
);
8747 distribute_links (LOG_LINKS (tem
));
8749 PUT_CODE (tem
, NOTE
);
8750 NOTE_LINE_NUMBER (tem
) = NOTE_INSN_DELETED
;
8751 NOTE_SOURCE_FILE (tem
) = 0;
8755 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
8757 /* If there isn't already a REG_UNUSED note, put one
8759 if (! find_regno_note (tem
, REG_UNUSED
,
8760 REGNO (XEXP (note
, 0))))
8765 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (tem
)))
8772 /* If the register is set or already dead at PLACE, we needn't do
8773 anything with this note if it is still a REG_DEAD note.
8775 Note that we cannot use just `dead_or_set_p' here since we can
8776 convert an assignment to a register into a bit-field assignment.
8777 Therefore, we must also omit the note if the register is the
8778 target of a bitfield assignment. */
8780 if (place
&& REG_NOTE_KIND (note
) == REG_DEAD
)
8782 int regno
= REGNO (XEXP (note
, 0));
8784 if (dead_or_set_p (place
, XEXP (note
, 0))
8785 || reg_bitfield_target_p (XEXP (note
, 0), PATTERN (place
)))
8787 /* Unless the register previously died in PLACE, clear
8788 reg_last_death. [I no longer understand why this is
8790 if (reg_last_death
[regno
] != place
)
8791 reg_last_death
[regno
] = 0;
8795 reg_last_death
[regno
] = place
;
8797 /* If this is a death note for a hard reg that is occupying
8798 multiple registers, ensure that we are still using all
8799 parts of the object. If we find a piece of the object
8800 that is unused, we must add a USE for that piece before
8801 PLACE and put the appropriate REG_DEAD note on it.
8803 An alternative would be to put a REG_UNUSED for the pieces
8804 on the insn that set the register, but that can't be done if
8805 it is not in the same block. It is simpler, though less
8806 efficient, to add the USE insns. */
8808 if (place
&& regno
< FIRST_PSEUDO_REGISTER
8809 && HARD_REGNO_NREGS (regno
, GET_MODE (XEXP (note
, 0))) > 1)
8812 = regno
+ HARD_REGNO_NREGS (regno
,
8813 GET_MODE (XEXP (note
, 0)));
8817 for (i
= regno
; i
< endregno
; i
++)
8818 if (! refers_to_regno_p (i
, i
+ 1, PATTERN (place
), 0))
8820 rtx piece
= gen_rtx (REG
, word_mode
, i
);
8823 /* See if we already placed a USE note for this
8824 register in front of PLACE. */
8826 GET_CODE (PREV_INSN (p
)) == INSN
8827 && GET_CODE (PATTERN (PREV_INSN (p
))) == USE
;
8829 if (rtx_equal_p (piece
,
8830 XEXP (PATTERN (PREV_INSN (p
)), 0)))
8839 = emit_insn_before (gen_rtx (USE
, VOIDmode
,
8842 REG_NOTES (use_insn
)
8843 = gen_rtx (EXPR_LIST
, REG_DEAD
, piece
,
8844 REG_NOTES (use_insn
));
8852 /* Put only REG_DEAD notes for pieces that are
8853 still used and that are not already dead or set. */
8855 for (i
= regno
; i
< endregno
; i
++)
8857 rtx piece
= gen_rtx (REG
, word_mode
, i
);
8859 if (reg_referenced_p (piece
, PATTERN (place
))
8860 && ! dead_or_set_p (place
, piece
)
8861 && ! reg_bitfield_target_p (piece
,
8863 REG_NOTES (place
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
8875 /* Any other notes should not be present at this point in the
8882 XEXP (note
, 1) = REG_NOTES (place
);
8883 REG_NOTES (place
) = note
;
8887 REG_NOTES (place2
) = gen_rtx (GET_CODE (note
), REG_NOTE_KIND (note
),
8888 XEXP (note
, 0), REG_NOTES (place2
));
8892 /* Similarly to above, distribute the LOG_LINKS that used to be present on
8893 I3, I2, and I1 to new locations. This is also called in one case to
8894 add a link pointing at I3 when I3's destination is changed. */
8897 distribute_links (links
)
8900 rtx link
, next_link
;
8902 for (link
= links
; link
; link
= next_link
)
8908 next_link
= XEXP (link
, 1);
8910 /* If the insn that this link points to is a NOTE or isn't a single
8911 set, ignore it. In the latter case, it isn't clear what we
8912 can do other than ignore the link, since we can't tell which
8913 register it was for. Such links wouldn't be used by combine
8916 It is not possible for the destination of the target of the link to
8917 have been changed by combine. The only potential of this is if we
8918 replace I3, I2, and I1 by I3 and I2. But in that case the
8919 destination of I2 also remains unchanged. */
8921 if (GET_CODE (XEXP (link
, 0)) == NOTE
8922 || (set
= single_set (XEXP (link
, 0))) == 0)
8925 reg
= SET_DEST (set
);
8926 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
8927 || GET_CODE (reg
) == SIGN_EXTRACT
8928 || GET_CODE (reg
) == STRICT_LOW_PART
)
8929 reg
= XEXP (reg
, 0);
8931 /* A LOG_LINK is defined as being placed on the first insn that uses
8932 a register and points to the insn that sets the register. Start
8933 searching at the next insn after the target of the link and stop
8934 when we reach a set of the register or the end of the basic block.
8936 Note that this correctly handles the link that used to point from
8937 I3 to I2. Also note that not much searching is typically done here
8938 since most links don't point very far away. */
8940 for (insn
= NEXT_INSN (XEXP (link
, 0));
8941 (insn
&& GET_CODE (insn
) != CODE_LABEL
8942 && GET_CODE (PREV_INSN (insn
)) != JUMP_INSN
);
8943 insn
= NEXT_INSN (insn
))
8944 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
8945 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
8947 if (reg_referenced_p (reg
, PATTERN (insn
)))
8952 /* If we found a place to put the link, place it there unless there
8953 is already a link to the same insn as LINK at that point. */
8959 for (link2
= LOG_LINKS (place
); link2
; link2
= XEXP (link2
, 1))
8960 if (XEXP (link2
, 0) == XEXP (link
, 0))
8965 XEXP (link
, 1) = LOG_LINKS (place
);
8966 LOG_LINKS (place
) = link
;
8973 dump_combine_stats (file
)
8978 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
8979 combine_attempts
, combine_merges
, combine_extras
, combine_successes
);
8983 dump_combine_total_stats (file
)
8988 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
8989 total_attempts
, total_merges
, total_extras
, total_successes
);