1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
4 This file is part of GNU CC.
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "insn-flags.h"
87 #include "insn-codes.h"
88 #include "insn-attr.h"
92 /* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94 #define gen_lowpart dont_use_gen_lowpart_you_dummy
96 /* Number of attempts to combine instructions in this function. */
98 static int combine_attempts
;
100 /* Number of attempts that got as far as substitution in this function. */
102 static int combine_merges
;
104 /* Number of instructions combined with added SETs in this function. */
106 static int combine_extras
;
108 /* Number of instructions combined in this function. */
110 static int combine_successes
;
112 /* Totals over entire compilation. */
114 static int total_attempts
, total_merges
, total_extras
, total_successes
;
116 /* Vector mapping INSN_UIDs to cuids.
117 The cuids are like uids but increase monotonically always.
118 Combine always uses cuids so that it can compare them.
119 But actually renumbering the uids, which we used to do,
120 proves to be a bad idea because it makes it hard to compare
121 the dumps produced by earlier passes with those from later passes. */
123 static int *uid_cuid
;
125 /* Get the cuid of an insn. */
127 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
129 /* Maximum register number, which is the size of the tables below. */
131 static int combine_max_regno
;
133 /* Record last point of death of (hard or pseudo) register n. */
135 static rtx
*reg_last_death
;
137 /* Record last point of modification of (hard or pseudo) register n. */
139 static rtx
*reg_last_set
;
141 /* Record the cuid of the last insn that invalidated memory
142 (anything that writes memory, and subroutine calls, but not pushes). */
144 static int mem_last_set
;
146 /* Record the cuid of the last CALL_INSN
147 so we can tell whether a potential combination crosses any calls. */
149 static int last_call_cuid
;
151 /* When `subst' is called, this is the insn that is being modified
152 (by combining in a previous insn). The PATTERN of this insn
153 is still the old pattern partially modified and it should not be
154 looked at, but this may be used to examine the successors of the insn
155 to judge whether a simplification is valid. */
157 static rtx subst_insn
;
159 /* This is the lowest CUID that `subst' is currently dealing with.
160 get_last_value will not return a value if the register was set at or
161 after this CUID. If not for this mechanism, we could get confused if
162 I2 or I1 in try_combine were an insn that used the old value of a register
163 to obtain a new value. In that case, we might erroneously get the
164 new value of the register when we wanted the old one. */
166 static int subst_low_cuid
;
168 /* This is the value of undobuf.num_undo when we started processing this
169 substitution. This will prevent gen_rtx_combine from re-used a piece
170 from the previous expression. Doing so can produce circular rtl
173 static int previous_num_undos
;
175 /* The next group of arrays allows the recording of the last value assigned
176 to (hard or pseudo) register n. We use this information to see if a
177 operation being processed is redundant given a prior operation performed
178 on the register. For example, an `and' with a constant is redundant if
179 all the zero bits are already known to be turned off.
181 We use an approach similar to that used by cse, but change it in the
184 (1) We do not want to reinitialize at each label.
185 (2) It is useful, but not critical, to know the actual value assigned
186 to a register. Often just its form is helpful.
188 Therefore, we maintain the following arrays:
190 reg_last_set_value the last value assigned
191 reg_last_set_label records the value of label_tick when the
192 register was assigned
193 reg_last_set_table_tick records the value of label_tick when a
194 value using the register is assigned
195 reg_last_set_invalid set to non-zero when it is not valid
196 to use the value of this register in some
199 To understand the usage of these tables, it is important to understand
200 the distinction between the value in reg_last_set_value being valid
201 and the register being validly contained in some other expression in the
204 Entry I in reg_last_set_value is valid if it is non-zero, and either
205 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
207 Register I may validly appear in any expression returned for the value
208 of another register if reg_n_sets[i] is 1. It may also appear in the
209 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
210 reg_last_set_invalid[j] is zero.
212 If an expression is found in the table containing a register which may
213 not validly appear in an expression, the register is replaced by
214 something that won't match, (clobber (const_int 0)).
216 reg_last_set_invalid[i] is set non-zero when register I is being assigned
217 to and reg_last_set_table_tick[i] == label_tick. */
219 /* Record last value assigned to (hard or pseudo) register n. */
221 static rtx
*reg_last_set_value
;
223 /* Record the value of label_tick when the value for register n is placed in
224 reg_last_set_value[n]. */
226 static short *reg_last_set_label
;
228 /* Record the value of label_tick when an expression involving register n
229 is placed in reg_last_set_value. */
231 static short *reg_last_set_table_tick
;
233 /* Set non-zero if references to register n in expressions should not be
236 static char *reg_last_set_invalid
;
238 /* Incremented for each label. */
240 static short label_tick
;
242 /* Some registers that are set more than once and used in more than one
243 basic block are nevertheless always set in similar ways. For example,
244 a QImode register may be loaded from memory in two places on a machine
245 where byte loads zero extend.
247 We record in the following array what we know about the significant
248 bits of a register, specifically which bits are known to be zero.
250 If an entry is zero, it means that we don't know anything special. */
252 static HOST_WIDE_INT
*reg_significant
;
254 /* Mode used to compute significance in reg_significant. It is the largest
255 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
257 static enum machine_mode significant_mode
;
259 /* Nonzero when reg_significant can be safely used. It is zero while
260 computing reg_significant. This prevents propagating values based
261 on previously set values, which can be incorrect if a variable
262 is modified in a loop. */
264 static int significant_valid
;
266 /* Record one modification to rtl structure
267 to be undone by storing old_contents into *where.
268 is_int is 1 if the contents are an int. */
284 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
285 num_undo says how many are currently recorded.
287 storage is nonzero if we must undo the allocation of new storage.
288 The value of storage is what to pass to obfree.
290 other_insn is nonzero if we have modified some other insn in the process
291 of working on subst_insn. It must be verified too. */
299 struct undo undo
[MAX_UNDO
];
303 static struct undobuf undobuf
;
305 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
306 insn. The substitution can be undone by undo_all. If INTO is already
307 set to NEWVAL, do not record this change. Because computing NEWVAL might
308 also call SUBST, we have to compute it before we put anything into
311 #define SUBST(INTO, NEWVAL) \
312 do { rtx _new = (NEWVAL); \
313 if (undobuf.num_undo < MAX_UNDO) \
315 undobuf.undo[undobuf.num_undo].where = &INTO; \
316 undobuf.undo[undobuf.num_undo].old_contents = INTO; \
317 undobuf.undo[undobuf.num_undo].is_int = 0; \
319 if (undobuf.undo[undobuf.num_undo].old_contents != INTO) \
320 undobuf.num_undo++; \
324 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
326 Note that substitution for the value of a CONST_INT is not safe. */
328 #define SUBST_INT(INTO, NEWVAL) \
329 do { if (undobuf.num_undo < MAX_UNDO) \
332 = (struct undo_int *)&undobuf.undo[undobuf.num_undo]; \
333 u->where = (int *) &INTO; \
334 u->old_contents = INTO; \
337 if (u->old_contents != INTO) \
338 undobuf.num_undo++; \
342 /* Number of times the pseudo being substituted for
343 was found and replaced. */
345 static int n_occurrences
;
347 static void set_significant ();
348 static void move_deaths ();
350 static void record_value_for_reg ();
351 static void record_dead_and_set_regs ();
352 static int use_crosses_set_p ();
353 static rtx
try_combine ();
354 static rtx
*find_split_point ();
356 static void undo_all ();
357 static int reg_dead_at_p ();
358 static rtx
expand_compound_operation ();
359 static rtx
expand_field_assignment ();
360 static rtx
make_extraction ();
361 static int get_pos_from_mask ();
362 static rtx
force_to_mode ();
363 static rtx
make_field_assignment ();
364 static rtx
make_compound_operation ();
365 static rtx
apply_distributive_law ();
366 static rtx
simplify_and_const_int ();
367 static unsigned HOST_WIDE_INT
significant_bits ();
368 static int merge_outer_ops ();
369 static rtx
simplify_shift_const ();
370 static int recog_for_combine ();
371 static rtx
gen_lowpart_for_combine ();
372 static rtx
gen_rtx_combine ();
373 static rtx
gen_binary ();
374 static rtx
gen_unary ();
375 static enum rtx_code
simplify_comparison ();
376 static int reversible_comparison_p ();
377 static int get_last_value_validate ();
378 static rtx
get_last_value ();
379 static void distribute_notes ();
380 static void distribute_links ();
382 /* Main entry point for combiner. F is the first insn of the function.
383 NREGS is the first unused pseudo-reg number. */
386 combine_instructions (f
, nregs
)
390 register rtx insn
, next
, prev
;
392 register rtx links
, nextlinks
;
394 combine_attempts
= 0;
397 combine_successes
= 0;
399 combine_max_regno
= nregs
;
401 reg_last_death
= (rtx
*) alloca (nregs
* sizeof (rtx
));
402 reg_last_set
= (rtx
*) alloca (nregs
* sizeof (rtx
));
403 reg_last_set_value
= (rtx
*) alloca (nregs
* sizeof (rtx
));
404 reg_last_set_table_tick
= (short *) alloca (nregs
* sizeof (short));
405 reg_last_set_label
= (short *) alloca (nregs
* sizeof (short));
406 reg_last_set_invalid
= (char *) alloca (nregs
* sizeof (char));
407 reg_significant
= (HOST_WIDE_INT
*) alloca (nregs
* sizeof (HOST_WIDE_INT
));
409 bzero (reg_last_death
, nregs
* sizeof (rtx
));
410 bzero (reg_last_set
, nregs
* sizeof (rtx
));
411 bzero (reg_last_set_value
, nregs
* sizeof (rtx
));
412 bzero (reg_last_set_table_tick
, nregs
* sizeof (short));
413 bzero (reg_last_set_invalid
, nregs
* sizeof (char));
414 bzero (reg_significant
, nregs
* sizeof (HOST_WIDE_INT
));
416 init_recog_no_volatile ();
418 /* Compute maximum uid value so uid_cuid can be allocated. */
420 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
421 if (INSN_UID (insn
) > i
)
424 uid_cuid
= (int *) alloca ((i
+ 1) * sizeof (int));
426 significant_mode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
428 /* Don't use reg_significant when computing it. This can cause problems
429 when, for example, we have j <<= 1 in a loop. */
431 significant_valid
= 0;
433 /* Compute the mapping from uids to cuids.
434 Cuids are numbers assigned to insns, like uids,
435 except that cuids increase monotonically through the code.
437 Scan all SETs and see if we can deduce anything about what
438 bits are significant for some registers. */
440 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
442 INSN_CUID (insn
) = ++i
;
443 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
444 note_stores (PATTERN (insn
), set_significant
);
447 significant_valid
= 1;
449 /* Now scan all the insns in forward order. */
455 for (insn
= f
; insn
; insn
= next
? next
: NEXT_INSN (insn
))
459 if (GET_CODE (insn
) == CODE_LABEL
)
462 else if (GET_CODE (insn
) == INSN
463 || GET_CODE (insn
) == CALL_INSN
464 || GET_CODE (insn
) == JUMP_INSN
)
466 /* Try this insn with each insn it links back to. */
468 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
469 if ((next
= try_combine (insn
, XEXP (links
, 0), NULL_RTX
)) != 0)
472 /* Try each sequence of three linked insns ending with this one. */
474 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
475 for (nextlinks
= LOG_LINKS (XEXP (links
, 0)); nextlinks
;
476 nextlinks
= XEXP (nextlinks
, 1))
477 if ((next
= try_combine (insn
, XEXP (links
, 0),
478 XEXP (nextlinks
, 0))) != 0)
482 /* Try to combine a jump insn that uses CC0
483 with a preceding insn that sets CC0, and maybe with its
484 logical predecessor as well.
485 This is how we make decrement-and-branch insns.
486 We need this special code because data flow connections
487 via CC0 do not get entered in LOG_LINKS. */
489 if (GET_CODE (insn
) == JUMP_INSN
490 && (prev
= prev_nonnote_insn (insn
)) != 0
491 && GET_CODE (prev
) == INSN
492 && sets_cc0_p (PATTERN (prev
)))
494 if ((next
= try_combine (insn
, prev
, NULL_RTX
)) != 0)
497 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
498 nextlinks
= XEXP (nextlinks
, 1))
499 if ((next
= try_combine (insn
, prev
,
500 XEXP (nextlinks
, 0))) != 0)
504 /* Do the same for an insn that explicitly references CC0. */
505 if (GET_CODE (insn
) == INSN
506 && (prev
= prev_nonnote_insn (insn
)) != 0
507 && GET_CODE (prev
) == INSN
508 && sets_cc0_p (PATTERN (prev
))
509 && GET_CODE (PATTERN (insn
)) == SET
510 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (insn
))))
512 if ((next
= try_combine (insn
, prev
, NULL_RTX
)) != 0)
515 for (nextlinks
= LOG_LINKS (prev
); nextlinks
;
516 nextlinks
= XEXP (nextlinks
, 1))
517 if ((next
= try_combine (insn
, prev
,
518 XEXP (nextlinks
, 0))) != 0)
522 /* Finally, see if any of the insns that this insn links to
523 explicitly references CC0. If so, try this insn, that insn,
524 and its predecessor if it sets CC0. */
525 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
526 if (GET_CODE (XEXP (links
, 0)) == INSN
527 && GET_CODE (PATTERN (XEXP (links
, 0))) == SET
528 && reg_mentioned_p (cc0_rtx
, SET_SRC (PATTERN (XEXP (links
, 0))))
529 && (prev
= prev_nonnote_insn (XEXP (links
, 0))) != 0
530 && GET_CODE (prev
) == INSN
531 && sets_cc0_p (PATTERN (prev
))
532 && (next
= try_combine (insn
, XEXP (links
, 0), prev
)) != 0)
536 /* Try combining an insn with two different insns whose results it
538 for (links
= LOG_LINKS (insn
); links
; links
= XEXP (links
, 1))
539 for (nextlinks
= XEXP (links
, 1); nextlinks
;
540 nextlinks
= XEXP (nextlinks
, 1))
541 if ((next
= try_combine (insn
, XEXP (links
, 0),
542 XEXP (nextlinks
, 0))) != 0)
545 if (GET_CODE (insn
) != NOTE
)
546 record_dead_and_set_regs (insn
);
553 total_attempts
+= combine_attempts
;
554 total_merges
+= combine_merges
;
555 total_extras
+= combine_extras
;
556 total_successes
+= combine_successes
;
559 /* Called via note_stores. If X is a pseudo that is used in more than
560 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
561 set, record what bits are significant. If we are clobbering X,
562 ignore this "set" because the clobbered value won't be used.
564 If we are setting only a portion of X and we can't figure out what
565 portion, assume all bits will be used since we don't know what will
569 set_significant (x
, set
)
573 if (GET_CODE (x
) == REG
574 && REGNO (x
) >= FIRST_PSEUDO_REGISTER
575 && reg_n_sets
[REGNO (x
)] > 1
576 && reg_basic_block
[REGNO (x
)] < 0
577 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
)
579 if (GET_CODE (set
) == CLOBBER
)
582 /* If this is a complex assignment, see if we can convert it into a
583 simple assignment. */
584 set
= expand_field_assignment (set
);
585 if (SET_DEST (set
) == x
)
586 reg_significant
[REGNO (x
)]
587 |= significant_bits (SET_SRC (set
), significant_mode
);
589 reg_significant
[REGNO (x
)] = GET_MODE_MASK (GET_MODE (x
));
593 /* See if INSN can be combined into I3. PRED and SUCC are optionally
594 insns that were previously combined into I3 or that will be combined
595 into the merger of INSN and I3.
597 Return 0 if the combination is not allowed for any reason.
599 If the combination is allowed, *PDEST will be set to the single
600 destination of INSN and *PSRC to the single source, and this function
604 can_combine_p (insn
, i3
, pred
, succ
, pdest
, psrc
)
611 rtx set
= 0, src
, dest
;
613 int all_adjacent
= (succ
? (next_active_insn (insn
) == succ
614 && next_active_insn (succ
) == i3
)
615 : next_active_insn (insn
) == i3
);
617 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
618 or a PARALLEL consisting of such a SET and CLOBBERs.
620 If INSN has CLOBBER parallel parts, ignore them for our processing.
621 By definition, these happen during the execution of the insn. When it
622 is merged with another insn, all bets are off. If they are, in fact,
623 needed and aren't also supplied in I3, they may be added by
624 recog_for_combine. Otherwise, it won't match.
626 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
629 Get the source and destination of INSN. If more than one, can't
632 if (GET_CODE (PATTERN (insn
)) == SET
)
633 set
= PATTERN (insn
);
634 else if (GET_CODE (PATTERN (insn
)) == PARALLEL
635 && GET_CODE (XVECEXP (PATTERN (insn
), 0, 0)) == SET
)
637 for (i
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
639 rtx elt
= XVECEXP (PATTERN (insn
), 0, i
);
641 switch (GET_CODE (elt
))
643 /* We can ignore CLOBBERs. */
648 /* Ignore SETs whose result isn't used but not those that
649 have side-effects. */
650 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (elt
))
651 && ! side_effects_p (elt
))
654 /* If we have already found a SET, this is a second one and
655 so we cannot combine with this insn. */
663 /* Anything else means we can't combine. */
669 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
670 so don't do anything with it. */
671 || GET_CODE (SET_SRC (set
)) == ASM_OPERANDS
)
680 set
= expand_field_assignment (set
);
681 src
= SET_SRC (set
), dest
= SET_DEST (set
);
683 /* Don't eliminate a store in the stack pointer. */
684 if (dest
== stack_pointer_rtx
685 /* Don't install a subreg involving two modes not tieable.
686 It can worsen register allocation, and can even make invalid reload
687 insns, since the reg inside may need to be copied from in the
688 outside mode, and that may be invalid if it is an fp reg copied in
689 integer mode. As a special exception, we can allow this if
690 I3 is simply copying DEST, a REG, to CC0. */
691 || (GET_CODE (src
) == SUBREG
692 && ! MODES_TIEABLE_P (GET_MODE (src
), GET_MODE (SUBREG_REG (src
)))
694 && ! (GET_CODE (i3
) == INSN
&& GET_CODE (PATTERN (i3
)) == SET
695 && SET_DEST (PATTERN (i3
)) == cc0_rtx
696 && GET_CODE (dest
) == REG
&& dest
== SET_SRC (PATTERN (i3
)))
699 /* If we couldn't eliminate a field assignment, we can't combine. */
700 || GET_CODE (dest
) == ZERO_EXTRACT
|| GET_CODE (dest
) == STRICT_LOW_PART
701 /* Don't combine with an insn that sets a register to itself if it has
702 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
703 || (rtx_equal_p (src
, dest
) && find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
704 /* Can't merge a function call. */
705 || GET_CODE (src
) == CALL
706 /* Don't substitute into an incremented register. */
707 || FIND_REG_INC_NOTE (i3
, dest
)
708 || (succ
&& FIND_REG_INC_NOTE (succ
, dest
))
709 /* Don't combine the end of a libcall into anything. */
710 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
711 /* Make sure that DEST is not used after SUCC but before I3. */
712 || (succ
&& ! all_adjacent
713 && reg_used_between_p (dest
, succ
, i3
))
714 /* Make sure that the value that is to be substituted for the register
715 does not use any registers whose values alter in between. However,
716 If the insns are adjacent, a use can't cross a set even though we
717 think it might (this can happen for a sequence of insns each setting
718 the same destination; reg_last_set of that register might point to
719 a NOTE). Also, don't move a volatile asm across any other insns. */
721 && (use_crosses_set_p (src
, INSN_CUID (insn
))
722 || (GET_CODE (src
) == ASM_OPERANDS
&& MEM_VOLATILE_P (src
))))
723 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
724 better register allocation by not doing the combine. */
725 || find_reg_note (i3
, REG_NO_CONFLICT
, dest
)
726 || (succ
&& find_reg_note (succ
, REG_NO_CONFLICT
, dest
))
727 /* Don't combine across a CALL_INSN, because that would possibly
728 change whether the life span of some REGs crosses calls or not,
729 and it is a pain to update that information.
730 Exception: if source is a constant, moving it later can't hurt.
731 Accept that special case, because it helps -fforce-addr a lot. */
732 || (INSN_CUID (insn
) < last_call_cuid
&& ! CONSTANT_P (src
)))
735 /* DEST must either be a REG or CC0. */
736 if (GET_CODE (dest
) == REG
)
738 /* If register alignment is being enforced for multi-word items in all
739 cases except for parameters, it is possible to have a register copy
740 insn referencing a hard register that is not allowed to contain the
741 mode being copied and which would not be valid as an operand of most
742 insns. Eliminate this problem by not combining with such an insn.
744 Also, on some machines we don't want to extend the life of a hard
747 if (GET_CODE (src
) == REG
748 && ((REGNO (dest
) < FIRST_PSEUDO_REGISTER
749 && ! HARD_REGNO_MODE_OK (REGNO (dest
), GET_MODE (dest
)))
750 #ifdef SMALL_REGISTER_CLASSES
751 /* Don't extend the life of a hard register. */
752 || REGNO (src
) < FIRST_PSEUDO_REGISTER
754 || (REGNO (src
) < FIRST_PSEUDO_REGISTER
755 && ! HARD_REGNO_MODE_OK (REGNO (src
), GET_MODE (src
)))
760 else if (GET_CODE (dest
) != CC0
)
763 /* Don't substitute for a register intended as a clobberable operand. */
764 if (GET_CODE (PATTERN (i3
)) == PARALLEL
)
765 for (i
= XVECLEN (PATTERN (i3
), 0) - 1; i
>= 0; i
--)
766 if (GET_CODE (XVECEXP (PATTERN (i3
), 0, i
)) == CLOBBER
767 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3
), 0, i
), 0), dest
))
770 /* If INSN contains anything volatile, or is an `asm' (whether volatile
771 or not), reject, unless nothing volatile comes between it and I3,
772 with the exception of SUCC. */
774 if (GET_CODE (src
) == ASM_OPERANDS
|| volatile_refs_p (src
))
775 for (p
= NEXT_INSN (insn
); p
!= i3
; p
= NEXT_INSN (p
))
776 if (GET_RTX_CLASS (GET_CODE (p
)) == 'i'
777 && p
!= succ
&& volatile_refs_p (PATTERN (p
)))
780 /* If INSN or I2 contains an autoincrement or autodecrement,
781 make sure that register is not used between there and I3,
782 and not already used in I3 either.
783 Also insist that I3 not be a jump; if it were one
784 and the incremented register were spilled, we would lose. */
787 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
788 if (REG_NOTE_KIND (link
) == REG_INC
789 && (GET_CODE (i3
) == JUMP_INSN
790 || reg_used_between_p (XEXP (link
, 0), insn
, i3
)
791 || reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i3
))))
796 /* Don't combine an insn that follows a CC0-setting insn.
797 An insn that uses CC0 must not be separated from the one that sets it.
798 We do, however, allow I2 to follow a CC0-setting insn if that insn
799 is passed as I1; in that case it will be deleted also.
800 We also allow combining in this case if all the insns are adjacent
801 because that would leave the two CC0 insns adjacent as well.
802 It would be more logical to test whether CC0 occurs inside I1 or I2,
803 but that would be much slower, and this ought to be equivalent. */
805 p
= prev_nonnote_insn (insn
);
806 if (p
&& p
!= pred
&& GET_CODE (p
) == INSN
&& sets_cc0_p (PATTERN (p
))
811 /* If we get here, we have passed all the tests and the combination is
820 /* LOC is the location within I3 that contains its pattern or the component
821 of a PARALLEL of the pattern. We validate that it is valid for combining.
823 One problem is if I3 modifies its output, as opposed to replacing it
824 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
825 so would produce an insn that is not equivalent to the original insns.
829 (set (reg:DI 101) (reg:DI 100))
830 (set (subreg:SI (reg:DI 101) 0) <foo>)
832 This is NOT equivalent to:
834 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
835 (set (reg:DI 101) (reg:DI 100))])
837 Not only does this modify 100 (in which case it might still be valid
838 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
840 We can also run into a problem if I2 sets a register that I1
841 uses and I1 gets directly substituted into I3 (not via I2). In that
842 case, we would be getting the wrong value of I2DEST into I3, so we
843 must reject the combination. This case occurs when I2 and I1 both
844 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
845 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
846 of a SET must prevent combination from occurring.
848 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
849 if the destination of a SET is a hard register.
851 Before doing the above check, we first try to expand a field assignment
852 into a set of logical operations.
854 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
855 we place a register that is both set and used within I3. If more than one
856 such register is detected, we fail.
858 Return 1 if the combination is valid, zero otherwise. */
861 combinable_i3pat (i3
, loc
, i2dest
, i1dest
, i1_not_in_src
, pi3dest_killed
)
871 if (GET_CODE (x
) == SET
)
873 rtx set
= expand_field_assignment (x
);
874 rtx dest
= SET_DEST (set
);
875 rtx src
= SET_SRC (set
);
876 rtx inner_dest
= dest
, inner_src
= src
;
880 while (GET_CODE (inner_dest
) == STRICT_LOW_PART
881 || GET_CODE (inner_dest
) == SUBREG
882 || GET_CODE (inner_dest
) == ZERO_EXTRACT
)
883 inner_dest
= XEXP (inner_dest
, 0);
885 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
888 while (GET_CODE (inner_src
) == STRICT_LOW_PART
889 || GET_CODE (inner_src
) == SUBREG
890 || GET_CODE (inner_src
) == ZERO_EXTRACT
)
891 inner_src
= XEXP (inner_src
, 0);
893 /* If it is better that two different modes keep two different pseudos,
894 avoid combining them. This avoids producing the following pattern
896 (set (subreg:SI (reg/v:QI 21) 0)
897 (lshiftrt:SI (reg/v:SI 20)
899 If that were made, reload could not handle the pair of
900 reg 20/21, since it would try to get any GENERAL_REGS
901 but some of them don't handle QImode. */
903 if (rtx_equal_p (inner_src
, i2dest
)
904 && GET_CODE (inner_dest
) == REG
905 && ! MODES_TIEABLE_P (GET_MODE (i2dest
), GET_MODE (inner_dest
)))
909 /* Check for the case where I3 modifies its output, as
911 if ((inner_dest
!= dest
912 && (reg_overlap_mentioned_p (i2dest
, inner_dest
)
913 || (i1dest
&& reg_overlap_mentioned_p (i1dest
, inner_dest
))))
914 /* This is the same test done in can_combine_p except that we
915 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
917 || (GET_CODE (inner_dest
) == REG
918 && REGNO (inner_dest
) < FIRST_PSEUDO_REGISTER
919 #ifdef SMALL_REGISTER_CLASSES
920 && GET_CODE (src
) != CALL
922 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest
),
923 GET_MODE (inner_dest
))
927 || (i1_not_in_src
&& reg_overlap_mentioned_p (i1dest
, src
)))
930 /* If DEST is used in I3, it is being killed in this insn,
931 so record that for later. */
932 if (pi3dest_killed
&& GET_CODE (dest
) == REG
933 && reg_referenced_p (dest
, PATTERN (i3
)))
938 *pi3dest_killed
= dest
;
942 else if (GET_CODE (x
) == PARALLEL
)
946 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
947 if (! combinable_i3pat (i3
, &XVECEXP (x
, 0, i
), i2dest
, i1dest
,
948 i1_not_in_src
, pi3dest_killed
))
955 /* Try to combine the insns I1 and I2 into I3.
956 Here I1 and I2 appear earlier than I3.
957 I1 can be zero; then we combine just I2 into I3.
959 It we are combining three insns and the resulting insn is not recognized,
960 try splitting it into two insns. If that happens, I2 and I3 are retained
961 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
964 If we created two insns, return I2; otherwise return I3.
965 Return 0 if the combination does not work. Then nothing is changed. */
968 try_combine (i3
, i2
, i1
)
969 register rtx i3
, i2
, i1
;
971 /* New patterns for I3 and I3, respectively. */
972 rtx newpat
, newi2pat
= 0;
973 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
974 int added_sets_1
, added_sets_2
;
975 /* Total number of SETs to put into I3. */
977 /* Nonzero is I2's body now appears in I3. */
979 /* INSN_CODEs for new I3, new I2, and user of condition code. */
980 int insn_code_number
, i2_code_number
, other_code_number
;
981 /* Contains I3 if the destination of I3 is used in its source, which means
982 that the old life of I3 is being killed. If that usage is placed into
983 I2 and not in I3, a REG_DEAD note must be made. */
984 rtx i3dest_killed
= 0;
985 /* SET_DEST and SET_SRC of I2 and I1. */
986 rtx i2dest
, i2src
, i1dest
= 0, i1src
= 0;
987 /* PATTERN (I2), or a copy of it in certain cases. */
989 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
990 int i2dest_in_i2src
, i1dest_in_i1src
= 0, i2dest_in_i1src
= 0;
992 /* Notes that must be added to REG_NOTES in I3 and I2. */
993 rtx new_i3_notes
, new_i2_notes
;
1000 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1001 This can occur when flow deletes an insn that it has merged into an
1002 auto-increment address. We also can't do anything if I3 has a
1003 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1006 if (GET_RTX_CLASS (GET_CODE (i3
)) != 'i'
1007 || GET_RTX_CLASS (GET_CODE (i2
)) != 'i'
1008 || (i1
&& GET_RTX_CLASS (GET_CODE (i1
)) != 'i')
1009 || find_reg_note (i3
, REG_LIBCALL
, NULL_RTX
))
1014 undobuf
.num_undo
= previous_num_undos
= 0;
1015 undobuf
.other_insn
= 0;
1017 /* Save the current high-water-mark so we can free storage if we didn't
1018 accept this combination. */
1019 undobuf
.storage
= (char *) oballoc (0);
1021 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1022 code below, set I1 to be the earlier of the two insns. */
1023 if (i1
&& INSN_CUID (i1
) > INSN_CUID (i2
))
1024 temp
= i1
, i1
= i2
, i2
= temp
;
1026 /* First check for one important special-case that the code below will
1027 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1028 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1029 we may be able to replace that destination with the destination of I3.
1030 This occurs in the common code where we compute both a quotient and
1031 remainder into a structure, in which case we want to do the computation
1032 directly into the structure to avoid register-register copies.
1034 We make very conservative checks below and only try to handle the
1035 most common cases of this. For example, we only handle the case
1036 where I2 and I3 are adjacent to avoid making difficult register
1039 if (i1
== 0 && GET_CODE (i3
) == INSN
&& GET_CODE (PATTERN (i3
)) == SET
1040 && GET_CODE (SET_SRC (PATTERN (i3
))) == REG
1041 && REGNO (SET_SRC (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
1042 #ifdef SMALL_REGISTER_CLASSES
1043 && (GET_CODE (SET_DEST (PATTERN (i3
))) != REG
1044 || REGNO (SET_DEST (PATTERN (i3
))) >= FIRST_PSEUDO_REGISTER
)
1046 && find_reg_note (i3
, REG_DEAD
, SET_SRC (PATTERN (i3
)))
1047 && GET_CODE (PATTERN (i2
)) == PARALLEL
1048 && ! side_effects_p (SET_DEST (PATTERN (i3
)))
1049 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1050 below would need to check what is inside (and reg_overlap_mentioned_p
1051 doesn't support those codes anyway). Don't allow those destinations;
1052 the resulting insn isn't likely to be recognized anyway. */
1053 && GET_CODE (SET_DEST (PATTERN (i3
))) != ZERO_EXTRACT
1054 && GET_CODE (SET_DEST (PATTERN (i3
))) != STRICT_LOW_PART
1055 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3
)),
1056 SET_DEST (PATTERN (i3
)))
1057 && next_real_insn (i2
) == i3
)
1059 rtx p2
= PATTERN (i2
);
1061 /* Make sure that the destination of I3,
1062 which we are going to substitute into one output of I2,
1063 is not used within another output of I2. We must avoid making this:
1064 (parallel [(set (mem (reg 69)) ...)
1065 (set (reg 69) ...)])
1066 which is not well-defined as to order of actions.
1067 (Besides, reload can't handle output reloads for this.)
1069 The problem can also happen if the dest of I3 is a memory ref,
1070 if another dest in I2 is an indirect memory ref. */
1071 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
1072 if (GET_CODE (XVECEXP (p2
, 0, i
)) == SET
1073 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3
)),
1074 SET_DEST (XVECEXP (p2
, 0, i
))))
1077 if (i
== XVECLEN (p2
, 0))
1078 for (i
= 0; i
< XVECLEN (p2
, 0); i
++)
1079 if (SET_DEST (XVECEXP (p2
, 0, i
)) == SET_SRC (PATTERN (i3
)))
1084 subst_low_cuid
= INSN_CUID (i2
);
1087 i2dest
= SET_SRC (PATTERN (i3
));
1089 /* Replace the dest in I2 with our dest and make the resulting
1090 insn the new pattern for I3. Then skip to where we
1091 validate the pattern. Everything was set up above. */
1092 SUBST (SET_DEST (XVECEXP (p2
, 0, i
)),
1093 SET_DEST (PATTERN (i3
)));
1096 goto validate_replacement
;
1101 /* If we have no I1 and I2 looks like:
1102 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1104 make up a dummy I1 that is
1107 (set (reg:CC X) (compare:CC Y (const_int 0)))
1109 (We can ignore any trailing CLOBBERs.)
1111 This undoes a previous combination and allows us to match a branch-and-
1114 if (i1
== 0 && GET_CODE (PATTERN (i2
)) == PARALLEL
1115 && XVECLEN (PATTERN (i2
), 0) >= 2
1116 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 0)) == SET
1117 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 0))))
1119 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0))) == COMPARE
1120 && XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 1) == const0_rtx
1121 && GET_CODE (XVECEXP (PATTERN (i2
), 0, 1)) == SET
1122 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, 1))) == REG
1123 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2
), 0, 0)), 0),
1124 SET_SRC (XVECEXP (PATTERN (i2
), 0, 1))))
1126 for (i
= XVECLEN (PATTERN (i2
), 0) - 1; i
>= 2; i
--)
1127 if (GET_CODE (XVECEXP (PATTERN (i2
), 0, i
)) != CLOBBER
)
1132 /* We make I1 with the same INSN_UID as I2. This gives it
1133 the same INSN_CUID for value tracking. Our fake I1 will
1134 never appear in the insn stream so giving it the same INSN_UID
1135 as I2 will not cause a problem. */
1137 i1
= gen_rtx (INSN
, VOIDmode
, INSN_UID (i2
), 0, i2
,
1138 XVECEXP (PATTERN (i2
), 0, 1), -1, 0, 0);
1140 SUBST (PATTERN (i2
), XVECEXP (PATTERN (i2
), 0, 0));
1141 SUBST (XEXP (SET_SRC (PATTERN (i2
)), 0),
1142 SET_DEST (PATTERN (i1
)));
1147 /* Verify that I2 and I1 are valid for combining. */
1148 if (! can_combine_p (i2
, i3
, i1
, NULL_RTX
, &i2dest
, &i2src
)
1149 || (i1
&& ! can_combine_p (i1
, i3
, NULL_RTX
, i2
, &i1dest
, &i1src
)))
1155 /* Record whether I2DEST is used in I2SRC and similarly for the other
1156 cases. Knowing this will help in register status updating below. */
1157 i2dest_in_i2src
= reg_overlap_mentioned_p (i2dest
, i2src
);
1158 i1dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i1dest
, i1src
);
1159 i2dest_in_i1src
= i1
&& reg_overlap_mentioned_p (i2dest
, i1src
);
1161 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1163 i1_feeds_i3
= i1
&& ! reg_overlap_mentioned_p (i1dest
, i2src
);
1165 /* Ensure that I3's pattern can be the destination of combines. */
1166 if (! combinable_i3pat (i3
, &PATTERN (i3
), i2dest
, i1dest
,
1167 i1
&& i2dest_in_i1src
&& i1_feeds_i3
,
1174 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1175 We used to do this EXCEPT in one case: I3 has a post-inc in an
1176 output operand. However, that exception can give rise to insns like
1178 which is a famous insn on the PDP-11 where the value of r3 used as the
1179 source was model-dependent. Avoid this sort of thing. */
1182 if (!(GET_CODE (PATTERN (i3
)) == SET
1183 && GET_CODE (SET_SRC (PATTERN (i3
))) == REG
1184 && GET_CODE (SET_DEST (PATTERN (i3
))) == MEM
1185 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_INC
1186 || GET_CODE (XEXP (SET_DEST (PATTERN (i3
)), 0)) == POST_DEC
)))
1187 /* It's not the exception. */
1190 for (link
= REG_NOTES (i3
); link
; link
= XEXP (link
, 1))
1191 if (REG_NOTE_KIND (link
) == REG_INC
1192 && (reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i2
))
1194 && reg_overlap_mentioned_p (XEXP (link
, 0), PATTERN (i1
)))))
1201 /* See if the SETs in I1 or I2 need to be kept around in the merged
1202 instruction: whenever the value set there is still needed past I3.
1203 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1205 For the SET in I1, we have two cases: If I1 and I2 independently
1206 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1207 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1208 in I1 needs to be kept around unless I1DEST dies or is set in either
1209 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1210 I1DEST. If so, we know I1 feeds into I2. */
1212 added_sets_2
= ! dead_or_set_p (i3
, i2dest
);
1215 = i1
&& ! (i1_feeds_i3
? dead_or_set_p (i3
, i1dest
)
1216 : (dead_or_set_p (i3
, i1dest
) || dead_or_set_p (i2
, i1dest
)));
1218 /* If the set in I2 needs to be kept around, we must make a copy of
1219 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1220 PATTERN (I2), we are only substituting for the original I1DEST, not into
1221 an already-substituted copy. This also prevents making self-referential
1222 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1225 i2pat
= (GET_CODE (PATTERN (i2
)) == PARALLEL
1226 ? gen_rtx (SET
, VOIDmode
, i2dest
, i2src
)
1230 i2pat
= copy_rtx (i2pat
);
1234 /* Substitute in the latest insn for the regs set by the earlier ones. */
1236 maxreg
= max_reg_num ();
1239 subst_low_cuid
= i1
? INSN_CUID (i1
) : INSN_CUID (i2
);
1241 /* It is possible that the source of I2 or I1 may be performing an
1242 unneeded operation, such as a ZERO_EXTEND of something that is known
1243 to have the high part zero. Handle that case by letting subst look at
1244 the innermost one of them.
1246 Another way to do this would be to have a function that tries to
1247 simplify a single insn instead of merging two or more insns. We don't
1248 do this because of the potential of infinite loops and because
1249 of the potential extra memory required. However, doing it the way
1250 we are is a bit of a kludge and doesn't catch all cases.
1252 But only do this if -fexpensive-optimizations since it slows things down
1253 and doesn't usually win. */
1255 if (flag_expensive_optimizations
)
1257 /* Pass pc_rtx so no substitutions are done, just simplifications.
1258 The cases that we are interested in here do not involve the few
1259 cases were is_replaced is checked. */
1261 i1src
= subst (i1src
, pc_rtx
, pc_rtx
, 0, 0);
1263 i2src
= subst (i2src
, pc_rtx
, pc_rtx
, 0, 0);
1265 previous_num_undos
= undobuf
.num_undo
;
1269 /* Many machines that don't use CC0 have insns that can both perform an
1270 arithmetic operation and set the condition code. These operations will
1271 be represented as a PARALLEL with the first element of the vector
1272 being a COMPARE of an arithmetic operation with the constant zero.
1273 The second element of the vector will set some pseudo to the result
1274 of the same arithmetic operation. If we simplify the COMPARE, we won't
1275 match such a pattern and so will generate an extra insn. Here we test
1276 for this case, where both the comparison and the operation result are
1277 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1278 I2SRC. Later we will make the PARALLEL that contains I2. */
1280 if (i1
== 0 && added_sets_2
&& GET_CODE (PATTERN (i3
)) == SET
1281 && GET_CODE (SET_SRC (PATTERN (i3
))) == COMPARE
1282 && XEXP (SET_SRC (PATTERN (i3
)), 1) == const0_rtx
1283 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3
)), 0), i2dest
))
1286 enum machine_mode compare_mode
;
1288 newpat
= PATTERN (i3
);
1289 SUBST (XEXP (SET_SRC (newpat
), 0), i2src
);
1293 #ifdef EXTRA_CC_MODES
1294 /* See if a COMPARE with the operand we substituted in should be done
1295 with the mode that is currently being used. If not, do the same
1296 processing we do in `subst' for a SET; namely, if the destination
1297 is used only once, try to replace it with a register of the proper
1298 mode and also replace the COMPARE. */
1299 if (undobuf
.other_insn
== 0
1300 && (cc_use
= find_single_use (SET_DEST (newpat
), i3
,
1301 &undobuf
.other_insn
))
1302 && ((compare_mode
= SELECT_CC_MODE (GET_CODE (*cc_use
),
1304 != GET_MODE (SET_DEST (newpat
))))
1306 int regno
= REGNO (SET_DEST (newpat
));
1307 rtx new_dest
= gen_rtx (REG
, compare_mode
, regno
);
1309 if (regno
< FIRST_PSEUDO_REGISTER
1310 || (reg_n_sets
[regno
] == 1 && ! added_sets_2
1311 && ! REG_USERVAR_P (SET_DEST (newpat
))))
1313 if (regno
>= FIRST_PSEUDO_REGISTER
)
1314 SUBST (regno_reg_rtx
[regno
], new_dest
);
1316 SUBST (SET_DEST (newpat
), new_dest
);
1317 SUBST (XEXP (*cc_use
, 0), new_dest
);
1318 SUBST (SET_SRC (newpat
),
1319 gen_rtx_combine (COMPARE
, compare_mode
,
1320 i2src
, const0_rtx
));
1323 undobuf
.other_insn
= 0;
1330 n_occurrences
= 0; /* `subst' counts here */
1332 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1333 need to make a unique copy of I2SRC each time we substitute it
1334 to avoid self-referential rtl. */
1336 newpat
= subst (PATTERN (i3
), i2dest
, i2src
, 0,
1337 ! i1_feeds_i3
&& i1dest_in_i1src
);
1338 previous_num_undos
= undobuf
.num_undo
;
1340 /* Record whether i2's body now appears within i3's body. */
1341 i2_is_used
= n_occurrences
;
1344 /* If we already got a failure, don't try to do more. Otherwise,
1345 try to substitute in I1 if we have it. */
1347 if (i1
&& GET_CODE (newpat
) != CLOBBER
)
1349 /* Before we can do this substitution, we must redo the test done
1350 above (see detailed comments there) that ensures that I1DEST
1351 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1353 if (! combinable_i3pat (NULL_RTX
, &newpat
, i1dest
, NULL_RTX
,
1361 newpat
= subst (newpat
, i1dest
, i1src
, 0, 0);
1362 previous_num_undos
= undobuf
.num_undo
;
1365 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1366 to count all the ways that I2SRC and I1SRC can be used. */
1367 if ((FIND_REG_INC_NOTE (i2
, NULL_RTX
) != 0
1368 && i2_is_used
+ added_sets_2
> 1)
1369 || (i1
!= 0 && FIND_REG_INC_NOTE (i1
, NULL_RTX
) != 0
1370 && (n_occurrences
+ added_sets_1
+ (added_sets_2
&& ! i1_feeds_i3
)
1372 /* Fail if we tried to make a new register (we used to abort, but there's
1373 really no reason to). */
1374 || max_reg_num () != maxreg
1375 /* Fail if we couldn't do something and have a CLOBBER. */
1376 || GET_CODE (newpat
) == CLOBBER
)
1382 /* If the actions of the earlier insns must be kept
1383 in addition to substituting them into the latest one,
1384 we must make a new PARALLEL for the latest insn
1385 to hold additional the SETs. */
1387 if (added_sets_1
|| added_sets_2
)
1391 if (GET_CODE (newpat
) == PARALLEL
)
1393 rtvec old
= XVEC (newpat
, 0);
1394 total_sets
= XVECLEN (newpat
, 0) + added_sets_1
+ added_sets_2
;
1395 newpat
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (total_sets
));
1396 bcopy (&old
->elem
[0], &XVECEXP (newpat
, 0, 0),
1397 sizeof (old
->elem
[0]) * old
->num_elem
);
1402 total_sets
= 1 + added_sets_1
+ added_sets_2
;
1403 newpat
= gen_rtx (PARALLEL
, VOIDmode
, rtvec_alloc (total_sets
));
1404 XVECEXP (newpat
, 0, 0) = old
;
1408 XVECEXP (newpat
, 0, --total_sets
)
1409 = (GET_CODE (PATTERN (i1
)) == PARALLEL
1410 ? gen_rtx (SET
, VOIDmode
, i1dest
, i1src
) : PATTERN (i1
));
1414 /* If there is no I1, use I2's body as is. We used to also not do
1415 the subst call below if I2 was substituted into I3,
1416 but that could lose a simplification. */
1418 XVECEXP (newpat
, 0, --total_sets
) = i2pat
;
1420 /* See comment where i2pat is assigned. */
1421 XVECEXP (newpat
, 0, --total_sets
)
1422 = subst (i2pat
, i1dest
, i1src
, 0, 0);
1426 /* We come here when we are replacing a destination in I2 with the
1427 destination of I3. */
1428 validate_replacement
:
1430 /* Is the result of combination a valid instruction? */
1431 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1433 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1434 the second SET's destination is a register that is unused. In that case,
1435 we just need the first SET. This can occur when simplifying a divmod
1436 insn. We *must* test for this case here because the code below that
1437 splits two independent SETs doesn't handle this case correctly when it
1438 updates the register status. Also check the case where the first
1439 SET's destination is unused. That would not cause incorrect code, but
1440 does cause an unneeded insn to remain. */
1442 if (insn_code_number
< 0 && GET_CODE (newpat
) == PARALLEL
1443 && XVECLEN (newpat
, 0) == 2
1444 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1445 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1446 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) == REG
1447 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (XVECEXP (newpat
, 0, 1)))
1448 && ! side_effects_p (SET_SRC (XVECEXP (newpat
, 0, 1)))
1449 && asm_noperands (newpat
) < 0)
1451 newpat
= XVECEXP (newpat
, 0, 0);
1452 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1455 else if (insn_code_number
< 0 && GET_CODE (newpat
) == PARALLEL
1456 && XVECLEN (newpat
, 0) == 2
1457 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1458 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1459 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) == REG
1460 && find_reg_note (i3
, REG_UNUSED
, SET_DEST (XVECEXP (newpat
, 0, 0)))
1461 && ! side_effects_p (SET_SRC (XVECEXP (newpat
, 0, 0)))
1462 && asm_noperands (newpat
) < 0)
1464 newpat
= XVECEXP (newpat
, 0, 1);
1465 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1468 /* If we were combining three insns and the result is a simple SET
1469 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1470 insns. There are two ways to do this. It can be split using a
1471 machine-specific method (like when you have an addition of a large
1472 constant) or by combine in the function find_split_point. */
1474 if (i1
&& insn_code_number
< 0 && GET_CODE (newpat
) == SET
1475 && asm_noperands (newpat
) < 0)
1477 rtx m_split
, *split
;
1478 rtx ni2dest
= i2dest
;
1480 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1481 use I2DEST as a scratch register will help. In the latter case,
1482 convert I2DEST to the mode of the source of NEWPAT if we can. */
1484 m_split
= split_insns (newpat
, i3
);
1487 /* If I2DEST is a hard register or the only use of a pseudo,
1488 we can change its mode. */
1489 if (GET_MODE (SET_DEST (newpat
)) != GET_MODE (i2dest
)
1490 && GET_MODE (SET_DEST (newpat
)) != VOIDmode
1491 && GET_CODE (i2dest
) == REG
1492 && (REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
1493 || (reg_n_sets
[REGNO (i2dest
)] == 1 && ! added_sets_2
1494 && ! REG_USERVAR_P (i2dest
))))
1495 ni2dest
= gen_rtx (REG
, GET_MODE (SET_DEST (newpat
)),
1498 m_split
= split_insns (gen_rtx (PARALLEL
, VOIDmode
,
1499 gen_rtvec (2, newpat
,
1506 if (m_split
&& GET_CODE (m_split
) == SEQUENCE
1507 && XVECLEN (m_split
, 0) == 2
1508 && (next_real_insn (i2
) == i3
1509 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split
, 0, 0)),
1512 newi2pat
= PATTERN (XVECEXP (m_split
, 0, 0));
1513 newpat
= PATTERN (XVECEXP (m_split
, 0, 1));
1515 /* In case we changed the mode of I2DEST, replace it in the
1516 pseudo-register table here. We can't do it above in case this
1517 code doesn't get executed and we do a split the other way. */
1519 if (REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
1520 SUBST (regno_reg_rtx
[REGNO (i2dest
)], ni2dest
);
1522 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1523 if (i2_code_number
>= 0)
1524 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1526 /* It is possible that both insns now set the destination of I3.
1527 If so, we must show an extra use of it and update
1530 if (insn_code_number
>= 0 && GET_CODE (SET_DEST (newpat
)) == REG
1531 && GET_CODE (SET_DEST (newi2pat
)) == REG
1532 && REGNO (SET_DEST (newpat
)) == REGNO (SET_DEST (newi2pat
)))
1534 reg_n_sets
[REGNO (SET_DEST (newpat
))]++;
1535 set_significant (SET_DEST (newi2pat
), newi2pat
);
1536 set_significant (SET_DEST (newpat
), newpat
);
1540 /* If we can split it and use I2DEST, go ahead and see if that
1541 helps things be recognized. Verify that none of the registers
1542 are set between I2 and I3. */
1543 else if ((split
= find_split_point (&newpat
)) != 0
1545 && GET_CODE (i2dest
) == REG
1547 /* We need I2DEST in the proper mode. If it is a hard register
1548 or the only use of a pseudo, we can change its mode. */
1549 && (GET_MODE (*split
) == GET_MODE (i2dest
)
1550 || GET_MODE (*split
) == VOIDmode
1551 || REGNO (i2dest
) < FIRST_PSEUDO_REGISTER
1552 || (reg_n_sets
[REGNO (i2dest
)] == 1 && ! added_sets_2
1553 && ! REG_USERVAR_P (i2dest
)))
1554 && (next_real_insn (i2
) == i3
1555 || ! use_crosses_set_p (*split
, INSN_CUID (i2
)))
1556 /* We can't overwrite I2DEST if its value is still used by
1558 && ! reg_referenced_p (i2dest
, newpat
))
1560 rtx newdest
= i2dest
;
1562 /* Get NEWDEST as a register in the proper mode. We have already
1563 validated that we can do this. */
1564 if (GET_MODE (i2dest
) != GET_MODE (*split
)
1565 && GET_MODE (*split
) != VOIDmode
)
1567 newdest
= gen_rtx (REG
, GET_MODE (*split
), REGNO (i2dest
));
1569 if (REGNO (i2dest
) >= FIRST_PSEUDO_REGISTER
)
1570 SUBST (regno_reg_rtx
[REGNO (i2dest
)], newdest
);
1573 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1574 an ASHIFT. This can occur if it was inside a PLUS and hence
1575 appeared to be a memory address. This is a kludge. */
1576 if (GET_CODE (*split
) == MULT
1577 && GET_CODE (XEXP (*split
, 1)) == CONST_INT
1578 && (i
= exact_log2 (INTVAL (XEXP (*split
, 1)))) >= 0)
1579 SUBST (*split
, gen_rtx_combine (ASHIFT
, GET_MODE (*split
),
1580 XEXP (*split
, 0), GEN_INT (i
)));
1582 #ifdef INSN_SCHEDULING
1583 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1584 be written as a ZERO_EXTEND. */
1585 if (GET_CODE (*split
) == SUBREG
1586 && GET_CODE (SUBREG_REG (*split
)) == MEM
)
1587 SUBST (*split
, gen_rtx_combine (ZERO_EXTEND
, GET_MODE (*split
),
1591 newi2pat
= gen_rtx_combine (SET
, VOIDmode
, newdest
, *split
);
1592 SUBST (*split
, newdest
);
1593 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1594 if (i2_code_number
>= 0)
1595 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1599 /* Check for a case where we loaded from memory in a narrow mode and
1600 then sign extended it, but we need both registers. In that case,
1601 we have a PARALLEL with both loads from the same memory location.
1602 We can split this into a load from memory followed by a register-register
1603 copy. This saves at least one insn, more if register allocation can
1604 eliminate the copy. */
1606 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
1607 && GET_CODE (newpat
) == PARALLEL
1608 && XVECLEN (newpat
, 0) == 2
1609 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1610 && GET_CODE (SET_SRC (XVECEXP (newpat
, 0, 0))) == SIGN_EXTEND
1611 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1612 && rtx_equal_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1613 XEXP (SET_SRC (XVECEXP (newpat
, 0, 0)), 0))
1614 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1616 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
1617 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
1618 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
1619 SET_SRC (XVECEXP (newpat
, 0, 1)))
1620 && ! find_reg_note (i3
, REG_UNUSED
,
1621 SET_DEST (XVECEXP (newpat
, 0, 0))))
1623 newi2pat
= XVECEXP (newpat
, 0, 0);
1624 newpat
= XVECEXP (newpat
, 0, 1);
1625 SUBST (SET_SRC (newpat
),
1626 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat
)),
1627 SET_DEST (newi2pat
)));
1628 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1629 if (i2_code_number
>= 0)
1630 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1632 if (insn_code_number
>= 0)
1637 /* If we will be able to accept this, we have made a change to the
1638 destination of I3. This can invalidate a LOG_LINKS pointing
1639 to I3. No other part of combine.c makes such a transformation.
1641 The new I3 will have a destination that was previously the
1642 destination of I1 or I2 and which was used in i2 or I3. Call
1643 distribute_links to make a LOG_LINK from the next use of
1644 that destination. */
1646 PATTERN (i3
) = newpat
;
1647 distribute_links (gen_rtx (INSN_LIST
, VOIDmode
, i3
, NULL_RTX
));
1649 /* I3 now uses what used to be its destination and which is
1650 now I2's destination. That means we need a LOG_LINK from
1651 I3 to I2. But we used to have one, so we still will.
1653 However, some later insn might be using I2's dest and have
1654 a LOG_LINK pointing at I3. We must remove this link.
1655 The simplest way to remove the link is to point it at I1,
1656 which we know will be a NOTE. */
1658 for (insn
= NEXT_INSN (i3
);
1659 insn
&& GET_CODE (insn
) != CODE_LABEL
1660 && GET_CODE (PREV_INSN (insn
)) != JUMP_INSN
;
1661 insn
= NEXT_INSN (insn
))
1663 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
1664 && reg_referenced_p (SET_DEST (newi2pat
), PATTERN (insn
)))
1666 for (link
= LOG_LINKS (insn
); link
;
1667 link
= XEXP (link
, 1))
1668 if (XEXP (link
, 0) == i3
)
1669 XEXP (link
, 0) = i1
;
1677 /* Similarly, check for a case where we have a PARALLEL of two independent
1678 SETs but we started with three insns. In this case, we can do the sets
1679 as two separate insns. This case occurs when some SET allows two
1680 other insns to combine, but the destination of that SET is still live. */
1682 else if (i1
&& insn_code_number
< 0 && asm_noperands (newpat
) < 0
1683 && GET_CODE (newpat
) == PARALLEL
1684 && XVECLEN (newpat
, 0) == 2
1685 && GET_CODE (XVECEXP (newpat
, 0, 0)) == SET
1686 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != ZERO_EXTRACT
1687 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != STRICT_LOW_PART
1688 && GET_CODE (XVECEXP (newpat
, 0, 1)) == SET
1689 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != ZERO_EXTRACT
1690 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != STRICT_LOW_PART
1691 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat
, 0, 1)),
1693 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1694 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 1))) != USE
1695 && GET_CODE (SET_DEST (XVECEXP (newpat
, 0, 0))) != USE
1696 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 1)),
1697 XVECEXP (newpat
, 0, 0))
1698 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat
, 0, 0)),
1699 XVECEXP (newpat
, 0, 1)))
1701 newi2pat
= XVECEXP (newpat
, 0, 1);
1702 newpat
= XVECEXP (newpat
, 0, 0);
1704 i2_code_number
= recog_for_combine (&newi2pat
, i2
, &new_i2_notes
);
1705 if (i2_code_number
>= 0)
1706 insn_code_number
= recog_for_combine (&newpat
, i3
, &new_i3_notes
);
1709 /* If it still isn't recognized, fail and change things back the way they
1711 if ((insn_code_number
< 0
1712 /* Is the result a reasonable ASM_OPERANDS? */
1713 && (! check_asm_operands (newpat
) || added_sets_1
|| added_sets_2
)))
1719 /* If we had to change another insn, make sure it is valid also. */
1720 if (undobuf
.other_insn
)
1722 rtx other_notes
= REG_NOTES (undobuf
.other_insn
);
1723 rtx other_pat
= PATTERN (undobuf
.other_insn
);
1724 rtx new_other_notes
;
1727 other_code_number
= recog_for_combine (&other_pat
, undobuf
.other_insn
,
1730 if (other_code_number
< 0 && ! check_asm_operands (other_pat
))
1736 PATTERN (undobuf
.other_insn
) = other_pat
;
1738 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1739 are still valid. Then add any non-duplicate notes added by
1740 recog_for_combine. */
1741 for (note
= REG_NOTES (undobuf
.other_insn
); note
; note
= next
)
1743 next
= XEXP (note
, 1);
1745 if (REG_NOTE_KIND (note
) == REG_UNUSED
1746 && ! reg_set_p (XEXP (note
, 0), PATTERN (undobuf
.other_insn
)))
1747 remove_note (undobuf
.other_insn
, note
);
1750 distribute_notes (new_other_notes
, undobuf
.other_insn
,
1751 undobuf
.other_insn
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1754 /* We now know that we can do this combination. Merge the insns and
1755 update the status of registers and LOG_LINKS. */
1758 rtx i3notes
, i2notes
, i1notes
= 0;
1759 rtx i3links
, i2links
, i1links
= 0;
1761 int all_adjacent
= (next_real_insn (i2
) == i3
1762 && (i1
== 0 || next_real_insn (i1
) == i2
));
1764 /* Compute which registers we expect to eliminate. */
1765 rtx elim_i2
= (newi2pat
|| i2dest_in_i2src
|| i2dest_in_i1src
1767 rtx elim_i1
= i1
== 0 || i1dest_in_i1src
? 0 : i1dest
;
1769 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1771 i3notes
= REG_NOTES (i3
), i3links
= LOG_LINKS (i3
);
1772 i2notes
= REG_NOTES (i2
), i2links
= LOG_LINKS (i2
);
1774 i1notes
= REG_NOTES (i1
), i1links
= LOG_LINKS (i1
);
1776 /* Ensure that we do not have something that should not be shared but
1777 occurs multiple times in the new insns. Check this by first
1778 resetting all the `used' flags and then copying anything is shared. */
1780 reset_used_flags (i3notes
);
1781 reset_used_flags (i2notes
);
1782 reset_used_flags (i1notes
);
1783 reset_used_flags (newpat
);
1784 reset_used_flags (newi2pat
);
1785 if (undobuf
.other_insn
)
1786 reset_used_flags (PATTERN (undobuf
.other_insn
));
1788 i3notes
= copy_rtx_if_shared (i3notes
);
1789 i2notes
= copy_rtx_if_shared (i2notes
);
1790 i1notes
= copy_rtx_if_shared (i1notes
);
1791 newpat
= copy_rtx_if_shared (newpat
);
1792 newi2pat
= copy_rtx_if_shared (newi2pat
);
1793 if (undobuf
.other_insn
)
1794 reset_used_flags (PATTERN (undobuf
.other_insn
));
1796 INSN_CODE (i3
) = insn_code_number
;
1797 PATTERN (i3
) = newpat
;
1798 if (undobuf
.other_insn
)
1799 INSN_CODE (undobuf
.other_insn
) = other_code_number
;
1801 /* We had one special case above where I2 had more than one set and
1802 we replaced a destination of one of those sets with the destination
1803 of I3. In that case, we have to update LOG_LINKS of insns later
1804 in this basic block. Note that this (expensive) case is rare. */
1806 if (GET_CODE (PATTERN (i2
)) == PARALLEL
)
1807 for (i
= 0; i
< XVECLEN (PATTERN (i2
), 0); i
++)
1808 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))) == REG
1809 && SET_DEST (XVECEXP (PATTERN (i2
), 0, i
)) != i2dest
1810 && ! find_reg_note (i2
, REG_UNUSED
,
1811 SET_DEST (XVECEXP (PATTERN (i2
), 0, i
))))
1815 for (insn
= NEXT_INSN (i2
); insn
; insn
= NEXT_INSN (insn
))
1817 if (insn
!= i3
&& GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1818 for (link
= LOG_LINKS (insn
); link
; link
= XEXP (link
, 1))
1819 if (XEXP (link
, 0) == i2
)
1820 XEXP (link
, 0) = i3
;
1822 if (GET_CODE (insn
) == CODE_LABEL
1823 || GET_CODE (insn
) == JUMP_INSN
)
1835 INSN_CODE (i2
) = i2_code_number
;
1836 PATTERN (i2
) = newi2pat
;
1840 PUT_CODE (i2
, NOTE
);
1841 NOTE_LINE_NUMBER (i2
) = NOTE_INSN_DELETED
;
1842 NOTE_SOURCE_FILE (i2
) = 0;
1849 PUT_CODE (i1
, NOTE
);
1850 NOTE_LINE_NUMBER (i1
) = NOTE_INSN_DELETED
;
1851 NOTE_SOURCE_FILE (i1
) = 0;
1854 /* Get death notes for everything that is now used in either I3 or
1855 I2 and used to die in a previous insn. */
1857 move_deaths (newpat
, i1
? INSN_CUID (i1
) : INSN_CUID (i2
), i3
, &midnotes
);
1859 move_deaths (newi2pat
, INSN_CUID (i1
), i2
, &midnotes
);
1861 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1863 distribute_notes (i3notes
, i3
, i3
, newi2pat
? i2
: NULL_RTX
,
1866 distribute_notes (i2notes
, i2
, i3
, newi2pat
? i2
: NULL_RTX
,
1869 distribute_notes (i1notes
, i1
, i3
, newi2pat
? i2
: NULL_RTX
,
1872 distribute_notes (midnotes
, NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1875 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1876 know these are REG_UNUSED and want them to go to the desired insn,
1877 so we always pass it as i3. */
1878 if (newi2pat
&& new_i2_notes
)
1879 distribute_notes (new_i2_notes
, i2
, i2
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1881 distribute_notes (new_i3_notes
, i3
, i3
, NULL_RTX
, NULL_RTX
, NULL_RTX
);
1883 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1884 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
1886 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i3dest_killed
, NULL_RTX
),
1887 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1888 NULL_RTX
, NULL_RTX
);
1889 if (i2dest_in_i2src
)
1890 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i2dest
, NULL_RTX
),
1891 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1892 NULL_RTX
, NULL_RTX
);
1893 if (i1dest_in_i1src
)
1894 distribute_notes (gen_rtx (EXPR_LIST
, REG_DEAD
, i1dest
, NULL_RTX
),
1895 NULL_RTX
, i3
, newi2pat
? i2
: NULL_RTX
,
1896 NULL_RTX
, NULL_RTX
);
1898 distribute_links (i3links
);
1899 distribute_links (i2links
);
1900 distribute_links (i1links
);
1902 if (GET_CODE (i2dest
) == REG
)
1904 /* The insn that previously set this register doesn't exist, and
1905 this life of the register may not exist either. Show that
1906 we don't know its value any more. If we don't do this and
1907 I2 set the register to a value that depended on its old
1908 contents, we will get confused. If this insn is used, thing
1909 will be set correctly in combine_instructions. */
1910 record_value_for_reg (i2dest
, NULL_RTX
, NULL_RTX
);
1912 /* If the reg formerly set in I2 died only once and that was in I3,
1913 zero its use count so it won't make `reload' do any work. */
1914 if (! added_sets_2
&& newi2pat
== 0)
1916 regno
= REGNO (i2dest
);
1917 reg_n_sets
[regno
]--;
1918 if (reg_n_sets
[regno
] == 0
1919 && ! (basic_block_live_at_start
[0][regno
/ REGSET_ELT_BITS
]
1920 & ((REGSET_ELT_TYPE
) 1 << (regno
% REGSET_ELT_BITS
))))
1921 reg_n_refs
[regno
] = 0;
1925 if (i1
&& GET_CODE (i1dest
) == REG
)
1927 record_value_for_reg (i1dest
, NULL_RTX
, NULL_RTX
);
1928 regno
= REGNO (i1dest
);
1931 reg_n_sets
[regno
]--;
1932 if (reg_n_sets
[regno
] == 0
1933 && ! (basic_block_live_at_start
[0][regno
/ REGSET_ELT_BITS
]
1934 & ((REGSET_ELT_TYPE
) 1 << (regno
% REGSET_ELT_BITS
))))
1935 reg_n_refs
[regno
] = 0;
1939 /* If I3 is now an unconditional jump, ensure that it has a
1940 BARRIER following it since it may have initially been a
1941 conditional jump. */
1943 if ((GET_CODE (newpat
) == RETURN
|| simplejump_p (i3
))
1944 && GET_CODE (next_nonnote_insn (i3
)) != BARRIER
)
1945 emit_barrier_after (i3
);
1948 combine_successes
++;
1950 return newi2pat
? i2
: i3
;
1953 /* Undo all the modifications recorded in undobuf. */
1959 if (undobuf
.num_undo
> MAX_UNDO
)
1960 undobuf
.num_undo
= MAX_UNDO
;
1961 for (i
= undobuf
.num_undo
- 1; i
>= 0; i
--)
1962 *undobuf
.undo
[i
].where
= undobuf
.undo
[i
].old_contents
;
1964 obfree (undobuf
.storage
);
1965 undobuf
.num_undo
= 0;
1968 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
1969 where we have an arithmetic expression and return that point.
1971 try_combine will call this function to see if an insn can be split into
1975 find_split_point (loc
)
1979 enum rtx_code code
= GET_CODE (x
);
1981 int len
= 0, pos
, unsignedp
;
1984 /* First special-case some codes. */
1988 #ifdef INSN_SCHEDULING
1989 /* If we are making a paradoxical SUBREG invalid, it becomes a split
1991 if (GET_CODE (SUBREG_REG (x
)) == MEM
)
1994 return find_split_point (&SUBREG_REG (x
));
1998 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
1999 using LO_SUM and HIGH. */
2000 if (GET_CODE (XEXP (x
, 0)) == CONST
2001 || GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
)
2004 gen_rtx_combine (LO_SUM
, Pmode
,
2005 gen_rtx_combine (HIGH
, Pmode
, XEXP (x
, 0)),
2007 return &XEXP (XEXP (x
, 0), 0);
2011 /* If we have a PLUS whose second operand is a constant and the
2012 address is not valid, perhaps will can split it up using
2013 the machine-specific way to split large constants. We use
2014 the first pseudo-reg (one of the virtual regs) as a placeholder;
2015 it will not remain in the result. */
2016 if (GET_CODE (XEXP (x
, 0)) == PLUS
2017 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2018 && ! memory_address_p (GET_MODE (x
), XEXP (x
, 0)))
2020 rtx reg
= regno_reg_rtx
[FIRST_PSEUDO_REGISTER
];
2021 rtx seq
= split_insns (gen_rtx (SET
, VOIDmode
, reg
, XEXP (x
, 0)),
2024 /* This should have produced two insns, each of which sets our
2025 placeholder. If the source of the second is a valid address,
2026 we can make put both sources together and make a split point
2029 if (seq
&& XVECLEN (seq
, 0) == 2
2030 && GET_CODE (XVECEXP (seq
, 0, 0)) == INSN
2031 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 0))) == SET
2032 && SET_DEST (PATTERN (XVECEXP (seq
, 0, 0))) == reg
2033 && ! reg_mentioned_p (reg
,
2034 SET_SRC (PATTERN (XVECEXP (seq
, 0, 0))))
2035 && GET_CODE (XVECEXP (seq
, 0, 1)) == INSN
2036 && GET_CODE (PATTERN (XVECEXP (seq
, 0, 1))) == SET
2037 && SET_DEST (PATTERN (XVECEXP (seq
, 0, 1))) == reg
2038 && memory_address_p (GET_MODE (x
),
2039 SET_SRC (PATTERN (XVECEXP (seq
, 0, 1)))))
2041 rtx src1
= SET_SRC (PATTERN (XVECEXP (seq
, 0, 0)));
2042 rtx src2
= SET_SRC (PATTERN (XVECEXP (seq
, 0, 1)));
2044 /* Replace the placeholder in SRC2 with SRC1. If we can
2045 find where in SRC2 it was placed, that can become our
2046 split point and we can replace this address with SRC2.
2047 Just try two obvious places. */
2049 src2
= replace_rtx (src2
, reg
, src1
);
2051 if (XEXP (src2
, 0) == src1
)
2052 split
= &XEXP (src2
, 0);
2053 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2
, 0)))[0] == 'e'
2054 && XEXP (XEXP (src2
, 0), 0) == src1
)
2055 split
= &XEXP (XEXP (src2
, 0), 0);
2059 SUBST (XEXP (x
, 0), src2
);
2068 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2069 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2070 we need to put the operand into a register. So split at that
2073 if (SET_DEST (x
) == cc0_rtx
2074 && GET_CODE (SET_SRC (x
)) != COMPARE
2075 && GET_CODE (SET_SRC (x
)) != ZERO_EXTRACT
2076 && GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) != 'o'
2077 && ! (GET_CODE (SET_SRC (x
)) == SUBREG
2078 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x
)))) == 'o'))
2079 return &SET_SRC (x
);
2082 /* See if we can split SET_SRC as it stands. */
2083 split
= find_split_point (&SET_SRC (x
));
2084 if (split
&& split
!= &SET_SRC (x
))
2087 /* See if this is a bitfield assignment with everything constant. If
2088 so, this is an IOR of an AND, so split it into that. */
2089 if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
2090 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)))
2091 <= HOST_BITS_PER_WIDE_INT
)
2092 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
2093 && GET_CODE (XEXP (SET_DEST (x
), 2)) == CONST_INT
2094 && GET_CODE (SET_SRC (x
)) == CONST_INT
2095 && ((INTVAL (XEXP (SET_DEST (x
), 1))
2096 + INTVAL (XEXP (SET_DEST (x
), 2)))
2097 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0))))
2098 && ! side_effects_p (XEXP (SET_DEST (x
), 0)))
2100 int pos
= INTVAL (XEXP (SET_DEST (x
), 2));
2101 int len
= INTVAL (XEXP (SET_DEST (x
), 1));
2102 int src
= INTVAL (SET_SRC (x
));
2103 rtx dest
= XEXP (SET_DEST (x
), 0);
2104 enum machine_mode mode
= GET_MODE (dest
);
2105 unsigned HOST_WIDE_INT mask
= ((HOST_WIDE_INT
) 1 << len
) - 1;
2108 pos
= GET_MODE_BITSIZE (mode
) - len
- pos
;
2113 gen_binary (IOR
, mode
, dest
, GEN_INT (src
<< pos
)));
2116 gen_binary (IOR
, mode
,
2117 gen_binary (AND
, mode
, dest
,
2118 GEN_INT (~ (mask
<< pos
)
2119 & GET_MODE_MASK (mode
))),
2120 GEN_INT (src
<< pos
)));
2122 SUBST (SET_DEST (x
), dest
);
2124 split
= find_split_point (&SET_SRC (x
));
2125 if (split
&& split
!= &SET_SRC (x
))
2129 /* Otherwise, see if this is an operation that we can split into two.
2130 If so, try to split that. */
2131 code
= GET_CODE (SET_SRC (x
));
2136 inner
= XEXP (SET_SRC (x
), 0);
2138 len
= GET_MODE_BITSIZE (GET_MODE (inner
));
2144 if (GET_CODE (XEXP (SET_SRC (x
), 1)) == CONST_INT
2145 && GET_CODE (XEXP (SET_SRC (x
), 2)) == CONST_INT
)
2147 inner
= XEXP (SET_SRC (x
), 0);
2148 len
= INTVAL (XEXP (SET_SRC (x
), 1));
2149 pos
= INTVAL (XEXP (SET_SRC (x
), 2));
2152 pos
= GET_MODE_BITSIZE (GET_MODE (inner
)) - len
- pos
;
2154 unsignedp
= (code
== ZERO_EXTRACT
);
2159 if (len
&& pos
>= 0 && pos
+ len
<= GET_MODE_BITSIZE (GET_MODE (inner
)))
2161 enum machine_mode mode
= GET_MODE (SET_SRC (x
));
2163 if (unsignedp
&& len
< HOST_BITS_PER_WIDE_INT
)
2168 gen_rtx_combine (LSHIFTRT
, mode
,
2169 gen_lowpart_for_combine (mode
, inner
),
2171 GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1)));
2173 split
= find_split_point (&SET_SRC (x
));
2174 if (split
&& split
!= &SET_SRC (x
))
2182 gen_rtx_combine (ASHIFT
, mode
,
2183 gen_lowpart_for_combine (mode
, inner
),
2184 GEN_INT (GET_MODE_BITSIZE (mode
)
2186 GEN_INT (GET_MODE_BITSIZE (mode
) - len
)));
2188 split
= find_split_point (&SET_SRC (x
));
2189 if (split
&& split
!= &SET_SRC (x
))
2194 /* See if this is a simple operation with a constant as the second
2195 operand. It might be that this constant is out of range and hence
2196 could be used as a split point. */
2197 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '2'
2198 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == 'c'
2199 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '<')
2200 && CONSTANT_P (XEXP (SET_SRC (x
), 1))
2201 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x
), 0))) == 'o'
2202 || (GET_CODE (XEXP (SET_SRC (x
), 0)) == SUBREG
2203 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x
), 0))))
2205 return &XEXP (SET_SRC (x
), 1);
2207 /* Finally, see if this is a simple operation with its first operand
2208 not in a register. The operation might require this operand in a
2209 register, so return it as a split point. We can always do this
2210 because if the first operand were another operation, we would have
2211 already found it as a split point. */
2212 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '2'
2213 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == 'c'
2214 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '<'
2215 || GET_RTX_CLASS (GET_CODE (SET_SRC (x
))) == '1')
2216 && ! register_operand (XEXP (SET_SRC (x
), 0), VOIDmode
))
2217 return &XEXP (SET_SRC (x
), 0);
2223 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2224 it is better to write this as (not (ior A B)) so we can split it.
2225 Similarly for IOR. */
2226 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == NOT
)
2229 gen_rtx_combine (NOT
, GET_MODE (x
),
2230 gen_rtx_combine (code
== IOR
? AND
: IOR
,
2232 XEXP (XEXP (x
, 0), 0),
2233 XEXP (XEXP (x
, 1), 0))));
2234 return find_split_point (loc
);
2237 /* Many RISC machines have a large set of logical insns. If the
2238 second operand is a NOT, put it first so we will try to split the
2239 other operand first. */
2240 if (GET_CODE (XEXP (x
, 1)) == NOT
)
2242 rtx tem
= XEXP (x
, 0);
2243 SUBST (XEXP (x
, 0), XEXP (x
, 1));
2244 SUBST (XEXP (x
, 1), tem
);
2249 /* Otherwise, select our actions depending on our rtx class. */
2250 switch (GET_RTX_CLASS (code
))
2252 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2254 split
= find_split_point (&XEXP (x
, 2));
2257 /* ... fall through ... */
2261 split
= find_split_point (&XEXP (x
, 1));
2264 /* ... fall through ... */
2266 /* Some machines have (and (shift ...) ...) insns. If X is not
2267 an AND, but XEXP (X, 0) is, use it as our split point. */
2268 if (GET_CODE (x
) != AND
&& GET_CODE (XEXP (x
, 0)) == AND
)
2269 return &XEXP (x
, 0);
2271 split
= find_split_point (&XEXP (x
, 0));
2277 /* Otherwise, we don't have a split point. */
2281 /* Throughout X, replace FROM with TO, and return the result.
2282 The result is TO if X is FROM;
2283 otherwise the result is X, but its contents may have been modified.
2284 If they were modified, a record was made in undobuf so that
2285 undo_all will (among other things) return X to its original state.
2287 If the number of changes necessary is too much to record to undo,
2288 the excess changes are not made, so the result is invalid.
2289 The changes already made can still be undone.
2290 undobuf.num_undo is incremented for such changes, so by testing that
2291 the caller can tell whether the result is valid.
2293 `n_occurrences' is incremented each time FROM is replaced.
2295 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2297 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2298 by copying if `n_occurrences' is non-zero. */
2301 subst (x
, from
, to
, in_dest
, unique_copy
)
2302 register rtx x
, from
, to
;
2307 register int len
, i
;
2308 register enum rtx_code code
= GET_CODE (x
), orig_code
= code
;
2310 enum machine_mode mode
= GET_MODE (x
);
2311 enum machine_mode op0_mode
= VOIDmode
;
2316 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2317 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2318 If it is 0, that cannot be done. We can now do this for any MEM
2319 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2320 If not for that, MEM's would very rarely be safe. */
2322 /* Reject MODEs bigger than a word, because we might not be able
2323 to reference a two-register group starting with an arbitrary register
2324 (and currently gen_lowpart might crash for a SUBREG). */
2326 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2327 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2329 /* Two expressions are equal if they are identical copies of a shared
2330 RTX or if they are both registers with the same register number
2333 #define COMBINE_RTX_EQUAL_P(X,Y) \
2335 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2336 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2338 if (! in_dest
&& COMBINE_RTX_EQUAL_P (x
, from
))
2341 return (unique_copy
&& n_occurrences
> 1 ? copy_rtx (to
) : to
);
2344 /* If X and FROM are the same register but different modes, they will
2345 not have been seen as equal above. However, flow.c will make a
2346 LOG_LINKS entry for that case. If we do nothing, we will try to
2347 rerecognize our original insn and, when it succeeds, we will
2348 delete the feeding insn, which is incorrect.
2350 So force this insn not to match in this (rare) case. */
2351 if (! in_dest
&& code
== REG
&& GET_CODE (from
) == REG
2352 && REGNO (x
) == REGNO (from
))
2353 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
2355 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2356 of which may contain things that can be combined. */
2357 if (code
!= MEM
&& code
!= LO_SUM
&& GET_RTX_CLASS (code
) == 'o')
2360 /* It is possible to have a subexpression appear twice in the insn.
2361 Suppose that FROM is a register that appears within TO.
2362 Then, after that subexpression has been scanned once by `subst',
2363 the second time it is scanned, TO may be found. If we were
2364 to scan TO here, we would find FROM within it and create a
2365 self-referent rtl structure which is completely wrong. */
2366 if (COMBINE_RTX_EQUAL_P (x
, to
))
2369 len
= GET_RTX_LENGTH (code
);
2370 fmt
= GET_RTX_FORMAT (code
);
2372 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2373 set up to skip this common case. All other cases where we want to
2374 suppress replacing something inside a SET_SRC are handled via the
2377 && (GET_CODE (SET_DEST (x
)) == REG
2378 || GET_CODE (SET_DEST (x
)) == CC0
2379 || GET_CODE (SET_DEST (x
)) == PC
))
2382 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2384 op0_mode
= GET_MODE (XEXP (x
, 0));
2386 for (i
= 0; i
< len
; i
++)
2391 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2394 if (COMBINE_RTX_EQUAL_P (XVECEXP (x
, i
, j
), from
))
2396 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
2401 new = subst (XVECEXP (x
, i
, j
), from
, to
, 0, unique_copy
);
2403 /* If this substitution failed, this whole thing fails. */
2404 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
2408 SUBST (XVECEXP (x
, i
, j
), new);
2411 else if (fmt
[i
] == 'e')
2415 if (COMBINE_RTX_EQUAL_P (XEXP (x
, i
), from
))
2417 new = (unique_copy
&& n_occurrences
? copy_rtx (to
) : to
);
2421 /* If we are in a SET_DEST, suppress most cases unless we
2422 have gone inside a MEM, in which case we want to
2423 simplify the address. We assume here that things that
2424 are actually part of the destination have their inner
2425 parts in the first expression. This is true for SUBREG,
2426 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2427 things aside from REG and MEM that should appear in a
2429 new = subst (XEXP (x
, i
), from
, to
,
2431 && (code
== SUBREG
|| code
== STRICT_LOW_PART
2432 || code
== ZERO_EXTRACT
))
2434 && i
== 0), unique_copy
);
2436 /* If we found that we will have to reject this combination,
2437 indicate that by returning the CLOBBER ourselves, rather than
2438 an expression containing it. This will speed things up as
2439 well as prevent accidents where two CLOBBERs are considered
2440 to be equal, thus producing an incorrect simplification. */
2442 if (GET_CODE (new) == CLOBBER
&& XEXP (new, 0) == const0_rtx
)
2445 SUBST (XEXP (x
, i
), new);
2449 /* If this is a commutative operation, put a constant last and a complex
2450 expression first. We don't need to do this for comparisons here. */
2451 if (GET_RTX_CLASS (code
) == 'c'
2452 && ((CONSTANT_P (XEXP (x
, 0)) && GET_CODE (XEXP (x
, 1)) != CONST_INT
)
2453 || (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == 'o'
2454 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) != 'o')
2455 || (GET_CODE (XEXP (x
, 0)) == SUBREG
2456 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x
, 0)))) == 'o'
2457 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 1))) != 'o')))
2460 SUBST (XEXP (x
, 0), XEXP (x
, 1));
2461 SUBST (XEXP (x
, 1), temp
);
2464 /* Try to fold this expression in case we have constants that weren't
2467 switch (GET_RTX_CLASS (code
))
2470 temp
= simplify_unary_operation (code
, mode
, XEXP (x
, 0), op0_mode
);
2473 temp
= simplify_relational_operation (code
, op0_mode
,
2474 XEXP (x
, 0), XEXP (x
, 1));
2475 #ifdef FLOAT_STORE_FLAG_VALUE
2476 if (temp
!= 0 && GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2477 temp
= ((temp
== const0_rtx
) ? CONST0_RTX (GET_MODE (x
))
2478 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE
, GET_MODE (x
)));
2483 temp
= simplify_binary_operation (code
, mode
, XEXP (x
, 0), XEXP (x
, 1));
2487 temp
= simplify_ternary_operation (code
, mode
, op0_mode
, XEXP (x
, 0),
2488 XEXP (x
, 1), XEXP (x
, 2));
2495 /* We come back to here if we have replaced the expression with one of
2496 a different code and it is likely that further simplification will be
2501 /* If we have restarted more than 4 times, we are probably looping, so
2503 if (++n_restarts
> 4)
2506 code
= GET_CODE (x
);
2508 /* First see if we can apply the inverse distributive law. */
2509 if (code
== PLUS
|| code
== MINUS
|| code
== IOR
|| code
== XOR
)
2511 x
= apply_distributive_law (x
);
2512 code
= GET_CODE (x
);
2515 /* If CODE is an associative operation not otherwise handled, see if we
2516 can associate some operands. This can win if they are constants or
2517 if they are logically related (i.e. (a & b) & a. */
2518 if ((code
== PLUS
|| code
== MINUS
2519 || code
== MULT
|| code
== AND
|| code
== IOR
|| code
== XOR
2520 || code
== DIV
|| code
== UDIV
2521 || code
== SMAX
|| code
== SMIN
|| code
== UMAX
|| code
== UMIN
)
2522 && GET_MODE_CLASS (mode
) == MODE_INT
)
2524 if (GET_CODE (XEXP (x
, 0)) == code
)
2526 rtx other
= XEXP (XEXP (x
, 0), 0);
2527 rtx inner_op0
= XEXP (XEXP (x
, 0), 1);
2528 rtx inner_op1
= XEXP (x
, 1);
2531 /* Make sure we pass the constant operand if any as the second
2532 one if this is a commutative operation. */
2533 if (CONSTANT_P (inner_op0
) && GET_RTX_CLASS (code
) == 'c')
2535 rtx tem
= inner_op0
;
2536 inner_op0
= inner_op1
;
2539 inner
= simplify_binary_operation (code
== MINUS
? PLUS
2540 : code
== DIV
? MULT
2541 : code
== UDIV
? MULT
2543 mode
, inner_op0
, inner_op1
);
2545 /* For commutative operations, try the other pair if that one
2547 if (inner
== 0 && GET_RTX_CLASS (code
) == 'c')
2549 other
= XEXP (XEXP (x
, 0), 1);
2550 inner
= simplify_binary_operation (code
, mode
,
2551 XEXP (XEXP (x
, 0), 0),
2557 x
= gen_binary (code
, mode
, other
, inner
);
2564 /* A little bit of algebraic simplification here. */
2568 /* Ensure that our address has any ASHIFTs converted to MULT in case
2569 address-recognizing predicates are called later. */
2570 temp
= make_compound_operation (XEXP (x
, 0), MEM
);
2571 SUBST (XEXP (x
, 0), temp
);
2575 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2576 is paradoxical. If we can't do that safely, then it becomes
2577 something nonsensical so that this combination won't take place. */
2579 if (GET_CODE (SUBREG_REG (x
)) == MEM
2580 && (GET_MODE_SIZE (mode
)
2581 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))))
2583 rtx inner
= SUBREG_REG (x
);
2584 int endian_offset
= 0;
2585 /* Don't change the mode of the MEM
2586 if that would change the meaning of the address. */
2587 if (MEM_VOLATILE_P (SUBREG_REG (x
))
2588 || mode_dependent_address_p (XEXP (inner
, 0)))
2589 return gen_rtx (CLOBBER
, mode
, const0_rtx
);
2591 #if BYTES_BIG_ENDIAN
2592 if (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
)
2593 endian_offset
+= UNITS_PER_WORD
- GET_MODE_SIZE (mode
);
2594 if (GET_MODE_SIZE (GET_MODE (inner
)) < UNITS_PER_WORD
)
2595 endian_offset
-= UNITS_PER_WORD
- GET_MODE_SIZE (GET_MODE (inner
));
2597 /* Note if the plus_constant doesn't make a valid address
2598 then this combination won't be accepted. */
2599 x
= gen_rtx (MEM
, mode
,
2600 plus_constant (XEXP (inner
, 0),
2601 (SUBREG_WORD (x
) * UNITS_PER_WORD
2603 MEM_VOLATILE_P (x
) = MEM_VOLATILE_P (inner
);
2604 RTX_UNCHANGING_P (x
) = RTX_UNCHANGING_P (inner
);
2605 MEM_IN_STRUCT_P (x
) = MEM_IN_STRUCT_P (inner
);
2609 /* If we are in a SET_DEST, these other cases can't apply. */
2613 /* Changing mode twice with SUBREG => just change it once,
2614 or not at all if changing back to starting mode. */
2615 if (GET_CODE (SUBREG_REG (x
)) == SUBREG
)
2617 if (mode
== GET_MODE (SUBREG_REG (SUBREG_REG (x
)))
2618 && SUBREG_WORD (x
) == 0 && SUBREG_WORD (SUBREG_REG (x
)) == 0)
2619 return SUBREG_REG (SUBREG_REG (x
));
2621 SUBST_INT (SUBREG_WORD (x
),
2622 SUBREG_WORD (x
) + SUBREG_WORD (SUBREG_REG (x
)));
2623 SUBST (SUBREG_REG (x
), SUBREG_REG (SUBREG_REG (x
)));
2626 /* SUBREG of a hard register => just change the register number
2627 and/or mode. If the hard register is not valid in that mode,
2628 suppress this combination. */
2630 if (GET_CODE (SUBREG_REG (x
)) == REG
2631 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
2633 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
),
2635 return gen_rtx (REG
, mode
,
2636 REGNO (SUBREG_REG (x
)) + SUBREG_WORD (x
));
2638 return gen_rtx (CLOBBER
, mode
, const0_rtx
);
2641 /* For a constant, try to pick up the part we want. Handle a full
2642 word and low-order part. Only do this if we are narrowing
2643 the constant; if it is being widened, we have no idea what
2644 the extra bits will have been set to. */
2646 if (CONSTANT_P (SUBREG_REG (x
)) && op0_mode
!= VOIDmode
2647 && GET_MODE_SIZE (mode
) == UNITS_PER_WORD
2648 && GET_MODE_SIZE (op0_mode
) < UNITS_PER_WORD
2649 && GET_MODE_CLASS (mode
) == MODE_INT
)
2651 temp
= operand_subword (SUBREG_REG (x
), SUBREG_WORD (x
),
2657 if (CONSTANT_P (SUBREG_REG (x
)) && subreg_lowpart_p (x
)
2658 && GET_MODE_SIZE (mode
) < GET_MODE_SIZE (op0_mode
))
2659 return gen_lowpart_for_combine (mode
, SUBREG_REG (x
));
2661 /* If we are narrowing the object, we need to see if we can simplify
2662 the expression for the object knowing that we only need the
2663 low-order bits. We do this by computing an AND of the object
2664 with only the bits we care about. That will produce any needed
2665 simplifications. If the resulting computation is just the
2666 AND with the significant bits, our operand is the first operand
2667 of the AND. Otherwise, it is the resulting expression. */
2668 if (GET_MODE_SIZE (mode
) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
)))
2669 && subreg_lowpart_p (x
)
2670 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
2671 <= HOST_BITS_PER_WIDE_INT
))
2673 temp
= simplify_and_const_int (NULL_RTX
, GET_MODE (SUBREG_REG (x
)),
2674 SUBREG_REG (x
), GET_MODE_MASK (mode
));
2675 if (GET_CODE (temp
) == AND
&& GET_CODE (XEXP (temp
, 1)) == CONST_INT
2676 && INTVAL (XEXP (temp
, 1)) == GET_MODE_MASK (mode
))
2677 temp
= XEXP (temp
, 0);
2678 return gen_lowpart_for_combine (mode
, temp
);
2684 /* (not (plus X -1)) can become (neg X). */
2685 if (GET_CODE (XEXP (x
, 0)) == PLUS
2686 && XEXP (XEXP (x
, 0), 1) == constm1_rtx
)
2688 x
= gen_rtx_combine (NEG
, mode
, XEXP (XEXP (x
, 0), 0));
2692 /* Similarly, (not (neg X)) is (plus X -1). */
2693 if (GET_CODE (XEXP (x
, 0)) == NEG
)
2695 x
= gen_rtx_combine (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), constm1_rtx
);
2699 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2700 other than 1, but that is not valid. We could do a similar
2701 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2702 but this doesn't seem common enough to bother with. */
2703 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
2704 && XEXP (XEXP (x
, 0), 0) == const1_rtx
)
2706 x
= gen_rtx (ROTATE
, mode
, gen_unary (NOT
, mode
, const1_rtx
),
2707 XEXP (XEXP (x
, 0), 1));
2711 if (GET_CODE (XEXP (x
, 0)) == SUBREG
2712 && subreg_lowpart_p (XEXP (x
, 0))
2713 && (GET_MODE_SIZE (GET_MODE (XEXP (x
, 0)))
2714 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x
, 0)))))
2715 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == ASHIFT
2716 && XEXP (SUBREG_REG (XEXP (x
, 0)), 0) == const1_rtx
)
2718 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (XEXP (x
, 0)));
2720 x
= gen_rtx (ROTATE
, inner_mode
,
2721 gen_unary (NOT
, inner_mode
, const1_rtx
),
2722 XEXP (SUBREG_REG (XEXP (x
, 0)), 1));
2723 x
= gen_lowpart_for_combine (mode
, x
);
2727 #if STORE_FLAG_VALUE == -1
2728 /* (not (comparison foo bar)) can be done by reversing the comparison
2730 if (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
2731 && reversible_comparison_p (XEXP (x
, 0)))
2732 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
2733 mode
, XEXP (XEXP (x
, 0), 0),
2734 XEXP (XEXP (x
, 0), 1));
2737 /* Apply De Morgan's laws to reduce number of patterns for machines
2738 with negating logical insns (and-not, nand, etc.). If result has
2739 only one NOT, put it first, since that is how the patterns are
2742 if (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == AND
)
2744 rtx in1
= XEXP (XEXP (x
, 0), 0), in2
= XEXP (XEXP (x
, 0), 1);
2746 if (GET_CODE (in1
) == NOT
)
2747 in1
= XEXP (in1
, 0);
2749 in1
= gen_rtx_combine (NOT
, GET_MODE (in1
), in1
);
2751 if (GET_CODE (in2
) == NOT
)
2752 in2
= XEXP (in2
, 0);
2753 else if (GET_CODE (in2
) == CONST_INT
2754 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
2755 in2
= GEN_INT (GET_MODE_MASK (mode
) & ~ INTVAL (in2
));
2757 in2
= gen_rtx_combine (NOT
, GET_MODE (in2
), in2
);
2759 if (GET_CODE (in2
) == NOT
)
2762 in2
= in1
; in1
= tem
;
2765 x
= gen_rtx_combine (GET_CODE (XEXP (x
, 0)) == IOR
? AND
: IOR
,
2772 /* (neg (plus X 1)) can become (not X). */
2773 if (GET_CODE (XEXP (x
, 0)) == PLUS
2774 && XEXP (XEXP (x
, 0), 1) == const1_rtx
)
2776 x
= gen_rtx_combine (NOT
, mode
, XEXP (XEXP (x
, 0), 0));
2780 /* Similarly, (neg (not X)) is (plus X 1). */
2781 if (GET_CODE (XEXP (x
, 0)) == NOT
)
2783 x
= gen_rtx_combine (PLUS
, mode
, XEXP (XEXP (x
, 0), 0), const1_rtx
);
2787 /* (neg (abs X)) is X if X is a value known to be either -1 or 0. */
2788 if (GET_CODE (XEXP (x
, 0)) == ABS
2789 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == SIGN_EXTRACT
2790 && XEXP (XEXP (XEXP (x
, 0), 0), 1) == const1_rtx
)
2791 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ASHIFTRT
2792 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
2793 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
2794 == GET_MODE_BITSIZE (mode
) - 1))
2795 || ((temp
= get_last_value (XEXP (XEXP (x
, 0), 0))) != 0
2796 && ((GET_CODE (temp
) == SIGN_EXTRACT
2797 && XEXP (temp
, 1) == const1_rtx
)
2798 || (GET_CODE (temp
) == ASHIFTRT
2799 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
2800 && (INTVAL (XEXP (temp
, 1))
2801 == GET_MODE_BITSIZE (mode
) - 1))))))
2802 return XEXP (XEXP (x
, 0), 0);
2804 /* (neg (minus X Y)) can become (minus Y X). */
2805 if (GET_CODE (XEXP (x
, 0)) == MINUS
2806 && (GET_MODE_CLASS (mode
) != MODE_FLOAT
2807 /* x-y != -(y-x) with IEEE floating point. */
2808 || TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
))
2810 x
= gen_binary (MINUS
, mode
, XEXP (XEXP (x
, 0), 1),
2811 XEXP (XEXP (x
, 0), 0));
2815 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2816 if we can then eliminate the NEG (e.g.,
2817 if the operand is a constant). */
2819 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
)
2821 temp
= simplify_unary_operation (NEG
, mode
,
2822 XEXP (XEXP (x
, 0), 0), mode
);
2825 SUBST (XEXP (XEXP (x
, 0), 0), temp
);
2830 temp
= expand_compound_operation (XEXP (x
, 0));
2832 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2833 replaced by (lshiftrt X C). This will convert
2834 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2836 if (GET_CODE (temp
) == ASHIFTRT
2837 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
2838 && INTVAL (XEXP (temp
, 1)) == GET_MODE_BITSIZE (mode
) - 1)
2840 x
= simplify_shift_const (temp
, LSHIFTRT
, mode
, XEXP (temp
, 0),
2841 INTVAL (XEXP (temp
, 1)));
2845 /* If X has only a single bit significant, say, bit I, convert
2846 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
2847 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
2848 (sign_extract X 1 Y). But only do this if TEMP isn't a register
2849 or a SUBREG of one since we'd be making the expression more
2850 complex if it was just a register. */
2852 if (GET_CODE (temp
) != REG
2853 && ! (GET_CODE (temp
) == SUBREG
2854 && GET_CODE (SUBREG_REG (temp
)) == REG
)
2855 && (i
= exact_log2 (significant_bits (temp
, mode
))) >= 0)
2857 rtx temp1
= simplify_shift_const
2858 (NULL_RTX
, ASHIFTRT
, mode
,
2859 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, temp
,
2860 GET_MODE_BITSIZE (mode
) - 1 - i
),
2861 GET_MODE_BITSIZE (mode
) - 1 - i
);
2863 /* If all we did was surround TEMP with the two shifts, we
2864 haven't improved anything, so don't use it. Otherwise,
2865 we are better off with TEMP1. */
2866 if (GET_CODE (temp1
) != ASHIFTRT
2867 || GET_CODE (XEXP (temp1
, 0)) != ASHIFT
2868 || XEXP (XEXP (temp1
, 0), 0) != temp
)
2876 case FLOAT_TRUNCATE
:
2877 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
2878 if (GET_CODE (XEXP (x
, 0)) == FLOAT_EXTEND
2879 && GET_MODE (XEXP (XEXP (x
, 0), 0)) == mode
)
2880 return XEXP (XEXP (x
, 0), 0);
2885 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
2886 using cc0, in which case we want to leave it as a COMPARE
2887 so we can distinguish it from a register-register-copy. */
2888 if (XEXP (x
, 1) == const0_rtx
)
2891 /* In IEEE floating point, x-0 is not the same as x. */
2892 if ((TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
2893 || GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_INT
)
2894 && XEXP (x
, 1) == CONST0_RTX (GET_MODE (XEXP (x
, 0))))
2900 /* (const (const X)) can become (const X). Do it this way rather than
2901 returning the inner CONST since CONST can be shared with a
2903 if (GET_CODE (XEXP (x
, 0)) == CONST
)
2904 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
2909 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
2910 can add in an offset. find_split_point will split this address up
2911 again if it doesn't match. */
2912 if (GET_CODE (XEXP (x
, 0)) == HIGH
2913 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)))
2919 /* If we have (plus (plus (A const) B)), associate it so that CONST is
2920 outermost. That's because that's the way indexed addresses are
2921 supposed to appear. This code used to check many more cases, but
2922 they are now checked elsewhere. */
2923 if (GET_CODE (XEXP (x
, 0)) == PLUS
2924 && CONSTANT_ADDRESS_P (XEXP (XEXP (x
, 0), 1)))
2925 return gen_binary (PLUS
, mode
,
2926 gen_binary (PLUS
, mode
, XEXP (XEXP (x
, 0), 0),
2928 XEXP (XEXP (x
, 0), 1));
2930 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
2931 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
2932 bit-field and can be replaced by either a sign_extend or a
2933 sign_extract. The `and' may be a zero_extend. */
2934 if (GET_CODE (XEXP (x
, 0)) == XOR
2935 && GET_CODE (XEXP (x
, 1)) == CONST_INT
2936 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
2937 && INTVAL (XEXP (x
, 1)) == - INTVAL (XEXP (XEXP (x
, 0), 1))
2938 && (i
= exact_log2 (INTVAL (XEXP (XEXP (x
, 0), 1)))) >= 0
2939 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
2940 && ((GET_CODE (XEXP (XEXP (x
, 0), 0)) == AND
2941 && GET_CODE (XEXP (XEXP (XEXP (x
, 0), 0), 1)) == CONST_INT
2942 && (INTVAL (XEXP (XEXP (XEXP (x
, 0), 0), 1))
2943 == ((HOST_WIDE_INT
) 1 << (i
+ 1)) - 1))
2944 || (GET_CODE (XEXP (XEXP (x
, 0), 0)) == ZERO_EXTEND
2945 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x
, 0), 0), 0)))
2948 x
= simplify_shift_const
2949 (NULL_RTX
, ASHIFTRT
, mode
,
2950 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
2951 XEXP (XEXP (XEXP (x
, 0), 0), 0),
2952 GET_MODE_BITSIZE (mode
) - (i
+ 1)),
2953 GET_MODE_BITSIZE (mode
) - (i
+ 1));
2957 /* If only the low-order bit of X is significant, (plus x -1)
2958 can become (ashiftrt (ashift (xor x 1) C) C) where C is
2959 the bitsize of the mode - 1. This allows simplification of
2960 "a = (b & 8) == 0;" */
2961 if (XEXP (x
, 1) == constm1_rtx
2962 && GET_CODE (XEXP (x
, 0)) != REG
2963 && ! (GET_CODE (XEXP (x
,0)) == SUBREG
2964 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == REG
)
2965 && significant_bits (XEXP (x
, 0), mode
) == 1)
2967 x
= simplify_shift_const
2968 (NULL_RTX
, ASHIFTRT
, mode
,
2969 simplify_shift_const (NULL_RTX
, ASHIFT
, mode
,
2970 gen_rtx_combine (XOR
, mode
,
2971 XEXP (x
, 0), const1_rtx
),
2972 GET_MODE_BITSIZE (mode
) - 1),
2973 GET_MODE_BITSIZE (mode
) - 1);
2977 /* If we are adding two things that have no bits in common, convert
2978 the addition into an IOR. This will often be further simplified,
2979 for example in cases like ((a & 1) + (a & 2)), which can
2982 if ((significant_bits (XEXP (x
, 0), mode
)
2983 & significant_bits (XEXP (x
, 1), mode
)) == 0)
2985 x
= gen_binary (IOR
, mode
, XEXP (x
, 0), XEXP (x
, 1));
2991 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
2992 (and <foo> (const_int pow2-1)) */
2993 if (GET_CODE (XEXP (x
, 1)) == AND
2994 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
2995 && exact_log2 (- INTVAL (XEXP (XEXP (x
, 1), 1))) >= 0
2996 && rtx_equal_p (XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)))
2998 x
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (x
, 0),
2999 - INTVAL (XEXP (XEXP (x
, 1), 1)) - 1);
3005 /* If we have (mult (plus A B) C), apply the distributive law and then
3006 the inverse distributive law to see if things simplify. This
3007 occurs mostly in addresses, often when unrolling loops. */
3009 if (GET_CODE (XEXP (x
, 0)) == PLUS
)
3011 x
= apply_distributive_law
3012 (gen_binary (PLUS
, mode
,
3013 gen_binary (MULT
, mode
,
3014 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3015 gen_binary (MULT
, mode
,
3016 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3018 if (GET_CODE (x
) != MULT
)
3022 /* If this is multiplication by a power of two and its first operand is
3023 a shift, treat the multiply as a shift to allow the shifts to
3024 possibly combine. */
3025 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3026 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
3027 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
3028 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
3029 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
3030 || GET_CODE (XEXP (x
, 0)) == ROTATE
3031 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
3033 x
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, XEXP (x
, 0), i
);
3037 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3038 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
3039 && XEXP (XEXP (x
, 0), 0) == const1_rtx
)
3040 return gen_rtx_combine (ASHIFT
, mode
, XEXP (x
, 1),
3041 XEXP (XEXP (x
, 0), 1));
3045 /* If this is a divide by a power of two, treat it as a shift if
3046 its first operand is a shift. */
3047 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3048 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0
3049 && (GET_CODE (XEXP (x
, 0)) == ASHIFT
3050 || GET_CODE (XEXP (x
, 0)) == LSHIFTRT
3051 || GET_CODE (XEXP (x
, 0)) == ASHIFTRT
3052 || GET_CODE (XEXP (x
, 0)) == ROTATE
3053 || GET_CODE (XEXP (x
, 0)) == ROTATERT
))
3055 x
= simplify_shift_const (NULL_RTX
, LSHIFTRT
, mode
, XEXP (x
, 0), i
);
3061 case GT
: case GTU
: case GE
: case GEU
:
3062 case LT
: case LTU
: case LE
: case LEU
:
3063 /* If the first operand is a condition code, we can't do anything
3065 if (GET_CODE (XEXP (x
, 0)) == COMPARE
3066 || (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) != MODE_CC
3068 && XEXP (x
, 0) != cc0_rtx
3072 rtx op0
= XEXP (x
, 0);
3073 rtx op1
= XEXP (x
, 1);
3074 enum rtx_code new_code
;
3076 if (GET_CODE (op0
) == COMPARE
)
3077 op1
= XEXP (op0
, 1), op0
= XEXP (op0
, 0);
3079 /* Simplify our comparison, if possible. */
3080 new_code
= simplify_comparison (code
, &op0
, &op1
);
3082 #if STORE_FLAG_VALUE == 1
3083 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3084 if only the low-order bit is significant in X (such as when
3085 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3087 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
3088 && op1
== const0_rtx
3089 && significant_bits (op0
, GET_MODE (op0
)) == 1)
3090 return gen_lowpart_for_combine (mode
, op0
);
3091 else if (new_code
== EQ
&& GET_MODE_CLASS (mode
) == MODE_INT
3092 && op1
== const0_rtx
3093 && significant_bits (op0
, GET_MODE (op0
)) == 1)
3094 return gen_rtx_combine (XOR
, mode
,
3095 gen_lowpart_for_combine (mode
, op0
),
3099 #if STORE_FLAG_VALUE == -1
3100 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3101 to (neg x) if only the low-order bit of X is significant.
3102 This converts (ne (zero_extract X 1 Y) 0) to
3103 (sign_extract X 1 Y). */
3104 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
3105 && op1
== const0_rtx
3106 && significant_bits (op0
, GET_MODE (op0
)) == 1)
3108 x
= gen_rtx_combine (NEG
, mode
,
3109 gen_lowpart_for_combine (mode
, op0
));
3114 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3115 one significant bit, we can convert (ne x 0) to (ashift x c)
3116 where C puts the bit in the sign bit. Remove any AND with
3117 STORE_FLAG_VALUE when we are done, since we are only going to
3118 test the sign bit. */
3119 if (new_code
== NE
&& GET_MODE_CLASS (mode
) == MODE_INT
3120 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3121 && (STORE_FLAG_VALUE
3122 == (HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (mode
) - 1))
3123 && op1
== const0_rtx
3124 && mode
== GET_MODE (op0
)
3125 && (i
= exact_log2 (significant_bits (op0
, GET_MODE (op0
)))) >= 0)
3127 x
= simplify_shift_const (NULL_RTX
, ASHIFT
, mode
, op0
,
3128 GET_MODE_BITSIZE (mode
) - 1 - i
);
3129 if (GET_CODE (x
) == AND
&& XEXP (x
, 1) == const_true_rtx
)
3135 /* If the code changed, return a whole new comparison. */
3136 if (new_code
!= code
)
3137 return gen_rtx_combine (new_code
, mode
, op0
, op1
);
3139 /* Otherwise, keep this operation, but maybe change its operands.
3140 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3141 SUBST (XEXP (x
, 0), op0
);
3142 SUBST (XEXP (x
, 1), op1
);
3147 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3148 reversed, do so to avoid needing two sets of patterns for
3149 subtract-and-branch insns. */
3150 if (XEXP (x
, 1) == pc_rtx
3151 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3152 && reversible_comparison_p (XEXP (x
, 0)))
3155 gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3156 GET_MODE (XEXP (x
, 0)),
3157 XEXP (XEXP (x
, 0), 0),
3158 XEXP (XEXP (x
, 0), 1)));
3159 SUBST (XEXP (x
, 1), XEXP (x
, 2));
3160 SUBST (XEXP (x
, 2), pc_rtx
);
3168 /* If we are processing SET_DEST, we are done. */
3172 x
= expand_compound_operation (x
);
3173 if (GET_CODE (x
) != code
)
3178 /* (set (pc) (return)) gets written as (return). */
3179 if (GET_CODE (SET_DEST (x
)) == PC
&& GET_CODE (SET_SRC (x
)) == RETURN
)
3182 /* Convert this into a field assignment operation, if possible. */
3183 x
= make_field_assignment (x
);
3185 /* If we are setting CC0 or if the source is a COMPARE, look for the
3186 use of the comparison result and try to simplify it unless we already
3187 have used undobuf.other_insn. */
3188 if ((GET_CODE (SET_SRC (x
)) == COMPARE
3190 || SET_DEST (x
) == cc0_rtx
3193 && (cc_use
= find_single_use (SET_DEST (x
), subst_insn
,
3195 && (undobuf
.other_insn
== 0 || other_insn
== undobuf
.other_insn
)
3196 && GET_RTX_CLASS (GET_CODE (*cc_use
)) == '<'
3197 && XEXP (*cc_use
, 0) == SET_DEST (x
))
3199 enum rtx_code old_code
= GET_CODE (*cc_use
);
3200 enum rtx_code new_code
;
3202 int other_changed
= 0;
3203 enum machine_mode compare_mode
= GET_MODE (SET_DEST (x
));
3205 if (GET_CODE (SET_SRC (x
)) == COMPARE
)
3206 op0
= XEXP (SET_SRC (x
), 0), op1
= XEXP (SET_SRC (x
), 1);
3208 op0
= SET_SRC (x
), op1
= const0_rtx
;
3210 /* Simplify our comparison, if possible. */
3211 new_code
= simplify_comparison (old_code
, &op0
, &op1
);
3213 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3214 /* If this machine has CC modes other than CCmode, check to see
3215 if we need to use a different CC mode here. */
3216 compare_mode
= SELECT_CC_MODE (new_code
, op0
, op1
);
3218 /* If the mode changed, we have to change SET_DEST, the mode
3219 in the compare, and the mode in the place SET_DEST is used.
3220 If SET_DEST is a hard register, just build new versions with
3221 the proper mode. If it is a pseudo, we lose unless it is only
3222 time we set the pseudo, in which case we can safely change
3224 if (compare_mode
!= GET_MODE (SET_DEST (x
)))
3226 int regno
= REGNO (SET_DEST (x
));
3227 rtx new_dest
= gen_rtx (REG
, compare_mode
, regno
);
3229 if (regno
< FIRST_PSEUDO_REGISTER
3230 || (reg_n_sets
[regno
] == 1
3231 && ! REG_USERVAR_P (SET_DEST (x
))))
3233 if (regno
>= FIRST_PSEUDO_REGISTER
)
3234 SUBST (regno_reg_rtx
[regno
], new_dest
);
3236 SUBST (SET_DEST (x
), new_dest
);
3237 SUBST (XEXP (*cc_use
, 0), new_dest
);
3243 /* If the code changed, we have to build a new comparison
3244 in undobuf.other_insn. */
3245 if (new_code
!= old_code
)
3249 SUBST (*cc_use
, gen_rtx_combine (new_code
, GET_MODE (*cc_use
),
3250 SET_DEST (x
), const0_rtx
));
3252 /* If the only change we made was to change an EQ into an
3253 NE or vice versa, OP0 has only one significant bit,
3254 and OP1 is zero, check if changing the user of the condition
3255 code will produce a valid insn. If it won't, we can keep
3256 the original code in that insn by surrounding our operation
3259 if (((old_code
== NE
&& new_code
== EQ
)
3260 || (old_code
== EQ
&& new_code
== NE
))
3261 && ! other_changed
&& op1
== const0_rtx
3262 && (GET_MODE_BITSIZE (GET_MODE (op0
))
3263 <= HOST_BITS_PER_WIDE_INT
)
3264 && (exact_log2 (mask
= significant_bits (op0
,
3268 rtx pat
= PATTERN (other_insn
), note
= 0;
3270 if ((recog_for_combine (&pat
, undobuf
.other_insn
, ¬e
) < 0
3271 && ! check_asm_operands (pat
)))
3273 PUT_CODE (*cc_use
, old_code
);
3276 op0
= gen_binary (XOR
, GET_MODE (op0
), op0
,
3285 undobuf
.other_insn
= other_insn
;
3288 /* If we are now comparing against zero, change our source if
3289 needed. If we do not use cc0, we always have a COMPARE. */
3290 if (op1
== const0_rtx
&& SET_DEST (x
) == cc0_rtx
)
3291 SUBST (SET_SRC (x
), op0
);
3295 /* Otherwise, if we didn't previously have a COMPARE in the
3296 correct mode, we need one. */
3297 if (GET_CODE (SET_SRC (x
)) != COMPARE
3298 || GET_MODE (SET_SRC (x
)) != compare_mode
)
3299 SUBST (SET_SRC (x
), gen_rtx_combine (COMPARE
, compare_mode
,
3303 /* Otherwise, update the COMPARE if needed. */
3304 SUBST (XEXP (SET_SRC (x
), 0), op0
);
3305 SUBST (XEXP (SET_SRC (x
), 1), op1
);
3310 /* Get SET_SRC in a form where we have placed back any
3311 compound expressions. Then do the checks below. */
3312 temp
= make_compound_operation (SET_SRC (x
), SET
);
3313 SUBST (SET_SRC (x
), temp
);
3316 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3317 operation, and X being a REG or (subreg (reg)), we may be able to
3318 convert this to (set (subreg:m2 x) (op)).
3320 We can always do this if M1 is narrower than M2 because that
3321 means that we only care about the low bits of the result.
3323 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3324 not defined), we cannot perform a narrower operation that
3325 requested since the high-order bits will be undefined. On
3326 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
3327 transformation is safe as long as M1 and M2 have the same number
3330 if (GET_CODE (SET_SRC (x
)) == SUBREG
3331 && subreg_lowpart_p (SET_SRC (x
))
3332 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x
)))) != 'o'
3333 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x
))) + (UNITS_PER_WORD
- 1))
3335 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
))))
3336 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
3337 #ifndef BYTE_LOADS_ZERO_EXTEND
3338 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
3339 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
)))))
3341 && (GET_CODE (SET_DEST (x
)) == REG
3342 || (GET_CODE (SET_DEST (x
)) == SUBREG
3343 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
)))
3345 /* Get the object that will be the SUBREG_REG of the
3346 SUBREG we are making. Note that SUBREG_WORD will always
3347 be zero because this will either be a paradoxical SUBREG
3348 or a SUBREG with the same number of words on the outside and
3350 rtx object
= (GET_CODE (SET_DEST (x
)) == REG
? SET_DEST (x
)
3351 : SUBREG_REG (SET_DEST (x
)));
3353 SUBST (SET_DEST (x
),
3354 gen_rtx (SUBREG
, GET_MODE (SUBREG_REG (SET_SRC (x
))),
3356 SUBST (SET_SRC (x
), SUBREG_REG (SET_SRC (x
)));
3359 #ifdef BYTE_LOADS_ZERO_EXTEND
3360 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3361 M wider than N, this would require a paradoxical subreg.
3362 Replace the subreg with a zero_extend to avoid the reload that
3363 would otherwise be required. */
3364 if (GET_CODE (SET_SRC (x
)) == SUBREG
3365 && subreg_lowpart_p (SET_SRC (x
))
3366 && SUBREG_WORD (SET_SRC (x
)) == 0
3367 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x
)))
3368 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x
)))))
3369 && GET_CODE (SUBREG_REG (SET_SRC (x
))) == MEM
)
3370 SUBST (SET_SRC (x
), gen_rtx_combine (ZERO_EXTEND
,
3371 GET_MODE (SET_SRC (x
)),
3372 XEXP (SET_SRC (x
), 0)));
3378 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3380 x
= simplify_and_const_int (x
, mode
, XEXP (x
, 0),
3381 INTVAL (XEXP (x
, 1)));
3383 /* If we have (ior (and (X C1) C2)) and the next restart would be
3384 the last, simplify this by making C1 as small as possible
3386 if (n_restarts
>= 3 && GET_CODE (x
) == IOR
3387 && GET_CODE (XEXP (x
, 0)) == AND
3388 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3389 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3391 temp
= gen_binary (AND
, mode
, XEXP (XEXP (x
, 0), 0),
3392 GEN_INT (INTVAL (XEXP (XEXP (x
, 0), 1))
3393 & ~ INTVAL (XEXP (x
, 1))));
3394 return gen_binary (IOR
, mode
, temp
, XEXP (x
, 1));
3397 if (GET_CODE (x
) != AND
)
3401 /* Convert (A | B) & A to A. */
3402 if (GET_CODE (XEXP (x
, 0)) == IOR
3403 && (rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3404 || rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1)))
3405 && ! side_effects_p (XEXP (XEXP (x
, 0), 0))
3406 && ! side_effects_p (XEXP (XEXP (x
, 0), 1)))
3409 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3410 insn (and may simplify more). */
3411 else if (GET_CODE (XEXP (x
, 0)) == XOR
3412 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3413 && ! side_effects_p (XEXP (x
, 1)))
3415 x
= gen_binary (AND
, mode
,
3416 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 1)),
3420 else if (GET_CODE (XEXP (x
, 0)) == XOR
3421 && rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))
3422 && ! side_effects_p (XEXP (x
, 1)))
3424 x
= gen_binary (AND
, mode
,
3425 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 0)),
3430 /* Similarly for (~ (A ^ B)) & A. */
3431 else if (GET_CODE (XEXP (x
, 0)) == NOT
3432 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == XOR
3433 && rtx_equal_p (XEXP (XEXP (XEXP (x
, 0), 0), 0), XEXP (x
, 1))
3434 && ! side_effects_p (XEXP (x
, 1)))
3436 x
= gen_binary (AND
, mode
, XEXP (XEXP (XEXP (x
, 0), 0), 1),
3440 else if (GET_CODE (XEXP (x
, 0)) == NOT
3441 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == XOR
3442 && rtx_equal_p (XEXP (XEXP (XEXP (x
, 0), 0), 1), XEXP (x
, 1))
3443 && ! side_effects_p (XEXP (x
, 1)))
3445 x
= gen_binary (AND
, mode
, XEXP (XEXP (XEXP (x
, 0), 0), 0),
3450 /* In the follow group of tests (and those in case IOR below),
3451 we start with some combination of logical operations and apply
3452 the distributive law followed by the inverse distributive law.
3453 Most of the time, this results in no change. However, if some of
3454 the operands are the same or inverses of each other, simplifications
3457 For example, (and (ior A B) (not B)) can occur as the result of
3458 expanding a bit field assignment. When we apply the distributive
3459 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3460 which then simplifies to (and (A (not B))). */
3462 /* If we have (and (ior A B) C), apply the distributive law and then
3463 the inverse distributive law to see if things simplify. */
3465 if (GET_CODE (XEXP (x
, 0)) == IOR
|| GET_CODE (XEXP (x
, 0)) == XOR
)
3467 x
= apply_distributive_law
3468 (gen_binary (GET_CODE (XEXP (x
, 0)), mode
,
3469 gen_binary (AND
, mode
,
3470 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3471 gen_binary (AND
, mode
,
3472 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3473 if (GET_CODE (x
) != AND
)
3477 if (GET_CODE (XEXP (x
, 1)) == IOR
|| GET_CODE (XEXP (x
, 1)) == XOR
)
3479 x
= apply_distributive_law
3480 (gen_binary (GET_CODE (XEXP (x
, 1)), mode
,
3481 gen_binary (AND
, mode
,
3482 XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)),
3483 gen_binary (AND
, mode
,
3484 XEXP (XEXP (x
, 1), 1), XEXP (x
, 0))));
3485 if (GET_CODE (x
) != AND
)
3489 /* Similarly, taking advantage of the fact that
3490 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3492 if (GET_CODE (XEXP (x
, 0)) == NOT
&& GET_CODE (XEXP (x
, 1)) == XOR
)
3494 x
= apply_distributive_law
3495 (gen_binary (XOR
, mode
,
3496 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 0), 0),
3497 XEXP (XEXP (x
, 1), 0)),
3498 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 0), 0),
3499 XEXP (XEXP (x
, 1), 1))));
3500 if (GET_CODE (x
) != AND
)
3504 else if (GET_CODE (XEXP (x
, 1)) == NOT
&& GET_CODE (XEXP (x
, 0)) == XOR
)
3506 x
= apply_distributive_law
3507 (gen_binary (XOR
, mode
,
3508 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 1), 0),
3509 XEXP (XEXP (x
, 0), 0)),
3510 gen_binary (IOR
, mode
, XEXP (XEXP (x
, 1), 0),
3511 XEXP (XEXP (x
, 0), 1))));
3512 if (GET_CODE (x
) != AND
)
3518 /* Convert (A & B) | A to A. */
3519 if (GET_CODE (XEXP (x
, 0)) == AND
3520 && (rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3521 || rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1)))
3522 && ! side_effects_p (XEXP (XEXP (x
, 0), 0))
3523 && ! side_effects_p (XEXP (XEXP (x
, 0), 1)))
3526 /* If we have (ior (and A B) C), apply the distributive law and then
3527 the inverse distributive law to see if things simplify. */
3529 if (GET_CODE (XEXP (x
, 0)) == AND
)
3531 x
= apply_distributive_law
3532 (gen_binary (AND
, mode
,
3533 gen_binary (IOR
, mode
,
3534 XEXP (XEXP (x
, 0), 0), XEXP (x
, 1)),
3535 gen_binary (IOR
, mode
,
3536 XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))));
3538 if (GET_CODE (x
) != IOR
)
3542 if (GET_CODE (XEXP (x
, 1)) == AND
)
3544 x
= apply_distributive_law
3545 (gen_binary (AND
, mode
,
3546 gen_binary (IOR
, mode
,
3547 XEXP (XEXP (x
, 1), 0), XEXP (x
, 0)),
3548 gen_binary (IOR
, mode
,
3549 XEXP (XEXP (x
, 1), 1), XEXP (x
, 0))));
3551 if (GET_CODE (x
) != IOR
)
3555 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3556 mode size to (rotate A CX). */
3558 if (((GET_CODE (XEXP (x
, 0)) == ASHIFT
3559 && GET_CODE (XEXP (x
, 1)) == LSHIFTRT
)
3560 || (GET_CODE (XEXP (x
, 1)) == ASHIFT
3561 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
))
3562 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (XEXP (x
, 1), 0))
3563 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3564 && GET_CODE (XEXP (XEXP (x
, 1), 1)) == CONST_INT
3565 && (INTVAL (XEXP (XEXP (x
, 0), 1)) + INTVAL (XEXP (XEXP (x
, 1), 1))
3566 == GET_MODE_BITSIZE (mode
)))
3570 if (GET_CODE (XEXP (x
, 0)) == ASHIFT
)
3571 shift_count
= XEXP (XEXP (x
, 0), 1);
3573 shift_count
= XEXP (XEXP (x
, 1), 1);
3574 x
= gen_rtx (ROTATE
, mode
, XEXP (XEXP (x
, 0), 0), shift_count
);
3580 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3581 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3584 int num_negated
= 0;
3585 rtx in1
= XEXP (x
, 0), in2
= XEXP (x
, 1);
3587 if (GET_CODE (in1
) == NOT
)
3588 num_negated
++, in1
= XEXP (in1
, 0);
3589 if (GET_CODE (in2
) == NOT
)
3590 num_negated
++, in2
= XEXP (in2
, 0);
3592 if (num_negated
== 2)
3594 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3595 SUBST (XEXP (x
, 1), XEXP (XEXP (x
, 1), 0));
3597 else if (num_negated
== 1)
3598 return gen_rtx_combine (NOT
, mode
,
3599 gen_rtx_combine (XOR
, mode
, in1
, in2
));
3602 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3603 correspond to a machine insn or result in further simplifications
3604 if B is a constant. */
3606 if (GET_CODE (XEXP (x
, 0)) == AND
3607 && rtx_equal_p (XEXP (XEXP (x
, 0), 1), XEXP (x
, 1))
3608 && ! side_effects_p (XEXP (x
, 1)))
3610 x
= gen_binary (AND
, mode
,
3611 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 0)),
3615 else if (GET_CODE (XEXP (x
, 0)) == AND
3616 && rtx_equal_p (XEXP (XEXP (x
, 0), 0), XEXP (x
, 1))
3617 && ! side_effects_p (XEXP (x
, 1)))
3619 x
= gen_binary (AND
, mode
,
3620 gen_unary (NOT
, mode
, XEXP (XEXP (x
, 0), 1)),
3626 #if STORE_FLAG_VALUE == 1
3627 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3629 if (XEXP (x
, 1) == const1_rtx
3630 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3631 && reversible_comparison_p (XEXP (x
, 0)))
3632 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3633 mode
, XEXP (XEXP (x
, 0), 0),
3634 XEXP (XEXP (x
, 0), 1));
3637 /* (xor (comparison foo bar) (const_int sign-bit))
3638 when STORE_FLAG_VALUE is the sign bit. */
3639 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
3640 && (STORE_FLAG_VALUE
3641 == (HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (mode
) - 1))
3642 && XEXP (x
, 1) == const_true_rtx
3643 && GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
3644 && reversible_comparison_p (XEXP (x
, 0)))
3645 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x
, 0))),
3646 mode
, XEXP (XEXP (x
, 0), 0),
3647 XEXP (XEXP (x
, 0), 1));
3651 /* (abs (neg <foo>)) -> (abs <foo>) */
3652 if (GET_CODE (XEXP (x
, 0)) == NEG
)
3653 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3655 /* If operand is something known to be positive, ignore the ABS. */
3656 if (GET_CODE (XEXP (x
, 0)) == FFS
|| GET_CODE (XEXP (x
, 0)) == ABS
3657 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
3658 <= HOST_BITS_PER_WIDE_INT
)
3659 && ((significant_bits (XEXP (x
, 0), GET_MODE (XEXP (x
, 0)))
3660 & ((HOST_WIDE_INT
) 1
3661 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1)))
3666 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3667 if ((GET_CODE (XEXP (x
, 0)) == SIGN_EXTRACT
3668 && XEXP (XEXP (x
, 0), 1) == const1_rtx
)
3669 || (GET_CODE (XEXP (x
, 0)) == ASHIFTRT
3670 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
3671 && INTVAL (XEXP (XEXP (x
, 0), 1)) == GET_MODE_BITSIZE (mode
) - 1)
3672 || ((temp
= get_last_value (XEXP (x
, 0))) != 0
3673 && ((GET_CODE (temp
) == SIGN_EXTRACT
3674 && XEXP (temp
, 1) == const1_rtx
)
3675 || (GET_CODE (temp
) == ASHIFTRT
3676 && GET_CODE (XEXP (temp
, 1)) == CONST_INT
3677 && (INTVAL (XEXP (temp
, 1))
3678 == GET_MODE_BITSIZE (mode
) - 1)))))
3680 x
= gen_rtx_combine (NEG
, mode
, XEXP (x
, 0));
3686 /* (ffs (*_extend <X>)) = (ffs <X>) */
3687 if (GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
3688 || GET_CODE (XEXP (x
, 0)) == ZERO_EXTEND
)
3689 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3693 /* (float (sign_extend <X>)) = (float <X>). */
3694 if (GET_CODE (XEXP (x
, 0)) == SIGN_EXTEND
)
3695 SUBST (XEXP (x
, 0), XEXP (XEXP (x
, 0), 0));
3704 /* If this is a shift by a constant amount, simplify it. */
3705 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
3707 x
= simplify_shift_const (x
, code
, mode
, XEXP (x
, 0),
3708 INTVAL (XEXP (x
, 1)));
3709 if (GET_CODE (x
) != code
)
3713 #ifdef SHIFT_COUNT_TRUNCATED
3714 else if (GET_CODE (XEXP (x
, 1)) != REG
)
3716 force_to_mode (XEXP (x
, 1), GET_MODE (x
),
3717 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x
))),
3727 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3728 operations" because they can be replaced with two more basic operations.
3729 ZERO_EXTEND is also considered "compound" because it can be replaced with
3730 an AND operation, which is simpler, though only one operation.
3732 The function expand_compound_operation is called with an rtx expression
3733 and will convert it to the appropriate shifts and AND operations,
3734 simplifying at each stage.
3736 The function make_compound_operation is called to convert an expression
3737 consisting of shifts and ANDs into the equivalent compound expression.
3738 It is the inverse of this function, loosely speaking. */
3741 expand_compound_operation (x
)
3749 switch (GET_CODE (x
))
3754 /* We can't necessarily use a const_int for a multiword mode;
3755 it depends on implicitly extending the value.
3756 Since we don't know the right way to extend it,
3757 we can't tell whether the implicit way is right.
3759 Even for a mode that is no wider than a const_int,
3760 we can't win, because we need to sign extend one of its bits through
3761 the rest of it, and we don't know which bit. */
3762 if (GET_CODE (XEXP (x
, 0)) == CONST_INT
)
3765 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x
, 0)), XEXP (x
, 0)))
3768 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)));
3769 /* If the inner object has VOIDmode (the only way this can happen
3770 is if it is a ASM_OPERANDS), we can't do anything since we don't
3771 know how much masking to do. */
3780 /* If the operand is a CLOBBER, just return it. */
3781 if (GET_CODE (XEXP (x
, 0)) == CLOBBER
)
3784 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
3785 || GET_CODE (XEXP (x
, 2)) != CONST_INT
3786 || GET_MODE (XEXP (x
, 0)) == VOIDmode
)
3789 len
= INTVAL (XEXP (x
, 1));
3790 pos
= INTVAL (XEXP (x
, 2));
3792 /* If this goes outside the object being extracted, replace the object
3793 with a (use (mem ...)) construct that only combine understands
3794 and is used only for this purpose. */
3795 if (len
+ pos
> GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))))
3796 SUBST (XEXP (x
, 0), gen_rtx (USE
, GET_MODE (x
), XEXP (x
, 0)));
3799 pos
= GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - len
- pos
;
3807 /* If we reach here, we want to return a pair of shifts. The inner
3808 shift is a left shift of BITSIZE - POS - LEN bits. The outer
3809 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
3810 logical depending on the value of UNSIGNEDP.
3812 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
3813 converted into an AND of a shift.
3815 We must check for the case where the left shift would have a negative
3816 count. This can happen in a case like (x >> 31) & 255 on machines
3817 that can't shift by a constant. On those machines, we would first
3818 combine the shift with the AND to produce a variable-position
3819 extraction. Then the constant of 31 would be substituted in to produce
3820 a such a position. */
3822 modewidth
= GET_MODE_BITSIZE (GET_MODE (x
));
3823 if (modewidth
>= pos
- len
)
3824 tem
= simplify_shift_const (NULL_RTX
, unsignedp
? LSHIFTRT
: ASHIFTRT
,
3826 simplify_shift_const (NULL_RTX
, ASHIFT
,
3829 modewidth
- pos
- len
),
3832 else if (unsignedp
&& len
< HOST_BITS_PER_WIDE_INT
)
3833 tem
= simplify_and_const_int (NULL_RTX
, GET_MODE (x
),
3834 simplify_shift_const (NULL_RTX
, LSHIFTRT
,
3837 ((HOST_WIDE_INT
) 1 << len
) - 1);
3839 /* Any other cases we can't handle. */
3843 /* If we couldn't do this for some reason, return the original
3845 if (GET_CODE (tem
) == CLOBBER
)
3851 /* X is a SET which contains an assignment of one object into
3852 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
3853 or certain SUBREGS). If possible, convert it into a series of
3856 We half-heartedly support variable positions, but do not at all
3857 support variable lengths. */
3860 expand_field_assignment (x
)
3864 rtx pos
; /* Always counts from low bit. */
3867 enum machine_mode compute_mode
;
3869 /* Loop until we find something we can't simplify. */
3872 if (GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
3873 && GET_CODE (XEXP (SET_DEST (x
), 0)) == SUBREG
)
3875 inner
= SUBREG_REG (XEXP (SET_DEST (x
), 0));
3876 len
= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x
), 0)));
3879 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
3880 && GET_CODE (XEXP (SET_DEST (x
), 1)) == CONST_INT
)
3882 inner
= XEXP (SET_DEST (x
), 0);
3883 len
= INTVAL (XEXP (SET_DEST (x
), 1));
3884 pos
= XEXP (SET_DEST (x
), 2);
3886 /* If the position is constant and spans the width of INNER,
3887 surround INNER with a USE to indicate this. */
3888 if (GET_CODE (pos
) == CONST_INT
3889 && INTVAL (pos
) + len
> GET_MODE_BITSIZE (GET_MODE (inner
)))
3890 inner
= gen_rtx (USE
, GET_MODE (SET_DEST (x
)), inner
);
3893 if (GET_CODE (pos
) == CONST_INT
)
3894 pos
= GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner
)) - len
3896 else if (GET_CODE (pos
) == MINUS
3897 && GET_CODE (XEXP (pos
, 1)) == CONST_INT
3898 && (INTVAL (XEXP (pos
, 1))
3899 == GET_MODE_BITSIZE (GET_MODE (inner
)) - len
))
3900 /* If position is ADJUST - X, new position is X. */
3901 pos
= XEXP (pos
, 0);
3903 pos
= gen_binary (MINUS
, GET_MODE (pos
),
3904 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner
))
3910 /* A SUBREG between two modes that occupy the same numbers of words
3911 can be done by moving the SUBREG to the source. */
3912 else if (GET_CODE (SET_DEST (x
)) == SUBREG
3913 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x
)))
3914 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
3915 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x
))))
3916 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
3918 x
= gen_rtx (SET
, VOIDmode
, SUBREG_REG (SET_DEST (x
)),
3919 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x
))),
3926 while (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
3927 inner
= SUBREG_REG (inner
);
3929 compute_mode
= GET_MODE (inner
);
3931 /* Compute a mask of LEN bits, if we can do this on the host machine. */
3932 if (len
< HOST_BITS_PER_WIDE_INT
)
3933 mask
= GEN_INT (((HOST_WIDE_INT
) 1 << len
) - 1);
3937 /* Now compute the equivalent expression. Make a copy of INNER
3938 for the SET_DEST in case it is a MEM into which we will substitute;
3939 we don't want shared RTL in that case. */
3940 x
= gen_rtx (SET
, VOIDmode
, copy_rtx (inner
),
3941 gen_binary (IOR
, compute_mode
,
3942 gen_binary (AND
, compute_mode
,
3943 gen_unary (NOT
, compute_mode
,
3948 gen_binary (ASHIFT
, compute_mode
,
3949 gen_binary (AND
, compute_mode
,
3950 gen_lowpart_for_combine
3960 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
3961 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
3962 the starting bit position.
3964 INNER may be a USE. This will occur when we started with a bitfield
3965 that went outside the boundary of the object in memory, which is
3966 allowed on most machines. To isolate this case, we produce a USE
3967 whose mode is wide enough and surround the MEM with it. The only
3968 code that understands the USE is this routine. If it is not removed,
3969 it will cause the resulting insn not to match.
3971 UNSIGNEDP is non-zero for an unsigned reference and zero for a
3974 IN_DEST is non-zero if this is a reference in the destination of a
3975 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
3976 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
3979 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
3980 ZERO_EXTRACT should be built even for bits starting at bit 0.
3982 MODE is the desired mode of the result (if IN_DEST == 0). */
3985 make_extraction (mode
, inner
, pos
, pos_rtx
, len
,
3986 unsignedp
, in_dest
, in_compare
)
3987 enum machine_mode mode
;
3993 int in_dest
, in_compare
;
3995 enum machine_mode is_mode
= GET_MODE (inner
);
3996 enum machine_mode inner_mode
;
3997 enum machine_mode wanted_mem_mode
= byte_mode
;
3998 enum machine_mode pos_mode
= word_mode
;
3999 enum machine_mode extraction_mode
= word_mode
;
4000 enum machine_mode tmode
= mode_for_size (len
, MODE_INT
, 1);
4004 /* Get some information about INNER and get the innermost object. */
4005 if (GET_CODE (inner
) == USE
)
4006 /* We don't need to adjust the position because we set up the USE
4007 to pretend that it was a full-word object. */
4008 spans_byte
= 1, inner
= XEXP (inner
, 0);
4009 else if (GET_CODE (inner
) == SUBREG
&& subreg_lowpart_p (inner
))
4010 inner
= SUBREG_REG (inner
);
4012 inner_mode
= GET_MODE (inner
);
4014 if (pos_rtx
&& GET_CODE (pos_rtx
) == CONST_INT
)
4015 pos
= INTVAL (pos_rtx
);
4017 /* See if this can be done without an extraction. We never can if the
4018 width of the field is not the same as that of some integer mode. For
4019 registers, we can only avoid the extraction if the position is at the
4020 low-order bit and this is either not in the destination or we have the
4021 appropriate STRICT_LOW_PART operation available.
4023 For MEM, we can avoid an extract if the field starts on an appropriate
4024 boundary and we can change the mode of the memory reference. However,
4025 we cannot directly access the MEM if we have a USE and the underlying
4026 MEM is not TMODE. This combination means that MEM was being used in a
4027 context where bits outside its mode were being referenced; that is only
4028 valid in bit-field insns. */
4030 if (tmode
!= BLKmode
4031 && ! (spans_byte
&& inner_mode
!= tmode
)
4032 && ((pos
== 0 && GET_CODE (inner
) != MEM
4034 || (GET_CODE (inner
) == REG
4035 && (movstrict_optab
->handlers
[(int) tmode
].insn_code
4036 != CODE_FOR_nothing
))))
4037 || (GET_CODE (inner
) == MEM
&& pos
>= 0
4039 % (STRICT_ALIGNMENT
? GET_MODE_ALIGNMENT (tmode
)
4040 : BITS_PER_UNIT
)) == 0
4041 /* We can't do this if we are widening INNER_MODE (it
4042 may not be aligned, for one thing). */
4043 && GET_MODE_BITSIZE (inner_mode
) >= GET_MODE_BITSIZE (tmode
)
4044 && (inner_mode
== tmode
4045 || (! mode_dependent_address_p (XEXP (inner
, 0))
4046 && ! MEM_VOLATILE_P (inner
))))))
4048 int offset
= pos
/ BITS_PER_UNIT
;
4050 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4051 field. If the original and current mode are the same, we need not
4052 adjust the offset. Otherwise, we do if bytes big endian.
4054 If INNER is not a MEM, get a piece consisting of the just the field
4055 of interest (in this case POS must be 0). */
4057 if (GET_CODE (inner
) == MEM
)
4059 #if BYTES_BIG_ENDIAN
4060 if (inner_mode
!= tmode
)
4061 offset
= (GET_MODE_SIZE (inner_mode
)
4062 - GET_MODE_SIZE (tmode
) - offset
);
4065 new = gen_rtx (MEM
, tmode
, plus_constant (XEXP (inner
, 0), offset
));
4066 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner
);
4067 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner
);
4068 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner
);
4070 else if (GET_CODE (inner
) == REG
)
4071 /* We can't call gen_lowpart_for_combine here since we always want
4072 a SUBREG and it would sometimes return a new hard register. */
4073 new = gen_rtx (SUBREG
, tmode
, inner
,
4075 && GET_MODE_SIZE (is_mode
) > UNITS_PER_WORD
)
4076 ? ((GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (tmode
)
4080 new = gen_lowpart_for_combine (tmode
, inner
);
4082 /* If this extraction is going into the destination of a SET,
4083 make a STRICT_LOW_PART unless we made a MEM. */
4086 return (GET_CODE (new) == MEM
? new
4087 : (GET_CODE (new) != SUBREG
4088 ? gen_rtx (CLOBBER
, tmode
, const0_rtx
)
4089 : gen_rtx_combine (STRICT_LOW_PART
, VOIDmode
, new)));
4091 /* Otherwise, sign- or zero-extend unless we already are in the
4094 return (mode
== tmode
? new
4095 : gen_rtx_combine (unsignedp
? ZERO_EXTEND
: SIGN_EXTEND
,
4099 /* Unless this is in a COMPARE or we have a funny memory reference,
4100 don't do anything with field extracts starting at the low-order
4101 bit since they are simple AND operations. */
4102 if (pos
== 0 && ! in_dest
&& ! in_compare
&& ! spans_byte
)
4105 /* Get the mode to use should INNER be a MEM, the mode for the position,
4106 and the mode for the result. */
4110 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][0];
4111 pos_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][2];
4112 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_insv
][3];
4117 if (! in_dest
&& unsignedp
)
4119 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][1];
4120 pos_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][3];
4121 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_extzv
][0];
4126 if (! in_dest
&& ! unsignedp
)
4128 wanted_mem_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][1];
4129 pos_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][3];
4130 extraction_mode
= insn_operand_mode
[(int) CODE_FOR_extv
][0];
4134 /* Never narrow an object, since that might not be safe. */
4136 if (mode
!= VOIDmode
4137 && GET_MODE_SIZE (extraction_mode
) < GET_MODE_SIZE (mode
))
4138 extraction_mode
= mode
;
4140 if (pos_rtx
&& GET_MODE (pos_rtx
) != VOIDmode
4141 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
4142 pos_mode
= GET_MODE (pos_rtx
);
4144 /* If this is not from memory or we have to change the mode of memory and
4145 cannot, the desired mode is EXTRACTION_MODE. */
4146 if (GET_CODE (inner
) != MEM
4147 || (inner_mode
!= wanted_mem_mode
4148 && (mode_dependent_address_p (XEXP (inner
, 0))
4149 || MEM_VOLATILE_P (inner
))))
4150 wanted_mem_mode
= extraction_mode
;
4153 /* If position is constant, compute new position. Otherwise, build
4156 pos
= (MAX (GET_MODE_BITSIZE (is_mode
), GET_MODE_BITSIZE (wanted_mem_mode
))
4160 = gen_rtx_combine (MINUS
, GET_MODE (pos_rtx
),
4161 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode
),
4162 GET_MODE_BITSIZE (wanted_mem_mode
))
4167 /* If INNER has a wider mode, make it smaller. If this is a constant
4168 extract, try to adjust the byte to point to the byte containing
4170 if (wanted_mem_mode
!= VOIDmode
4171 && GET_MODE_SIZE (wanted_mem_mode
) < GET_MODE_SIZE (is_mode
)
4172 && ((GET_CODE (inner
) == MEM
4173 && (inner_mode
== wanted_mem_mode
4174 || (! mode_dependent_address_p (XEXP (inner
, 0))
4175 && ! MEM_VOLATILE_P (inner
))))))
4179 /* The computations below will be correct if the machine is big
4180 endian in both bits and bytes or little endian in bits and bytes.
4181 If it is mixed, we must adjust. */
4183 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4184 if (! spans_byte
&& is_mode
!= wanted_mem_mode
)
4185 offset
= (GET_MODE_SIZE (is_mode
)
4186 - GET_MODE_SIZE (wanted_mem_mode
) - offset
);
4189 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4190 adjust OFFSET to compensate. */
4191 #if BYTES_BIG_ENDIAN
4193 && GET_MODE_SIZE (inner_mode
) < GET_MODE_SIZE (is_mode
))
4194 offset
-= GET_MODE_SIZE (is_mode
) - GET_MODE_SIZE (inner_mode
);
4197 /* If this is a constant position, we can move to the desired byte. */
4200 offset
+= pos
/ BITS_PER_UNIT
;
4201 pos
%= GET_MODE_BITSIZE (wanted_mem_mode
);
4204 if (offset
!= 0 || inner_mode
!= wanted_mem_mode
)
4206 rtx newmem
= gen_rtx (MEM
, wanted_mem_mode
,
4207 plus_constant (XEXP (inner
, 0), offset
));
4208 RTX_UNCHANGING_P (newmem
) = RTX_UNCHANGING_P (inner
);
4209 MEM_VOLATILE_P (newmem
) = MEM_VOLATILE_P (inner
);
4210 MEM_IN_STRUCT_P (newmem
) = MEM_IN_STRUCT_P (inner
);
4215 /* If INNER is not memory, we can always get it into the proper mode. */
4216 else if (GET_CODE (inner
) != MEM
)
4217 inner
= gen_lowpart_for_combine (extraction_mode
, inner
);
4219 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4220 have to zero extend. Otherwise, we can just use a SUBREG. */
4222 && GET_MODE_SIZE (pos_mode
) > GET_MODE_SIZE (GET_MODE (pos_rtx
)))
4223 pos_rtx
= gen_rtx_combine (ZERO_EXTEND
, pos_mode
, pos_rtx
);
4225 && GET_MODE_SIZE (pos_mode
) < GET_MODE_SIZE (GET_MODE (pos_rtx
)))
4226 pos_rtx
= gen_lowpart_for_combine (pos_mode
, pos_rtx
);
4228 /* Make POS_RTX unless we already have it and it is correct. */
4229 if (pos_rtx
== 0 || (pos
>= 0 && INTVAL (pos_rtx
) != pos
))
4230 pos_rtx
= GEN_INT (pos
);
4232 /* Make the required operation. See if we can use existing rtx. */
4233 new = gen_rtx_combine (unsignedp
? ZERO_EXTRACT
: SIGN_EXTRACT
,
4234 extraction_mode
, inner
, GEN_INT (len
), pos_rtx
);
4236 new = gen_lowpart_for_combine (mode
, new);
4241 /* Look at the expression rooted at X. Look for expressions
4242 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4243 Form these expressions.
4245 Return the new rtx, usually just X.
4247 Also, for machines like the Vax that don't have logical shift insns,
4248 try to convert logical to arithmetic shift operations in cases where
4249 they are equivalent. This undoes the canonicalizations to logical
4250 shifts done elsewhere.
4252 We try, as much as possible, to re-use rtl expressions to save memory.
4254 IN_CODE says what kind of expression we are processing. Normally, it is
4255 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4256 being kludges), it is MEM. When processing the arguments of a comparison
4257 or a COMPARE against zero, it is COMPARE. */
4260 make_compound_operation (x
, in_code
)
4262 enum rtx_code in_code
;
4264 enum rtx_code code
= GET_CODE (x
);
4265 enum machine_mode mode
= GET_MODE (x
);
4266 int mode_width
= GET_MODE_BITSIZE (mode
);
4267 enum rtx_code next_code
;
4272 /* Select the code to be used in recursive calls. Once we are inside an
4273 address, we stay there. If we have a comparison, set to COMPARE,
4274 but once inside, go back to our default of SET. */
4276 next_code
= (code
== MEM
|| code
== PLUS
|| code
== MINUS
? MEM
4277 : ((code
== COMPARE
|| GET_RTX_CLASS (code
) == '<')
4278 && XEXP (x
, 1) == const0_rtx
) ? COMPARE
4279 : in_code
== COMPARE
? SET
: in_code
);
4281 /* Process depending on the code of this operation. If NEW is set
4282 non-zero, it will be returned. */
4288 /* Convert shifts by constants into multiplications if inside
4290 if (in_code
== MEM
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
4291 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
4292 && INTVAL (XEXP (x
, 1)) >= 0)
4293 new = gen_rtx_combine (MULT
, mode
, XEXP (x
, 0),
4294 GEN_INT ((HOST_WIDE_INT
) 1
4295 << INTVAL (XEXP (x
, 1))));
4299 /* If the second operand is not a constant, we can't do anything
4301 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
)
4304 /* If the constant is a power of two minus one and the first operand
4305 is a logical right shift, make an extraction. */
4306 if (GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4307 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4308 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0), -1,
4309 XEXP (XEXP (x
, 0), 1), i
, 1,
4310 0, in_code
== COMPARE
);
4312 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4313 else if (GET_CODE (XEXP (x
, 0)) == SUBREG
4314 && subreg_lowpart_p (XEXP (x
, 0))
4315 && GET_CODE (SUBREG_REG (XEXP (x
, 0))) == LSHIFTRT
4316 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4317 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x
, 0))),
4318 XEXP (SUBREG_REG (XEXP (x
, 0)), 0), -1,
4319 XEXP (SUBREG_REG (XEXP (x
, 0)), 1), i
, 1,
4320 0, in_code
== COMPARE
);
4323 /* If we are have (and (rotate X C) M) and C is larger than the number
4324 of bits in M, this is an extraction. */
4326 else if (GET_CODE (XEXP (x
, 0)) == ROTATE
4327 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4328 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0
4329 && i
<= INTVAL (XEXP (XEXP (x
, 0), 1)))
4330 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0),
4331 (GET_MODE_BITSIZE (mode
)
4332 - INTVAL (XEXP (XEXP (x
, 0), 1))),
4333 NULL_RTX
, i
, 1, 0, in_code
== COMPARE
);
4335 /* On machines without logical shifts, if the operand of the AND is
4336 a logical shift and our mask turns off all the propagated sign
4337 bits, we can replace the logical shift with an arithmetic shift. */
4349 && GET_CODE (XEXP (x
, 0)) == LSHIFTRT
4350 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4351 && INTVAL (XEXP (XEXP (x
, 0), 1)) >= 0
4352 && INTVAL (XEXP (XEXP (x
, 0), 1)) < HOST_BITS_PER_WIDE_INT
4353 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
4355 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
4357 mask
>>= INTVAL (XEXP (XEXP (x
, 0), 1));
4358 if ((INTVAL (XEXP (x
, 1)) & ~mask
) == 0)
4360 gen_rtx_combine (ASHIFTRT
, mode
, XEXP (XEXP (x
, 0), 0),
4361 XEXP (XEXP (x
, 0), 1)));
4364 /* If the constant is one less than a power of two, this might be
4365 representable by an extraction even if no shift is present.
4366 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4367 we are in a COMPARE. */
4368 else if ((i
= exact_log2 (INTVAL (XEXP (x
, 1)) + 1)) >= 0)
4369 new = make_extraction (mode
, XEXP (x
, 0), 0, NULL_RTX
, i
, 1,
4370 0, in_code
== COMPARE
);
4372 /* If we are in a comparison and this is an AND with a power of two,
4373 convert this into the appropriate bit extract. */
4374 else if (in_code
== COMPARE
4375 && (i
= exact_log2 (INTVAL (XEXP (x
, 1)))) >= 0)
4376 new = make_extraction (mode
, XEXP (x
, 0), i
, NULL_RTX
, 1, 1, 0, 1);
4381 /* If the sign bit is known to be zero, replace this with an
4382 arithmetic shift. */
4394 && mode_width
<= HOST_BITS_PER_WIDE_INT
4395 && (significant_bits (XEXP (x
, 0), mode
)
4396 & (1 << (mode_width
- 1))) == 0)
4398 new = gen_rtx_combine (ASHIFTRT
, mode
, XEXP (x
, 0), XEXP (x
, 1));
4402 /* ... fall through ... */
4405 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4406 this is a SIGN_EXTRACT. */
4407 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4408 && GET_CODE (XEXP (x
, 0)) == ASHIFT
4409 && GET_CODE (XEXP (XEXP (x
, 0), 1)) == CONST_INT
4410 && INTVAL (XEXP (x
, 1)) >= INTVAL (XEXP (XEXP (x
, 0), 1)))
4411 new = make_extraction (mode
, XEXP (XEXP (x
, 0), 0),
4412 (INTVAL (XEXP (x
, 1))
4413 - INTVAL (XEXP (XEXP (x
, 0), 1))),
4414 NULL_RTX
, mode_width
- INTVAL (XEXP (x
, 1)),
4415 code
== LSHIFTRT
, 0, in_code
== COMPARE
);
4421 x
= gen_lowpart_for_combine (mode
, new);
4422 code
= GET_CODE (x
);
4425 /* Now recursively process each operand of this operation. */
4426 fmt
= GET_RTX_FORMAT (code
);
4427 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
4430 new = make_compound_operation (XEXP (x
, i
), next_code
);
4431 SUBST (XEXP (x
, i
), new);
4437 /* Given M see if it is a value that would select a field of bits
4438 within an item, but not the entire word. Return -1 if not.
4439 Otherwise, return the starting position of the field, where 0 is the
4442 *PLEN is set to the length of the field. */
4445 get_pos_from_mask (m
, plen
)
4446 unsigned HOST_WIDE_INT m
;
4449 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4450 int pos
= exact_log2 (m
& - m
);
4455 /* Now shift off the low-order zero bits and see if we have a power of
4457 *plen
= exact_log2 ((m
>> pos
) + 1);
4465 /* Rewrite X so that it is an expression in MODE. We only care about the
4466 low-order BITS bits so we can ignore AND operations that just clear
4469 Also, if REG is non-zero and X is a register equal in value to REG,
4470 replace X with REG. */
4473 force_to_mode (x
, mode
, bits
, reg
)
4475 enum machine_mode mode
;
4479 enum rtx_code code
= GET_CODE (x
);
4481 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
4482 just get X in the proper mode. */
4484 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
)
4485 || bits
> GET_MODE_BITSIZE (mode
))
4486 return gen_lowpart_for_combine (mode
, x
);
4494 x
= expand_compound_operation (x
);
4495 if (GET_CODE (x
) != code
)
4496 return force_to_mode (x
, mode
, bits
, reg
);
4500 if (reg
!= 0 && (rtx_equal_p (get_last_value (reg
), x
)
4501 || rtx_equal_p (reg
, get_last_value (x
))))
4506 if (bits
< HOST_BITS_PER_WIDE_INT
)
4507 x
= GEN_INT (INTVAL (x
) & (((HOST_WIDE_INT
) 1 << bits
) - 1));
4511 /* Ignore low-order SUBREGs. */
4512 if (subreg_lowpart_p (x
))
4513 return force_to_mode (SUBREG_REG (x
), mode
, bits
, reg
);
4517 /* If this is an AND with a constant. Otherwise, we fall through to
4518 do the general binary case. */
4520 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4522 HOST_WIDE_INT mask
= INTVAL (XEXP (x
, 1));
4523 int len
= exact_log2 (mask
+ 1);
4524 rtx op
= XEXP (x
, 0);
4526 /* If this is masking some low-order bits, we may be able to
4527 impose a stricter constraint on what bits of the operand are
4530 op
= force_to_mode (op
, mode
, len
> 0 ? MIN (len
, bits
) : bits
,
4533 if (bits
< HOST_BITS_PER_WIDE_INT
)
4534 mask
&= ((HOST_WIDE_INT
) 1 << bits
) - 1;
4536 x
= simplify_and_const_int (x
, mode
, op
, mask
);
4538 /* If X is still an AND, see if it is an AND with a mask that
4539 is just some low-order bits. If so, and it is BITS wide (it
4540 can't be wider), we don't need it. */
4542 if (GET_CODE (x
) == AND
&& GET_CODE (XEXP (x
, 1)) == CONST_INT
4543 && bits
< HOST_BITS_PER_WIDE_INT
4544 && INTVAL (XEXP (x
, 1)) == ((HOST_WIDE_INT
) 1 << bits
) - 1)
4549 /* ... fall through ... */
4556 /* For most binary operations, just propagate into the operation and
4559 return gen_binary (code
, mode
,
4560 force_to_mode (XEXP (x
, 0), mode
, bits
, reg
),
4561 force_to_mode (XEXP (x
, 1), mode
, bits
, reg
));
4565 /* For left shifts, do the same, but just for the first operand.
4566 If the shift count is a constant, we need even fewer bits of the
4569 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) < bits
)
4570 bits
-= INTVAL (XEXP (x
, 1));
4572 return gen_binary (code
, mode
,
4573 force_to_mode (XEXP (x
, 0), mode
, bits
, reg
),
4577 /* Here we can only do something if the shift count is a constant and
4578 the count plus BITS is no larger than the width of MODE, we can do
4579 the shift in MODE. */
4581 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4582 && INTVAL (XEXP (x
, 1)) + bits
<= GET_MODE_BITSIZE (mode
))
4583 return gen_binary (LSHIFTRT
, mode
,
4584 force_to_mode (XEXP (x
, 0), mode
,
4585 bits
+ INTVAL (XEXP (x
, 1)), reg
),
4591 /* Handle these similarly to the way we handle most binary operations. */
4592 return gen_unary (code
, mode
,
4593 force_to_mode (XEXP (x
, 0), mode
, bits
, reg
));
4596 /* Otherwise, just do the operation canonically. */
4597 return gen_lowpart_for_combine (mode
, x
);
4600 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
4601 Return that assignment if so.
4603 We only handle the most common cases. */
4606 make_field_assignment (x
)
4609 rtx dest
= SET_DEST (x
);
4610 rtx src
= SET_SRC (x
);
4616 enum machine_mode mode
;
4618 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4619 a clear of a one-bit field. We will have changed it to
4620 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4623 if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == ROTATE
4624 && GET_CODE (XEXP (XEXP (src
, 0), 0)) == CONST_INT
4625 && INTVAL (XEXP (XEXP (src
, 0), 0)) == -2
4626 && (rtx_equal_p (dest
, XEXP (src
, 1))
4627 || rtx_equal_p (dest
, get_last_value (XEXP (src
, 1)))
4628 || rtx_equal_p (get_last_value (dest
), XEXP (src
, 1))))
4630 assign
= make_extraction (VOIDmode
, dest
, -1, XEXP (XEXP (src
, 0), 1),
4632 return gen_rtx (SET
, VOIDmode
, assign
, const0_rtx
);
4635 else if (GET_CODE (src
) == AND
&& GET_CODE (XEXP (src
, 0)) == SUBREG
4636 && subreg_lowpart_p (XEXP (src
, 0))
4637 && (GET_MODE_SIZE (GET_MODE (XEXP (src
, 0)))
4638 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src
, 0)))))
4639 && GET_CODE (SUBREG_REG (XEXP (src
, 0))) == ROTATE
4640 && INTVAL (XEXP (SUBREG_REG (XEXP (src
, 0)), 0)) == -2
4641 && (rtx_equal_p (dest
, XEXP (src
, 1))
4642 || rtx_equal_p (dest
, get_last_value (XEXP (src
, 1)))
4643 || rtx_equal_p (get_last_value (dest
), XEXP (src
, 1))))
4645 assign
= make_extraction (VOIDmode
, dest
, -1,
4646 XEXP (SUBREG_REG (XEXP (src
, 0)), 1),
4648 return gen_rtx (SET
, VOIDmode
, assign
, const0_rtx
);
4651 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4653 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == ASHIFT
4654 && XEXP (XEXP (src
, 0), 0) == const1_rtx
4655 && (rtx_equal_p (dest
, XEXP (src
, 1))
4656 || rtx_equal_p (dest
, get_last_value (XEXP (src
, 1)))
4657 || rtx_equal_p (get_last_value (dest
), XEXP (src
, 1))))
4659 assign
= make_extraction (VOIDmode
, dest
, -1, XEXP (XEXP (src
, 0), 1),
4661 return gen_rtx (SET
, VOIDmode
, assign
, const1_rtx
);
4664 /* The other case we handle is assignments into a constant-position
4665 field. They look like (ior (and DEST C1) OTHER). If C1 represents
4666 a mask that has all one bits except for a group of zero bits and
4667 OTHER is known to have zeros where C1 has ones, this is such an
4668 assignment. Compute the position and length from C1. Shift OTHER
4669 to the appropriate position, force it to the required mode, and
4670 make the extraction. Check for the AND in both operands. */
4672 if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 0)) == AND
4673 && GET_CODE (XEXP (XEXP (src
, 0), 1)) == CONST_INT
4674 && (rtx_equal_p (XEXP (XEXP (src
, 0), 0), dest
)
4675 || rtx_equal_p (XEXP (XEXP (src
, 0), 0), get_last_value (dest
))
4676 || rtx_equal_p (get_last_value (XEXP (XEXP (src
, 0), 1)), dest
)))
4677 c1
= INTVAL (XEXP (XEXP (src
, 0), 1)), other
= XEXP (src
, 1);
4678 else if (GET_CODE (src
) == IOR
&& GET_CODE (XEXP (src
, 1)) == AND
4679 && GET_CODE (XEXP (XEXP (src
, 1), 1)) == CONST_INT
4680 && (rtx_equal_p (XEXP (XEXP (src
, 1), 0), dest
)
4681 || rtx_equal_p (XEXP (XEXP (src
, 1), 0), get_last_value (dest
))
4682 || rtx_equal_p (get_last_value (XEXP (XEXP (src
, 1), 0)),
4684 c1
= INTVAL (XEXP (XEXP (src
, 1), 1)), other
= XEXP (src
, 0);
4688 pos
= get_pos_from_mask (~c1
, &len
);
4689 if (pos
< 0 || pos
+ len
> GET_MODE_BITSIZE (GET_MODE (dest
))
4690 || (c1
& significant_bits (other
, GET_MODE (other
))) != 0)
4693 assign
= make_extraction (VOIDmode
, dest
, pos
, NULL_RTX
, len
, 1, 1, 0);
4695 /* The mode to use for the source is the mode of the assignment, or of
4696 what is inside a possible STRICT_LOW_PART. */
4697 mode
= (GET_CODE (assign
) == STRICT_LOW_PART
4698 ? GET_MODE (XEXP (assign
, 0)) : GET_MODE (assign
));
4700 /* Shift OTHER right POS places and make it the source, restricting it
4701 to the proper length and mode. */
4703 src
= force_to_mode (simplify_shift_const (NULL_RTX
, LSHIFTRT
,
4704 GET_MODE (src
), other
, pos
),
4707 return gen_rtx_combine (SET
, VOIDmode
, assign
, src
);
4710 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
4714 apply_distributive_law (x
)
4717 enum rtx_code code
= GET_CODE (x
);
4718 rtx lhs
, rhs
, other
;
4720 enum rtx_code inner_code
;
4722 /* The outer operation can only be one of the following: */
4723 if (code
!= IOR
&& code
!= AND
&& code
!= XOR
4724 && code
!= PLUS
&& code
!= MINUS
)
4727 lhs
= XEXP (x
, 0), rhs
= XEXP (x
, 1);
4729 /* If either operand is a primitive we can't do anything, so get out fast. */
4730 if (GET_RTX_CLASS (GET_CODE (lhs
)) == 'o'
4731 || GET_RTX_CLASS (GET_CODE (rhs
)) == 'o')
4734 lhs
= expand_compound_operation (lhs
);
4735 rhs
= expand_compound_operation (rhs
);
4736 inner_code
= GET_CODE (lhs
);
4737 if (inner_code
!= GET_CODE (rhs
))
4740 /* See if the inner and outer operations distribute. */
4747 /* These all distribute except over PLUS. */
4748 if (code
== PLUS
|| code
== MINUS
)
4753 if (code
!= PLUS
&& code
!= MINUS
)
4759 /* These are also multiplies, so they distribute over everything. */
4763 /* Non-paradoxical SUBREGs distributes over all operations, provided
4764 the inner modes and word numbers are the same, this is an extraction
4765 of a low-order part, we don't convert an fp operation to int or
4766 vice versa, and we would not be converting a single-word
4767 operation into a multi-word operation. The latter test is not
4768 required, but it prevents generating unneeded multi-word operations.
4769 Some of the previous tests are redundant given the latter test, but
4770 are retained because they are required for correctness.
4772 We produce the result slightly differently in this case. */
4774 if (GET_MODE (SUBREG_REG (lhs
)) != GET_MODE (SUBREG_REG (rhs
))
4775 || SUBREG_WORD (lhs
) != SUBREG_WORD (rhs
)
4776 || ! subreg_lowpart_p (lhs
)
4777 || (GET_MODE_CLASS (GET_MODE (lhs
))
4778 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs
))))
4779 || (GET_MODE_SIZE (GET_MODE (lhs
))
4780 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))))
4781 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs
))) > UNITS_PER_WORD
)
4784 tem
= gen_binary (code
, GET_MODE (SUBREG_REG (lhs
)),
4785 SUBREG_REG (lhs
), SUBREG_REG (rhs
));
4786 return gen_lowpart_for_combine (GET_MODE (x
), tem
);
4792 /* Set LHS and RHS to the inner operands (A and B in the example
4793 above) and set OTHER to the common operand (C in the example).
4794 These is only one way to do this unless the inner operation is
4796 if (GET_RTX_CLASS (inner_code
) == 'c'
4797 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 0)))
4798 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 1);
4799 else if (GET_RTX_CLASS (inner_code
) == 'c'
4800 && rtx_equal_p (XEXP (lhs
, 0), XEXP (rhs
, 1)))
4801 other
= XEXP (lhs
, 0), lhs
= XEXP (lhs
, 1), rhs
= XEXP (rhs
, 0);
4802 else if (GET_RTX_CLASS (inner_code
) == 'c'
4803 && rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 0)))
4804 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 1);
4805 else if (rtx_equal_p (XEXP (lhs
, 1), XEXP (rhs
, 1)))
4806 other
= XEXP (lhs
, 1), lhs
= XEXP (lhs
, 0), rhs
= XEXP (rhs
, 0);
4810 /* Form the new inner operation, seeing if it simplifies first. */
4811 tem
= gen_binary (code
, GET_MODE (x
), lhs
, rhs
);
4813 /* There is one exception to the general way of distributing:
4814 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
4815 if (code
== XOR
&& inner_code
== IOR
)
4818 other
= gen_unary (NOT
, GET_MODE (x
), other
);
4821 /* We may be able to continuing distributing the result, so call
4822 ourselves recursively on the inner operation before forming the
4823 outer operation, which we return. */
4824 return gen_binary (inner_code
, GET_MODE (x
),
4825 apply_distributive_law (tem
), other
);
4828 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
4831 Return an equivalent form, if different from X. Otherwise, return X. If
4832 X is zero, we are to always construct the equivalent form. */
4835 simplify_and_const_int (x
, mode
, varop
, constop
)
4837 enum machine_mode mode
;
4839 unsigned HOST_WIDE_INT constop
;
4841 register enum machine_mode tmode
;
4843 unsigned HOST_WIDE_INT significant
;
4845 /* There is a large class of optimizations based on the principle that
4846 some operations produce results where certain bits are known to be zero,
4847 and hence are not significant to the AND. For example, if we have just
4848 done a left shift of one bit, the low-order bit is known to be zero and
4849 hence an AND with a mask of ~1 would not do anything.
4851 At the end of the following loop, we set:
4853 VAROP to be the item to be AND'ed with;
4854 CONSTOP to the constant value to AND it with. */
4858 /* If we ever encounter a mode wider than the host machine's widest
4859 integer size, we can't compute the masks accurately, so give up. */
4860 if (GET_MODE_BITSIZE (GET_MODE (varop
)) > HOST_BITS_PER_WIDE_INT
)
4863 /* Unless one of the cases below does a `continue',
4864 a `break' will be executed to exit the loop. */
4866 switch (GET_CODE (varop
))
4869 /* If VAROP is a (clobber (const_int)), return it since we know
4870 we are generating something that won't match. */
4873 #if ! BITS_BIG_ENDIAN
4875 /* VAROP is a (use (mem ..)) that was made from a bit-field
4876 extraction that spanned the boundary of the MEM. If we are
4877 now masking so it is within that boundary, we don't need the
4879 if ((constop
& ~ GET_MODE_MASK (GET_MODE (XEXP (varop
, 0)))) == 0)
4881 varop
= XEXP (varop
, 0);
4888 if (subreg_lowpart_p (varop
)
4889 /* We can ignore the effect this SUBREG if it narrows the mode
4890 or, on machines where byte operations zero extend, if the
4891 constant masks to zero all the bits the mode doesn't have. */
4892 && ((GET_MODE_SIZE (GET_MODE (varop
))
4893 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
))))
4894 #ifdef BYTE_LOADS_ZERO_EXTEND
4896 & GET_MODE_MASK (GET_MODE (varop
))
4897 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop
)))))
4901 varop
= SUBREG_REG (varop
);
4910 /* Try to expand these into a series of shifts and then work
4911 with that result. If we can't, for example, if the extract
4912 isn't at a fixed position, give up. */
4913 temp
= expand_compound_operation (varop
);
4922 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
)
4924 constop
&= INTVAL (XEXP (varop
, 1));
4925 varop
= XEXP (varop
, 0);
4932 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
4933 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
4934 operation which may be a bitfield extraction. */
4936 if (GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
4937 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
4938 && INTVAL (XEXP (XEXP (varop
, 0), 1)) >= 0
4939 && INTVAL (XEXP (XEXP (varop
, 0), 1)) < HOST_BITS_PER_WIDE_INT
4940 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
4941 && (INTVAL (XEXP (varop
, 1))
4942 & ~ significant_bits (XEXP (varop
, 0),
4943 GET_MODE (varop
)) == 0))
4945 temp
= GEN_INT ((INTVAL (XEXP (varop
, 1)) & constop
)
4946 << INTVAL (XEXP (XEXP (varop
, 0), 1)));
4947 temp
= gen_binary (GET_CODE (varop
), GET_MODE (varop
),
4948 XEXP (XEXP (varop
, 0), 0), temp
);
4949 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
4950 temp
, XEXP (varop
, 1));
4954 /* Apply the AND to both branches of the IOR or XOR, then try to
4955 apply the distributive law. This may eliminate operations
4956 if either branch can be simplified because of the AND.
4957 It may also make some cases more complex, but those cases
4958 probably won't match a pattern either with or without this. */
4960 gen_lowpart_for_combine
4961 (mode
, apply_distributive_law
4963 (GET_CODE (varop
), GET_MODE (varop
),
4964 simplify_and_const_int (NULL_RTX
, GET_MODE (varop
),
4965 XEXP (varop
, 0), constop
),
4966 simplify_and_const_int (NULL_RTX
, GET_MODE (varop
),
4967 XEXP (varop
, 1), constop
))));
4970 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
4971 LSHIFTRT we can do the same as above. */
4973 if (GET_CODE (XEXP (varop
, 0)) == LSHIFTRT
4974 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
4975 && INTVAL (XEXP (XEXP (varop
, 0), 1)) >= 0
4976 && INTVAL (XEXP (XEXP (varop
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
4978 temp
= GEN_INT (constop
<< INTVAL (XEXP (XEXP (varop
, 0), 1)));
4979 temp
= gen_binary (XOR
, GET_MODE (varop
),
4980 XEXP (XEXP (varop
, 0), 0), temp
);
4981 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
4982 temp
, XEXP (XEXP (varop
, 0), 1));
4988 /* If we are just looking for the sign bit, we don't need this
4989 shift at all, even if it has a variable count. */
4990 if (constop
== ((HOST_WIDE_INT
) 1
4991 << (GET_MODE_BITSIZE (GET_MODE (varop
)) - 1)))
4993 varop
= XEXP (varop
, 0);
4997 /* If this is a shift by a constant, get a mask that contains
4998 those bits that are not copies of the sign bit. We then have
4999 two cases: If CONSTOP only includes those bits, this can be
5000 a logical shift, which may allow simplifications. If CONSTOP
5001 is a single-bit field not within those bits, we are requesting
5002 a copy of the sign bit and hence can shift the sign bit to
5003 the appropriate location. */
5004 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5005 && INTVAL (XEXP (varop
, 1)) >= 0
5006 && INTVAL (XEXP (varop
, 1)) < HOST_BITS_PER_WIDE_INT
)
5010 significant
= GET_MODE_MASK (GET_MODE (varop
));
5011 significant
>>= INTVAL (XEXP (varop
, 1));
5013 if ((constop
& ~significant
) == 0
5014 || (i
= exact_log2 (constop
)) >= 0)
5016 varop
= simplify_shift_const
5017 (varop
, LSHIFTRT
, GET_MODE (varop
), XEXP (varop
, 0),
5018 i
< 0 ? INTVAL (XEXP (varop
, 1))
5019 : GET_MODE_BITSIZE (GET_MODE (varop
)) - 1 - i
);
5020 if (GET_CODE (varop
) != ASHIFTRT
)
5025 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5026 even if the shift count isn't a constant. */
5028 varop
= gen_rtx_combine (LSHIFTRT
, GET_MODE (varop
),
5029 XEXP (varop
, 0), XEXP (varop
, 1));
5033 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5034 included in STORE_FLAG_VALUE and FOO has no significant bits
5036 if ((constop
& ~ STORE_FLAG_VALUE
) == 0
5037 && XEXP (varop
, 0) == const0_rtx
5038 && (significant_bits (XEXP (varop
, 0), mode
) & ~ constop
) == 0)
5040 varop
= XEXP (varop
, 0);
5046 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5047 low-order bits (as in an alignment operation) and FOO is already
5048 aligned to that boundary, we can convert remove this AND
5049 and possibly the PLUS if it is now adding zero. */
5050 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5051 && exact_log2 (-constop
) >= 0
5052 && (significant_bits (XEXP (varop
, 0), mode
) & ~ constop
) == 0)
5054 varop
= plus_constant (XEXP (varop
, 0),
5055 INTVAL (XEXP (varop
, 1)) & constop
);
5060 /* ... fall through ... */
5063 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5064 less than powers of two and M2 is narrower than M1, we can
5065 eliminate the inner AND. This occurs when incrementing
5068 if (GET_CODE (XEXP (varop
, 0)) == ZERO_EXTRACT
5069 || GET_CODE (XEXP (varop
, 0)) == ZERO_EXTEND
)
5070 SUBST (XEXP (varop
, 0),
5071 expand_compound_operation (XEXP (varop
, 0)));
5073 if (GET_CODE (XEXP (varop
, 0)) == AND
5074 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
5075 && exact_log2 (constop
+ 1) >= 0
5076 && exact_log2 (INTVAL (XEXP (XEXP (varop
, 0), 1)) + 1) >= 0
5077 && (~ INTVAL (XEXP (XEXP (varop
, 0), 1)) & constop
) == 0)
5078 SUBST (XEXP (varop
, 0), XEXP (XEXP (varop
, 0), 0));
5085 /* If we have reached a constant, this whole thing is constant. */
5086 if (GET_CODE (varop
) == CONST_INT
)
5087 return GEN_INT (constop
& INTVAL (varop
));
5089 /* See what bits are significant in VAROP. */
5090 significant
= significant_bits (varop
, mode
);
5092 /* Turn off all bits in the constant that are known to already be zero.
5093 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5094 which is tested below. */
5096 constop
&= significant
;
5098 /* If we don't have any bits left, return zero. */
5102 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5103 if we already had one (just check for the simplest cases). */
5104 if (x
&& GET_CODE (XEXP (x
, 0)) == SUBREG
5105 && GET_MODE (XEXP (x
, 0)) == mode
5106 && SUBREG_REG (XEXP (x
, 0)) == varop
)
5107 varop
= XEXP (x
, 0);
5109 varop
= gen_lowpart_for_combine (mode
, varop
);
5111 /* If we can't make the SUBREG, try to return what we were given. */
5112 if (GET_CODE (varop
) == CLOBBER
)
5113 return x
? x
: varop
;
5115 /* If we are only masking insignificant bits, return VAROP. */
5116 if (constop
== significant
)
5119 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5120 else if (x
== 0 || GET_CODE (x
) != AND
|| GET_MODE (x
) != mode
)
5121 x
= gen_rtx_combine (AND
, mode
, varop
, GEN_INT (constop
));
5125 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
5126 || INTVAL (XEXP (x
, 1)) != constop
)
5127 SUBST (XEXP (x
, 1), GEN_INT (constop
));
5129 SUBST (XEXP (x
, 0), varop
);
5135 /* Given an expression, X, compute which bits in X can be non-zero.
5136 We don't care about bits outside of those defined in MODE.
5138 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5139 a shift, AND, or zero_extract, we can do better. */
5141 static unsigned HOST_WIDE_INT
5142 significant_bits (x
, mode
)
5144 enum machine_mode mode
;
5146 unsigned HOST_WIDE_INT significant
= GET_MODE_MASK (mode
);
5147 unsigned HOST_WIDE_INT inner_sig
;
5149 int mode_width
= GET_MODE_BITSIZE (mode
);
5152 /* If X is wider than MODE, use its mode instead. */
5153 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
5155 mode
= GET_MODE (x
);
5156 significant
= GET_MODE_MASK (mode
);
5157 mode_width
= GET_MODE_BITSIZE (mode
);
5160 if (mode_width
> HOST_BITS_PER_WIDE_INT
)
5161 /* Our only callers in this case look for single bit values. So
5162 just return the mode mask. Those tests will then be false. */
5165 code
= GET_CODE (x
);
5169 #ifdef STACK_BOUNDARY
5170 /* If this is the stack pointer, we may know something about its
5171 alignment. If PUSH_ROUNDING is defined, it is possible for the
5172 stack to be momentarily aligned only to that amount, so we pick
5173 the least alignment. */
5175 if (x
== stack_pointer_rtx
)
5177 int sp_alignment
= STACK_BOUNDARY
/ BITS_PER_UNIT
;
5179 #ifdef PUSH_ROUNDING
5180 sp_alignment
= MIN (PUSH_ROUNDING (1), sp_alignment
);
5183 return significant
& ~ (sp_alignment
- 1);
5187 /* If X is a register whose value we can find, use that value.
5188 Otherwise, use the previously-computed significant bits for this
5191 tem
= get_last_value (x
);
5193 return significant_bits (tem
, mode
);
5194 else if (significant_valid
&& reg_significant
[REGNO (x
)])
5195 return reg_significant
[REGNO (x
)] & significant
;
5202 #ifdef BYTE_LOADS_ZERO_EXTEND
5204 /* In many, if not most, RISC machines, reading a byte from memory
5205 zeros the rest of the register. Noticing that fact saves a lot
5206 of extra zero-extends. */
5207 significant
&= GET_MODE_MASK (GET_MODE (x
));
5211 #if STORE_FLAG_VALUE == 1
5218 if (GET_MODE_CLASS (mode
) == MODE_INT
)
5221 /* A comparison operation only sets the bits given by its mode. The
5222 rest are set undefined. */
5223 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
5224 significant
|= (GET_MODE_MASK (mode
) & ~ GET_MODE_MASK (GET_MODE (x
)));
5228 #if STORE_FLAG_VALUE == -1
5230 if (GET_RTX_CLASS (GET_CODE (XEXP (x
, 0))) == '<'
5231 || ((tem
= get_last_value (XEXP (x
, 0))) != 0
5232 && GET_RTX_CLASS (GET_CODE (tem
)) == '<'))
5235 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
5236 significant
|= (GET_MODE_MASK (mode
) & ~ GET_MODE_MASK (GET_MODE (x
)));
5241 significant
&= (significant_bits (XEXP (x
, 0), mode
)
5242 & GET_MODE_MASK (mode
));
5246 significant
&= significant_bits (XEXP (x
, 0), mode
);
5247 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
5248 significant
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
5252 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5253 Otherwise, show all the bits in the outer mode but not the inner
5255 inner_sig
= significant_bits (XEXP (x
, 0), mode
);
5256 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
5258 inner_sig
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
5261 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1))))
5262 inner_sig
|= (GET_MODE_MASK (mode
)
5263 & ~ GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
5266 significant
&= inner_sig
;
5270 significant
&= (significant_bits (XEXP (x
, 0), mode
)
5271 & significant_bits (XEXP (x
, 1), mode
));
5276 significant
&= (significant_bits (XEXP (x
, 0), mode
)
5277 | significant_bits (XEXP (x
, 1), mode
));
5280 case PLUS
: case MINUS
:
5282 case DIV
: case UDIV
:
5283 case MOD
: case UMOD
:
5284 /* We can apply the rules of arithmetic to compute the number of
5285 high- and low-order zero bits of these operations. We start by
5286 computing the width (position of the highest-order non-zero bit)
5287 and the number of low-order zero bits for each value. */
5289 unsigned HOST_WIDE_INT sig0
= significant_bits (XEXP (x
, 0), mode
);
5290 unsigned HOST_WIDE_INT sig1
= significant_bits (XEXP (x
, 1), mode
);
5291 int width0
= floor_log2 (sig0
) + 1;
5292 int width1
= floor_log2 (sig1
) + 1;
5293 int low0
= floor_log2 (sig0
& -sig0
);
5294 int low1
= floor_log2 (sig1
& -sig1
);
5295 int op0_maybe_minusp
= (sig0
& (1 << (mode_width
- 1)));
5296 int op1_maybe_minusp
= (sig1
& (1 << (mode_width
- 1)));
5297 int result_width
= mode_width
;
5303 result_width
= MAX (width0
, width1
) + 1;
5304 result_low
= MIN (low0
, low1
);
5307 result_low
= MIN (low0
, low1
);
5310 result_width
= width0
+ width1
;
5311 result_low
= low0
+ low1
;
5314 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
5315 result_width
= width0
;
5318 result_width
= width0
;
5321 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
5322 result_width
= MIN (width0
, width1
);
5323 result_low
= MIN (low0
, low1
);
5326 result_width
= MIN (width0
, width1
);
5327 result_low
= MIN (low0
, low1
);
5331 if (result_width
< mode_width
)
5332 significant
&= ((HOST_WIDE_INT
) 1 << result_width
) - 1;
5335 significant
&= ~ (((HOST_WIDE_INT
) 1 << result_low
) - 1);
5340 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5341 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
5342 significant
&= ((HOST_WIDE_INT
) 1 << INTVAL (XEXP (x
, 1))) - 1;
5346 /* If the inner mode is a single word for both the host and target
5347 machines, we can compute this from which bits of the inner
5348 object are known significant. */
5349 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
5350 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
5351 <= HOST_BITS_PER_WIDE_INT
))
5353 significant
&= significant_bits (SUBREG_REG (x
), mode
);
5354 #ifndef BYTE_LOADS_ZERO_EXTEND
5355 /* On many CISC machines, accessing an object in a wider mode
5356 causes the high-order bits to become undefined. So they are
5357 not known to be zero. */
5358 if (GET_MODE_SIZE (GET_MODE (x
))
5359 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
5360 significant
|= (GET_MODE_MASK (GET_MODE (x
))
5361 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
5371 /* The significant bits are in two classes: any bits within MODE
5372 that aren't in GET_MODE (x) are always significant. The rest of the
5373 significant bits are those that are significant in the operand of
5374 the shift when shifted the appropriate number of bits. This
5375 shows that high-order bits are cleared by the right shift and
5376 low-order bits by left shifts. */
5377 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
5378 && INTVAL (XEXP (x
, 1)) >= 0
5379 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
5381 enum machine_mode inner_mode
= GET_MODE (x
);
5382 int width
= GET_MODE_BITSIZE (inner_mode
);
5383 int count
= INTVAL (XEXP (x
, 1));
5384 unsigned HOST_WIDE_INT mode_mask
= GET_MODE_MASK (inner_mode
);
5385 unsigned HOST_WIDE_INT op_significant
5386 = significant_bits (XEXP (x
, 0), mode
);
5387 unsigned HOST_WIDE_INT inner
= op_significant
& mode_mask
;
5388 unsigned HOST_WIDE_INT outer
= 0;
5390 if (mode_width
> width
)
5391 outer
= (op_significant
& significant
& ~ mode_mask
);
5393 if (code
== LSHIFTRT
)
5395 else if (code
== ASHIFTRT
)
5399 /* If the sign bit was significant at before the shift, we
5400 need to mark all the places it could have been copied to
5401 by the shift significant. */
5402 if (inner
& ((HOST_WIDE_INT
) 1 << (width
- 1 - count
)))
5403 inner
|= (((HOST_WIDE_INT
) 1 << count
) - 1) << (width
- count
);
5405 else if (code
== LSHIFT
|| code
== ASHIFT
)
5408 inner
= ((inner
<< (count
% width
)
5409 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
5411 significant
&= (outer
| inner
);
5416 /* This is at most the number of bits in the mode. */
5417 significant
= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
) + 1)) - 1;
5424 /* This function is called from `simplify_shift_const' to merge two
5425 outer operations. Specifically, we have already found that we need
5426 to perform operation *POP0 with constant *PCONST0 at the outermost
5427 position. We would now like to also perform OP1 with constant CONST1
5428 (with *POP0 being done last).
5430 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5431 the resulting operation. *PCOMP_P is set to 1 if we would need to
5432 complement the innermost operand, otherwise it is unchanged.
5434 MODE is the mode in which the operation will be done. No bits outside
5435 the width of this mode matter. It is assumed that the width of this mode
5436 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
5438 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
5439 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
5440 result is simply *PCONST0.
5442 If the resulting operation cannot be expressed as one operation, we
5443 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
5446 merge_outer_ops (pop0
, pconst0
, op1
, const1
, mode
, pcomp_p
)
5447 enum rtx_code
*pop0
;
5448 HOST_WIDE_INT
*pconst0
;
5450 HOST_WIDE_INT const1
;
5451 enum machine_mode mode
;
5454 enum rtx_code op0
= *pop0
;
5455 HOST_WIDE_INT const0
= *pconst0
;
5457 const0
&= GET_MODE_MASK (mode
);
5458 const1
&= GET_MODE_MASK (mode
);
5460 /* If OP0 is an AND, clear unimportant bits in CONST1. */
5464 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
5467 if (op1
== NIL
|| op0
== SET
)
5470 else if (op0
== NIL
)
5471 op0
= op1
, const0
= const1
;
5473 else if (op0
== op1
)
5495 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
5496 else if (op0
== PLUS
|| op1
== PLUS
|| op0
== NEG
|| op1
== NEG
)
5499 /* If the two constants aren't the same, we can't do anything. The
5500 remaining six cases can all be done. */
5501 else if (const0
!= const1
)
5509 /* (a & b) | b == b */
5511 else /* op1 == XOR */
5512 /* (a ^ b) | b == a | b */
5518 /* (a & b) ^ b == (~a) & b */
5519 op0
= AND
, *pcomp_p
= 1;
5520 else /* op1 == IOR */
5521 /* (a | b) ^ b == a & ~b */
5522 op0
= AND
, *pconst0
= ~ const0
;
5527 /* (a | b) & b == b */
5529 else /* op1 == XOR */
5530 /* (a ^ b) & b) == (~a) & b */
5535 /* Check for NO-OP cases. */
5536 const0
&= GET_MODE_MASK (mode
);
5538 && (op0
== IOR
|| op0
== XOR
|| op0
== PLUS
))
5540 else if (const0
== 0 && op0
== AND
)
5542 else if (const0
== GET_MODE_MASK (mode
) && op0
== AND
)
5551 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
5552 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
5553 that we started with.
5555 The shift is normally computed in the widest mode we find in VAROP, as
5556 long as it isn't a different number of words than RESULT_MODE. Exceptions
5557 are ASHIFTRT and ROTATE, which are always done in their original mode, */
5560 simplify_shift_const (x
, code
, result_mode
, varop
, count
)
5563 enum machine_mode result_mode
;
5567 enum rtx_code orig_code
= code
;
5568 int orig_count
= count
;
5569 enum machine_mode mode
= result_mode
;
5570 enum machine_mode shift_mode
, tmode
;
5572 = (GET_MODE_SIZE (mode
) + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
;
5573 /* We form (outer_op (code varop count) (outer_const)). */
5574 enum rtx_code outer_op
= NIL
;
5575 HOST_WIDE_INT outer_const
;
5577 int complement_p
= 0;
5580 /* If we were given an invalid count, don't do anything except exactly
5581 what was requested. */
5583 if (count
< 0 || count
> GET_MODE_BITSIZE (mode
))
5588 return gen_rtx (code
, mode
, varop
, GEN_INT (count
));
5591 /* Unless one of the branches of the `if' in this loop does a `continue',
5592 we will `break' the loop after the `if'. */
5596 /* If we have an operand of (clobber (const_int 0)), just return that
5598 if (GET_CODE (varop
) == CLOBBER
)
5601 /* If we discovered we had to complement VAROP, leave. Making a NOT
5602 here would cause an infinite loop. */
5606 /* Convert ROTATETRT to ROTATE. */
5607 if (code
== ROTATERT
)
5608 code
= ROTATE
, count
= GET_MODE_BITSIZE (result_mode
) - count
;
5610 /* Canonicalize LSHIFT to ASHIFT. */
5614 /* We need to determine what mode we will do the shift in. If the
5615 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
5616 was originally done in. Otherwise, we can do it in MODE, the widest
5617 mode encountered. */
5618 shift_mode
= (code
== ASHIFTRT
|| code
== ROTATE
? result_mode
: mode
);
5620 /* Handle cases where the count is greater than the size of the mode
5621 minus 1. For ASHIFT, use the size minus one as the count (this can
5622 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
5623 take the count modulo the size. For other shifts, the result is
5626 Since these shifts are being produced by the compiler by combining
5627 multiple operations, each of which are defined, we know what the
5628 result is supposed to be. */
5630 if (count
> GET_MODE_BITSIZE (shift_mode
) - 1)
5632 if (code
== ASHIFTRT
)
5633 count
= GET_MODE_BITSIZE (shift_mode
) - 1;
5634 else if (code
== ROTATE
|| code
== ROTATERT
)
5635 count
%= GET_MODE_BITSIZE (shift_mode
);
5638 /* We can't simply return zero because there may be an
5646 /* Negative counts are invalid and should not have been made (a
5647 programmer-specified negative count should have been handled
5652 /* We simplify the tests below and elsewhere by converting
5653 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
5654 `make_compound_operation' will convert it to a ASHIFTRT for
5655 those machines (such as Vax) that don't have a LSHIFTRT. */
5656 if (GET_MODE_BITSIZE (shift_mode
) <= HOST_BITS_PER_WIDE_INT
5658 && ((significant_bits (varop
, shift_mode
)
5659 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (shift_mode
) - 1)))
5663 switch (GET_CODE (varop
))
5669 new = expand_compound_operation (varop
);
5678 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
5679 minus the width of a smaller mode, we can do this with a
5680 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
5681 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
5682 && ! mode_dependent_address_p (XEXP (varop
, 0))
5683 && ! MEM_VOLATILE_P (varop
)
5684 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
5685 MODE_INT
, 1)) != BLKmode
)
5687 #if BYTES_BIG_ENDIAN
5688 new = gen_rtx (MEM
, tmode
, XEXP (varop
, 0));
5690 new = gen_rtx (MEM
, tmode
,
5691 plus_constant (XEXP (varop
, 0),
5692 count
/ BITS_PER_UNIT
));
5693 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop
);
5694 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop
);
5695 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop
);
5697 varop
= gen_rtx_combine (code
== ASHIFTRT
? SIGN_EXTEND
5698 : ZERO_EXTEND
, mode
, new);
5705 /* Similar to the case above, except that we can only do this if
5706 the resulting mode is the same as that of the underlying
5707 MEM and adjust the address depending on the *bits* endianness
5708 because of the way that bit-field extract insns are defined. */
5709 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
5710 && (tmode
= mode_for_size (GET_MODE_BITSIZE (mode
) - count
,
5711 MODE_INT
, 1)) != BLKmode
5712 && tmode
== GET_MODE (XEXP (varop
, 0)))
5715 new = XEXP (varop
, 0);
5717 new = copy_rtx (XEXP (varop
, 0));
5718 SUBST (XEXP (new, 0),
5719 plus_constant (XEXP (new, 0),
5720 count
/ BITS_PER_UNIT
));
5723 varop
= gen_rtx_combine (code
== ASHIFTRT
? SIGN_EXTEND
5724 : ZERO_EXTEND
, mode
, new);
5731 /* If VAROP is a SUBREG, strip it as long as the inner operand has
5732 the same number of words as what we've seen so far. Then store
5733 the widest mode in MODE. */
5734 if (SUBREG_WORD (varop
) == 0
5735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop
)))
5736 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
5739 varop
= SUBREG_REG (varop
);
5740 if (GET_MODE_SIZE (GET_MODE (varop
)) > GET_MODE_SIZE (mode
))
5741 mode
= GET_MODE (varop
);
5747 /* Some machines use MULT instead of ASHIFT because MULT
5748 is cheaper. But it is still better on those machines to
5749 merge two shifts into one. */
5750 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5751 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
5753 varop
= gen_binary (ASHIFT
, GET_MODE (varop
), XEXP (varop
, 0),
5754 GEN_INT (exact_log2 (INTVAL (XEXP (varop
, 1)))));;
5760 /* Similar, for when divides are cheaper. */
5761 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5762 && exact_log2 (INTVAL (XEXP (varop
, 1))) >= 0)
5764 varop
= gen_binary (LSHIFTRT
, GET_MODE (varop
), XEXP (varop
, 0),
5765 GEN_INT (exact_log2 (INTVAL (XEXP (varop
, 1)))));
5771 /* If we are extracting just the sign bit of an arithmetic right
5772 shift, that shift is not needed. */
5773 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1)
5775 varop
= XEXP (varop
, 0);
5779 /* ... fall through ... */
5785 /* Here we have two nested shifts. The result is usually the
5786 AND of a new shift with a mask. We compute the result below. */
5787 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5788 && INTVAL (XEXP (varop
, 1)) >= 0
5789 && INTVAL (XEXP (varop
, 1)) < GET_MODE_BITSIZE (GET_MODE (varop
))
5790 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
5791 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
5793 enum rtx_code first_code
= GET_CODE (varop
);
5794 int first_count
= INTVAL (XEXP (varop
, 1));
5795 unsigned HOST_WIDE_INT mask
;
5799 if (first_code
== LSHIFT
)
5800 first_code
= ASHIFT
;
5802 /* We have one common special case. We can't do any merging if
5803 the inner code is an ASHIFTRT of a smaller mode. However, if
5804 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
5805 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
5806 we can convert it to
5807 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
5808 This simplifies certain SIGN_EXTEND operations. */
5809 if (code
== ASHIFT
&& first_code
== ASHIFTRT
5810 && (GET_MODE_BITSIZE (result_mode
)
5811 - GET_MODE_BITSIZE (GET_MODE (varop
))) == count
)
5813 /* C3 has the low-order C1 bits zero. */
5815 mask
= (GET_MODE_MASK (mode
)
5816 & ~ (((HOST_WIDE_INT
) 1 << first_count
) - 1));
5818 varop
= simplify_and_const_int (NULL_RTX
, result_mode
,
5819 XEXP (varop
, 0), mask
);
5820 varop
= simplify_shift_const (NULL_RTX
, ASHIFT
, result_mode
,
5822 count
= first_count
;
5827 /* If this was (ashiftrt (ashift foo C1) C2) and we know
5828 something about FOO's previous value, we may be able to
5829 optimize this even though the code below can't handle this
5832 If FOO has J high-order bits equal to the sign bit with
5833 J > C1, then we can convert this to either an ASHIFT or
5834 a ASHIFTRT depending on the two counts.
5836 We cannot do this if VAROP's mode is not SHIFT_MODE. */
5838 if (code
== ASHIFTRT
&& first_code
== ASHIFT
5839 && GET_MODE (varop
) == shift_mode
5840 && (inner
= get_last_value (XEXP (varop
, 0))) != 0)
5842 if ((GET_CODE (inner
) == CONST_INT
5843 && (((INTVAL (inner
)
5844 >> (HOST_BITS_PER_WIDE_INT
- (first_count
+ 1)))
5847 >> (HOST_BITS_PER_WIDE_INT
- (first_count
+ 1)))
5849 || (GET_CODE (inner
) == SIGN_EXTEND
5850 && ((GET_MODE_BITSIZE (GET_MODE (inner
))
5851 - GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner
))))
5853 || (GET_CODE (inner
) == ASHIFTRT
5854 && GET_CODE (XEXP (inner
, 1)) == CONST_INT
5855 && INTVAL (XEXP (inner
, 1)) >= first_count
))
5857 count
-= first_count
;
5859 count
= - count
, code
= ASHIFT
;
5860 varop
= XEXP (varop
, 0);
5865 /* There are some cases we can't do. If CODE is ASHIFTRT,
5866 we can only do this if FIRST_CODE is also ASHIFTRT.
5868 We can't do the case when CODE is ROTATE and FIRST_CODE is
5871 If the mode of this shift is not the mode of the outer shift,
5872 we can't do this if either shift is ASHIFTRT or ROTATE.
5874 Finally, we can't do any of these if the mode is too wide
5875 unless the codes are the same.
5877 Handle the case where the shift codes are the same
5880 if (code
== first_code
)
5882 if (GET_MODE (varop
) != result_mode
5883 && (code
== ASHIFTRT
|| code
== ROTATE
))
5886 count
+= first_count
;
5887 varop
= XEXP (varop
, 0);
5891 if (code
== ASHIFTRT
5892 || (code
== ROTATE
&& first_code
== ASHIFTRT
)
5893 || GET_MODE_BITSIZE (mode
) > HOST_BITS_PER_WIDE_INT
5894 || (GET_MODE (varop
) != result_mode
5895 && (first_code
== ASHIFTRT
|| first_code
== ROTATE
5896 || code
== ROTATE
)))
5899 /* To compute the mask to apply after the shift, shift the
5900 significant bits of the inner shift the same way the
5901 outer shift will. */
5903 mask_rtx
= GEN_INT (significant_bits (varop
, GET_MODE (varop
)));
5906 = simplify_binary_operation (code
, result_mode
, mask_rtx
,
5909 /* Give up if we can't compute an outer operation to use. */
5911 || GET_CODE (mask_rtx
) != CONST_INT
5912 || ! merge_outer_ops (&outer_op
, &outer_const
, AND
,
5914 result_mode
, &complement_p
))
5917 /* If the shifts are in the same direction, we add the
5918 counts. Otherwise, we subtract them. */
5919 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
5920 == (first_code
== ASHIFTRT
|| first_code
== LSHIFTRT
))
5921 count
+= first_count
;
5923 count
-= first_count
;
5925 /* If COUNT is positive, the new shift is usually CODE,
5926 except for the two exceptions below, in which case it is
5927 FIRST_CODE. If the count is negative, FIRST_CODE should
5930 && ((first_code
== ROTATE
&& code
== ASHIFT
)
5931 || (first_code
== ASHIFTRT
&& code
== LSHIFTRT
)))
5934 code
= first_code
, count
= - count
;
5936 varop
= XEXP (varop
, 0);
5940 /* If we have (A << B << C) for any shift, we can convert this to
5941 (A << C << B). This wins if A is a constant. Only try this if
5942 B is not a constant. */
5944 else if (GET_CODE (varop
) == code
5945 && GET_CODE (XEXP (varop
, 1)) != CONST_INT
5947 = simplify_binary_operation (code
, mode
,
5951 varop
= gen_rtx_combine (code
, mode
, new, XEXP (varop
, 1));
5958 /* Make this fit the case below. */
5959 varop
= gen_rtx_combine (XOR
, mode
, XEXP (varop
, 0),
5960 GEN_INT (GET_MODE_MASK (mode
)));
5966 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
5967 with C the size of VAROP - 1 and the shift is logical if
5968 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
5969 we have an (le X 0) operation. If we have an arithmetic shift
5970 and STORE_FLAG_VALUE is 1 or we have a logical shift with
5971 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
5973 if (GET_CODE (varop
) == IOR
&& GET_CODE (XEXP (varop
, 0)) == PLUS
5974 && XEXP (XEXP (varop
, 0), 1) == constm1_rtx
5975 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
5976 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
5977 && count
== GET_MODE_BITSIZE (GET_MODE (varop
)) - 1
5978 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
5981 varop
= gen_rtx_combine (LE
, GET_MODE (varop
), XEXP (varop
, 1),
5984 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
5985 varop
= gen_rtx_combine (NEG
, GET_MODE (varop
), varop
);
5990 /* If we have (shift (logical)), move the logical to the outside
5991 to allow it to possibly combine with another logical and the
5992 shift to combine with another shift. This also canonicalizes to
5993 what a ZERO_EXTRACT looks like. Also, some machines have
5994 (and (shift)) insns. */
5996 if (GET_CODE (XEXP (varop
, 1)) == CONST_INT
5997 && (new = simplify_binary_operation (code
, result_mode
,
5999 GEN_INT (count
))) != 0
6000 && merge_outer_ops (&outer_op
, &outer_const
, GET_CODE (varop
),
6001 INTVAL (new), result_mode
, &complement_p
))
6003 varop
= XEXP (varop
, 0);
6007 /* If we can't do that, try to simplify the shift in each arm of the
6008 logical expression, make a new logical expression, and apply
6009 the inverse distributive law. */
6011 rtx lhs
= simplify_shift_const (NULL_RTX
, code
, result_mode
,
6012 XEXP (varop
, 0), count
);
6013 rtx rhs
= simplify_shift_const (NULL_RTX
, code
, result_mode
,
6014 XEXP (varop
, 1), count
);
6016 varop
= gen_binary (GET_CODE (varop
), result_mode
, lhs
, rhs
);
6017 varop
= apply_distributive_law (varop
);
6024 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
6025 says that the sign bit can be tested, FOO has mode MODE, C is
6026 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
6029 && XEXP (varop
, 1) == const0_rtx
6030 && GET_MODE (XEXP (varop
, 0)) == result_mode
6031 && count
== GET_MODE_BITSIZE (result_mode
) - 1
6032 && GET_MODE_BITSIZE (result_mode
) <= HOST_BITS_PER_WIDE_INT
6033 && ((STORE_FLAG_VALUE
6034 & ((HOST_WIDE_INT
) 1 << (GET_MODE_BITSIZE (result_mode
) - 1))))
6035 && significant_bits (XEXP (varop
, 0), result_mode
) == 1
6036 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
6037 (HOST_WIDE_INT
) 1, result_mode
,
6040 varop
= XEXP (varop
, 0);
6047 /* If we are doing an arithmetic right shift of something known
6048 to be -1 or 0, we don't need the shift. */
6049 if (code
== ASHIFTRT
6050 && significant_bits (XEXP (varop
, 0), result_mode
) == 1)
6056 /* NEG commutes with ASHIFT since it is multiplication. Move the
6057 NEG outside to allow shifts to combine. */
6059 && merge_outer_ops (&outer_op
, &outer_const
, NEG
,
6060 (HOST_WIDE_INT
) 0, result_mode
,
6063 varop
= XEXP (varop
, 0);
6069 /* Similar to case above. If X is 0 or 1 then X - 1 is -1 or 0. */
6070 if (XEXP (varop
, 1) == constm1_rtx
&& code
== ASHIFTRT
6071 && significant_bits (XEXP (varop
, 0), result_mode
) == 1)
6077 /* If we have the same operands as above but we are shifting the
6078 sign bit into the low-order bit, we are exclusive-or'ing
6079 the operand of the PLUS with a one. */
6080 if (code
== LSHIFTRT
&& count
== GET_MODE_BITSIZE (result_mode
) - 1
6081 && XEXP (varop
, 1) == constm1_rtx
6082 && significant_bits (XEXP (varop
, 0), result_mode
) == 1
6083 && merge_outer_ops (&outer_op
, &outer_const
, XOR
,
6084 (HOST_WIDE_INT
) 1, result_mode
,
6088 varop
= XEXP (varop
, 0);
6092 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
6093 significant in BAR are those being shifted out and those
6094 bits are known zero in FOO, we can replace the PLUS with FOO.
6095 Similarly in the other operand order. This code occurs when
6096 we are computing the size of a variable-size array. */
6098 if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6099 && count
< HOST_BITS_PER_WIDE_INT
6100 && significant_bits (XEXP (varop
, 1), result_mode
) >> count
== 0
6101 && (significant_bits (XEXP (varop
, 1), result_mode
)
6102 & significant_bits (XEXP (varop
, 0), result_mode
)) == 0)
6104 varop
= XEXP (varop
, 0);
6107 else if ((code
== ASHIFTRT
|| code
== LSHIFTRT
)
6108 && count
< HOST_BITS_PER_WIDE_INT
6109 && 0 == (significant_bits (XEXP (varop
, 0), result_mode
)
6111 && 0 == (significant_bits (XEXP (varop
, 0), result_mode
)
6112 & significant_bits (XEXP (varop
, 1),
6115 varop
= XEXP (varop
, 1);
6119 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
6121 && GET_CODE (XEXP (varop
, 1)) == CONST_INT
6122 && (new = simplify_binary_operation (ASHIFT
, result_mode
,
6124 GEN_INT (count
))) != 0
6125 && merge_outer_ops (&outer_op
, &outer_const
, PLUS
,
6126 INTVAL (new), result_mode
, &complement_p
))
6128 varop
= XEXP (varop
, 0);
6134 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
6135 with C the size of VAROP - 1 and the shift is logical if
6136 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6137 we have a (gt X 0) operation. If the shift is arithmetic with
6138 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
6139 we have a (neg (gt X 0)) operation. */
6141 if (GET_CODE (XEXP (varop
, 0)) == ASHIFTRT
6142 && count
== GET_MODE_BITSIZE (GET_MODE (varop
)) - 1
6143 && (STORE_FLAG_VALUE
== 1 || STORE_FLAG_VALUE
== -1)
6144 && (code
== LSHIFTRT
|| code
== ASHIFTRT
)
6145 && GET_CODE (XEXP (XEXP (varop
, 0), 1)) == CONST_INT
6146 && INTVAL (XEXP (XEXP (varop
, 0), 1)) == count
6147 && rtx_equal_p (XEXP (XEXP (varop
, 0), 0), XEXP (varop
, 1)))
6150 varop
= gen_rtx_combine (GT
, GET_MODE (varop
), XEXP (varop
, 1),
6153 if (STORE_FLAG_VALUE
== 1 ? code
== ASHIFTRT
: code
== LSHIFTRT
)
6154 varop
= gen_rtx_combine (NEG
, GET_MODE (varop
), varop
);
6164 /* We need to determine what mode to do the shift in. If the shift is
6165 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
6166 done in. Otherwise, we can do it in MODE, the widest mode encountered.
6167 The code we care about is that of the shift that will actually be done,
6168 not the shift that was originally requested. */
6169 shift_mode
= (code
== ASHIFTRT
|| code
== ROTATE
? result_mode
: mode
);
6171 /* We have now finished analyzing the shift. The result should be
6172 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
6173 OUTER_OP is non-NIL, it is an operation that needs to be applied
6174 to the result of the shift. OUTER_CONST is the relevant constant,
6175 but we must turn off all bits turned off in the shift.
6177 If we were passed a value for X, see if we can use any pieces of
6178 it. If not, make new rtx. */
6180 if (x
&& GET_RTX_CLASS (GET_CODE (x
)) == '2'
6181 && GET_CODE (XEXP (x
, 1)) == CONST_INT
6182 && INTVAL (XEXP (x
, 1)) == count
)
6183 const_rtx
= XEXP (x
, 1);
6185 const_rtx
= GEN_INT (count
);
6187 if (x
&& GET_CODE (XEXP (x
, 0)) == SUBREG
6188 && GET_MODE (XEXP (x
, 0)) == shift_mode
6189 && SUBREG_REG (XEXP (x
, 0)) == varop
)
6190 varop
= XEXP (x
, 0);
6191 else if (GET_MODE (varop
) != shift_mode
)
6192 varop
= gen_lowpart_for_combine (shift_mode
, varop
);
6194 /* If we can't make the SUBREG, try to return what we were given. */
6195 if (GET_CODE (varop
) == CLOBBER
)
6196 return x
? x
: varop
;
6198 new = simplify_binary_operation (code
, shift_mode
, varop
, const_rtx
);
6203 if (x
== 0 || GET_CODE (x
) != code
|| GET_MODE (x
) != shift_mode
)
6204 x
= gen_rtx_combine (code
, shift_mode
, varop
, const_rtx
);
6206 SUBST (XEXP (x
, 0), varop
);
6207 SUBST (XEXP (x
, 1), const_rtx
);
6210 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
6211 turn off all the bits that the shift would have turned off. */
6212 if (orig_code
== LSHIFTRT
&& result_mode
!= shift_mode
)
6213 x
= simplify_and_const_int (NULL_RTX
, shift_mode
, x
,
6214 GET_MODE_MASK (result_mode
) >> orig_count
);
6216 /* Do the remainder of the processing in RESULT_MODE. */
6217 x
= gen_lowpart_for_combine (result_mode
, x
);
6219 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
6222 x
= gen_unary (NOT
, result_mode
, x
);
6224 if (outer_op
!= NIL
)
6226 if (GET_MODE_BITSIZE (result_mode
) < HOST_BITS_PER_WIDE_INT
)
6227 outer_const
&= GET_MODE_MASK (result_mode
);
6229 if (outer_op
== AND
)
6230 x
= simplify_and_const_int (NULL_RTX
, result_mode
, x
, outer_const
);
6231 else if (outer_op
== SET
)
6232 /* This means that we have determined that the result is
6233 equivalent to a constant. This should be rare. */
6234 x
= GEN_INT (outer_const
);
6235 else if (GET_RTX_CLASS (outer_op
) == '1')
6236 x
= gen_unary (outer_op
, result_mode
, x
);
6238 x
= gen_binary (outer_op
, result_mode
, x
, GEN_INT (outer_const
));
6244 /* Like recog, but we receive the address of a pointer to a new pattern.
6245 We try to match the rtx that the pointer points to.
6246 If that fails, we may try to modify or replace the pattern,
6247 storing the replacement into the same pointer object.
6249 Modifications include deletion or addition of CLOBBERs.
6251 PNOTES is a pointer to a location where any REG_UNUSED notes added for
6252 the CLOBBERs are placed.
6254 The value is the final insn code from the pattern ultimately matched,
6258 recog_for_combine (pnewpat
, insn
, pnotes
)
6263 register rtx pat
= *pnewpat
;
6264 int insn_code_number
;
6265 int num_clobbers_to_add
= 0;
6269 /* Is the result of combination a valid instruction? */
6270 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
6272 /* If it isn't, there is the possibility that we previously had an insn
6273 that clobbered some register as a side effect, but the combined
6274 insn doesn't need to do that. So try once more without the clobbers
6275 unless this represents an ASM insn. */
6277 if (insn_code_number
< 0 && ! check_asm_operands (pat
)
6278 && GET_CODE (pat
) == PARALLEL
)
6282 for (pos
= 0, i
= 0; i
< XVECLEN (pat
, 0); i
++)
6283 if (GET_CODE (XVECEXP (pat
, 0, i
)) != CLOBBER
)
6286 SUBST (XVECEXP (pat
, 0, pos
), XVECEXP (pat
, 0, i
));
6290 SUBST_INT (XVECLEN (pat
, 0), pos
);
6293 pat
= XVECEXP (pat
, 0, 0);
6295 insn_code_number
= recog (pat
, insn
, &num_clobbers_to_add
);
6298 /* If we had any clobbers to add, make a new pattern than contains
6299 them. Then check to make sure that all of them are dead. */
6300 if (num_clobbers_to_add
)
6302 rtx newpat
= gen_rtx (PARALLEL
, VOIDmode
,
6303 gen_rtvec (GET_CODE (pat
) == PARALLEL
6304 ? XVECLEN (pat
, 0) + num_clobbers_to_add
6305 : num_clobbers_to_add
+ 1));
6307 if (GET_CODE (pat
) == PARALLEL
)
6308 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
6309 XVECEXP (newpat
, 0, i
) = XVECEXP (pat
, 0, i
);
6311 XVECEXP (newpat
, 0, 0) = pat
;
6313 add_clobbers (newpat
, insn_code_number
);
6315 for (i
= XVECLEN (newpat
, 0) - num_clobbers_to_add
;
6316 i
< XVECLEN (newpat
, 0); i
++)
6318 if (GET_CODE (XEXP (XVECEXP (newpat
, 0, i
), 0)) == REG
6319 && ! reg_dead_at_p (XEXP (XVECEXP (newpat
, 0, i
), 0), insn
))
6321 notes
= gen_rtx (EXPR_LIST
, REG_UNUSED
,
6322 XEXP (XVECEXP (newpat
, 0, i
), 0), notes
);
6330 return insn_code_number
;
6333 /* Like gen_lowpart but for use by combine. In combine it is not possible
6334 to create any new pseudoregs. However, it is safe to create
6335 invalid memory addresses, because combine will try to recognize
6336 them and all they will do is make the combine attempt fail.
6338 If for some reason this cannot do its job, an rtx
6339 (clobber (const_int 0)) is returned.
6340 An insn containing that will not be recognized. */
6345 gen_lowpart_for_combine (mode
, x
)
6346 enum machine_mode mode
;
6351 if (GET_MODE (x
) == mode
)
6354 if (GET_MODE_SIZE (mode
) > UNITS_PER_WORD
)
6355 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
6357 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
6358 won't know what to do. So we will strip off the SUBREG here and
6359 process normally. */
6360 if (GET_CODE (x
) == SUBREG
&& GET_CODE (SUBREG_REG (x
)) == MEM
)
6363 if (GET_MODE (x
) == mode
)
6367 result
= gen_lowpart_common (mode
, x
);
6371 if (GET_CODE (x
) == MEM
)
6373 register int offset
= 0;
6376 /* Refuse to work on a volatile memory ref or one with a mode-dependent
6378 if (MEM_VOLATILE_P (x
) || mode_dependent_address_p (XEXP (x
, 0)))
6379 return gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
6381 /* If we want to refer to something bigger than the original memref,
6382 generate a perverse subreg instead. That will force a reload
6383 of the original memref X. */
6384 if (GET_MODE_SIZE (GET_MODE (x
)) < GET_MODE_SIZE (mode
))
6385 return gen_rtx (SUBREG
, mode
, x
, 0);
6387 #if WORDS_BIG_ENDIAN
6388 offset
= (MAX (GET_MODE_SIZE (GET_MODE (x
)), UNITS_PER_WORD
)
6389 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
));
6391 #if BYTES_BIG_ENDIAN
6392 /* Adjust the address so that the address-after-the-data
6394 offset
-= (MIN (UNITS_PER_WORD
, GET_MODE_SIZE (mode
))
6395 - MIN (UNITS_PER_WORD
, GET_MODE_SIZE (GET_MODE (x
))));
6397 new = gen_rtx (MEM
, mode
, plus_constant (XEXP (x
, 0), offset
));
6398 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x
);
6399 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x
);
6400 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x
);
6404 /* If X is a comparison operator, rewrite it in a new mode. This
6405 probably won't match, but may allow further simplifications. */
6406 else if (GET_RTX_CLASS (GET_CODE (x
)) == '<')
6407 return gen_rtx_combine (GET_CODE (x
), mode
, XEXP (x
, 0), XEXP (x
, 1));
6409 /* If we couldn't simplify X any other way, just enclose it in a
6410 SUBREG. Normally, this SUBREG won't match, but some patterns may
6411 include an explicit SUBREG or we may simplify it further in combine. */
6416 if (WORDS_BIG_ENDIAN
&& GET_MODE_SIZE (GET_MODE (x
)) > UNITS_PER_WORD
)
6417 word
= ((GET_MODE_SIZE (GET_MODE (x
))
6418 - MAX (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
6420 return gen_rtx (SUBREG
, mode
, x
, word
);
6424 /* Make an rtx expression. This is a subset of gen_rtx and only supports
6425 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
6427 If the identical expression was previously in the insn (in the undobuf),
6428 it will be returned. Only if it is not found will a new expression
6433 gen_rtx_combine (va_alist
)
6438 enum machine_mode mode
;
6446 code
= va_arg (p
, enum rtx_code
);
6447 mode
= va_arg (p
, enum machine_mode
);
6448 n_args
= GET_RTX_LENGTH (code
);
6449 fmt
= GET_RTX_FORMAT (code
);
6451 if (n_args
== 0 || n_args
> 3)
6454 /* Get each arg and verify that it is supposed to be an expression. */
6455 for (j
= 0; j
< n_args
; j
++)
6460 args
[j
] = va_arg (p
, rtx
);
6463 /* See if this is in undobuf. Be sure we don't use objects that came
6464 from another insn; this could produce circular rtl structures. */
6466 for (i
= previous_num_undos
; i
< undobuf
.num_undo
; i
++)
6467 if (!undobuf
.undo
[i
].is_int
6468 && GET_CODE (undobuf
.undo
[i
].old_contents
) == code
6469 && GET_MODE (undobuf
.undo
[i
].old_contents
) == mode
)
6471 for (j
= 0; j
< n_args
; j
++)
6472 if (XEXP (undobuf
.undo
[i
].old_contents
, j
) != args
[j
])
6476 return undobuf
.undo
[i
].old_contents
;
6479 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
6480 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
6481 rt
= rtx_alloc (code
);
6482 PUT_MODE (rt
, mode
);
6483 XEXP (rt
, 0) = args
[0];
6486 XEXP (rt
, 1) = args
[1];
6488 XEXP (rt
, 2) = args
[2];
6493 /* These routines make binary and unary operations by first seeing if they
6494 fold; if not, a new expression is allocated. */
6497 gen_binary (code
, mode
, op0
, op1
)
6499 enum machine_mode mode
;
6504 if (GET_RTX_CLASS (code
) == '<')
6506 enum machine_mode op_mode
= GET_MODE (op0
);
6507 if (op_mode
== VOIDmode
)
6508 op_mode
= GET_MODE (op1
);
6509 result
= simplify_relational_operation (code
, op_mode
, op0
, op1
);
6512 result
= simplify_binary_operation (code
, mode
, op0
, op1
);
6517 /* Put complex operands first and constants second. */
6518 if (GET_RTX_CLASS (code
) == 'c'
6519 && ((CONSTANT_P (op0
) && GET_CODE (op1
) != CONST_INT
)
6520 || (GET_RTX_CLASS (GET_CODE (op0
)) == 'o'
6521 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')
6522 || (GET_CODE (op0
) == SUBREG
6523 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0
))) == 'o'
6524 && GET_RTX_CLASS (GET_CODE (op1
)) != 'o')))
6525 return gen_rtx_combine (code
, mode
, op1
, op0
);
6527 return gen_rtx_combine (code
, mode
, op0
, op1
);
6531 gen_unary (code
, mode
, op0
)
6533 enum machine_mode mode
;
6536 rtx result
= simplify_unary_operation (code
, mode
, op0
, mode
);
6541 return gen_rtx_combine (code
, mode
, op0
);
6544 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
6545 comparison code that will be tested.
6547 The result is a possibly different comparison code to use. *POP0 and
6548 *POP1 may be updated.
6550 It is possible that we might detect that a comparison is either always
6551 true or always false. However, we do not perform general constant
6552 folding in combine, so this knowledge isn't useful. Such tautologies
6553 should have been detected earlier. Hence we ignore all such cases. */
6555 static enum rtx_code
6556 simplify_comparison (code
, pop0
, pop1
)
6565 enum machine_mode mode
, tmode
;
6567 /* Try a few ways of applying the same transformation to both operands. */
6570 /* If both operands are the same constant shift, see if we can ignore the
6571 shift. We can if the shift is a rotate or if the bits shifted out of
6572 this shift are not significant for either input and if the type of
6573 comparison is compatible with the shift. */
6574 if (GET_CODE (op0
) == GET_CODE (op1
)
6575 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
6576 && ((GET_CODE (op0
) == ROTATE
&& (code
== NE
|| code
== EQ
))
6577 || ((GET_CODE (op0
) == LSHIFTRT
6578 || GET_CODE (op0
) == ASHIFT
|| GET_CODE (op0
) == LSHIFT
)
6579 && (code
!= GT
&& code
!= LT
&& code
!= GE
&& code
!= LE
))
6580 || (GET_CODE (op0
) == ASHIFTRT
6581 && (code
!= GTU
&& code
!= LTU
6582 && code
!= GEU
&& code
!= GEU
)))
6583 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6584 && INTVAL (XEXP (op0
, 1)) >= 0
6585 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
6586 && XEXP (op0
, 1) == XEXP (op1
, 1))
6588 enum machine_mode mode
= GET_MODE (op0
);
6589 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
6590 int shift_count
= INTVAL (XEXP (op0
, 1));
6592 if (GET_CODE (op0
) == LSHIFTRT
|| GET_CODE (op0
) == ASHIFTRT
)
6593 mask
&= (mask
>> shift_count
) << shift_count
;
6594 else if (GET_CODE (op0
) == ASHIFT
|| GET_CODE (op0
) == LSHIFT
)
6595 mask
= (mask
& (mask
<< shift_count
)) >> shift_count
;
6597 if ((significant_bits (XEXP (op0
, 0), mode
) & ~ mask
) == 0
6598 && (significant_bits (XEXP (op1
, 0), mode
) & ~ mask
) == 0)
6599 op0
= XEXP (op0
, 0), op1
= XEXP (op1
, 0);
6604 /* If both operands are AND's of a paradoxical SUBREG by constant, the
6605 SUBREGs are of the same mode, and, in both cases, the AND would
6606 be redundant if the comparison was done in the narrower mode,
6607 do the comparison in the narrower mode (e.g., we are AND'ing with 1
6608 and the operand's significant bits are 0xffffff01; in that case if
6609 we only care about QImode, we don't need the AND). This case occurs
6610 if the output mode of an scc insn is not SImode and
6611 STORE_FLAG_VALUE == 1 (e.g., the 386). */
6613 else if (GET_CODE (op0
) == AND
&& GET_CODE (op1
) == AND
6614 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6615 && GET_CODE (XEXP (op1
, 1)) == CONST_INT
6616 && GET_CODE (XEXP (op0
, 0)) == SUBREG
6617 && GET_CODE (XEXP (op1
, 0)) == SUBREG
6618 && (GET_MODE_SIZE (GET_MODE (XEXP (op0
, 0)))
6619 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0
, 0)))))
6620 && (GET_MODE (SUBREG_REG (XEXP (op0
, 0)))
6621 == GET_MODE (SUBREG_REG (XEXP (op1
, 0))))
6622 && (significant_bits (SUBREG_REG (XEXP (op0
, 0)),
6623 GET_MODE (SUBREG_REG (XEXP (op0
, 0))))
6624 & ~ INTVAL (XEXP (op0
, 1))) == 0
6625 && (significant_bits (SUBREG_REG (XEXP (op1
, 0)),
6626 GET_MODE (SUBREG_REG (XEXP (op1
, 0))))
6627 & ~ INTVAL (XEXP (op1
, 1))) == 0)
6629 op0
= SUBREG_REG (XEXP (op0
, 0));
6630 op1
= SUBREG_REG (XEXP (op1
, 0));
6632 /* the resulting comparison is always unsigned since we masked off
6633 the original sign bit. */
6634 code
= unsigned_condition (code
);
6640 /* If the first operand is a constant, swap the operands and adjust the
6641 comparison code appropriately. */
6642 if (CONSTANT_P (op0
))
6644 tem
= op0
, op0
= op1
, op1
= tem
;
6645 code
= swap_condition (code
);
6648 /* We now enter a loop during which we will try to simplify the comparison.
6649 For the most part, we only are concerned with comparisons with zero,
6650 but some things may really be comparisons with zero but not start
6651 out looking that way. */
6653 while (GET_CODE (op1
) == CONST_INT
)
6655 enum machine_mode mode
= GET_MODE (op0
);
6656 int mode_width
= GET_MODE_BITSIZE (mode
);
6657 unsigned HOST_WIDE_INT mask
= GET_MODE_MASK (mode
);
6658 int equality_comparison_p
;
6659 int sign_bit_comparison_p
;
6660 int unsigned_comparison_p
;
6661 HOST_WIDE_INT const_op
;
6663 /* We only want to handle integral modes. This catches VOIDmode,
6664 CCmode, and the floating-point modes. An exception is that we
6665 can handle VOIDmode if OP0 is a COMPARE or a comparison
6668 if (GET_MODE_CLASS (mode
) != MODE_INT
6669 && ! (mode
== VOIDmode
6670 && (GET_CODE (op0
) == COMPARE
6671 || GET_RTX_CLASS (GET_CODE (op0
)) == '<')))
6674 /* Get the constant we are comparing against and turn off all bits
6675 not on in our mode. */
6676 const_op
= INTVAL (op1
);
6677 if (mode_width
<= HOST_BITS_PER_WIDE_INT
)
6680 /* If we are comparing against a constant power of two and the value
6681 being compared has only that single significant bit (e.g., it was
6682 `and'ed with that bit), we can replace this with a comparison
6685 && (code
== EQ
|| code
== NE
|| code
== GE
|| code
== GEU
6686 || code
== LT
|| code
== LTU
)
6687 && mode_width
<= HOST_BITS_PER_WIDE_INT
6688 && exact_log2 (const_op
) >= 0
6689 && significant_bits (op0
, mode
) == const_op
)
6691 code
= (code
== EQ
|| code
== GE
|| code
== GEU
? NE
: EQ
);
6692 op1
= const0_rtx
, const_op
= 0;
6695 /* Do some canonicalizations based on the comparison code. We prefer
6696 comparisons against zero and then prefer equality comparisons.
6697 If we can reduce the size of a constant, we will do that too. */
6702 /* < C is equivalent to <= (C - 1) */
6706 op1
= GEN_INT (const_op
);
6708 /* ... fall through to LE case below. */
6714 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
6718 op1
= GEN_INT (const_op
);
6722 /* If we are doing a <= 0 comparison on a value known to have
6723 a zero sign bit, we can replace this with == 0. */
6724 else if (const_op
== 0
6725 && mode_width
<= HOST_BITS_PER_WIDE_INT
6726 && (significant_bits (op0
, mode
)
6727 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
6732 /* >= C is equivalent to > (C - 1). */
6736 op1
= GEN_INT (const_op
);
6738 /* ... fall through to GT below. */
6744 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
6748 op1
= GEN_INT (const_op
);
6752 /* If we are doing a > 0 comparison on a value known to have
6753 a zero sign bit, we can replace this with != 0. */
6754 else if (const_op
== 0
6755 && mode_width
<= HOST_BITS_PER_WIDE_INT
6756 && (significant_bits (op0
, mode
)
6757 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)
6762 /* < C is equivalent to <= (C - 1). */
6766 op1
= GEN_INT (const_op
);
6768 /* ... fall through ... */
6774 /* unsigned <= 0 is equivalent to == 0 */
6780 /* >= C is equivalent to < (C - 1). */
6784 op1
= GEN_INT (const_op
);
6786 /* ... fall through ... */
6792 /* unsigned > 0 is equivalent to != 0 */
6798 /* Compute some predicates to simplify code below. */
6800 equality_comparison_p
= (code
== EQ
|| code
== NE
);
6801 sign_bit_comparison_p
= ((code
== LT
|| code
== GE
) && const_op
== 0);
6802 unsigned_comparison_p
= (code
== LTU
|| code
== LEU
|| code
== GTU
6805 /* Now try cases based on the opcode of OP0. If none of the cases
6806 does a "continue", we exit this loop immediately after the
6809 switch (GET_CODE (op0
))
6812 /* If we are extracting a single bit from a variable position in
6813 a constant that has only a single bit set and are comparing it
6814 with zero, we can convert this into an equality comparison
6815 between the position and the location of the single bit. We can't
6816 do this if bit endian and we don't have an extzv since we then
6817 can't know what mode to use for the endianness adjustment. */
6819 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
6820 if (GET_CODE (XEXP (op0
, 0)) == CONST_INT
6821 && XEXP (op0
, 1) == const1_rtx
6822 && equality_comparison_p
&& const_op
== 0
6823 && (i
= exact_log2 (INTVAL (XEXP (op0
, 0)))) >= 0)
6826 i
= (GET_MODE_BITSIZE
6827 (insn_operand_mode
[(int) CODE_FOR_extzv
][1]) - 1 - i
);
6830 op0
= XEXP (op0
, 2);
6834 /* Result is nonzero iff shift count is equal to I. */
6835 code
= reverse_condition (code
);
6840 /* ... fall through ... */
6843 tem
= expand_compound_operation (op0
);
6852 /* If testing for equality, we can take the NOT of the constant. */
6853 if (equality_comparison_p
6854 && (tem
= simplify_unary_operation (NOT
, mode
, op1
, mode
)) != 0)
6856 op0
= XEXP (op0
, 0);
6861 /* If just looking at the sign bit, reverse the sense of the
6863 if (sign_bit_comparison_p
)
6865 op0
= XEXP (op0
, 0);
6866 code
= (code
== GE
? LT
: GE
);
6872 /* If testing for equality, we can take the NEG of the constant. */
6873 if (equality_comparison_p
6874 && (tem
= simplify_unary_operation (NEG
, mode
, op1
, mode
)) != 0)
6876 op0
= XEXP (op0
, 0);
6881 /* The remaining cases only apply to comparisons with zero. */
6885 /* When X is ABS or is known positive,
6886 (neg X) is < 0 if and only if X != 0. */
6888 if (sign_bit_comparison_p
6889 && (GET_CODE (XEXP (op0
, 0)) == ABS
6890 || (mode_width
<= HOST_BITS_PER_WIDE_INT
6891 && (significant_bits (XEXP (op0
, 0), mode
)
6892 & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))) == 0)))
6894 op0
= XEXP (op0
, 0);
6895 code
= (code
== LT
? NE
: EQ
);
6899 /* If we have NEG of something that is the result of a
6900 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
6901 two high-order bits must be the same and hence that
6902 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
6904 if (GET_CODE (XEXP (op0
, 0)) == SIGN_EXTEND
6905 || (GET_CODE (XEXP (op0
, 0)) == SIGN_EXTRACT
6906 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
6907 && (INTVAL (XEXP (XEXP (op0
, 0), 1))
6908 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0
, 0), 0)))))
6909 || (GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
6910 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
6911 && XEXP (XEXP (op0
, 0), 1) != const0_rtx
)
6912 || ((tem
= get_last_value (XEXP (op0
, 0))) != 0
6913 && (GET_CODE (tem
) == SIGN_EXTEND
6914 || (GET_CODE (tem
) == SIGN_EXTRACT
6915 && GET_CODE (XEXP (tem
, 1)) == CONST_INT
6916 && (INTVAL (XEXP (tem
, 1))
6917 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem
, 0)))))
6918 || (GET_CODE (tem
) == ASHIFTRT
6919 && GET_CODE (XEXP (tem
, 1)) == CONST_INT
6920 && XEXP (tem
, 1) != const0_rtx
))))
6922 op0
= XEXP (op0
, 0);
6923 code
= swap_condition (code
);
6929 /* If we are testing equality and our count is a constant, we
6930 can perform the inverse operation on our RHS. */
6931 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
6932 && (tem
= simplify_binary_operation (ROTATERT
, mode
,
6933 op1
, XEXP (op0
, 1))) != 0)
6935 op0
= XEXP (op0
, 0);
6940 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
6941 a particular bit. Convert it to an AND of a constant of that
6942 bit. This will be converted into a ZERO_EXTRACT. */
6943 if (const_op
== 0 && sign_bit_comparison_p
6944 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
6945 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
6947 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
6950 - INTVAL (XEXP (op0
, 1)))));
6951 code
= (code
== LT
? NE
: EQ
);
6955 /* ... fall through ... */
6958 /* ABS is ignorable inside an equality comparison with zero. */
6959 if (const_op
== 0 && equality_comparison_p
)
6961 op0
= XEXP (op0
, 0);
6968 /* Can simplify (compare (zero/sign_extend FOO) CONST)
6969 to (compare FOO CONST) if CONST fits in FOO's mode and we
6970 are either testing inequality or have an unsigned comparison
6971 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
6972 if (! unsigned_comparison_p
6973 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
6974 <= HOST_BITS_PER_WIDE_INT
)
6975 && ((unsigned HOST_WIDE_INT
) const_op
6976 < (((HOST_WIDE_INT
) 1
6977 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0))) - 1)))))
6979 op0
= XEXP (op0
, 0);
6985 /* If the inner mode is smaller and we are extracting the low
6986 part, we can treat the SUBREG as if it were a ZERO_EXTEND. */
6987 if (! subreg_lowpart_p (op0
)
6988 || GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0
))) >= mode_width
)
6991 /* ... fall through ... */
6994 if ((unsigned_comparison_p
|| equality_comparison_p
)
6995 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0
, 0)))
6996 <= HOST_BITS_PER_WIDE_INT
)
6997 && ((unsigned HOST_WIDE_INT
) const_op
6998 < GET_MODE_MASK (GET_MODE (XEXP (op0
, 0)))))
7000 op0
= XEXP (op0
, 0);
7006 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
7007 this for equality comparisons due to pathological cases involving
7009 if (equality_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
7010 && (tem
= simplify_binary_operation (MINUS
, mode
, op1
,
7011 XEXP (op0
, 1))) != 0)
7013 op0
= XEXP (op0
, 0);
7018 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
7019 if (const_op
== 0 && XEXP (op0
, 1) == constm1_rtx
7020 && GET_CODE (XEXP (op0
, 0)) == ABS
&& sign_bit_comparison_p
)
7022 op0
= XEXP (XEXP (op0
, 0), 0);
7023 code
= (code
== LT
? EQ
: NE
);
7029 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
7030 of bits in X minus 1, is one iff X > 0. */
7031 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == ASHIFTRT
7032 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
7033 && INTVAL (XEXP (XEXP (op0
, 0), 1)) == mode_width
- 1
7034 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
7036 op0
= XEXP (op0
, 1);
7037 code
= (code
== GE
? LE
: GT
);
7043 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
7044 if C is zero or B is a constant. */
7045 if (equality_comparison_p
7046 && 0 != (tem
= simplify_binary_operation (XOR
, mode
,
7047 XEXP (op0
, 1), op1
)))
7049 op0
= XEXP (op0
, 0);
7056 case LT
: case LTU
: case LE
: case LEU
:
7057 case GT
: case GTU
: case GE
: case GEU
:
7058 /* We can't do anything if OP0 is a condition code value, rather
7059 than an actual data value. */
7062 || XEXP (op0
, 0) == cc0_rtx
7064 || GET_MODE_CLASS (GET_MODE (XEXP (op0
, 0))) == MODE_CC
)
7067 /* Get the two operands being compared. */
7068 if (GET_CODE (XEXP (op0
, 0)) == COMPARE
)
7069 tem
= XEXP (XEXP (op0
, 0), 0), tem1
= XEXP (XEXP (op0
, 0), 1);
7071 tem
= XEXP (op0
, 0), tem1
= XEXP (op0
, 1);
7073 /* Check for the cases where we simply want the result of the
7074 earlier test or the opposite of that result. */
7076 || (code
== EQ
&& reversible_comparison_p (op0
))
7077 || (GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
7078 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7079 && (STORE_FLAG_VALUE
7080 & (((HOST_WIDE_INT
) 1
7081 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
7083 || (code
== GE
&& reversible_comparison_p (op0
)))))
7085 code
= (code
== LT
|| code
== NE
7086 ? GET_CODE (op0
) : reverse_condition (GET_CODE (op0
)));
7087 op0
= tem
, op1
= tem1
;
7093 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
7095 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 0)) == PLUS
7096 && XEXP (XEXP (op0
, 0), 1) == constm1_rtx
7097 && rtx_equal_p (XEXP (XEXP (op0
, 0), 0), XEXP (op0
, 1)))
7099 op0
= XEXP (op0
, 1);
7100 code
= (code
== GE
? GT
: LE
);
7106 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
7107 will be converted to a ZERO_EXTRACT later. */
7108 if (const_op
== 0 && equality_comparison_p
7109 && (GET_CODE (XEXP (op0
, 0)) == ASHIFT
7110 || GET_CODE (XEXP (op0
, 0)) == LSHIFT
)
7111 && XEXP (XEXP (op0
, 0), 0) == const1_rtx
)
7113 op0
= simplify_and_const_int
7114 (op0
, mode
, gen_rtx_combine (LSHIFTRT
, mode
,
7116 XEXP (XEXP (op0
, 0), 1)),
7121 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
7122 zero and X is a comparison and C1 and C2 describe only bits set
7123 in STORE_FLAG_VALUE, we can compare with X. */
7124 if (const_op
== 0 && equality_comparison_p
7125 && mode_width
<= HOST_BITS_PER_WIDE_INT
7126 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7127 && GET_CODE (XEXP (op0
, 0)) == LSHIFTRT
7128 && GET_CODE (XEXP (XEXP (op0
, 0), 1)) == CONST_INT
7129 && INTVAL (XEXP (XEXP (op0
, 0), 1)) >= 0
7130 && INTVAL (XEXP (XEXP (op0
, 0), 1)) < HOST_BITS_PER_WIDE_INT
)
7132 mask
= ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
7133 << INTVAL (XEXP (XEXP (op0
, 0), 1)));
7134 if ((~ STORE_FLAG_VALUE
& mask
) == 0
7135 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0
, 0), 0))) == '<'
7136 || ((tem
= get_last_value (XEXP (XEXP (op0
, 0), 0))) != 0
7137 && GET_RTX_CLASS (GET_CODE (tem
)) == '<')))
7139 op0
= XEXP (XEXP (op0
, 0), 0);
7144 /* If we are doing an equality comparison of an AND of a bit equal
7145 to the sign bit, replace this with a LT or GE comparison of
7146 the underlying value. */
7147 if (equality_comparison_p
7149 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7150 && mode_width
<= HOST_BITS_PER_WIDE_INT
7151 && ((INTVAL (XEXP (op0
, 1)) & GET_MODE_MASK (mode
))
7152 == (HOST_WIDE_INT
) 1 << (mode_width
- 1)))
7154 op0
= XEXP (op0
, 0);
7155 code
= (code
== EQ
? GE
: LT
);
7159 /* If this AND operation is really a ZERO_EXTEND from a narrower
7160 mode, the constant fits within that mode, and this is either an
7161 equality or unsigned comparison, try to do this comparison in
7162 the narrower mode. */
7163 if ((equality_comparison_p
|| unsigned_comparison_p
)
7164 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7165 && (i
= exact_log2 ((INTVAL (XEXP (op0
, 1))
7166 & GET_MODE_MASK (mode
))
7168 && const_op
>> i
== 0
7169 && (tmode
= mode_for_size (i
, MODE_INT
, 1)) != BLKmode
)
7171 op0
= gen_lowpart_for_combine (tmode
, XEXP (op0
, 0));
7178 /* If we have (compare (xshift FOO N) (const_int C)) and
7179 the high order N bits of FOO (N+1 if an inequality comparison)
7180 are not significant, we can do this by comparing FOO with C
7181 shifted right N bits so long as the low-order N bits of C are
7183 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
7184 && INTVAL (XEXP (op0
, 1)) >= 0
7185 && ((INTVAL (XEXP (op0
, 1)) + ! equality_comparison_p
)
7186 < HOST_BITS_PER_WIDE_INT
)
7188 & ~ (((HOST_WIDE_INT
) 1
7189 << INTVAL (XEXP (op0
, 1))) - 1)) == 0)
7190 && mode_width
<= HOST_BITS_PER_WIDE_INT
7191 && (significant_bits (XEXP (op0
, 0), mode
)
7192 & ~ (mask
>> (INTVAL (XEXP (op0
, 1))
7193 + ! equality_comparison_p
))) == 0)
7195 const_op
>>= INTVAL (XEXP (op0
, 1));
7196 op1
= GEN_INT (const_op
);
7197 op0
= XEXP (op0
, 0);
7201 /* If we are doing a sign bit comparison, it means we are testing
7202 a particular bit. Convert it to the appropriate AND. */
7203 if (sign_bit_comparison_p
&& GET_CODE (XEXP (op0
, 1)) == CONST_INT
7204 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
7206 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
7209 - INTVAL (XEXP (op0
, 1)))));
7210 code
= (code
== LT
? NE
: EQ
);
7214 /* If this an equality comparison with zero and we are shifting
7215 the low bit to the sign bit, we can convert this to an AND of the
7217 if (const_op
== 0 && equality_comparison_p
7218 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7219 && INTVAL (XEXP (op0
, 1)) == mode_width
- 1)
7221 op0
= simplify_and_const_int (NULL_RTX
, mode
, XEXP (op0
, 0),
7228 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
7229 do the comparison in a narrower mode. */
7230 if (! unsigned_comparison_p
7231 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7232 && GET_CODE (XEXP (op0
, 0)) == ASHIFT
7233 && XEXP (op0
, 1) == XEXP (XEXP (op0
, 0), 1)
7234 && (tmode
= mode_for_size (mode_width
- INTVAL (XEXP (op0
, 1)),
7235 MODE_INT
, 1)) != VOIDmode
7236 && ((unsigned HOST_WIDE_INT
) const_op
<= GET_MODE_MASK (tmode
)
7237 || ((unsigned HOST_WIDE_INT
) - const_op
7238 <= GET_MODE_MASK (tmode
))))
7240 op0
= gen_lowpart_for_combine (tmode
, XEXP (XEXP (op0
, 0), 0));
7244 /* ... fall through ... */
7246 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
7247 the low order N bits of FOO are not significant, we can do this
7248 by comparing FOO with C shifted left N bits so long as no
7250 if (GET_CODE (XEXP (op0
, 1)) == CONST_INT
7251 && INTVAL (XEXP (op0
, 1)) >= 0
7252 && INTVAL (XEXP (op0
, 1)) < HOST_BITS_PER_WIDE_INT
7253 && mode_width
<= HOST_BITS_PER_WIDE_INT
7254 && (significant_bits (XEXP (op0
, 0), mode
)
7255 & (((HOST_WIDE_INT
) 1 << INTVAL (XEXP (op0
, 1))) - 1)) == 0
7257 || (floor_log2 (const_op
) + INTVAL (XEXP (op0
, 1))
7260 const_op
<<= INTVAL (XEXP (op0
, 1));
7261 op1
= GEN_INT (const_op
);
7262 op0
= XEXP (op0
, 0);
7266 /* If we are using this shift to extract just the sign bit, we
7267 can replace this with an LT or GE comparison. */
7269 && (equality_comparison_p
|| sign_bit_comparison_p
)
7270 && GET_CODE (XEXP (op0
, 1)) == CONST_INT
7271 && INTVAL (XEXP (op0
, 1)) == mode_width
- 1)
7273 op0
= XEXP (op0
, 0);
7274 code
= (code
== NE
|| code
== GT
? LT
: GE
);
7283 /* Now make any compound operations involved in this comparison. Then,
7284 check for an outmost SUBREG on OP0 that isn't doing anything or is
7285 paradoxical. The latter case can only occur when it is known that the
7286 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
7287 We can never remove a SUBREG for a non-equality comparison because the
7288 sign bit is in a different place in the underlying object. */
7290 op0
= make_compound_operation (op0
, op1
== const0_rtx
? COMPARE
: SET
);
7291 op1
= make_compound_operation (op1
, SET
);
7293 if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
7294 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7295 && (code
== NE
|| code
== EQ
)
7296 && ((GET_MODE_SIZE (GET_MODE (op0
))
7297 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0
))))))
7299 op0
= SUBREG_REG (op0
);
7300 op1
= gen_lowpart_for_combine (GET_MODE (op0
), op1
);
7303 else if (GET_CODE (op0
) == SUBREG
&& subreg_lowpart_p (op0
)
7304 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_INT
7305 && (code
== NE
|| code
== EQ
)
7306 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
7307 && (significant_bits (SUBREG_REG (op0
), GET_MODE (SUBREG_REG (op0
)))
7308 & ~ GET_MODE_MASK (GET_MODE (op0
))) == 0
7309 && (tem
= gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0
)),
7311 (significant_bits (tem
, GET_MODE (SUBREG_REG (op0
)))
7312 & ~ GET_MODE_MASK (GET_MODE (op0
))) == 0))
7313 op0
= SUBREG_REG (op0
), op1
= tem
;
7315 /* We now do the opposite procedure: Some machines don't have compare
7316 insns in all modes. If OP0's mode is an integer mode smaller than a
7317 word and we can't do a compare in that mode, see if there is a larger
7318 mode for which we can do the compare and where the only significant
7319 bits in OP0 and OP1 are those in the narrower mode. We can do
7320 this if this is an equality comparison, in which case we can
7321 merely widen the operation, or if we are testing the sign bit, in
7322 which case we can explicitly put in the test. */
7324 mode
= GET_MODE (op0
);
7325 if (mode
!= VOIDmode
&& GET_MODE_CLASS (mode
) == MODE_INT
7326 && GET_MODE_SIZE (mode
) < UNITS_PER_WORD
7327 && cmp_optab
->handlers
[(int) mode
].insn_code
== CODE_FOR_nothing
)
7328 for (tmode
= GET_MODE_WIDER_MODE (mode
);
7330 && GET_MODE_BITSIZE (tmode
) <= HOST_BITS_PER_WIDE_INT
);
7331 tmode
= GET_MODE_WIDER_MODE (tmode
))
7332 if (cmp_optab
->handlers
[(int) tmode
].insn_code
!= CODE_FOR_nothing
7333 && (significant_bits (op0
, tmode
) & ~ GET_MODE_MASK (mode
)) == 0
7334 && (significant_bits (op1
, tmode
) & ~ GET_MODE_MASK (mode
)) == 0
7335 && (code
== EQ
|| code
== NE
7336 || (op1
== const0_rtx
&& (code
== LT
|| code
== GE
)
7337 && GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)))
7339 op0
= gen_lowpart_for_combine (tmode
, op0
);
7340 op1
= gen_lowpart_for_combine (tmode
, op1
);
7342 if (code
== LT
|| code
== GE
)
7344 op0
= gen_binary (AND
, tmode
, op0
,
7345 GEN_INT ((HOST_WIDE_INT
) 1
7346 << (GET_MODE_BITSIZE (mode
) - 1)));
7347 code
= (code
== LT
) ? NE
: EQ
;
7359 /* Return 1 if we know that X, a comparison operation, is not operating
7360 on a floating-point value or is EQ or NE, meaning that we can safely
7364 reversible_comparison_p (x
)
7367 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
7368 || GET_CODE (x
) == NE
|| GET_CODE (x
) == EQ
)
7371 switch (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))))
7377 x
= get_last_value (XEXP (x
, 0));
7378 return (x
&& GET_CODE (x
) == COMPARE
7379 && GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_INT
);
7385 /* Utility function for following routine. Called when X is part of a value
7386 being stored into reg_last_set_value. Sets reg_last_set_table_tick
7387 for each register mentioned. Similar to mention_regs in cse.c */
7390 update_table_tick (x
)
7393 register enum rtx_code code
= GET_CODE (x
);
7394 register char *fmt
= GET_RTX_FORMAT (code
);
7399 int regno
= REGNO (x
);
7400 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7401 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
7403 for (i
= regno
; i
< endregno
; i
++)
7404 reg_last_set_table_tick
[i
] = label_tick
;
7409 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7410 /* Note that we can't have an "E" in values stored; see
7411 get_last_value_validate. */
7413 update_table_tick (XEXP (x
, i
));
7416 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
7417 are saying that the register is clobbered and we no longer know its
7418 value. If INSN is zero, don't update reg_last_set; this call is normally
7419 done with VALUE also zero to invalidate the register. */
7422 record_value_for_reg (reg
, insn
, value
)
7427 int regno
= REGNO (reg
);
7428 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7429 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1);
7432 /* If VALUE contains REG and we have a previous value for REG, substitute
7433 the previous value. */
7434 if (value
&& insn
&& reg_overlap_mentioned_p (reg
, value
))
7438 /* Set things up so get_last_value is allowed to see anything set up to
7440 subst_low_cuid
= INSN_CUID (insn
);
7441 tem
= get_last_value (reg
);
7444 value
= replace_rtx (copy_rtx (value
), reg
, tem
);
7447 /* For each register modified, show we don't know its value, that
7448 its value has been updated, and that we don't know the location of
7449 the death of the register. */
7450 for (i
= regno
; i
< endregno
; i
++)
7453 reg_last_set
[i
] = insn
;
7454 reg_last_set_value
[i
] = 0;
7455 reg_last_death
[i
] = 0;
7458 /* Mark registers that are being referenced in this value. */
7460 update_table_tick (value
);
7462 /* Now update the status of each register being set.
7463 If someone is using this register in this block, set this register
7464 to invalid since we will get confused between the two lives in this
7465 basic block. This makes using this register always invalid. In cse, we
7466 scan the table to invalidate all entries using this register, but this
7467 is too much work for us. */
7469 for (i
= regno
; i
< endregno
; i
++)
7471 reg_last_set_label
[i
] = label_tick
;
7472 if (value
&& reg_last_set_table_tick
[i
] == label_tick
)
7473 reg_last_set_invalid
[i
] = 1;
7475 reg_last_set_invalid
[i
] = 0;
7478 /* The value being assigned might refer to X (like in "x++;"). In that
7479 case, we must replace it with (clobber (const_int 0)) to prevent
7481 if (value
&& ! get_last_value_validate (&value
,
7482 reg_last_set_label
[regno
], 0))
7484 value
= copy_rtx (value
);
7485 if (! get_last_value_validate (&value
, reg_last_set_label
[regno
], 1))
7489 /* For the main register being modified, update the value. */
7490 reg_last_set_value
[regno
] = value
;
7494 /* Used for communication between the following two routines. */
7495 static rtx record_dead_insn
;
7497 /* Called via note_stores from record_dead_and_set_regs to handle one
7498 SET or CLOBBER in an insn. */
7501 record_dead_and_set_regs_1 (dest
, setter
)
7504 if (GET_CODE (dest
) == REG
)
7506 /* If we are setting the whole register, we know its value. Otherwise
7507 show that we don't know the value. We can handle SUBREG in
7509 if (GET_CODE (setter
) == SET
&& dest
== SET_DEST (setter
))
7510 record_value_for_reg (dest
, record_dead_insn
, SET_SRC (setter
));
7511 else if (GET_CODE (setter
) == SET
7512 && GET_CODE (SET_DEST (setter
)) == SUBREG
7513 && SUBREG_REG (SET_DEST (setter
)) == dest
7514 && subreg_lowpart_p (SET_DEST (setter
)))
7515 record_value_for_reg
7516 (dest
, record_dead_insn
,
7517 gen_lowpart_for_combine (GET_MODE (SET_DEST (setter
)),
7520 record_value_for_reg (dest
, record_dead_insn
, NULL_RTX
);
7522 else if (GET_CODE (dest
) == MEM
7523 /* Ignore pushes, they clobber nothing. */
7524 && ! push_operand (dest
, GET_MODE (dest
)))
7525 mem_last_set
= INSN_CUID (record_dead_insn
);
7528 /* Update the records of when each REG was most recently set or killed
7529 for the things done by INSN. This is the last thing done in processing
7530 INSN in the combiner loop.
7532 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
7533 similar information mem_last_set (which insn most recently modified memory)
7534 and last_call_cuid (which insn was the most recent subroutine call). */
7537 record_dead_and_set_regs (insn
)
7541 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
7543 if (REG_NOTE_KIND (link
) == REG_DEAD
)
7544 reg_last_death
[REGNO (XEXP (link
, 0))] = insn
;
7545 else if (REG_NOTE_KIND (link
) == REG_INC
)
7546 record_value_for_reg (XEXP (link
, 0), insn
, NULL_RTX
);
7549 if (GET_CODE (insn
) == CALL_INSN
)
7550 last_call_cuid
= mem_last_set
= INSN_CUID (insn
);
7552 record_dead_insn
= insn
;
7553 note_stores (PATTERN (insn
), record_dead_and_set_regs_1
);
7556 /* Utility routine for the following function. Verify that all the registers
7557 mentioned in *LOC are valid when *LOC was part of a value set when
7558 label_tick == TICK. Return 0 if some are not.
7560 If REPLACE is non-zero, replace the invalid reference with
7561 (clobber (const_int 0)) and return 1. This replacement is useful because
7562 we often can get useful information about the form of a value (e.g., if
7563 it was produced by a shift that always produces -1 or 0) even though
7564 we don't know exactly what registers it was produced from. */
7567 get_last_value_validate (loc
, tick
, replace
)
7573 char *fmt
= GET_RTX_FORMAT (GET_CODE (x
));
7574 int len
= GET_RTX_LENGTH (GET_CODE (x
));
7577 if (GET_CODE (x
) == REG
)
7579 int regno
= REGNO (x
);
7580 int endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7581 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
7584 for (j
= regno
; j
< endregno
; j
++)
7585 if (reg_last_set_invalid
[j
]
7586 /* If this is a pseudo-register that was only set once, it is
7588 || (! (regno
>= FIRST_PSEUDO_REGISTER
&& reg_n_sets
[regno
] == 1)
7589 && reg_last_set_label
[j
] > tick
))
7592 *loc
= gen_rtx (CLOBBER
, GET_MODE (x
), const0_rtx
);
7599 for (i
= 0; i
< len
; i
++)
7601 && get_last_value_validate (&XEXP (x
, i
), tick
, replace
) == 0)
7602 /* Don't bother with these. They shouldn't occur anyway. */
7606 /* If we haven't found a reason for it to be invalid, it is valid. */
7610 /* Get the last value assigned to X, if known. Some registers
7611 in the value may be replaced with (clobber (const_int 0)) if their value
7612 is known longer known reliably. */
7621 /* If this is a non-paradoxical SUBREG, get the value of its operand and
7622 then convert it to the desired mode. If this is a paradoxical SUBREG,
7623 we cannot predict what values the "extra" bits might have. */
7624 if (GET_CODE (x
) == SUBREG
7625 && subreg_lowpart_p (x
)
7626 && (GET_MODE_SIZE (GET_MODE (x
))
7627 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
7628 && (value
= get_last_value (SUBREG_REG (x
))) != 0)
7629 return gen_lowpart_for_combine (GET_MODE (x
), value
);
7631 if (GET_CODE (x
) != REG
)
7635 value
= reg_last_set_value
[regno
];
7637 /* If we don't have a value, it isn't for this basic block, or if it was
7638 set in a later insn that the ones we are processing, return 0. */
7641 || (reg_n_sets
[regno
] != 1
7642 && (reg_last_set_label
[regno
] != label_tick
7643 || INSN_CUID (reg_last_set
[regno
]) >= subst_low_cuid
)))
7646 /* If the value has all its register valid, return it. */
7647 if (get_last_value_validate (&value
, reg_last_set_label
[regno
], 0))
7650 /* Otherwise, make a copy and replace any invalid register with
7651 (clobber (const_int 0)). If that fails for some reason, return 0. */
7653 value
= copy_rtx (value
);
7654 if (get_last_value_validate (&value
, reg_last_set_label
[regno
], 1))
7660 /* Return nonzero if expression X refers to a REG or to memory
7661 that is set in an instruction more recent than FROM_CUID. */
7664 use_crosses_set_p (x
, from_cuid
)
7670 register enum rtx_code code
= GET_CODE (x
);
7674 register int regno
= REGNO (x
);
7675 #ifdef PUSH_ROUNDING
7676 /* Don't allow uses of the stack pointer to be moved,
7677 because we don't know whether the move crosses a push insn. */
7678 if (regno
== STACK_POINTER_REGNUM
)
7681 return (reg_last_set
[regno
]
7682 && INSN_CUID (reg_last_set
[regno
]) > from_cuid
);
7685 if (code
== MEM
&& mem_last_set
> from_cuid
)
7688 fmt
= GET_RTX_FORMAT (code
);
7690 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
7695 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7696 if (use_crosses_set_p (XVECEXP (x
, i
, j
), from_cuid
))
7699 else if (fmt
[i
] == 'e'
7700 && use_crosses_set_p (XEXP (x
, i
), from_cuid
))
7706 /* Define three variables used for communication between the following
7709 static int reg_dead_regno
, reg_dead_endregno
;
7710 static int reg_dead_flag
;
7712 /* Function called via note_stores from reg_dead_at_p.
7714 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
7715 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
7718 reg_dead_at_p_1 (dest
, x
)
7722 int regno
, endregno
;
7724 if (GET_CODE (dest
) != REG
)
7727 regno
= REGNO (dest
);
7728 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
7729 ? HARD_REGNO_NREGS (regno
, GET_MODE (dest
)) : 1);
7731 if (reg_dead_endregno
> regno
&& reg_dead_regno
< endregno
)
7732 reg_dead_flag
= (GET_CODE (x
) == CLOBBER
) ? 1 : -1;
7735 /* Return non-zero if REG is known to be dead at INSN.
7737 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
7738 referencing REG, it is dead. If we hit a SET referencing REG, it is
7739 live. Otherwise, see if it is live or dead at the start of the basic
7743 reg_dead_at_p (reg
, insn
)
7749 /* Set variables for reg_dead_at_p_1. */
7750 reg_dead_regno
= REGNO (reg
);
7751 reg_dead_endregno
= reg_dead_regno
+ (reg_dead_regno
< FIRST_PSEUDO_REGISTER
7752 ? HARD_REGNO_NREGS (reg_dead_regno
,
7758 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
7759 beginning of function. */
7760 for (; insn
&& GET_CODE (insn
) != CODE_LABEL
;
7761 insn
= prev_nonnote_insn (insn
))
7763 note_stores (PATTERN (insn
), reg_dead_at_p_1
);
7765 return reg_dead_flag
== 1 ? 1 : 0;
7767 if (find_regno_note (insn
, REG_DEAD
, reg_dead_regno
))
7771 /* Get the basic block number that we were in. */
7776 for (block
= 0; block
< n_basic_blocks
; block
++)
7777 if (insn
== basic_block_head
[block
])
7780 if (block
== n_basic_blocks
)
7784 for (i
= reg_dead_regno
; i
< reg_dead_endregno
; i
++)
7785 if (basic_block_live_at_start
[block
][i
/ REGSET_ELT_BITS
]
7786 & ((REGSET_ELT_TYPE
) 1 << (i
% REGSET_ELT_BITS
)))
7792 /* Remove register number REGNO from the dead registers list of INSN.
7794 Return the note used to record the death, if there was one. */
7797 remove_death (regno
, insn
)
7801 register rtx note
= find_regno_note (insn
, REG_DEAD
, regno
);
7804 remove_note (insn
, note
);
7809 /* For each register (hardware or pseudo) used within expression X, if its
7810 death is in an instruction with cuid between FROM_CUID (inclusive) and
7811 TO_INSN (exclusive), put a REG_DEAD note for that register in the
7812 list headed by PNOTES.
7814 This is done when X is being merged by combination into TO_INSN. These
7815 notes will then be distributed as needed. */
7818 move_deaths (x
, from_cuid
, to_insn
, pnotes
)
7825 register int len
, i
;
7826 register enum rtx_code code
= GET_CODE (x
);
7830 register int regno
= REGNO (x
);
7831 register rtx where_dead
= reg_last_death
[regno
];
7833 if (where_dead
&& INSN_CUID (where_dead
) >= from_cuid
7834 && INSN_CUID (where_dead
) < INSN_CUID (to_insn
))
7836 rtx note
= remove_death (regno
, reg_last_death
[regno
]);
7838 /* It is possible for the call above to return 0. This can occur
7839 when reg_last_death points to I2 or I1 that we combined with.
7840 In that case make a new note. */
7844 XEXP (note
, 1) = *pnotes
;
7848 *pnotes
= gen_rtx (EXPR_LIST
, REG_DEAD
, x
, *pnotes
);
7854 else if (GET_CODE (x
) == SET
)
7856 rtx dest
= SET_DEST (x
);
7858 move_deaths (SET_SRC (x
), from_cuid
, to_insn
, pnotes
);
7860 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
7861 that accesses one word of a multi-word item, some
7862 piece of everything register in the expression is used by
7863 this insn, so remove any old death. */
7865 if (GET_CODE (dest
) == ZERO_EXTRACT
7866 || GET_CODE (dest
) == STRICT_LOW_PART
7867 || (GET_CODE (dest
) == SUBREG
7868 && (((GET_MODE_SIZE (GET_MODE (dest
))
7869 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
7870 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
7871 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
))))
7873 move_deaths (dest
, from_cuid
, to_insn
, pnotes
);
7877 /* If this is some other SUBREG, we know it replaces the entire
7878 value, so use that as the destination. */
7879 if (GET_CODE (dest
) == SUBREG
)
7880 dest
= SUBREG_REG (dest
);
7882 /* If this is a MEM, adjust deaths of anything used in the address.
7883 For a REG (the only other possibility), the entire value is
7884 being replaced so the old value is not used in this insn. */
7886 if (GET_CODE (dest
) == MEM
)
7887 move_deaths (XEXP (dest
, 0), from_cuid
, to_insn
, pnotes
);
7891 else if (GET_CODE (x
) == CLOBBER
)
7894 len
= GET_RTX_LENGTH (code
);
7895 fmt
= GET_RTX_FORMAT (code
);
7897 for (i
= 0; i
< len
; i
++)
7902 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7903 move_deaths (XVECEXP (x
, i
, j
), from_cuid
, to_insn
, pnotes
);
7905 else if (fmt
[i
] == 'e')
7906 move_deaths (XEXP (x
, i
), from_cuid
, to_insn
, pnotes
);
7910 /* Return 1 if X is the target of a bit-field assignment in BODY, the
7911 pattern of an insn. X must be a REG. */
7914 reg_bitfield_target_p (x
, body
)
7920 if (GET_CODE (body
) == SET
)
7922 rtx dest
= SET_DEST (body
);
7924 int regno
, tregno
, endregno
, endtregno
;
7926 if (GET_CODE (dest
) == ZERO_EXTRACT
)
7927 target
= XEXP (dest
, 0);
7928 else if (GET_CODE (dest
) == STRICT_LOW_PART
)
7929 target
= SUBREG_REG (XEXP (dest
, 0));
7933 if (GET_CODE (target
) == SUBREG
)
7934 target
= SUBREG_REG (target
);
7936 if (GET_CODE (target
) != REG
)
7939 tregno
= REGNO (target
), regno
= REGNO (x
);
7940 if (tregno
>= FIRST_PSEUDO_REGISTER
|| regno
>= FIRST_PSEUDO_REGISTER
)
7943 endtregno
= tregno
+ HARD_REGNO_NREGS (tregno
, GET_MODE (target
));
7944 endregno
= regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
));
7946 return endregno
> tregno
&& regno
< endtregno
;
7949 else if (GET_CODE (body
) == PARALLEL
)
7950 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
7951 if (reg_bitfield_target_p (x
, XVECEXP (body
, 0, i
)))
7957 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
7958 as appropriate. I3 and I2 are the insns resulting from the combination
7959 insns including FROM (I2 may be zero).
7961 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
7962 not need REG_DEAD notes because they are being substituted for. This
7963 saves searching in the most common cases.
7965 Each note in the list is either ignored or placed on some insns, depending
7966 on the type of note. */
7969 distribute_notes (notes
, from_insn
, i3
, i2
, elim_i2
, elim_i1
)
7973 rtx elim_i2
, elim_i1
;
7975 rtx note
, next_note
;
7978 for (note
= notes
; note
; note
= next_note
)
7980 rtx place
= 0, place2
= 0;
7982 /* If this NOTE references a pseudo register, ensure it references
7983 the latest copy of that register. */
7984 if (XEXP (note
, 0) && GET_CODE (XEXP (note
, 0)) == REG
7985 && REGNO (XEXP (note
, 0)) >= FIRST_PSEUDO_REGISTER
)
7986 XEXP (note
, 0) = regno_reg_rtx
[REGNO (XEXP (note
, 0))];
7988 next_note
= XEXP (note
, 1);
7989 switch (REG_NOTE_KIND (note
))
7992 /* If this register is set or clobbered in I3, put the note there
7993 unless there is one already. */
7994 if (reg_set_p (XEXP (note
, 0), PATTERN (i3
)))
7996 if (! (GET_CODE (XEXP (note
, 0)) == REG
7997 ? find_regno_note (i3
, REG_UNUSED
, REGNO (XEXP (note
, 0)))
7998 : find_reg_note (i3
, REG_UNUSED
, XEXP (note
, 0))))
8001 /* Otherwise, if this register is used by I3, then this register
8002 now dies here, so we must put a REG_DEAD note here unless there
8004 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
))
8005 && ! (GET_CODE (XEXP (note
, 0)) == REG
8006 ? find_regno_note (i3
, REG_DEAD
, REGNO (XEXP (note
, 0)))
8007 : find_reg_note (i3
, REG_DEAD
, XEXP (note
, 0))))
8009 PUT_REG_NOTE_KIND (note
, REG_DEAD
);
8017 /* These notes say something about results of an insn. We can
8018 only support them if they used to be on I3 in which case they
8019 remain on I3. Otherwise they are ignored. */
8020 if (from_insn
== i3
)
8025 case REG_NO_CONFLICT
:
8027 /* These notes say something about how a register is used. They must
8028 be present on any use of the register in I2 or I3. */
8029 if (reg_mentioned_p (XEXP (note
, 0), PATTERN (i3
)))
8032 if (i2
&& reg_mentioned_p (XEXP (note
, 0), PATTERN (i2
)))
8042 /* It is too much trouble to try to see if this note is still
8043 correct in all situations. It is better to simply delete it. */
8047 /* If the insn previously containing this note still exists,
8048 put it back where it was. Otherwise move it to the previous
8049 insn. Adjust the corresponding REG_LIBCALL note. */
8050 if (GET_CODE (from_insn
) != NOTE
)
8054 tem
= find_reg_note (XEXP (note
, 0), REG_LIBCALL
, NULL_RTX
);
8055 place
= prev_real_insn (from_insn
);
8057 XEXP (tem
, 0) = place
;
8062 /* This is handled similarly to REG_RETVAL. */
8063 if (GET_CODE (from_insn
) != NOTE
)
8067 tem
= find_reg_note (XEXP (note
, 0), REG_RETVAL
, NULL_RTX
);
8068 place
= next_real_insn (from_insn
);
8070 XEXP (tem
, 0) = place
;
8075 /* If the register is used as an input in I3, it dies there.
8076 Similarly for I2, if it is non-zero and adjacent to I3.
8078 If the register is not used as an input in either I3 or I2
8079 and it is not one of the registers we were supposed to eliminate,
8080 there are two possibilities. We might have a non-adjacent I2
8081 or we might have somehow eliminated an additional register
8082 from a computation. For example, we might have had A & B where
8083 we discover that B will always be zero. In this case we will
8084 eliminate the reference to A.
8086 In both cases, we must search to see if we can find a previous
8087 use of A and put the death note there. */
8089 if (reg_referenced_p (XEXP (note
, 0), PATTERN (i3
)))
8091 else if (i2
!= 0 && next_nonnote_insn (i2
) == i3
8092 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
8095 if (XEXP (note
, 0) == elim_i2
|| XEXP (note
, 0) == elim_i1
)
8098 /* If the register is used in both I2 and I3 and it dies in I3,
8099 we might have added another reference to it. If reg_n_refs
8100 was 2, bump it to 3. This has to be correct since the
8101 register must have been set somewhere. The reason this is
8102 done is because local-alloc.c treats 2 references as a
8105 if (place
== i3
&& i2
!= 0 && GET_CODE (XEXP (note
, 0)) == REG
8106 && reg_n_refs
[REGNO (XEXP (note
, 0))]== 2
8107 && reg_referenced_p (XEXP (note
, 0), PATTERN (i2
)))
8108 reg_n_refs
[REGNO (XEXP (note
, 0))] = 3;
8111 for (tem
= prev_nonnote_insn (i3
);
8112 tem
&& (GET_CODE (tem
) == INSN
8113 || GET_CODE (tem
) == CALL_INSN
);
8114 tem
= prev_nonnote_insn (tem
))
8116 /* If the register is being set at TEM, see if that is all
8117 TEM is doing. If so, delete TEM. Otherwise, make this
8118 into a REG_UNUSED note instead. */
8119 if (reg_set_p (XEXP (note
, 0), PATTERN (tem
)))
8121 rtx set
= single_set (tem
);
8123 /* Verify that it was the set, and not a clobber that
8124 modified the register. */
8126 if (set
!= 0 && ! side_effects_p (SET_SRC (set
))
8127 && rtx_equal_p (XEXP (note
, 0), SET_DEST (set
)))
8129 /* Move the notes and links of TEM elsewhere.
8130 This might delete other dead insns recursively.
8131 First set the pattern to something that won't use
8134 PATTERN (tem
) = pc_rtx
;
8136 distribute_notes (REG_NOTES (tem
), tem
, tem
,
8137 NULL_RTX
, NULL_RTX
, NULL_RTX
);
8138 distribute_links (LOG_LINKS (tem
));
8140 PUT_CODE (tem
, NOTE
);
8141 NOTE_LINE_NUMBER (tem
) = NOTE_INSN_DELETED
;
8142 NOTE_SOURCE_FILE (tem
) = 0;
8146 PUT_REG_NOTE_KIND (note
, REG_UNUSED
);
8148 /* If there isn't already a REG_UNUSED note, put one
8150 if (! find_regno_note (tem
, REG_UNUSED
,
8151 REGNO (XEXP (note
, 0))))
8156 else if (reg_referenced_p (XEXP (note
, 0), PATTERN (tem
)))
8163 /* If the register is set or already dead at PLACE, we needn't do
8164 anything with this note if it is still a REG_DEAD note.
8166 Note that we cannot use just `dead_or_set_p' here since we can
8167 convert an assignment to a register into a bit-field assignment.
8168 Therefore, we must also omit the note if the register is the
8169 target of a bitfield assignment. */
8171 if (place
&& REG_NOTE_KIND (note
) == REG_DEAD
)
8173 int regno
= REGNO (XEXP (note
, 0));
8175 if (dead_or_set_p (place
, XEXP (note
, 0))
8176 || reg_bitfield_target_p (XEXP (note
, 0), PATTERN (place
)))
8178 /* Unless the register previously died in PLACE, clear
8179 reg_last_death. [I no longer understand why this is
8181 if (reg_last_death
[regno
] != place
)
8182 reg_last_death
[regno
] = 0;
8186 reg_last_death
[regno
] = place
;
8188 /* If this is a death note for a hard reg that is occupying
8189 multiple registers, ensure that we are still using all
8190 parts of the object. If we find a piece of the object
8191 that is unused, we must add a USE for that piece before
8192 PLACE and put the appropriate REG_DEAD note on it.
8194 An alternative would be to put a REG_UNUSED for the pieces
8195 on the insn that set the register, but that can't be done if
8196 it is not in the same block. It is simpler, though less
8197 efficient, to add the USE insns. */
8199 if (place
&& regno
< FIRST_PSEUDO_REGISTER
8200 && HARD_REGNO_NREGS (regno
, GET_MODE (XEXP (note
, 0))) > 1)
8203 = regno
+ HARD_REGNO_NREGS (regno
,
8204 GET_MODE (XEXP (note
, 0)));
8208 for (i
= regno
; i
< endregno
; i
++)
8209 if (! refers_to_regno_p (i
, i
+ 1, PATTERN (place
), 0))
8211 rtx piece
= gen_rtx (REG
, word_mode
, i
);
8214 /* See if we already placed a USE note for this
8215 register in front of PLACE. */
8217 GET_CODE (PREV_INSN (p
)) == INSN
8218 && GET_CODE (PATTERN (PREV_INSN (p
))) == USE
;
8220 if (rtx_equal_p (piece
,
8221 XEXP (PATTERN (PREV_INSN (p
)), 0)))
8230 = emit_insn_before (gen_rtx (USE
, VOIDmode
,
8233 REG_NOTES (use_insn
)
8234 = gen_rtx (EXPR_LIST
, REG_DEAD
, piece
,
8235 REG_NOTES (use_insn
));
8243 /* Put only REG_DEAD notes for pieces that are
8244 still used and that are not already dead or set. */
8246 for (i
= regno
; i
< endregno
; i
++)
8248 rtx piece
= gen_rtx (REG
, word_mode
, i
);
8250 if (reg_referenced_p (piece
, PATTERN (place
))
8251 && ! dead_or_set_p (place
, piece
)
8252 && ! reg_bitfield_target_p (piece
,
8254 REG_NOTES (place
) = gen_rtx (EXPR_LIST
, REG_DEAD
,
8266 /* Any other notes should not be present at this point in the
8273 XEXP (note
, 1) = REG_NOTES (place
);
8274 REG_NOTES (place
) = note
;
8278 REG_NOTES (place2
) = gen_rtx (GET_CODE (note
), REG_NOTE_KIND (note
),
8279 XEXP (note
, 0), REG_NOTES (place2
));
8283 /* Similarly to above, distribute the LOG_LINKS that used to be present on
8284 I3, I2, and I1 to new locations. This is also called in one case to
8285 add a link pointing at I3 when I3's destination is changed. */
8288 distribute_links (links
)
8291 rtx link
, next_link
;
8293 for (link
= links
; link
; link
= next_link
)
8299 next_link
= XEXP (link
, 1);
8301 /* If the insn that this link points to is a NOTE or isn't a single
8302 set, ignore it. In the latter case, it isn't clear what we
8303 can do other than ignore the link, since we can't tell which
8304 register it was for. Such links wouldn't be used by combine
8307 It is not possible for the destination of the target of the link to
8308 have been changed by combine. The only potential of this is if we
8309 replace I3, I2, and I1 by I3 and I2. But in that case the
8310 destination of I2 also remains unchanged. */
8312 if (GET_CODE (XEXP (link
, 0)) == NOTE
8313 || (set
= single_set (XEXP (link
, 0))) == 0)
8316 reg
= SET_DEST (set
);
8317 while (GET_CODE (reg
) == SUBREG
|| GET_CODE (reg
) == ZERO_EXTRACT
8318 || GET_CODE (reg
) == SIGN_EXTRACT
8319 || GET_CODE (reg
) == STRICT_LOW_PART
)
8320 reg
= XEXP (reg
, 0);
8322 /* A LOG_LINK is defined as being placed on the first insn that uses
8323 a register and points to the insn that sets the register. Start
8324 searching at the next insn after the target of the link and stop
8325 when we reach a set of the register or the end of the basic block.
8327 Note that this correctly handles the link that used to point from
8328 I3 to I2. Also note that not much searching is typically done here
8329 since most links don't point very far away. */
8331 for (insn
= NEXT_INSN (XEXP (link
, 0));
8332 (insn
&& GET_CODE (insn
) != CODE_LABEL
8333 && GET_CODE (PREV_INSN (insn
)) != JUMP_INSN
);
8334 insn
= NEXT_INSN (insn
))
8335 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i'
8336 && reg_overlap_mentioned_p (reg
, PATTERN (insn
)))
8338 if (reg_referenced_p (reg
, PATTERN (insn
)))
8343 /* If we found a place to put the link, place it there unless there
8344 is already a link to the same insn as LINK at that point. */
8350 for (link2
= LOG_LINKS (place
); link2
; link2
= XEXP (link2
, 1))
8351 if (XEXP (link2
, 0) == XEXP (link
, 0))
8356 XEXP (link
, 1) = LOG_LINKS (place
);
8357 LOG_LINKS (place
) = link
;
8364 dump_combine_stats (file
)
8369 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
8370 combine_attempts
, combine_merges
, combine_extras
, combine_successes
);
8374 dump_combine_total_stats (file
)
8379 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
8380 total_attempts
, total_merges
, total_extras
, total_successes
);