]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
*** empty log message ***
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include <stdio.h>
77
78 #include "config.h"
79 #include "gvarargs.h"
80 #include "rtl.h"
81 #include "flags.h"
82 #include "regs.h"
83 #include "expr.h"
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "insn-flags.h"
87 #include "insn-codes.h"
88 #include "insn-attr.h"
89 #include "recog.h"
90 #include "real.h"
91
92 /* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94 #define gen_lowpart dont_use_gen_lowpart_you_dummy
95
96 /* Number of attempts to combine instructions in this function. */
97
98 static int combine_attempts;
99
100 /* Number of attempts that got as far as substitution in this function. */
101
102 static int combine_merges;
103
104 /* Number of instructions combined with added SETs in this function. */
105
106 static int combine_extras;
107
108 /* Number of instructions combined in this function. */
109
110 static int combine_successes;
111
112 /* Totals over entire compilation. */
113
114 static int total_attempts, total_merges, total_extras, total_successes;
115 \f
116 /* Vector mapping INSN_UIDs to cuids.
117 The cuids are like uids but increase monotonically always.
118 Combine always uses cuids so that it can compare them.
119 But actually renumbering the uids, which we used to do,
120 proves to be a bad idea because it makes it hard to compare
121 the dumps produced by earlier passes with those from later passes. */
122
123 static int *uid_cuid;
124
125 /* Get the cuid of an insn. */
126
127 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
128
129 /* Maximum register number, which is the size of the tables below. */
130
131 static int combine_max_regno;
132
133 /* Record last point of death of (hard or pseudo) register n. */
134
135 static rtx *reg_last_death;
136
137 /* Record last point of modification of (hard or pseudo) register n. */
138
139 static rtx *reg_last_set;
140
141 /* Record the cuid of the last insn that invalidated memory
142 (anything that writes memory, and subroutine calls, but not pushes). */
143
144 static int mem_last_set;
145
146 /* Record the cuid of the last CALL_INSN
147 so we can tell whether a potential combination crosses any calls. */
148
149 static int last_call_cuid;
150
151 /* When `subst' is called, this is the insn that is being modified
152 (by combining in a previous insn). The PATTERN of this insn
153 is still the old pattern partially modified and it should not be
154 looked at, but this may be used to examine the successors of the insn
155 to judge whether a simplification is valid. */
156
157 static rtx subst_insn;
158
159 /* This is the lowest CUID that `subst' is currently dealing with.
160 get_last_value will not return a value if the register was set at or
161 after this CUID. If not for this mechanism, we could get confused if
162 I2 or I1 in try_combine were an insn that used the old value of a register
163 to obtain a new value. In that case, we might erroneously get the
164 new value of the register when we wanted the old one. */
165
166 static int subst_low_cuid;
167
168 /* This is the value of undobuf.num_undo when we started processing this
169 substitution. This will prevent gen_rtx_combine from re-used a piece
170 from the previous expression. Doing so can produce circular rtl
171 structures. */
172
173 static int previous_num_undos;
174 \f
175 /* The next group of arrays allows the recording of the last value assigned
176 to (hard or pseudo) register n. We use this information to see if a
177 operation being processed is redundant given a prior operation performed
178 on the register. For example, an `and' with a constant is redundant if
179 all the zero bits are already known to be turned off.
180
181 We use an approach similar to that used by cse, but change it in the
182 following ways:
183
184 (1) We do not want to reinitialize at each label.
185 (2) It is useful, but not critical, to know the actual value assigned
186 to a register. Often just its form is helpful.
187
188 Therefore, we maintain the following arrays:
189
190 reg_last_set_value the last value assigned
191 reg_last_set_label records the value of label_tick when the
192 register was assigned
193 reg_last_set_table_tick records the value of label_tick when a
194 value using the register is assigned
195 reg_last_set_invalid set to non-zero when it is not valid
196 to use the value of this register in some
197 register's value
198
199 To understand the usage of these tables, it is important to understand
200 the distinction between the value in reg_last_set_value being valid
201 and the register being validly contained in some other expression in the
202 table.
203
204 Entry I in reg_last_set_value is valid if it is non-zero, and either
205 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
206
207 Register I may validly appear in any expression returned for the value
208 of another register if reg_n_sets[i] is 1. It may also appear in the
209 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
210 reg_last_set_invalid[j] is zero.
211
212 If an expression is found in the table containing a register which may
213 not validly appear in an expression, the register is replaced by
214 something that won't match, (clobber (const_int 0)).
215
216 reg_last_set_invalid[i] is set non-zero when register I is being assigned
217 to and reg_last_set_table_tick[i] == label_tick. */
218
219 /* Record last value assigned to (hard or pseudo) register n. */
220
221 static rtx *reg_last_set_value;
222
223 /* Record the value of label_tick when the value for register n is placed in
224 reg_last_set_value[n]. */
225
226 static short *reg_last_set_label;
227
228 /* Record the value of label_tick when an expression involving register n
229 is placed in reg_last_set_value. */
230
231 static short *reg_last_set_table_tick;
232
233 /* Set non-zero if references to register n in expressions should not be
234 used. */
235
236 static char *reg_last_set_invalid;
237
238 /* Incremented for each label. */
239
240 static short label_tick;
241
242 /* Some registers that are set more than once and used in more than one
243 basic block are nevertheless always set in similar ways. For example,
244 a QImode register may be loaded from memory in two places on a machine
245 where byte loads zero extend.
246
247 We record in the following array what we know about the significant
248 bits of a register, specifically which bits are known to be zero.
249
250 If an entry is zero, it means that we don't know anything special. */
251
252 static HOST_WIDE_INT *reg_significant;
253
254 /* Mode used to compute significance in reg_significant. It is the largest
255 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
256
257 static enum machine_mode significant_mode;
258
259 /* Nonzero when reg_significant can be safely used. It is zero while
260 computing reg_significant. This prevents propagating values based
261 on previously set values, which can be incorrect if a variable
262 is modified in a loop. */
263
264 static int significant_valid;
265 \f
266 /* Record one modification to rtl structure
267 to be undone by storing old_contents into *where.
268 is_int is 1 if the contents are an int. */
269
270 struct undo
271 {
272 rtx *where;
273 rtx old_contents;
274 int is_int;
275 };
276
277 struct undo_int
278 {
279 int *where;
280 int old_contents;
281 int is_int;
282 };
283
284 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
285 num_undo says how many are currently recorded.
286
287 storage is nonzero if we must undo the allocation of new storage.
288 The value of storage is what to pass to obfree.
289
290 other_insn is nonzero if we have modified some other insn in the process
291 of working on subst_insn. It must be verified too. */
292
293 #define MAX_UNDO 50
294
295 struct undobuf
296 {
297 int num_undo;
298 char *storage;
299 struct undo undo[MAX_UNDO];
300 rtx other_insn;
301 };
302
303 static struct undobuf undobuf;
304
305 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
306 insn. The substitution can be undone by undo_all. If INTO is already
307 set to NEWVAL, do not record this change. Because computing NEWVAL might
308 also call SUBST, we have to compute it before we put anything into
309 the undo table. */
310
311 #define SUBST(INTO, NEWVAL) \
312 do { rtx _new = (NEWVAL); \
313 if (undobuf.num_undo < MAX_UNDO) \
314 { \
315 undobuf.undo[undobuf.num_undo].where = &INTO; \
316 undobuf.undo[undobuf.num_undo].old_contents = INTO; \
317 undobuf.undo[undobuf.num_undo].is_int = 0; \
318 INTO = _new; \
319 if (undobuf.undo[undobuf.num_undo].old_contents != INTO) \
320 undobuf.num_undo++; \
321 } \
322 } while (0)
323
324 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
325 expression.
326 Note that substitution for the value of a CONST_INT is not safe. */
327
328 #define SUBST_INT(INTO, NEWVAL) \
329 do { if (undobuf.num_undo < MAX_UNDO) \
330 { \
331 struct undo_int *u \
332 = (struct undo_int *)&undobuf.undo[undobuf.num_undo]; \
333 u->where = (int *) &INTO; \
334 u->old_contents = INTO; \
335 u->is_int = 1; \
336 INTO = NEWVAL; \
337 if (u->old_contents != INTO) \
338 undobuf.num_undo++; \
339 } \
340 } while (0)
341
342 /* Number of times the pseudo being substituted for
343 was found and replaced. */
344
345 static int n_occurrences;
346
347 static void set_significant ();
348 static void move_deaths ();
349 rtx remove_death ();
350 static void record_value_for_reg ();
351 static void record_dead_and_set_regs ();
352 static int use_crosses_set_p ();
353 static rtx try_combine ();
354 static rtx *find_split_point ();
355 static rtx subst ();
356 static void undo_all ();
357 static int reg_dead_at_p ();
358 static rtx expand_compound_operation ();
359 static rtx expand_field_assignment ();
360 static rtx make_extraction ();
361 static int get_pos_from_mask ();
362 static rtx force_to_mode ();
363 static rtx make_field_assignment ();
364 static rtx make_compound_operation ();
365 static rtx apply_distributive_law ();
366 static rtx simplify_and_const_int ();
367 static unsigned HOST_WIDE_INT significant_bits ();
368 static int merge_outer_ops ();
369 static rtx simplify_shift_const ();
370 static int recog_for_combine ();
371 static rtx gen_lowpart_for_combine ();
372 static rtx gen_rtx_combine ();
373 static rtx gen_binary ();
374 static rtx gen_unary ();
375 static enum rtx_code simplify_comparison ();
376 static int reversible_comparison_p ();
377 static int get_last_value_validate ();
378 static rtx get_last_value ();
379 static void distribute_notes ();
380 static void distribute_links ();
381 \f
382 /* Main entry point for combiner. F is the first insn of the function.
383 NREGS is the first unused pseudo-reg number. */
384
385 void
386 combine_instructions (f, nregs)
387 rtx f;
388 int nregs;
389 {
390 register rtx insn, next, prev;
391 register int i;
392 register rtx links, nextlinks;
393
394 combine_attempts = 0;
395 combine_merges = 0;
396 combine_extras = 0;
397 combine_successes = 0;
398
399 combine_max_regno = nregs;
400
401 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
402 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
403 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
404 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
405 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
406 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
407 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
408
409 bzero (reg_last_death, nregs * sizeof (rtx));
410 bzero (reg_last_set, nregs * sizeof (rtx));
411 bzero (reg_last_set_value, nregs * sizeof (rtx));
412 bzero (reg_last_set_table_tick, nregs * sizeof (short));
413 bzero (reg_last_set_invalid, nregs * sizeof (char));
414 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
415
416 init_recog_no_volatile ();
417
418 /* Compute maximum uid value so uid_cuid can be allocated. */
419
420 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
421 if (INSN_UID (insn) > i)
422 i = INSN_UID (insn);
423
424 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
425
426 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
427
428 /* Don't use reg_significant when computing it. This can cause problems
429 when, for example, we have j <<= 1 in a loop. */
430
431 significant_valid = 0;
432
433 /* Compute the mapping from uids to cuids.
434 Cuids are numbers assigned to insns, like uids,
435 except that cuids increase monotonically through the code.
436
437 Scan all SETs and see if we can deduce anything about what
438 bits are significant for some registers. */
439
440 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
441 {
442 INSN_CUID (insn) = ++i;
443 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
444 note_stores (PATTERN (insn), set_significant);
445 }
446
447 significant_valid = 1;
448
449 /* Now scan all the insns in forward order. */
450
451 label_tick = 1;
452 last_call_cuid = 0;
453 mem_last_set = 0;
454
455 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
456 {
457 next = 0;
458
459 if (GET_CODE (insn) == CODE_LABEL)
460 label_tick++;
461
462 else if (GET_CODE (insn) == INSN
463 || GET_CODE (insn) == CALL_INSN
464 || GET_CODE (insn) == JUMP_INSN)
465 {
466 /* Try this insn with each insn it links back to. */
467
468 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
469 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
470 goto retry;
471
472 /* Try each sequence of three linked insns ending with this one. */
473
474 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
475 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
476 nextlinks = XEXP (nextlinks, 1))
477 if ((next = try_combine (insn, XEXP (links, 0),
478 XEXP (nextlinks, 0))) != 0)
479 goto retry;
480
481 #ifdef HAVE_cc0
482 /* Try to combine a jump insn that uses CC0
483 with a preceding insn that sets CC0, and maybe with its
484 logical predecessor as well.
485 This is how we make decrement-and-branch insns.
486 We need this special code because data flow connections
487 via CC0 do not get entered in LOG_LINKS. */
488
489 if (GET_CODE (insn) == JUMP_INSN
490 && (prev = prev_nonnote_insn (insn)) != 0
491 && GET_CODE (prev) == INSN
492 && sets_cc0_p (PATTERN (prev)))
493 {
494 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
495 goto retry;
496
497 for (nextlinks = LOG_LINKS (prev); nextlinks;
498 nextlinks = XEXP (nextlinks, 1))
499 if ((next = try_combine (insn, prev,
500 XEXP (nextlinks, 0))) != 0)
501 goto retry;
502 }
503
504 /* Do the same for an insn that explicitly references CC0. */
505 if (GET_CODE (insn) == INSN
506 && (prev = prev_nonnote_insn (insn)) != 0
507 && GET_CODE (prev) == INSN
508 && sets_cc0_p (PATTERN (prev))
509 && GET_CODE (PATTERN (insn)) == SET
510 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
511 {
512 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
513 goto retry;
514
515 for (nextlinks = LOG_LINKS (prev); nextlinks;
516 nextlinks = XEXP (nextlinks, 1))
517 if ((next = try_combine (insn, prev,
518 XEXP (nextlinks, 0))) != 0)
519 goto retry;
520 }
521
522 /* Finally, see if any of the insns that this insn links to
523 explicitly references CC0. If so, try this insn, that insn,
524 and its predecessor if it sets CC0. */
525 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
526 if (GET_CODE (XEXP (links, 0)) == INSN
527 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
528 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
529 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
530 && GET_CODE (prev) == INSN
531 && sets_cc0_p (PATTERN (prev))
532 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
533 goto retry;
534 #endif
535
536 /* Try combining an insn with two different insns whose results it
537 uses. */
538 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
539 for (nextlinks = XEXP (links, 1); nextlinks;
540 nextlinks = XEXP (nextlinks, 1))
541 if ((next = try_combine (insn, XEXP (links, 0),
542 XEXP (nextlinks, 0))) != 0)
543 goto retry;
544
545 if (GET_CODE (insn) != NOTE)
546 record_dead_and_set_regs (insn);
547
548 retry:
549 ;
550 }
551 }
552
553 total_attempts += combine_attempts;
554 total_merges += combine_merges;
555 total_extras += combine_extras;
556 total_successes += combine_successes;
557 }
558 \f
559 /* Called via note_stores. If X is a pseudo that is used in more than
560 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
561 set, record what bits are significant. If we are clobbering X,
562 ignore this "set" because the clobbered value won't be used.
563
564 If we are setting only a portion of X and we can't figure out what
565 portion, assume all bits will be used since we don't know what will
566 be happening. */
567
568 static void
569 set_significant (x, set)
570 rtx x;
571 rtx set;
572 {
573 if (GET_CODE (x) == REG
574 && REGNO (x) >= FIRST_PSEUDO_REGISTER
575 && reg_n_sets[REGNO (x)] > 1
576 && reg_basic_block[REGNO (x)] < 0
577 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
578 {
579 if (GET_CODE (set) == CLOBBER)
580 return;
581
582 /* If this is a complex assignment, see if we can convert it into a
583 simple assignment. */
584 set = expand_field_assignment (set);
585 if (SET_DEST (set) == x)
586 reg_significant[REGNO (x)]
587 |= significant_bits (SET_SRC (set), significant_mode);
588 else
589 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
590 }
591 }
592 \f
593 /* See if INSN can be combined into I3. PRED and SUCC are optionally
594 insns that were previously combined into I3 or that will be combined
595 into the merger of INSN and I3.
596
597 Return 0 if the combination is not allowed for any reason.
598
599 If the combination is allowed, *PDEST will be set to the single
600 destination of INSN and *PSRC to the single source, and this function
601 will return 1. */
602
603 static int
604 can_combine_p (insn, i3, pred, succ, pdest, psrc)
605 rtx insn;
606 rtx i3;
607 rtx pred, succ;
608 rtx *pdest, *psrc;
609 {
610 int i;
611 rtx set = 0, src, dest;
612 rtx p, link;
613 int all_adjacent = (succ ? (next_active_insn (insn) == succ
614 && next_active_insn (succ) == i3)
615 : next_active_insn (insn) == i3);
616
617 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
618 or a PARALLEL consisting of such a SET and CLOBBERs.
619
620 If INSN has CLOBBER parallel parts, ignore them for our processing.
621 By definition, these happen during the execution of the insn. When it
622 is merged with another insn, all bets are off. If they are, in fact,
623 needed and aren't also supplied in I3, they may be added by
624 recog_for_combine. Otherwise, it won't match.
625
626 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
627 note.
628
629 Get the source and destination of INSN. If more than one, can't
630 combine. */
631
632 if (GET_CODE (PATTERN (insn)) == SET)
633 set = PATTERN (insn);
634 else if (GET_CODE (PATTERN (insn)) == PARALLEL
635 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
636 {
637 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
638 {
639 rtx elt = XVECEXP (PATTERN (insn), 0, i);
640
641 switch (GET_CODE (elt))
642 {
643 /* We can ignore CLOBBERs. */
644 case CLOBBER:
645 break;
646
647 case SET:
648 /* Ignore SETs whose result isn't used but not those that
649 have side-effects. */
650 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
651 && ! side_effects_p (elt))
652 break;
653
654 /* If we have already found a SET, this is a second one and
655 so we cannot combine with this insn. */
656 if (set)
657 return 0;
658
659 set = elt;
660 break;
661
662 default:
663 /* Anything else means we can't combine. */
664 return 0;
665 }
666 }
667
668 if (set == 0
669 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
670 so don't do anything with it. */
671 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
672 return 0;
673 }
674 else
675 return 0;
676
677 if (set == 0)
678 return 0;
679
680 set = expand_field_assignment (set);
681 src = SET_SRC (set), dest = SET_DEST (set);
682
683 /* Don't eliminate a store in the stack pointer. */
684 if (dest == stack_pointer_rtx
685 /* Don't install a subreg involving two modes not tieable.
686 It can worsen register allocation, and can even make invalid reload
687 insns, since the reg inside may need to be copied from in the
688 outside mode, and that may be invalid if it is an fp reg copied in
689 integer mode. As a special exception, we can allow this if
690 I3 is simply copying DEST, a REG, to CC0. */
691 || (GET_CODE (src) == SUBREG
692 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
693 #ifdef HAVE_cc0
694 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
695 && SET_DEST (PATTERN (i3)) == cc0_rtx
696 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
697 #endif
698 )
699 /* If we couldn't eliminate a field assignment, we can't combine. */
700 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
701 /* Don't combine with an insn that sets a register to itself if it has
702 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
703 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
704 /* Can't merge a function call. */
705 || GET_CODE (src) == CALL
706 /* Don't substitute into an incremented register. */
707 || FIND_REG_INC_NOTE (i3, dest)
708 || (succ && FIND_REG_INC_NOTE (succ, dest))
709 /* Don't combine the end of a libcall into anything. */
710 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
711 /* Make sure that DEST is not used after SUCC but before I3. */
712 || (succ && ! all_adjacent
713 && reg_used_between_p (dest, succ, i3))
714 /* Make sure that the value that is to be substituted for the register
715 does not use any registers whose values alter in between. However,
716 If the insns are adjacent, a use can't cross a set even though we
717 think it might (this can happen for a sequence of insns each setting
718 the same destination; reg_last_set of that register might point to
719 a NOTE). Also, don't move a volatile asm across any other insns. */
720 || (! all_adjacent
721 && (use_crosses_set_p (src, INSN_CUID (insn))
722 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
723 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
724 better register allocation by not doing the combine. */
725 || find_reg_note (i3, REG_NO_CONFLICT, dest)
726 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
727 /* Don't combine across a CALL_INSN, because that would possibly
728 change whether the life span of some REGs crosses calls or not,
729 and it is a pain to update that information.
730 Exception: if source is a constant, moving it later can't hurt.
731 Accept that special case, because it helps -fforce-addr a lot. */
732 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
733 return 0;
734
735 /* DEST must either be a REG or CC0. */
736 if (GET_CODE (dest) == REG)
737 {
738 /* If register alignment is being enforced for multi-word items in all
739 cases except for parameters, it is possible to have a register copy
740 insn referencing a hard register that is not allowed to contain the
741 mode being copied and which would not be valid as an operand of most
742 insns. Eliminate this problem by not combining with such an insn.
743
744 Also, on some machines we don't want to extend the life of a hard
745 register. */
746
747 if (GET_CODE (src) == REG
748 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
749 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
750 #ifdef SMALL_REGISTER_CLASSES
751 /* Don't extend the life of a hard register. */
752 || REGNO (src) < FIRST_PSEUDO_REGISTER
753 #else
754 || (REGNO (src) < FIRST_PSEUDO_REGISTER
755 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
756 #endif
757 ))
758 return 0;
759 }
760 else if (GET_CODE (dest) != CC0)
761 return 0;
762
763 /* Don't substitute for a register intended as a clobberable operand. */
764 if (GET_CODE (PATTERN (i3)) == PARALLEL)
765 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
766 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
767 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))
768 return 0;
769
770 /* If INSN contains anything volatile, or is an `asm' (whether volatile
771 or not), reject, unless nothing volatile comes between it and I3,
772 with the exception of SUCC. */
773
774 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
775 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
776 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
777 && p != succ && volatile_refs_p (PATTERN (p)))
778 return 0;
779
780 /* If INSN or I2 contains an autoincrement or autodecrement,
781 make sure that register is not used between there and I3,
782 and not already used in I3 either.
783 Also insist that I3 not be a jump; if it were one
784 and the incremented register were spilled, we would lose. */
785
786 #ifdef AUTO_INC_DEC
787 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
788 if (REG_NOTE_KIND (link) == REG_INC
789 && (GET_CODE (i3) == JUMP_INSN
790 || reg_used_between_p (XEXP (link, 0), insn, i3)
791 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
792 return 0;
793 #endif
794
795 #ifdef HAVE_cc0
796 /* Don't combine an insn that follows a CC0-setting insn.
797 An insn that uses CC0 must not be separated from the one that sets it.
798 We do, however, allow I2 to follow a CC0-setting insn if that insn
799 is passed as I1; in that case it will be deleted also.
800 We also allow combining in this case if all the insns are adjacent
801 because that would leave the two CC0 insns adjacent as well.
802 It would be more logical to test whether CC0 occurs inside I1 or I2,
803 but that would be much slower, and this ought to be equivalent. */
804
805 p = prev_nonnote_insn (insn);
806 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
807 && ! all_adjacent)
808 return 0;
809 #endif
810
811 /* If we get here, we have passed all the tests and the combination is
812 to be allowed. */
813
814 *pdest = dest;
815 *psrc = src;
816
817 return 1;
818 }
819 \f
820 /* LOC is the location within I3 that contains its pattern or the component
821 of a PARALLEL of the pattern. We validate that it is valid for combining.
822
823 One problem is if I3 modifies its output, as opposed to replacing it
824 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
825 so would produce an insn that is not equivalent to the original insns.
826
827 Consider:
828
829 (set (reg:DI 101) (reg:DI 100))
830 (set (subreg:SI (reg:DI 101) 0) <foo>)
831
832 This is NOT equivalent to:
833
834 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
835 (set (reg:DI 101) (reg:DI 100))])
836
837 Not only does this modify 100 (in which case it might still be valid
838 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
839
840 We can also run into a problem if I2 sets a register that I1
841 uses and I1 gets directly substituted into I3 (not via I2). In that
842 case, we would be getting the wrong value of I2DEST into I3, so we
843 must reject the combination. This case occurs when I2 and I1 both
844 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
845 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
846 of a SET must prevent combination from occurring.
847
848 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
849 if the destination of a SET is a hard register.
850
851 Before doing the above check, we first try to expand a field assignment
852 into a set of logical operations.
853
854 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
855 we place a register that is both set and used within I3. If more than one
856 such register is detected, we fail.
857
858 Return 1 if the combination is valid, zero otherwise. */
859
860 static int
861 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
862 rtx i3;
863 rtx *loc;
864 rtx i2dest;
865 rtx i1dest;
866 int i1_not_in_src;
867 rtx *pi3dest_killed;
868 {
869 rtx x = *loc;
870
871 if (GET_CODE (x) == SET)
872 {
873 rtx set = expand_field_assignment (x);
874 rtx dest = SET_DEST (set);
875 rtx src = SET_SRC (set);
876 rtx inner_dest = dest, inner_src = src;
877
878 SUBST (*loc, set);
879
880 while (GET_CODE (inner_dest) == STRICT_LOW_PART
881 || GET_CODE (inner_dest) == SUBREG
882 || GET_CODE (inner_dest) == ZERO_EXTRACT)
883 inner_dest = XEXP (inner_dest, 0);
884
885 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
886 was added. */
887 #if 0
888 while (GET_CODE (inner_src) == STRICT_LOW_PART
889 || GET_CODE (inner_src) == SUBREG
890 || GET_CODE (inner_src) == ZERO_EXTRACT)
891 inner_src = XEXP (inner_src, 0);
892
893 /* If it is better that two different modes keep two different pseudos,
894 avoid combining them. This avoids producing the following pattern
895 on a 386:
896 (set (subreg:SI (reg/v:QI 21) 0)
897 (lshiftrt:SI (reg/v:SI 20)
898 (const_int 24)))
899 If that were made, reload could not handle the pair of
900 reg 20/21, since it would try to get any GENERAL_REGS
901 but some of them don't handle QImode. */
902
903 if (rtx_equal_p (inner_src, i2dest)
904 && GET_CODE (inner_dest) == REG
905 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
906 return 0;
907 #endif
908
909 /* Check for the case where I3 modifies its output, as
910 discussed above. */
911 if ((inner_dest != dest
912 && (reg_overlap_mentioned_p (i2dest, inner_dest)
913 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
914 /* This is the same test done in can_combine_p except that we
915 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
916 CALL operation. */
917 || (GET_CODE (inner_dest) == REG
918 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
919 #ifdef SMALL_REGISTER_CLASSES
920 && GET_CODE (src) != CALL
921 #else
922 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
923 GET_MODE (inner_dest))
924 #endif
925 )
926
927 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
928 return 0;
929
930 /* If DEST is used in I3, it is being killed in this insn,
931 so record that for later. */
932 if (pi3dest_killed && GET_CODE (dest) == REG
933 && reg_referenced_p (dest, PATTERN (i3)))
934 {
935 if (*pi3dest_killed)
936 return 0;
937
938 *pi3dest_killed = dest;
939 }
940 }
941
942 else if (GET_CODE (x) == PARALLEL)
943 {
944 int i;
945
946 for (i = 0; i < XVECLEN (x, 0); i++)
947 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
948 i1_not_in_src, pi3dest_killed))
949 return 0;
950 }
951
952 return 1;
953 }
954 \f
955 /* Try to combine the insns I1 and I2 into I3.
956 Here I1 and I2 appear earlier than I3.
957 I1 can be zero; then we combine just I2 into I3.
958
959 It we are combining three insns and the resulting insn is not recognized,
960 try splitting it into two insns. If that happens, I2 and I3 are retained
961 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
962 are pseudo-deleted.
963
964 If we created two insns, return I2; otherwise return I3.
965 Return 0 if the combination does not work. Then nothing is changed. */
966
967 static rtx
968 try_combine (i3, i2, i1)
969 register rtx i3, i2, i1;
970 {
971 /* New patterns for I3 and I3, respectively. */
972 rtx newpat, newi2pat = 0;
973 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
974 int added_sets_1, added_sets_2;
975 /* Total number of SETs to put into I3. */
976 int total_sets;
977 /* Nonzero is I2's body now appears in I3. */
978 int i2_is_used;
979 /* INSN_CODEs for new I3, new I2, and user of condition code. */
980 int insn_code_number, i2_code_number, other_code_number;
981 /* Contains I3 if the destination of I3 is used in its source, which means
982 that the old life of I3 is being killed. If that usage is placed into
983 I2 and not in I3, a REG_DEAD note must be made. */
984 rtx i3dest_killed = 0;
985 /* SET_DEST and SET_SRC of I2 and I1. */
986 rtx i2dest, i2src, i1dest = 0, i1src = 0;
987 /* PATTERN (I2), or a copy of it in certain cases. */
988 rtx i2pat;
989 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
990 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
991 int i1_feeds_i3 = 0;
992 /* Notes that must be added to REG_NOTES in I3 and I2. */
993 rtx new_i3_notes, new_i2_notes;
994
995 int maxreg;
996 rtx temp;
997 register rtx link;
998 int i;
999
1000 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1001 This can occur when flow deletes an insn that it has merged into an
1002 auto-increment address. We also can't do anything if I3 has a
1003 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1004 libcall. */
1005
1006 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1007 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1008 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1009 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1010 return 0;
1011
1012 combine_attempts++;
1013
1014 undobuf.num_undo = previous_num_undos = 0;
1015 undobuf.other_insn = 0;
1016
1017 /* Save the current high-water-mark so we can free storage if we didn't
1018 accept this combination. */
1019 undobuf.storage = (char *) oballoc (0);
1020
1021 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1022 code below, set I1 to be the earlier of the two insns. */
1023 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1024 temp = i1, i1 = i2, i2 = temp;
1025
1026 /* First check for one important special-case that the code below will
1027 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1028 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1029 we may be able to replace that destination with the destination of I3.
1030 This occurs in the common code where we compute both a quotient and
1031 remainder into a structure, in which case we want to do the computation
1032 directly into the structure to avoid register-register copies.
1033
1034 We make very conservative checks below and only try to handle the
1035 most common cases of this. For example, we only handle the case
1036 where I2 and I3 are adjacent to avoid making difficult register
1037 usage tests. */
1038
1039 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1040 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1041 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1042 #ifdef SMALL_REGISTER_CLASSES
1043 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1044 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1045 #endif
1046 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1047 && GET_CODE (PATTERN (i2)) == PARALLEL
1048 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1049 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1050 below would need to check what is inside (and reg_overlap_mentioned_p
1051 doesn't support those codes anyway). Don't allow those destinations;
1052 the resulting insn isn't likely to be recognized anyway. */
1053 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1054 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1055 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1056 SET_DEST (PATTERN (i3)))
1057 && next_real_insn (i2) == i3)
1058 {
1059 rtx p2 = PATTERN (i2);
1060
1061 /* Make sure that the destination of I3,
1062 which we are going to substitute into one output of I2,
1063 is not used within another output of I2. We must avoid making this:
1064 (parallel [(set (mem (reg 69)) ...)
1065 (set (reg 69) ...)])
1066 which is not well-defined as to order of actions.
1067 (Besides, reload can't handle output reloads for this.)
1068
1069 The problem can also happen if the dest of I3 is a memory ref,
1070 if another dest in I2 is an indirect memory ref. */
1071 for (i = 0; i < XVECLEN (p2, 0); i++)
1072 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1073 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1074 SET_DEST (XVECEXP (p2, 0, i))))
1075 break;
1076
1077 if (i == XVECLEN (p2, 0))
1078 for (i = 0; i < XVECLEN (p2, 0); i++)
1079 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1080 {
1081 combine_merges++;
1082
1083 subst_insn = i3;
1084 subst_low_cuid = INSN_CUID (i2);
1085
1086 added_sets_2 = 0;
1087 i2dest = SET_SRC (PATTERN (i3));
1088
1089 /* Replace the dest in I2 with our dest and make the resulting
1090 insn the new pattern for I3. Then skip to where we
1091 validate the pattern. Everything was set up above. */
1092 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1093 SET_DEST (PATTERN (i3)));
1094
1095 newpat = p2;
1096 goto validate_replacement;
1097 }
1098 }
1099
1100 #ifndef HAVE_cc0
1101 /* If we have no I1 and I2 looks like:
1102 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1103 (set Y OP)])
1104 make up a dummy I1 that is
1105 (set Y OP)
1106 and change I2 to be
1107 (set (reg:CC X) (compare:CC Y (const_int 0)))
1108
1109 (We can ignore any trailing CLOBBERs.)
1110
1111 This undoes a previous combination and allows us to match a branch-and-
1112 decrement insn. */
1113
1114 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1115 && XVECLEN (PATTERN (i2), 0) >= 2
1116 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1117 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1118 == MODE_CC)
1119 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1120 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1121 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1122 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1123 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1124 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1125 {
1126 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1127 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1128 break;
1129
1130 if (i == 1)
1131 {
1132 /* We make I1 with the same INSN_UID as I2. This gives it
1133 the same INSN_CUID for value tracking. Our fake I1 will
1134 never appear in the insn stream so giving it the same INSN_UID
1135 as I2 will not cause a problem. */
1136
1137 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1138 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1139
1140 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1141 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1142 SET_DEST (PATTERN (i1)));
1143 }
1144 }
1145 #endif
1146
1147 /* Verify that I2 and I1 are valid for combining. */
1148 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1149 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1150 {
1151 undo_all ();
1152 return 0;
1153 }
1154
1155 /* Record whether I2DEST is used in I2SRC and similarly for the other
1156 cases. Knowing this will help in register status updating below. */
1157 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1158 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1159 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1160
1161 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1162 in I2SRC. */
1163 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1164
1165 /* Ensure that I3's pattern can be the destination of combines. */
1166 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1167 i1 && i2dest_in_i1src && i1_feeds_i3,
1168 &i3dest_killed))
1169 {
1170 undo_all ();
1171 return 0;
1172 }
1173
1174 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1175 We used to do this EXCEPT in one case: I3 has a post-inc in an
1176 output operand. However, that exception can give rise to insns like
1177 mov r3,(r3)+
1178 which is a famous insn on the PDP-11 where the value of r3 used as the
1179 source was model-dependent. Avoid this sort of thing. */
1180
1181 #if 0
1182 if (!(GET_CODE (PATTERN (i3)) == SET
1183 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1184 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1185 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1186 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1187 /* It's not the exception. */
1188 #endif
1189 #ifdef AUTO_INC_DEC
1190 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1191 if (REG_NOTE_KIND (link) == REG_INC
1192 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1193 || (i1 != 0
1194 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1195 {
1196 undo_all ();
1197 return 0;
1198 }
1199 #endif
1200
1201 /* See if the SETs in I1 or I2 need to be kept around in the merged
1202 instruction: whenever the value set there is still needed past I3.
1203 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1204
1205 For the SET in I1, we have two cases: If I1 and I2 independently
1206 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1207 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1208 in I1 needs to be kept around unless I1DEST dies or is set in either
1209 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1210 I1DEST. If so, we know I1 feeds into I2. */
1211
1212 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1213
1214 added_sets_1
1215 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1216 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1217
1218 /* If the set in I2 needs to be kept around, we must make a copy of
1219 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1220 PATTERN (I2), we are only substituting for the original I1DEST, not into
1221 an already-substituted copy. This also prevents making self-referential
1222 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1223 I2DEST. */
1224
1225 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1226 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1227 : PATTERN (i2));
1228
1229 if (added_sets_2)
1230 i2pat = copy_rtx (i2pat);
1231
1232 combine_merges++;
1233
1234 /* Substitute in the latest insn for the regs set by the earlier ones. */
1235
1236 maxreg = max_reg_num ();
1237
1238 subst_insn = i3;
1239 subst_low_cuid = i1 ? INSN_CUID (i1) : INSN_CUID (i2);
1240
1241 /* It is possible that the source of I2 or I1 may be performing an
1242 unneeded operation, such as a ZERO_EXTEND of something that is known
1243 to have the high part zero. Handle that case by letting subst look at
1244 the innermost one of them.
1245
1246 Another way to do this would be to have a function that tries to
1247 simplify a single insn instead of merging two or more insns. We don't
1248 do this because of the potential of infinite loops and because
1249 of the potential extra memory required. However, doing it the way
1250 we are is a bit of a kludge and doesn't catch all cases.
1251
1252 But only do this if -fexpensive-optimizations since it slows things down
1253 and doesn't usually win. */
1254
1255 if (flag_expensive_optimizations)
1256 {
1257 /* Pass pc_rtx so no substitutions are done, just simplifications.
1258 The cases that we are interested in here do not involve the few
1259 cases were is_replaced is checked. */
1260 if (i1)
1261 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1262 else
1263 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1264
1265 previous_num_undos = undobuf.num_undo;
1266 }
1267
1268 #ifndef HAVE_cc0
1269 /* Many machines that don't use CC0 have insns that can both perform an
1270 arithmetic operation and set the condition code. These operations will
1271 be represented as a PARALLEL with the first element of the vector
1272 being a COMPARE of an arithmetic operation with the constant zero.
1273 The second element of the vector will set some pseudo to the result
1274 of the same arithmetic operation. If we simplify the COMPARE, we won't
1275 match such a pattern and so will generate an extra insn. Here we test
1276 for this case, where both the comparison and the operation result are
1277 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1278 I2SRC. Later we will make the PARALLEL that contains I2. */
1279
1280 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1281 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1282 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1283 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1284 {
1285 rtx *cc_use;
1286 enum machine_mode compare_mode;
1287
1288 newpat = PATTERN (i3);
1289 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1290
1291 i2_is_used = 1;
1292
1293 #ifdef EXTRA_CC_MODES
1294 /* See if a COMPARE with the operand we substituted in should be done
1295 with the mode that is currently being used. If not, do the same
1296 processing we do in `subst' for a SET; namely, if the destination
1297 is used only once, try to replace it with a register of the proper
1298 mode and also replace the COMPARE. */
1299 if (undobuf.other_insn == 0
1300 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1301 &undobuf.other_insn))
1302 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1303 i2src, const0_rtx))
1304 != GET_MODE (SET_DEST (newpat))))
1305 {
1306 int regno = REGNO (SET_DEST (newpat));
1307 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1308
1309 if (regno < FIRST_PSEUDO_REGISTER
1310 || (reg_n_sets[regno] == 1 && ! added_sets_2
1311 && ! REG_USERVAR_P (SET_DEST (newpat))))
1312 {
1313 if (regno >= FIRST_PSEUDO_REGISTER)
1314 SUBST (regno_reg_rtx[regno], new_dest);
1315
1316 SUBST (SET_DEST (newpat), new_dest);
1317 SUBST (XEXP (*cc_use, 0), new_dest);
1318 SUBST (SET_SRC (newpat),
1319 gen_rtx_combine (COMPARE, compare_mode,
1320 i2src, const0_rtx));
1321 }
1322 else
1323 undobuf.other_insn = 0;
1324 }
1325 #endif
1326 }
1327 else
1328 #endif
1329 {
1330 n_occurrences = 0; /* `subst' counts here */
1331
1332 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1333 need to make a unique copy of I2SRC each time we substitute it
1334 to avoid self-referential rtl. */
1335
1336 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1337 ! i1_feeds_i3 && i1dest_in_i1src);
1338 previous_num_undos = undobuf.num_undo;
1339
1340 /* Record whether i2's body now appears within i3's body. */
1341 i2_is_used = n_occurrences;
1342 }
1343
1344 /* If we already got a failure, don't try to do more. Otherwise,
1345 try to substitute in I1 if we have it. */
1346
1347 if (i1 && GET_CODE (newpat) != CLOBBER)
1348 {
1349 /* Before we can do this substitution, we must redo the test done
1350 above (see detailed comments there) that ensures that I1DEST
1351 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1352
1353 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1354 0, NULL_PTR))
1355 {
1356 undo_all ();
1357 return 0;
1358 }
1359
1360 n_occurrences = 0;
1361 newpat = subst (newpat, i1dest, i1src, 0, 0);
1362 previous_num_undos = undobuf.num_undo;
1363 }
1364
1365 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1366 to count all the ways that I2SRC and I1SRC can be used. */
1367 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1368 && i2_is_used + added_sets_2 > 1)
1369 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1370 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1371 > 1))
1372 /* Fail if we tried to make a new register (we used to abort, but there's
1373 really no reason to). */
1374 || max_reg_num () != maxreg
1375 /* Fail if we couldn't do something and have a CLOBBER. */
1376 || GET_CODE (newpat) == CLOBBER)
1377 {
1378 undo_all ();
1379 return 0;
1380 }
1381
1382 /* If the actions of the earlier insns must be kept
1383 in addition to substituting them into the latest one,
1384 we must make a new PARALLEL for the latest insn
1385 to hold additional the SETs. */
1386
1387 if (added_sets_1 || added_sets_2)
1388 {
1389 combine_extras++;
1390
1391 if (GET_CODE (newpat) == PARALLEL)
1392 {
1393 rtvec old = XVEC (newpat, 0);
1394 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1395 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1396 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1397 sizeof (old->elem[0]) * old->num_elem);
1398 }
1399 else
1400 {
1401 rtx old = newpat;
1402 total_sets = 1 + added_sets_1 + added_sets_2;
1403 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1404 XVECEXP (newpat, 0, 0) = old;
1405 }
1406
1407 if (added_sets_1)
1408 XVECEXP (newpat, 0, --total_sets)
1409 = (GET_CODE (PATTERN (i1)) == PARALLEL
1410 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1411
1412 if (added_sets_2)
1413 {
1414 /* If there is no I1, use I2's body as is. We used to also not do
1415 the subst call below if I2 was substituted into I3,
1416 but that could lose a simplification. */
1417 if (i1 == 0)
1418 XVECEXP (newpat, 0, --total_sets) = i2pat;
1419 else
1420 /* See comment where i2pat is assigned. */
1421 XVECEXP (newpat, 0, --total_sets)
1422 = subst (i2pat, i1dest, i1src, 0, 0);
1423 }
1424 }
1425
1426 /* We come here when we are replacing a destination in I2 with the
1427 destination of I3. */
1428 validate_replacement:
1429
1430 /* Is the result of combination a valid instruction? */
1431 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1432
1433 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1434 the second SET's destination is a register that is unused. In that case,
1435 we just need the first SET. This can occur when simplifying a divmod
1436 insn. We *must* test for this case here because the code below that
1437 splits two independent SETs doesn't handle this case correctly when it
1438 updates the register status. Also check the case where the first
1439 SET's destination is unused. That would not cause incorrect code, but
1440 does cause an unneeded insn to remain. */
1441
1442 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1443 && XVECLEN (newpat, 0) == 2
1444 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1445 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1446 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1447 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1448 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1449 && asm_noperands (newpat) < 0)
1450 {
1451 newpat = XVECEXP (newpat, 0, 0);
1452 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1453 }
1454
1455 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1456 && XVECLEN (newpat, 0) == 2
1457 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1458 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1459 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1460 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1461 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1462 && asm_noperands (newpat) < 0)
1463 {
1464 newpat = XVECEXP (newpat, 0, 1);
1465 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1466 }
1467
1468 /* If we were combining three insns and the result is a simple SET
1469 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1470 insns. There are two ways to do this. It can be split using a
1471 machine-specific method (like when you have an addition of a large
1472 constant) or by combine in the function find_split_point. */
1473
1474 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1475 && asm_noperands (newpat) < 0)
1476 {
1477 rtx m_split, *split;
1478 rtx ni2dest = i2dest;
1479
1480 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1481 use I2DEST as a scratch register will help. In the latter case,
1482 convert I2DEST to the mode of the source of NEWPAT if we can. */
1483
1484 m_split = split_insns (newpat, i3);
1485 if (m_split == 0)
1486 {
1487 /* If I2DEST is a hard register or the only use of a pseudo,
1488 we can change its mode. */
1489 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1490 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1491 && GET_CODE (i2dest) == REG
1492 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1493 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1494 && ! REG_USERVAR_P (i2dest))))
1495 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1496 REGNO (i2dest));
1497
1498 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1499 gen_rtvec (2, newpat,
1500 gen_rtx (CLOBBER,
1501 VOIDmode,
1502 ni2dest))),
1503 i3);
1504 }
1505
1506 if (m_split && GET_CODE (m_split) == SEQUENCE
1507 && XVECLEN (m_split, 0) == 2
1508 && (next_real_insn (i2) == i3
1509 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1510 INSN_CUID (i2))))
1511 {
1512 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1513 newpat = PATTERN (XVECEXP (m_split, 0, 1));
1514
1515 /* In case we changed the mode of I2DEST, replace it in the
1516 pseudo-register table here. We can't do it above in case this
1517 code doesn't get executed and we do a split the other way. */
1518
1519 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1520 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1521
1522 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1523 if (i2_code_number >= 0)
1524 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1525
1526 /* It is possible that both insns now set the destination of I3.
1527 If so, we must show an extra use of it and update
1528 reg_significant. */
1529
1530 if (insn_code_number >= 0 && GET_CODE (SET_DEST (newpat)) == REG
1531 && GET_CODE (SET_DEST (newi2pat)) == REG
1532 && REGNO (SET_DEST (newpat)) == REGNO (SET_DEST (newi2pat)))
1533 {
1534 reg_n_sets[REGNO (SET_DEST (newpat))]++;
1535 set_significant (SET_DEST (newi2pat), newi2pat);
1536 set_significant (SET_DEST (newpat), newpat);
1537 }
1538 }
1539
1540 /* If we can split it and use I2DEST, go ahead and see if that
1541 helps things be recognized. Verify that none of the registers
1542 are set between I2 and I3. */
1543 else if ((split = find_split_point (&newpat)) != 0
1544 #ifdef HAVE_cc0
1545 && GET_CODE (i2dest) == REG
1546 #endif
1547 /* We need I2DEST in the proper mode. If it is a hard register
1548 or the only use of a pseudo, we can change its mode. */
1549 && (GET_MODE (*split) == GET_MODE (i2dest)
1550 || GET_MODE (*split) == VOIDmode
1551 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1552 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1553 && ! REG_USERVAR_P (i2dest)))
1554 && (next_real_insn (i2) == i3
1555 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1556 /* We can't overwrite I2DEST if its value is still used by
1557 NEWPAT. */
1558 && ! reg_referenced_p (i2dest, newpat))
1559 {
1560 rtx newdest = i2dest;
1561
1562 /* Get NEWDEST as a register in the proper mode. We have already
1563 validated that we can do this. */
1564 if (GET_MODE (i2dest) != GET_MODE (*split)
1565 && GET_MODE (*split) != VOIDmode)
1566 {
1567 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1568
1569 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1570 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1571 }
1572
1573 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1574 an ASHIFT. This can occur if it was inside a PLUS and hence
1575 appeared to be a memory address. This is a kludge. */
1576 if (GET_CODE (*split) == MULT
1577 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1578 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1579 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1580 XEXP (*split, 0), GEN_INT (i)));
1581
1582 #ifdef INSN_SCHEDULING
1583 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1584 be written as a ZERO_EXTEND. */
1585 if (GET_CODE (*split) == SUBREG
1586 && GET_CODE (SUBREG_REG (*split)) == MEM)
1587 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1588 XEXP (*split, 0)));
1589 #endif
1590
1591 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1592 SUBST (*split, newdest);
1593 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1594 if (i2_code_number >= 0)
1595 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1596 }
1597 }
1598
1599 /* Check for a case where we loaded from memory in a narrow mode and
1600 then sign extended it, but we need both registers. In that case,
1601 we have a PARALLEL with both loads from the same memory location.
1602 We can split this into a load from memory followed by a register-register
1603 copy. This saves at least one insn, more if register allocation can
1604 eliminate the copy. */
1605
1606 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1607 && GET_CODE (newpat) == PARALLEL
1608 && XVECLEN (newpat, 0) == 2
1609 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1610 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1611 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1612 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1613 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1614 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1615 INSN_CUID (i2))
1616 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1617 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1618 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1619 SET_SRC (XVECEXP (newpat, 0, 1)))
1620 && ! find_reg_note (i3, REG_UNUSED,
1621 SET_DEST (XVECEXP (newpat, 0, 0))))
1622 {
1623 newi2pat = XVECEXP (newpat, 0, 0);
1624 newpat = XVECEXP (newpat, 0, 1);
1625 SUBST (SET_SRC (newpat),
1626 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)),
1627 SET_DEST (newi2pat)));
1628 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1629 if (i2_code_number >= 0)
1630 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1631
1632 if (insn_code_number >= 0)
1633 {
1634 rtx insn;
1635 rtx link;
1636
1637 /* If we will be able to accept this, we have made a change to the
1638 destination of I3. This can invalidate a LOG_LINKS pointing
1639 to I3. No other part of combine.c makes such a transformation.
1640
1641 The new I3 will have a destination that was previously the
1642 destination of I1 or I2 and which was used in i2 or I3. Call
1643 distribute_links to make a LOG_LINK from the next use of
1644 that destination. */
1645
1646 PATTERN (i3) = newpat;
1647 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1648
1649 /* I3 now uses what used to be its destination and which is
1650 now I2's destination. That means we need a LOG_LINK from
1651 I3 to I2. But we used to have one, so we still will.
1652
1653 However, some later insn might be using I2's dest and have
1654 a LOG_LINK pointing at I3. We must remove this link.
1655 The simplest way to remove the link is to point it at I1,
1656 which we know will be a NOTE. */
1657
1658 for (insn = NEXT_INSN (i3);
1659 insn && GET_CODE (insn) != CODE_LABEL
1660 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1661 insn = NEXT_INSN (insn))
1662 {
1663 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1664 && reg_referenced_p (SET_DEST (newi2pat), PATTERN (insn)))
1665 {
1666 for (link = LOG_LINKS (insn); link;
1667 link = XEXP (link, 1))
1668 if (XEXP (link, 0) == i3)
1669 XEXP (link, 0) = i1;
1670
1671 break;
1672 }
1673 }
1674 }
1675 }
1676
1677 /* Similarly, check for a case where we have a PARALLEL of two independent
1678 SETs but we started with three insns. In this case, we can do the sets
1679 as two separate insns. This case occurs when some SET allows two
1680 other insns to combine, but the destination of that SET is still live. */
1681
1682 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1683 && GET_CODE (newpat) == PARALLEL
1684 && XVECLEN (newpat, 0) == 2
1685 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1686 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1687 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1688 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1689 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1690 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1691 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1692 INSN_CUID (i2))
1693 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1694 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1695 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1696 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1697 XVECEXP (newpat, 0, 0))
1698 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1699 XVECEXP (newpat, 0, 1)))
1700 {
1701 newi2pat = XVECEXP (newpat, 0, 1);
1702 newpat = XVECEXP (newpat, 0, 0);
1703
1704 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1705 if (i2_code_number >= 0)
1706 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1707 }
1708
1709 /* If it still isn't recognized, fail and change things back the way they
1710 were. */
1711 if ((insn_code_number < 0
1712 /* Is the result a reasonable ASM_OPERANDS? */
1713 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1714 {
1715 undo_all ();
1716 return 0;
1717 }
1718
1719 /* If we had to change another insn, make sure it is valid also. */
1720 if (undobuf.other_insn)
1721 {
1722 rtx other_notes = REG_NOTES (undobuf.other_insn);
1723 rtx other_pat = PATTERN (undobuf.other_insn);
1724 rtx new_other_notes;
1725 rtx note, next;
1726
1727 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1728 &new_other_notes);
1729
1730 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1731 {
1732 undo_all ();
1733 return 0;
1734 }
1735
1736 PATTERN (undobuf.other_insn) = other_pat;
1737
1738 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1739 are still valid. Then add any non-duplicate notes added by
1740 recog_for_combine. */
1741 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1742 {
1743 next = XEXP (note, 1);
1744
1745 if (REG_NOTE_KIND (note) == REG_UNUSED
1746 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1747 remove_note (undobuf.other_insn, note);
1748 }
1749
1750 distribute_notes (new_other_notes, undobuf.other_insn,
1751 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1752 }
1753
1754 /* We now know that we can do this combination. Merge the insns and
1755 update the status of registers and LOG_LINKS. */
1756
1757 {
1758 rtx i3notes, i2notes, i1notes = 0;
1759 rtx i3links, i2links, i1links = 0;
1760 rtx midnotes = 0;
1761 int all_adjacent = (next_real_insn (i2) == i3
1762 && (i1 == 0 || next_real_insn (i1) == i2));
1763 register int regno;
1764 /* Compute which registers we expect to eliminate. */
1765 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1766 ? 0 : i2dest);
1767 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1768
1769 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1770 clear them. */
1771 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1772 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1773 if (i1)
1774 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1775
1776 /* Ensure that we do not have something that should not be shared but
1777 occurs multiple times in the new insns. Check this by first
1778 resetting all the `used' flags and then copying anything is shared. */
1779
1780 reset_used_flags (i3notes);
1781 reset_used_flags (i2notes);
1782 reset_used_flags (i1notes);
1783 reset_used_flags (newpat);
1784 reset_used_flags (newi2pat);
1785 if (undobuf.other_insn)
1786 reset_used_flags (PATTERN (undobuf.other_insn));
1787
1788 i3notes = copy_rtx_if_shared (i3notes);
1789 i2notes = copy_rtx_if_shared (i2notes);
1790 i1notes = copy_rtx_if_shared (i1notes);
1791 newpat = copy_rtx_if_shared (newpat);
1792 newi2pat = copy_rtx_if_shared (newi2pat);
1793 if (undobuf.other_insn)
1794 reset_used_flags (PATTERN (undobuf.other_insn));
1795
1796 INSN_CODE (i3) = insn_code_number;
1797 PATTERN (i3) = newpat;
1798 if (undobuf.other_insn)
1799 INSN_CODE (undobuf.other_insn) = other_code_number;
1800
1801 /* We had one special case above where I2 had more than one set and
1802 we replaced a destination of one of those sets with the destination
1803 of I3. In that case, we have to update LOG_LINKS of insns later
1804 in this basic block. Note that this (expensive) case is rare. */
1805
1806 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1807 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1808 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1809 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1810 && ! find_reg_note (i2, REG_UNUSED,
1811 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1812 {
1813 register rtx insn;
1814
1815 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1816 {
1817 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1818 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1819 if (XEXP (link, 0) == i2)
1820 XEXP (link, 0) = i3;
1821
1822 if (GET_CODE (insn) == CODE_LABEL
1823 || GET_CODE (insn) == JUMP_INSN)
1824 break;
1825 }
1826 }
1827
1828 LOG_LINKS (i3) = 0;
1829 REG_NOTES (i3) = 0;
1830 LOG_LINKS (i2) = 0;
1831 REG_NOTES (i2) = 0;
1832
1833 if (newi2pat)
1834 {
1835 INSN_CODE (i2) = i2_code_number;
1836 PATTERN (i2) = newi2pat;
1837 }
1838 else
1839 {
1840 PUT_CODE (i2, NOTE);
1841 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1842 NOTE_SOURCE_FILE (i2) = 0;
1843 }
1844
1845 if (i1)
1846 {
1847 LOG_LINKS (i1) = 0;
1848 REG_NOTES (i1) = 0;
1849 PUT_CODE (i1, NOTE);
1850 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1851 NOTE_SOURCE_FILE (i1) = 0;
1852 }
1853
1854 /* Get death notes for everything that is now used in either I3 or
1855 I2 and used to die in a previous insn. */
1856
1857 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1858 if (newi2pat)
1859 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1860
1861 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1862 if (i3notes)
1863 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1864 elim_i2, elim_i1);
1865 if (i2notes)
1866 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1867 elim_i2, elim_i1);
1868 if (i1notes)
1869 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1870 elim_i2, elim_i1);
1871 if (midnotes)
1872 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1873 elim_i2, elim_i1);
1874
1875 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1876 know these are REG_UNUSED and want them to go to the desired insn,
1877 so we always pass it as i3. */
1878 if (newi2pat && new_i2_notes)
1879 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1880 if (new_i3_notes)
1881 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1882
1883 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1884 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
1885 if (i3dest_killed)
1886 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed, NULL_RTX),
1887 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1888 NULL_RTX, NULL_RTX);
1889 if (i2dest_in_i2src)
1890 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1891 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1892 NULL_RTX, NULL_RTX);
1893 if (i1dest_in_i1src)
1894 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1895 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1896 NULL_RTX, NULL_RTX);
1897
1898 distribute_links (i3links);
1899 distribute_links (i2links);
1900 distribute_links (i1links);
1901
1902 if (GET_CODE (i2dest) == REG)
1903 {
1904 /* The insn that previously set this register doesn't exist, and
1905 this life of the register may not exist either. Show that
1906 we don't know its value any more. If we don't do this and
1907 I2 set the register to a value that depended on its old
1908 contents, we will get confused. If this insn is used, thing
1909 will be set correctly in combine_instructions. */
1910 record_value_for_reg (i2dest, NULL_RTX, NULL_RTX);
1911
1912 /* If the reg formerly set in I2 died only once and that was in I3,
1913 zero its use count so it won't make `reload' do any work. */
1914 if (! added_sets_2 && newi2pat == 0)
1915 {
1916 regno = REGNO (i2dest);
1917 reg_n_sets[regno]--;
1918 if (reg_n_sets[regno] == 0
1919 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
1920 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
1921 reg_n_refs[regno] = 0;
1922 }
1923 }
1924
1925 if (i1 && GET_CODE (i1dest) == REG)
1926 {
1927 record_value_for_reg (i1dest, NULL_RTX, NULL_RTX);
1928 regno = REGNO (i1dest);
1929 if (! added_sets_1)
1930 {
1931 reg_n_sets[regno]--;
1932 if (reg_n_sets[regno] == 0
1933 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
1934 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
1935 reg_n_refs[regno] = 0;
1936 }
1937 }
1938
1939 /* If I3 is now an unconditional jump, ensure that it has a
1940 BARRIER following it since it may have initially been a
1941 conditional jump. */
1942
1943 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
1944 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
1945 emit_barrier_after (i3);
1946 }
1947
1948 combine_successes++;
1949
1950 return newi2pat ? i2 : i3;
1951 }
1952 \f
1953 /* Undo all the modifications recorded in undobuf. */
1954
1955 static void
1956 undo_all ()
1957 {
1958 register int i;
1959 if (undobuf.num_undo > MAX_UNDO)
1960 undobuf.num_undo = MAX_UNDO;
1961 for (i = undobuf.num_undo - 1; i >= 0; i--)
1962 *undobuf.undo[i].where = undobuf.undo[i].old_contents;
1963
1964 obfree (undobuf.storage);
1965 undobuf.num_undo = 0;
1966 }
1967 \f
1968 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
1969 where we have an arithmetic expression and return that point.
1970
1971 try_combine will call this function to see if an insn can be split into
1972 two insns. */
1973
1974 static rtx *
1975 find_split_point (loc)
1976 rtx *loc;
1977 {
1978 rtx x = *loc;
1979 enum rtx_code code = GET_CODE (x);
1980 rtx *split;
1981 int len = 0, pos, unsignedp;
1982 rtx inner;
1983
1984 /* First special-case some codes. */
1985 switch (code)
1986 {
1987 case SUBREG:
1988 #ifdef INSN_SCHEDULING
1989 /* If we are making a paradoxical SUBREG invalid, it becomes a split
1990 point. */
1991 if (GET_CODE (SUBREG_REG (x)) == MEM)
1992 return loc;
1993 #endif
1994 return find_split_point (&SUBREG_REG (x));
1995
1996 case MEM:
1997 #ifdef HAVE_lo_sum
1998 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
1999 using LO_SUM and HIGH. */
2000 if (GET_CODE (XEXP (x, 0)) == CONST
2001 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2002 {
2003 SUBST (XEXP (x, 0),
2004 gen_rtx_combine (LO_SUM, Pmode,
2005 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2006 XEXP (x, 0)));
2007 return &XEXP (XEXP (x, 0), 0);
2008 }
2009 #endif
2010
2011 /* If we have a PLUS whose second operand is a constant and the
2012 address is not valid, perhaps will can split it up using
2013 the machine-specific way to split large constants. We use
2014 the first pseudo-reg (one of the virtual regs) as a placeholder;
2015 it will not remain in the result. */
2016 if (GET_CODE (XEXP (x, 0)) == PLUS
2017 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2018 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2019 {
2020 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2021 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2022 subst_insn);
2023
2024 /* This should have produced two insns, each of which sets our
2025 placeholder. If the source of the second is a valid address,
2026 we can make put both sources together and make a split point
2027 in the middle. */
2028
2029 if (seq && XVECLEN (seq, 0) == 2
2030 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2031 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2032 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2033 && ! reg_mentioned_p (reg,
2034 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2035 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2036 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2037 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2038 && memory_address_p (GET_MODE (x),
2039 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2040 {
2041 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2042 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2043
2044 /* Replace the placeholder in SRC2 with SRC1. If we can
2045 find where in SRC2 it was placed, that can become our
2046 split point and we can replace this address with SRC2.
2047 Just try two obvious places. */
2048
2049 src2 = replace_rtx (src2, reg, src1);
2050 split = 0;
2051 if (XEXP (src2, 0) == src1)
2052 split = &XEXP (src2, 0);
2053 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2054 && XEXP (XEXP (src2, 0), 0) == src1)
2055 split = &XEXP (XEXP (src2, 0), 0);
2056
2057 if (split)
2058 {
2059 SUBST (XEXP (x, 0), src2);
2060 return split;
2061 }
2062 }
2063 }
2064 break;
2065
2066 case SET:
2067 #ifdef HAVE_cc0
2068 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2069 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2070 we need to put the operand into a register. So split at that
2071 point. */
2072
2073 if (SET_DEST (x) == cc0_rtx
2074 && GET_CODE (SET_SRC (x)) != COMPARE
2075 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2076 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2077 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2078 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2079 return &SET_SRC (x);
2080 #endif
2081
2082 /* See if we can split SET_SRC as it stands. */
2083 split = find_split_point (&SET_SRC (x));
2084 if (split && split != &SET_SRC (x))
2085 return split;
2086
2087 /* See if this is a bitfield assignment with everything constant. If
2088 so, this is an IOR of an AND, so split it into that. */
2089 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2090 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2091 <= HOST_BITS_PER_WIDE_INT)
2092 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2093 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2094 && GET_CODE (SET_SRC (x)) == CONST_INT
2095 && ((INTVAL (XEXP (SET_DEST (x), 1))
2096 + INTVAL (XEXP (SET_DEST (x), 2)))
2097 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2098 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2099 {
2100 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2101 int len = INTVAL (XEXP (SET_DEST (x), 1));
2102 int src = INTVAL (SET_SRC (x));
2103 rtx dest = XEXP (SET_DEST (x), 0);
2104 enum machine_mode mode = GET_MODE (dest);
2105 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2106
2107 #if BITS_BIG_ENDIAN
2108 pos = GET_MODE_BITSIZE (mode) - len - pos;
2109 #endif
2110
2111 if (src == mask)
2112 SUBST (SET_SRC (x),
2113 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2114 else
2115 SUBST (SET_SRC (x),
2116 gen_binary (IOR, mode,
2117 gen_binary (AND, mode, dest,
2118 GEN_INT (~ (mask << pos)
2119 & GET_MODE_MASK (mode))),
2120 GEN_INT (src << pos)));
2121
2122 SUBST (SET_DEST (x), dest);
2123
2124 split = find_split_point (&SET_SRC (x));
2125 if (split && split != &SET_SRC (x))
2126 return split;
2127 }
2128
2129 /* Otherwise, see if this is an operation that we can split into two.
2130 If so, try to split that. */
2131 code = GET_CODE (SET_SRC (x));
2132
2133 switch (code)
2134 {
2135 case SIGN_EXTEND:
2136 inner = XEXP (SET_SRC (x), 0);
2137 pos = 0;
2138 len = GET_MODE_BITSIZE (GET_MODE (inner));
2139 unsignedp = 0;
2140 break;
2141
2142 case SIGN_EXTRACT:
2143 case ZERO_EXTRACT:
2144 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2145 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2146 {
2147 inner = XEXP (SET_SRC (x), 0);
2148 len = INTVAL (XEXP (SET_SRC (x), 1));
2149 pos = INTVAL (XEXP (SET_SRC (x), 2));
2150
2151 #if BITS_BIG_ENDIAN
2152 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2153 #endif
2154 unsignedp = (code == ZERO_EXTRACT);
2155 }
2156 break;
2157 }
2158
2159 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2160 {
2161 enum machine_mode mode = GET_MODE (SET_SRC (x));
2162
2163 if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
2164 {
2165 SUBST (SET_SRC (x),
2166 gen_rtx_combine
2167 (AND, mode,
2168 gen_rtx_combine (LSHIFTRT, mode,
2169 gen_lowpart_for_combine (mode, inner),
2170 GEN_INT (pos)),
2171 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2172
2173 split = find_split_point (&SET_SRC (x));
2174 if (split && split != &SET_SRC (x))
2175 return split;
2176 }
2177 else
2178 {
2179 SUBST (SET_SRC (x),
2180 gen_rtx_combine
2181 (ASHIFTRT, mode,
2182 gen_rtx_combine (ASHIFT, mode,
2183 gen_lowpart_for_combine (mode, inner),
2184 GEN_INT (GET_MODE_BITSIZE (mode)
2185 - len - pos)),
2186 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2187
2188 split = find_split_point (&SET_SRC (x));
2189 if (split && split != &SET_SRC (x))
2190 return split;
2191 }
2192 }
2193
2194 /* See if this is a simple operation with a constant as the second
2195 operand. It might be that this constant is out of range and hence
2196 could be used as a split point. */
2197 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2198 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2199 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2200 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2201 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2202 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2203 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2204 == 'o'))))
2205 return &XEXP (SET_SRC (x), 1);
2206
2207 /* Finally, see if this is a simple operation with its first operand
2208 not in a register. The operation might require this operand in a
2209 register, so return it as a split point. We can always do this
2210 because if the first operand were another operation, we would have
2211 already found it as a split point. */
2212 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2213 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2214 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2215 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2216 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2217 return &XEXP (SET_SRC (x), 0);
2218
2219 return 0;
2220
2221 case AND:
2222 case IOR:
2223 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2224 it is better to write this as (not (ior A B)) so we can split it.
2225 Similarly for IOR. */
2226 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2227 {
2228 SUBST (*loc,
2229 gen_rtx_combine (NOT, GET_MODE (x),
2230 gen_rtx_combine (code == IOR ? AND : IOR,
2231 GET_MODE (x),
2232 XEXP (XEXP (x, 0), 0),
2233 XEXP (XEXP (x, 1), 0))));
2234 return find_split_point (loc);
2235 }
2236
2237 /* Many RISC machines have a large set of logical insns. If the
2238 second operand is a NOT, put it first so we will try to split the
2239 other operand first. */
2240 if (GET_CODE (XEXP (x, 1)) == NOT)
2241 {
2242 rtx tem = XEXP (x, 0);
2243 SUBST (XEXP (x, 0), XEXP (x, 1));
2244 SUBST (XEXP (x, 1), tem);
2245 }
2246 break;
2247 }
2248
2249 /* Otherwise, select our actions depending on our rtx class. */
2250 switch (GET_RTX_CLASS (code))
2251 {
2252 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2253 case '3':
2254 split = find_split_point (&XEXP (x, 2));
2255 if (split)
2256 return split;
2257 /* ... fall through ... */
2258 case '2':
2259 case 'c':
2260 case '<':
2261 split = find_split_point (&XEXP (x, 1));
2262 if (split)
2263 return split;
2264 /* ... fall through ... */
2265 case '1':
2266 /* Some machines have (and (shift ...) ...) insns. If X is not
2267 an AND, but XEXP (X, 0) is, use it as our split point. */
2268 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2269 return &XEXP (x, 0);
2270
2271 split = find_split_point (&XEXP (x, 0));
2272 if (split)
2273 return split;
2274 return loc;
2275 }
2276
2277 /* Otherwise, we don't have a split point. */
2278 return 0;
2279 }
2280 \f
2281 /* Throughout X, replace FROM with TO, and return the result.
2282 The result is TO if X is FROM;
2283 otherwise the result is X, but its contents may have been modified.
2284 If they were modified, a record was made in undobuf so that
2285 undo_all will (among other things) return X to its original state.
2286
2287 If the number of changes necessary is too much to record to undo,
2288 the excess changes are not made, so the result is invalid.
2289 The changes already made can still be undone.
2290 undobuf.num_undo is incremented for such changes, so by testing that
2291 the caller can tell whether the result is valid.
2292
2293 `n_occurrences' is incremented each time FROM is replaced.
2294
2295 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2296
2297 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2298 by copying if `n_occurrences' is non-zero. */
2299
2300 static rtx
2301 subst (x, from, to, in_dest, unique_copy)
2302 register rtx x, from, to;
2303 int in_dest;
2304 int unique_copy;
2305 {
2306 register char *fmt;
2307 register int len, i;
2308 register enum rtx_code code = GET_CODE (x), orig_code = code;
2309 rtx temp;
2310 enum machine_mode mode = GET_MODE (x);
2311 enum machine_mode op0_mode = VOIDmode;
2312 rtx other_insn;
2313 rtx *cc_use;
2314 int n_restarts = 0;
2315
2316 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2317 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2318 If it is 0, that cannot be done. We can now do this for any MEM
2319 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2320 If not for that, MEM's would very rarely be safe. */
2321
2322 /* Reject MODEs bigger than a word, because we might not be able
2323 to reference a two-register group starting with an arbitrary register
2324 (and currently gen_lowpart might crash for a SUBREG). */
2325
2326 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2327 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2328
2329 /* Two expressions are equal if they are identical copies of a shared
2330 RTX or if they are both registers with the same register number
2331 and mode. */
2332
2333 #define COMBINE_RTX_EQUAL_P(X,Y) \
2334 ((X) == (Y) \
2335 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2336 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2337
2338 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2339 {
2340 n_occurrences++;
2341 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2342 }
2343
2344 /* If X and FROM are the same register but different modes, they will
2345 not have been seen as equal above. However, flow.c will make a
2346 LOG_LINKS entry for that case. If we do nothing, we will try to
2347 rerecognize our original insn and, when it succeeds, we will
2348 delete the feeding insn, which is incorrect.
2349
2350 So force this insn not to match in this (rare) case. */
2351 if (! in_dest && code == REG && GET_CODE (from) == REG
2352 && REGNO (x) == REGNO (from))
2353 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2354
2355 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2356 of which may contain things that can be combined. */
2357 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2358 return x;
2359
2360 /* It is possible to have a subexpression appear twice in the insn.
2361 Suppose that FROM is a register that appears within TO.
2362 Then, after that subexpression has been scanned once by `subst',
2363 the second time it is scanned, TO may be found. If we were
2364 to scan TO here, we would find FROM within it and create a
2365 self-referent rtl structure which is completely wrong. */
2366 if (COMBINE_RTX_EQUAL_P (x, to))
2367 return to;
2368
2369 len = GET_RTX_LENGTH (code);
2370 fmt = GET_RTX_FORMAT (code);
2371
2372 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2373 set up to skip this common case. All other cases where we want to
2374 suppress replacing something inside a SET_SRC are handled via the
2375 IN_DEST operand. */
2376 if (code == SET
2377 && (GET_CODE (SET_DEST (x)) == REG
2378 || GET_CODE (SET_DEST (x)) == CC0
2379 || GET_CODE (SET_DEST (x)) == PC))
2380 fmt = "ie";
2381
2382 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2383 if (fmt[0] == 'e')
2384 op0_mode = GET_MODE (XEXP (x, 0));
2385
2386 for (i = 0; i < len; i++)
2387 {
2388 if (fmt[i] == 'E')
2389 {
2390 register int j;
2391 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2392 {
2393 register rtx new;
2394 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2395 {
2396 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2397 n_occurrences++;
2398 }
2399 else
2400 {
2401 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2402
2403 /* If this substitution failed, this whole thing fails. */
2404 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2405 return new;
2406 }
2407
2408 SUBST (XVECEXP (x, i, j), new);
2409 }
2410 }
2411 else if (fmt[i] == 'e')
2412 {
2413 register rtx new;
2414
2415 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2416 {
2417 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2418 n_occurrences++;
2419 }
2420 else
2421 /* If we are in a SET_DEST, suppress most cases unless we
2422 have gone inside a MEM, in which case we want to
2423 simplify the address. We assume here that things that
2424 are actually part of the destination have their inner
2425 parts in the first expression. This is true for SUBREG,
2426 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2427 things aside from REG and MEM that should appear in a
2428 SET_DEST. */
2429 new = subst (XEXP (x, i), from, to,
2430 (((in_dest
2431 && (code == SUBREG || code == STRICT_LOW_PART
2432 || code == ZERO_EXTRACT))
2433 || code == SET)
2434 && i == 0), unique_copy);
2435
2436 /* If we found that we will have to reject this combination,
2437 indicate that by returning the CLOBBER ourselves, rather than
2438 an expression containing it. This will speed things up as
2439 well as prevent accidents where two CLOBBERs are considered
2440 to be equal, thus producing an incorrect simplification. */
2441
2442 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2443 return new;
2444
2445 SUBST (XEXP (x, i), new);
2446 }
2447 }
2448
2449 /* If this is a commutative operation, put a constant last and a complex
2450 expression first. We don't need to do this for comparisons here. */
2451 if (GET_RTX_CLASS (code) == 'c'
2452 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2453 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2454 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2455 || (GET_CODE (XEXP (x, 0)) == SUBREG
2456 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2457 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2458 {
2459 temp = XEXP (x, 0);
2460 SUBST (XEXP (x, 0), XEXP (x, 1));
2461 SUBST (XEXP (x, 1), temp);
2462 }
2463
2464 /* Try to fold this expression in case we have constants that weren't
2465 present before. */
2466 temp = 0;
2467 switch (GET_RTX_CLASS (code))
2468 {
2469 case '1':
2470 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2471 break;
2472 case '<':
2473 temp = simplify_relational_operation (code, op0_mode,
2474 XEXP (x, 0), XEXP (x, 1));
2475 #ifdef FLOAT_STORE_FLAG_VALUE
2476 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2477 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2478 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2479 #endif
2480 break;
2481 case 'c':
2482 case '2':
2483 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2484 break;
2485 case 'b':
2486 case '3':
2487 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2488 XEXP (x, 1), XEXP (x, 2));
2489 break;
2490 }
2491
2492 if (temp)
2493 x = temp;
2494
2495 /* We come back to here if we have replaced the expression with one of
2496 a different code and it is likely that further simplification will be
2497 possible. */
2498
2499 restart:
2500
2501 /* If we have restarted more than 4 times, we are probably looping, so
2502 give up. */
2503 if (++n_restarts > 4)
2504 return x;
2505
2506 code = GET_CODE (x);
2507
2508 /* First see if we can apply the inverse distributive law. */
2509 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2510 {
2511 x = apply_distributive_law (x);
2512 code = GET_CODE (x);
2513 }
2514
2515 /* If CODE is an associative operation not otherwise handled, see if we
2516 can associate some operands. This can win if they are constants or
2517 if they are logically related (i.e. (a & b) & a. */
2518 if ((code == PLUS || code == MINUS
2519 || code == MULT || code == AND || code == IOR || code == XOR
2520 || code == DIV || code == UDIV
2521 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2522 && GET_MODE_CLASS (mode) == MODE_INT)
2523 {
2524 if (GET_CODE (XEXP (x, 0)) == code)
2525 {
2526 rtx other = XEXP (XEXP (x, 0), 0);
2527 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2528 rtx inner_op1 = XEXP (x, 1);
2529 rtx inner;
2530
2531 /* Make sure we pass the constant operand if any as the second
2532 one if this is a commutative operation. */
2533 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2534 {
2535 rtx tem = inner_op0;
2536 inner_op0 = inner_op1;
2537 inner_op1 = tem;
2538 }
2539 inner = simplify_binary_operation (code == MINUS ? PLUS
2540 : code == DIV ? MULT
2541 : code == UDIV ? MULT
2542 : code,
2543 mode, inner_op0, inner_op1);
2544
2545 /* For commutative operations, try the other pair if that one
2546 didn't simplify. */
2547 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2548 {
2549 other = XEXP (XEXP (x, 0), 1);
2550 inner = simplify_binary_operation (code, mode,
2551 XEXP (XEXP (x, 0), 0),
2552 XEXP (x, 1));
2553 }
2554
2555 if (inner)
2556 {
2557 x = gen_binary (code, mode, other, inner);
2558 goto restart;
2559
2560 }
2561 }
2562 }
2563
2564 /* A little bit of algebraic simplification here. */
2565 switch (code)
2566 {
2567 case MEM:
2568 /* Ensure that our address has any ASHIFTs converted to MULT in case
2569 address-recognizing predicates are called later. */
2570 temp = make_compound_operation (XEXP (x, 0), MEM);
2571 SUBST (XEXP (x, 0), temp);
2572 break;
2573
2574 case SUBREG:
2575 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2576 is paradoxical. If we can't do that safely, then it becomes
2577 something nonsensical so that this combination won't take place. */
2578
2579 if (GET_CODE (SUBREG_REG (x)) == MEM
2580 && (GET_MODE_SIZE (mode)
2581 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2582 {
2583 rtx inner = SUBREG_REG (x);
2584 int endian_offset = 0;
2585 /* Don't change the mode of the MEM
2586 if that would change the meaning of the address. */
2587 if (MEM_VOLATILE_P (SUBREG_REG (x))
2588 || mode_dependent_address_p (XEXP (inner, 0)))
2589 return gen_rtx (CLOBBER, mode, const0_rtx);
2590
2591 #if BYTES_BIG_ENDIAN
2592 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2593 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2594 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2595 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2596 #endif
2597 /* Note if the plus_constant doesn't make a valid address
2598 then this combination won't be accepted. */
2599 x = gen_rtx (MEM, mode,
2600 plus_constant (XEXP (inner, 0),
2601 (SUBREG_WORD (x) * UNITS_PER_WORD
2602 + endian_offset)));
2603 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2604 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2605 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2606 return x;
2607 }
2608
2609 /* If we are in a SET_DEST, these other cases can't apply. */
2610 if (in_dest)
2611 return x;
2612
2613 /* Changing mode twice with SUBREG => just change it once,
2614 or not at all if changing back to starting mode. */
2615 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2616 {
2617 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2618 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2619 return SUBREG_REG (SUBREG_REG (x));
2620
2621 SUBST_INT (SUBREG_WORD (x),
2622 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2623 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2624 }
2625
2626 /* SUBREG of a hard register => just change the register number
2627 and/or mode. If the hard register is not valid in that mode,
2628 suppress this combination. */
2629
2630 if (GET_CODE (SUBREG_REG (x)) == REG
2631 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2632 {
2633 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2634 mode))
2635 return gen_rtx (REG, mode,
2636 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2637 else
2638 return gen_rtx (CLOBBER, mode, const0_rtx);
2639 }
2640
2641 /* For a constant, try to pick up the part we want. Handle a full
2642 word and low-order part. Only do this if we are narrowing
2643 the constant; if it is being widened, we have no idea what
2644 the extra bits will have been set to. */
2645
2646 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2647 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
2648 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
2649 && GET_MODE_CLASS (mode) == MODE_INT)
2650 {
2651 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
2652 0, op0_mode);
2653 if (temp)
2654 return temp;
2655 }
2656
2657 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2658 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
2659 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2660
2661 /* If we are narrowing the object, we need to see if we can simplify
2662 the expression for the object knowing that we only need the
2663 low-order bits. We do this by computing an AND of the object
2664 with only the bits we care about. That will produce any needed
2665 simplifications. If the resulting computation is just the
2666 AND with the significant bits, our operand is the first operand
2667 of the AND. Otherwise, it is the resulting expression. */
2668 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2669 && subreg_lowpart_p (x)
2670 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
2671 <= HOST_BITS_PER_WIDE_INT))
2672 {
2673 temp = simplify_and_const_int (NULL_RTX, GET_MODE (SUBREG_REG (x)),
2674 SUBREG_REG (x), GET_MODE_MASK (mode));
2675 if (GET_CODE (temp) == AND && GET_CODE (XEXP (temp, 1)) == CONST_INT
2676 && INTVAL (XEXP (temp, 1)) == GET_MODE_MASK (mode))
2677 temp = XEXP (temp, 0);
2678 return gen_lowpart_for_combine (mode, temp);
2679 }
2680
2681 break;
2682
2683 case NOT:
2684 /* (not (plus X -1)) can become (neg X). */
2685 if (GET_CODE (XEXP (x, 0)) == PLUS
2686 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2687 {
2688 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2689 goto restart;
2690 }
2691
2692 /* Similarly, (not (neg X)) is (plus X -1). */
2693 if (GET_CODE (XEXP (x, 0)) == NEG)
2694 {
2695 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2696 goto restart;
2697 }
2698
2699 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2700 other than 1, but that is not valid. We could do a similar
2701 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2702 but this doesn't seem common enough to bother with. */
2703 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2704 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2705 {
2706 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2707 XEXP (XEXP (x, 0), 1));
2708 goto restart;
2709 }
2710
2711 if (GET_CODE (XEXP (x, 0)) == SUBREG
2712 && subreg_lowpart_p (XEXP (x, 0))
2713 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
2714 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
2715 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
2716 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
2717 {
2718 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
2719
2720 x = gen_rtx (ROTATE, inner_mode,
2721 gen_unary (NOT, inner_mode, const1_rtx),
2722 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
2723 x = gen_lowpart_for_combine (mode, x);
2724 goto restart;
2725 }
2726
2727 #if STORE_FLAG_VALUE == -1
2728 /* (not (comparison foo bar)) can be done by reversing the comparison
2729 code if valid. */
2730 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
2731 && reversible_comparison_p (XEXP (x, 0)))
2732 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
2733 mode, XEXP (XEXP (x, 0), 0),
2734 XEXP (XEXP (x, 0), 1));
2735 #endif
2736
2737 /* Apply De Morgan's laws to reduce number of patterns for machines
2738 with negating logical insns (and-not, nand, etc.). If result has
2739 only one NOT, put it first, since that is how the patterns are
2740 coded. */
2741
2742 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
2743 {
2744 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
2745
2746 if (GET_CODE (in1) == NOT)
2747 in1 = XEXP (in1, 0);
2748 else
2749 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
2750
2751 if (GET_CODE (in2) == NOT)
2752 in2 = XEXP (in2, 0);
2753 else if (GET_CODE (in2) == CONST_INT
2754 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2755 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
2756 else
2757 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
2758
2759 if (GET_CODE (in2) == NOT)
2760 {
2761 rtx tem = in2;
2762 in2 = in1; in1 = tem;
2763 }
2764
2765 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
2766 mode, in1, in2);
2767 goto restart;
2768 }
2769 break;
2770
2771 case NEG:
2772 /* (neg (plus X 1)) can become (not X). */
2773 if (GET_CODE (XEXP (x, 0)) == PLUS
2774 && XEXP (XEXP (x, 0), 1) == const1_rtx)
2775 {
2776 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
2777 goto restart;
2778 }
2779
2780 /* Similarly, (neg (not X)) is (plus X 1). */
2781 if (GET_CODE (XEXP (x, 0)) == NOT)
2782 {
2783 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
2784 goto restart;
2785 }
2786
2787 /* (neg (abs X)) is X if X is a value known to be either -1 or 0. */
2788 if (GET_CODE (XEXP (x, 0)) == ABS
2789 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == SIGN_EXTRACT
2790 && XEXP (XEXP (XEXP (x, 0), 0), 1) == const1_rtx)
2791 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFTRT
2792 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2793 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
2794 == GET_MODE_BITSIZE (mode) - 1))
2795 || ((temp = get_last_value (XEXP (XEXP (x, 0), 0))) != 0
2796 && ((GET_CODE (temp) == SIGN_EXTRACT
2797 && XEXP (temp, 1) == const1_rtx)
2798 || (GET_CODE (temp) == ASHIFTRT
2799 && GET_CODE (XEXP (temp, 1)) == CONST_INT
2800 && (INTVAL (XEXP (temp, 1))
2801 == GET_MODE_BITSIZE (mode) - 1))))))
2802 return XEXP (XEXP (x, 0), 0);
2803
2804 /* (neg (minus X Y)) can become (minus Y X). */
2805 if (GET_CODE (XEXP (x, 0)) == MINUS
2806 && (GET_MODE_CLASS (mode) != MODE_FLOAT
2807 /* x-y != -(y-x) with IEEE floating point. */
2808 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
2809 {
2810 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
2811 XEXP (XEXP (x, 0), 0));
2812 goto restart;
2813 }
2814
2815 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2816 if we can then eliminate the NEG (e.g.,
2817 if the operand is a constant). */
2818
2819 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
2820 {
2821 temp = simplify_unary_operation (NEG, mode,
2822 XEXP (XEXP (x, 0), 0), mode);
2823 if (temp)
2824 {
2825 SUBST (XEXP (XEXP (x, 0), 0), temp);
2826 return XEXP (x, 0);
2827 }
2828 }
2829
2830 temp = expand_compound_operation (XEXP (x, 0));
2831
2832 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2833 replaced by (lshiftrt X C). This will convert
2834 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2835
2836 if (GET_CODE (temp) == ASHIFTRT
2837 && GET_CODE (XEXP (temp, 1)) == CONST_INT
2838 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
2839 {
2840 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
2841 INTVAL (XEXP (temp, 1)));
2842 goto restart;
2843 }
2844
2845 /* If X has only a single bit significant, say, bit I, convert
2846 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
2847 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
2848 (sign_extract X 1 Y). But only do this if TEMP isn't a register
2849 or a SUBREG of one since we'd be making the expression more
2850 complex if it was just a register. */
2851
2852 if (GET_CODE (temp) != REG
2853 && ! (GET_CODE (temp) == SUBREG
2854 && GET_CODE (SUBREG_REG (temp)) == REG)
2855 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
2856 {
2857 rtx temp1 = simplify_shift_const
2858 (NULL_RTX, ASHIFTRT, mode,
2859 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
2860 GET_MODE_BITSIZE (mode) - 1 - i),
2861 GET_MODE_BITSIZE (mode) - 1 - i);
2862
2863 /* If all we did was surround TEMP with the two shifts, we
2864 haven't improved anything, so don't use it. Otherwise,
2865 we are better off with TEMP1. */
2866 if (GET_CODE (temp1) != ASHIFTRT
2867 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
2868 || XEXP (XEXP (temp1, 0), 0) != temp)
2869 {
2870 x = temp1;
2871 goto restart;
2872 }
2873 }
2874 break;
2875
2876 case FLOAT_TRUNCATE:
2877 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
2878 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
2879 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
2880 return XEXP (XEXP (x, 0), 0);
2881 break;
2882
2883 #ifdef HAVE_cc0
2884 case COMPARE:
2885 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
2886 using cc0, in which case we want to leave it as a COMPARE
2887 so we can distinguish it from a register-register-copy. */
2888 if (XEXP (x, 1) == const0_rtx)
2889 return XEXP (x, 0);
2890
2891 /* In IEEE floating point, x-0 is not the same as x. */
2892 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
2893 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
2894 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
2895 return XEXP (x, 0);
2896 break;
2897 #endif
2898
2899 case CONST:
2900 /* (const (const X)) can become (const X). Do it this way rather than
2901 returning the inner CONST since CONST can be shared with a
2902 REG_EQUAL note. */
2903 if (GET_CODE (XEXP (x, 0)) == CONST)
2904 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
2905 break;
2906
2907 #ifdef HAVE_lo_sum
2908 case LO_SUM:
2909 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
2910 can add in an offset. find_split_point will split this address up
2911 again if it doesn't match. */
2912 if (GET_CODE (XEXP (x, 0)) == HIGH
2913 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
2914 return XEXP (x, 1);
2915 break;
2916 #endif
2917
2918 case PLUS:
2919 /* If we have (plus (plus (A const) B)), associate it so that CONST is
2920 outermost. That's because that's the way indexed addresses are
2921 supposed to appear. This code used to check many more cases, but
2922 they are now checked elsewhere. */
2923 if (GET_CODE (XEXP (x, 0)) == PLUS
2924 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
2925 return gen_binary (PLUS, mode,
2926 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
2927 XEXP (x, 1)),
2928 XEXP (XEXP (x, 0), 1));
2929
2930 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
2931 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
2932 bit-field and can be replaced by either a sign_extend or a
2933 sign_extract. The `and' may be a zero_extend. */
2934 if (GET_CODE (XEXP (x, 0)) == XOR
2935 && GET_CODE (XEXP (x, 1)) == CONST_INT
2936 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2937 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
2938 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
2939 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
2940 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
2941 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2942 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
2943 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
2944 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
2945 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
2946 == i + 1))))
2947 {
2948 x = simplify_shift_const
2949 (NULL_RTX, ASHIFTRT, mode,
2950 simplify_shift_const (NULL_RTX, ASHIFT, mode,
2951 XEXP (XEXP (XEXP (x, 0), 0), 0),
2952 GET_MODE_BITSIZE (mode) - (i + 1)),
2953 GET_MODE_BITSIZE (mode) - (i + 1));
2954 goto restart;
2955 }
2956
2957 /* If only the low-order bit of X is significant, (plus x -1)
2958 can become (ashiftrt (ashift (xor x 1) C) C) where C is
2959 the bitsize of the mode - 1. This allows simplification of
2960 "a = (b & 8) == 0;" */
2961 if (XEXP (x, 1) == constm1_rtx
2962 && GET_CODE (XEXP (x, 0)) != REG
2963 && ! (GET_CODE (XEXP (x,0)) == SUBREG
2964 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
2965 && significant_bits (XEXP (x, 0), mode) == 1)
2966 {
2967 x = simplify_shift_const
2968 (NULL_RTX, ASHIFTRT, mode,
2969 simplify_shift_const (NULL_RTX, ASHIFT, mode,
2970 gen_rtx_combine (XOR, mode,
2971 XEXP (x, 0), const1_rtx),
2972 GET_MODE_BITSIZE (mode) - 1),
2973 GET_MODE_BITSIZE (mode) - 1);
2974 goto restart;
2975 }
2976
2977 /* If we are adding two things that have no bits in common, convert
2978 the addition into an IOR. This will often be further simplified,
2979 for example in cases like ((a & 1) + (a & 2)), which can
2980 become a & 3. */
2981
2982 if ((significant_bits (XEXP (x, 0), mode)
2983 & significant_bits (XEXP (x, 1), mode)) == 0)
2984 {
2985 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
2986 goto restart;
2987 }
2988 break;
2989
2990 case MINUS:
2991 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
2992 (and <foo> (const_int pow2-1)) */
2993 if (GET_CODE (XEXP (x, 1)) == AND
2994 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
2995 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
2996 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
2997 {
2998 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
2999 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3000 goto restart;
3001 }
3002 break;
3003
3004 case MULT:
3005 /* If we have (mult (plus A B) C), apply the distributive law and then
3006 the inverse distributive law to see if things simplify. This
3007 occurs mostly in addresses, often when unrolling loops. */
3008
3009 if (GET_CODE (XEXP (x, 0)) == PLUS)
3010 {
3011 x = apply_distributive_law
3012 (gen_binary (PLUS, mode,
3013 gen_binary (MULT, mode,
3014 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3015 gen_binary (MULT, mode,
3016 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3017
3018 if (GET_CODE (x) != MULT)
3019 goto restart;
3020 }
3021
3022 /* If this is multiplication by a power of two and its first operand is
3023 a shift, treat the multiply as a shift to allow the shifts to
3024 possibly combine. */
3025 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3026 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3027 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3028 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3029 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3030 || GET_CODE (XEXP (x, 0)) == ROTATE
3031 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3032 {
3033 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3034 goto restart;
3035 }
3036
3037 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3038 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3039 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3040 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3041 XEXP (XEXP (x, 0), 1));
3042 break;
3043
3044 case UDIV:
3045 /* If this is a divide by a power of two, treat it as a shift if
3046 its first operand is a shift. */
3047 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3048 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3049 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3050 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3051 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3052 || GET_CODE (XEXP (x, 0)) == ROTATE
3053 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3054 {
3055 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3056 goto restart;
3057 }
3058 break;
3059
3060 case EQ: case NE:
3061 case GT: case GTU: case GE: case GEU:
3062 case LT: case LTU: case LE: case LEU:
3063 /* If the first operand is a condition code, we can't do anything
3064 with it. */
3065 if (GET_CODE (XEXP (x, 0)) == COMPARE
3066 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3067 #ifdef HAVE_cc0
3068 && XEXP (x, 0) != cc0_rtx
3069 #endif
3070 ))
3071 {
3072 rtx op0 = XEXP (x, 0);
3073 rtx op1 = XEXP (x, 1);
3074 enum rtx_code new_code;
3075
3076 if (GET_CODE (op0) == COMPARE)
3077 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3078
3079 /* Simplify our comparison, if possible. */
3080 new_code = simplify_comparison (code, &op0, &op1);
3081
3082 #if STORE_FLAG_VALUE == 1
3083 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3084 if only the low-order bit is significant in X (such as when
3085 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3086 EQ to (xor X 1). */
3087 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3088 && op1 == const0_rtx
3089 && significant_bits (op0, GET_MODE (op0)) == 1)
3090 return gen_lowpart_for_combine (mode, op0);
3091 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3092 && op1 == const0_rtx
3093 && significant_bits (op0, GET_MODE (op0)) == 1)
3094 return gen_rtx_combine (XOR, mode,
3095 gen_lowpart_for_combine (mode, op0),
3096 const1_rtx);
3097 #endif
3098
3099 #if STORE_FLAG_VALUE == -1
3100 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3101 to (neg x) if only the low-order bit of X is significant.
3102 This converts (ne (zero_extract X 1 Y) 0) to
3103 (sign_extract X 1 Y). */
3104 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3105 && op1 == const0_rtx
3106 && significant_bits (op0, GET_MODE (op0)) == 1)
3107 {
3108 x = gen_rtx_combine (NEG, mode,
3109 gen_lowpart_for_combine (mode, op0));
3110 goto restart;
3111 }
3112 #endif
3113
3114 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3115 one significant bit, we can convert (ne x 0) to (ashift x c)
3116 where C puts the bit in the sign bit. Remove any AND with
3117 STORE_FLAG_VALUE when we are done, since we are only going to
3118 test the sign bit. */
3119 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3120 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3121 && (STORE_FLAG_VALUE
3122 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3123 && op1 == const0_rtx
3124 && mode == GET_MODE (op0)
3125 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3126 {
3127 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
3128 GET_MODE_BITSIZE (mode) - 1 - i);
3129 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3130 return XEXP (x, 0);
3131 else
3132 return x;
3133 }
3134
3135 /* If the code changed, return a whole new comparison. */
3136 if (new_code != code)
3137 return gen_rtx_combine (new_code, mode, op0, op1);
3138
3139 /* Otherwise, keep this operation, but maybe change its operands.
3140 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3141 SUBST (XEXP (x, 0), op0);
3142 SUBST (XEXP (x, 1), op1);
3143 }
3144 break;
3145
3146 case IF_THEN_ELSE:
3147 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3148 reversed, do so to avoid needing two sets of patterns for
3149 subtract-and-branch insns. */
3150 if (XEXP (x, 1) == pc_rtx
3151 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3152 && reversible_comparison_p (XEXP (x, 0)))
3153 {
3154 SUBST (XEXP (x, 0),
3155 gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3156 GET_MODE (XEXP (x, 0)),
3157 XEXP (XEXP (x, 0), 0),
3158 XEXP (XEXP (x, 0), 1)));
3159 SUBST (XEXP (x, 1), XEXP (x, 2));
3160 SUBST (XEXP (x, 2), pc_rtx);
3161 }
3162 break;
3163
3164 case ZERO_EXTRACT:
3165 case SIGN_EXTRACT:
3166 case ZERO_EXTEND:
3167 case SIGN_EXTEND:
3168 /* If we are processing SET_DEST, we are done. */
3169 if (in_dest)
3170 return x;
3171
3172 x = expand_compound_operation (x);
3173 if (GET_CODE (x) != code)
3174 goto restart;
3175 break;
3176
3177 case SET:
3178 /* (set (pc) (return)) gets written as (return). */
3179 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3180 return SET_SRC (x);
3181
3182 /* Convert this into a field assignment operation, if possible. */
3183 x = make_field_assignment (x);
3184
3185 /* If we are setting CC0 or if the source is a COMPARE, look for the
3186 use of the comparison result and try to simplify it unless we already
3187 have used undobuf.other_insn. */
3188 if ((GET_CODE (SET_SRC (x)) == COMPARE
3189 #ifdef HAVE_cc0
3190 || SET_DEST (x) == cc0_rtx
3191 #endif
3192 )
3193 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3194 &other_insn)) != 0
3195 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3196 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3197 && XEXP (*cc_use, 0) == SET_DEST (x))
3198 {
3199 enum rtx_code old_code = GET_CODE (*cc_use);
3200 enum rtx_code new_code;
3201 rtx op0, op1;
3202 int other_changed = 0;
3203 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3204
3205 if (GET_CODE (SET_SRC (x)) == COMPARE)
3206 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3207 else
3208 op0 = SET_SRC (x), op1 = const0_rtx;
3209
3210 /* Simplify our comparison, if possible. */
3211 new_code = simplify_comparison (old_code, &op0, &op1);
3212
3213 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3214 /* If this machine has CC modes other than CCmode, check to see
3215 if we need to use a different CC mode here. */
3216 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3217
3218 /* If the mode changed, we have to change SET_DEST, the mode
3219 in the compare, and the mode in the place SET_DEST is used.
3220 If SET_DEST is a hard register, just build new versions with
3221 the proper mode. If it is a pseudo, we lose unless it is only
3222 time we set the pseudo, in which case we can safely change
3223 its mode. */
3224 if (compare_mode != GET_MODE (SET_DEST (x)))
3225 {
3226 int regno = REGNO (SET_DEST (x));
3227 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3228
3229 if (regno < FIRST_PSEUDO_REGISTER
3230 || (reg_n_sets[regno] == 1
3231 && ! REG_USERVAR_P (SET_DEST (x))))
3232 {
3233 if (regno >= FIRST_PSEUDO_REGISTER)
3234 SUBST (regno_reg_rtx[regno], new_dest);
3235
3236 SUBST (SET_DEST (x), new_dest);
3237 SUBST (XEXP (*cc_use, 0), new_dest);
3238 other_changed = 1;
3239 }
3240 }
3241 #endif
3242
3243 /* If the code changed, we have to build a new comparison
3244 in undobuf.other_insn. */
3245 if (new_code != old_code)
3246 {
3247 unsigned mask;
3248
3249 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3250 SET_DEST (x), const0_rtx));
3251
3252 /* If the only change we made was to change an EQ into an
3253 NE or vice versa, OP0 has only one significant bit,
3254 and OP1 is zero, check if changing the user of the condition
3255 code will produce a valid insn. If it won't, we can keep
3256 the original code in that insn by surrounding our operation
3257 with an XOR. */
3258
3259 if (((old_code == NE && new_code == EQ)
3260 || (old_code == EQ && new_code == NE))
3261 && ! other_changed && op1 == const0_rtx
3262 && (GET_MODE_BITSIZE (GET_MODE (op0))
3263 <= HOST_BITS_PER_WIDE_INT)
3264 && (exact_log2 (mask = significant_bits (op0,
3265 GET_MODE (op0)))
3266 >= 0))
3267 {
3268 rtx pat = PATTERN (other_insn), note = 0;
3269
3270 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3271 && ! check_asm_operands (pat)))
3272 {
3273 PUT_CODE (*cc_use, old_code);
3274 other_insn = 0;
3275
3276 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3277 GEN_INT (mask));
3278 }
3279 }
3280
3281 other_changed = 1;
3282 }
3283
3284 if (other_changed)
3285 undobuf.other_insn = other_insn;
3286
3287 #ifdef HAVE_cc0
3288 /* If we are now comparing against zero, change our source if
3289 needed. If we do not use cc0, we always have a COMPARE. */
3290 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3291 SUBST (SET_SRC (x), op0);
3292 else
3293 #endif
3294
3295 /* Otherwise, if we didn't previously have a COMPARE in the
3296 correct mode, we need one. */
3297 if (GET_CODE (SET_SRC (x)) != COMPARE
3298 || GET_MODE (SET_SRC (x)) != compare_mode)
3299 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3300 op0, op1));
3301 else
3302 {
3303 /* Otherwise, update the COMPARE if needed. */
3304 SUBST (XEXP (SET_SRC (x), 0), op0);
3305 SUBST (XEXP (SET_SRC (x), 1), op1);
3306 }
3307 }
3308 else
3309 {
3310 /* Get SET_SRC in a form where we have placed back any
3311 compound expressions. Then do the checks below. */
3312 temp = make_compound_operation (SET_SRC (x), SET);
3313 SUBST (SET_SRC (x), temp);
3314 }
3315
3316 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3317 operation, and X being a REG or (subreg (reg)), we may be able to
3318 convert this to (set (subreg:m2 x) (op)).
3319
3320 We can always do this if M1 is narrower than M2 because that
3321 means that we only care about the low bits of the result.
3322
3323 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3324 not defined), we cannot perform a narrower operation that
3325 requested since the high-order bits will be undefined. On
3326 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
3327 transformation is safe as long as M1 and M2 have the same number
3328 of words. */
3329
3330 if (GET_CODE (SET_SRC (x)) == SUBREG
3331 && subreg_lowpart_p (SET_SRC (x))
3332 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3333 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3334 / UNITS_PER_WORD)
3335 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3336 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3337 #ifndef BYTE_LOADS_ZERO_EXTEND
3338 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3339 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3340 #endif
3341 && (GET_CODE (SET_DEST (x)) == REG
3342 || (GET_CODE (SET_DEST (x)) == SUBREG
3343 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3344 {
3345 /* Get the object that will be the SUBREG_REG of the
3346 SUBREG we are making. Note that SUBREG_WORD will always
3347 be zero because this will either be a paradoxical SUBREG
3348 or a SUBREG with the same number of words on the outside and
3349 inside. */
3350 rtx object = (GET_CODE (SET_DEST (x)) == REG ? SET_DEST (x)
3351 : SUBREG_REG (SET_DEST (x)));
3352
3353 SUBST (SET_DEST (x),
3354 gen_rtx (SUBREG, GET_MODE (SUBREG_REG (SET_SRC (x))),
3355 object, 0));
3356 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3357 }
3358
3359 #ifdef BYTE_LOADS_ZERO_EXTEND
3360 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3361 M wider than N, this would require a paradoxical subreg.
3362 Replace the subreg with a zero_extend to avoid the reload that
3363 would otherwise be required. */
3364 if (GET_CODE (SET_SRC (x)) == SUBREG
3365 && subreg_lowpart_p (SET_SRC (x))
3366 && SUBREG_WORD (SET_SRC (x)) == 0
3367 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3368 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3369 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3370 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3371 GET_MODE (SET_SRC (x)),
3372 XEXP (SET_SRC (x), 0)));
3373 #endif
3374
3375 break;
3376
3377 case AND:
3378 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3379 {
3380 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3381 INTVAL (XEXP (x, 1)));
3382
3383 /* If we have (ior (and (X C1) C2)) and the next restart would be
3384 the last, simplify this by making C1 as small as possible
3385 and then exit. */
3386 if (n_restarts >= 3 && GET_CODE (x) == IOR
3387 && GET_CODE (XEXP (x, 0)) == AND
3388 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3389 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3390 {
3391 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
3392 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3393 & ~ INTVAL (XEXP (x, 1))));
3394 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3395 }
3396
3397 if (GET_CODE (x) != AND)
3398 goto restart;
3399 }
3400
3401 /* Convert (A | B) & A to A. */
3402 if (GET_CODE (XEXP (x, 0)) == IOR
3403 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3404 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3405 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3406 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3407 return XEXP (x, 1);
3408
3409 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3410 insn (and may simplify more). */
3411 else if (GET_CODE (XEXP (x, 0)) == XOR
3412 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3413 && ! side_effects_p (XEXP (x, 1)))
3414 {
3415 x = gen_binary (AND, mode,
3416 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3417 XEXP (x, 1));
3418 goto restart;
3419 }
3420 else if (GET_CODE (XEXP (x, 0)) == XOR
3421 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3422 && ! side_effects_p (XEXP (x, 1)))
3423 {
3424 x = gen_binary (AND, mode,
3425 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3426 XEXP (x, 1));
3427 goto restart;
3428 }
3429
3430 /* Similarly for (~ (A ^ B)) & A. */
3431 else if (GET_CODE (XEXP (x, 0)) == NOT
3432 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3433 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3434 && ! side_effects_p (XEXP (x, 1)))
3435 {
3436 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3437 XEXP (x, 1));
3438 goto restart;
3439 }
3440 else if (GET_CODE (XEXP (x, 0)) == NOT
3441 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3442 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3443 && ! side_effects_p (XEXP (x, 1)))
3444 {
3445 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3446 XEXP (x, 1));
3447 goto restart;
3448 }
3449
3450 /* In the follow group of tests (and those in case IOR below),
3451 we start with some combination of logical operations and apply
3452 the distributive law followed by the inverse distributive law.
3453 Most of the time, this results in no change. However, if some of
3454 the operands are the same or inverses of each other, simplifications
3455 will result.
3456
3457 For example, (and (ior A B) (not B)) can occur as the result of
3458 expanding a bit field assignment. When we apply the distributive
3459 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3460 which then simplifies to (and (A (not B))). */
3461
3462 /* If we have (and (ior A B) C), apply the distributive law and then
3463 the inverse distributive law to see if things simplify. */
3464
3465 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
3466 {
3467 x = apply_distributive_law
3468 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
3469 gen_binary (AND, mode,
3470 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3471 gen_binary (AND, mode,
3472 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3473 if (GET_CODE (x) != AND)
3474 goto restart;
3475 }
3476
3477 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
3478 {
3479 x = apply_distributive_law
3480 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
3481 gen_binary (AND, mode,
3482 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3483 gen_binary (AND, mode,
3484 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3485 if (GET_CODE (x) != AND)
3486 goto restart;
3487 }
3488
3489 /* Similarly, taking advantage of the fact that
3490 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3491
3492 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
3493 {
3494 x = apply_distributive_law
3495 (gen_binary (XOR, mode,
3496 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3497 XEXP (XEXP (x, 1), 0)),
3498 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3499 XEXP (XEXP (x, 1), 1))));
3500 if (GET_CODE (x) != AND)
3501 goto restart;
3502 }
3503
3504 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
3505 {
3506 x = apply_distributive_law
3507 (gen_binary (XOR, mode,
3508 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3509 XEXP (XEXP (x, 0), 0)),
3510 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3511 XEXP (XEXP (x, 0), 1))));
3512 if (GET_CODE (x) != AND)
3513 goto restart;
3514 }
3515 break;
3516
3517 case IOR:
3518 /* Convert (A & B) | A to A. */
3519 if (GET_CODE (XEXP (x, 0)) == AND
3520 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3521 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3522 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3523 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3524 return XEXP (x, 1);
3525
3526 /* If we have (ior (and A B) C), apply the distributive law and then
3527 the inverse distributive law to see if things simplify. */
3528
3529 if (GET_CODE (XEXP (x, 0)) == AND)
3530 {
3531 x = apply_distributive_law
3532 (gen_binary (AND, mode,
3533 gen_binary (IOR, mode,
3534 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3535 gen_binary (IOR, mode,
3536 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3537
3538 if (GET_CODE (x) != IOR)
3539 goto restart;
3540 }
3541
3542 if (GET_CODE (XEXP (x, 1)) == AND)
3543 {
3544 x = apply_distributive_law
3545 (gen_binary (AND, mode,
3546 gen_binary (IOR, mode,
3547 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3548 gen_binary (IOR, mode,
3549 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3550
3551 if (GET_CODE (x) != IOR)
3552 goto restart;
3553 }
3554
3555 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3556 mode size to (rotate A CX). */
3557
3558 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
3559 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
3560 || (GET_CODE (XEXP (x, 1)) == ASHIFT
3561 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
3562 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
3563 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3564 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3565 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
3566 == GET_MODE_BITSIZE (mode)))
3567 {
3568 rtx shift_count;
3569
3570 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3571 shift_count = XEXP (XEXP (x, 0), 1);
3572 else
3573 shift_count = XEXP (XEXP (x, 1), 1);
3574 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
3575 goto restart;
3576 }
3577 break;
3578
3579 case XOR:
3580 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3581 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3582 (NOT y). */
3583 {
3584 int num_negated = 0;
3585 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
3586
3587 if (GET_CODE (in1) == NOT)
3588 num_negated++, in1 = XEXP (in1, 0);
3589 if (GET_CODE (in2) == NOT)
3590 num_negated++, in2 = XEXP (in2, 0);
3591
3592 if (num_negated == 2)
3593 {
3594 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3595 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
3596 }
3597 else if (num_negated == 1)
3598 return gen_rtx_combine (NOT, mode,
3599 gen_rtx_combine (XOR, mode, in1, in2));
3600 }
3601
3602 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3603 correspond to a machine insn or result in further simplifications
3604 if B is a constant. */
3605
3606 if (GET_CODE (XEXP (x, 0)) == AND
3607 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3608 && ! side_effects_p (XEXP (x, 1)))
3609 {
3610 x = gen_binary (AND, mode,
3611 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3612 XEXP (x, 1));
3613 goto restart;
3614 }
3615 else if (GET_CODE (XEXP (x, 0)) == AND
3616 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3617 && ! side_effects_p (XEXP (x, 1)))
3618 {
3619 x = gen_binary (AND, mode,
3620 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3621 XEXP (x, 1));
3622 goto restart;
3623 }
3624
3625
3626 #if STORE_FLAG_VALUE == 1
3627 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3628 comparison. */
3629 if (XEXP (x, 1) == const1_rtx
3630 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3631 && reversible_comparison_p (XEXP (x, 0)))
3632 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3633 mode, XEXP (XEXP (x, 0), 0),
3634 XEXP (XEXP (x, 0), 1));
3635 #endif
3636
3637 /* (xor (comparison foo bar) (const_int sign-bit))
3638 when STORE_FLAG_VALUE is the sign bit. */
3639 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3640 && (STORE_FLAG_VALUE
3641 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3642 && XEXP (x, 1) == const_true_rtx
3643 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3644 && reversible_comparison_p (XEXP (x, 0)))
3645 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3646 mode, XEXP (XEXP (x, 0), 0),
3647 XEXP (XEXP (x, 0), 1));
3648 break;
3649
3650 case ABS:
3651 /* (abs (neg <foo>)) -> (abs <foo>) */
3652 if (GET_CODE (XEXP (x, 0)) == NEG)
3653 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3654
3655 /* If operand is something known to be positive, ignore the ABS. */
3656 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3657 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3658 <= HOST_BITS_PER_WIDE_INT)
3659 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3660 & ((HOST_WIDE_INT) 1
3661 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3662 == 0)))
3663 return XEXP (x, 0);
3664
3665
3666 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3667 if ((GET_CODE (XEXP (x, 0)) == SIGN_EXTRACT
3668 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3669 || (GET_CODE (XEXP (x, 0)) == ASHIFTRT
3670 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3671 && INTVAL (XEXP (XEXP (x, 0), 1)) == GET_MODE_BITSIZE (mode) - 1)
3672 || ((temp = get_last_value (XEXP (x, 0))) != 0
3673 && ((GET_CODE (temp) == SIGN_EXTRACT
3674 && XEXP (temp, 1) == const1_rtx)
3675 || (GET_CODE (temp) == ASHIFTRT
3676 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3677 && (INTVAL (XEXP (temp, 1))
3678 == GET_MODE_BITSIZE (mode) - 1)))))
3679 {
3680 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
3681 goto restart;
3682 }
3683 break;
3684
3685 case FFS:
3686 /* (ffs (*_extend <X>)) = (ffs <X>) */
3687 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3688 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3689 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3690 break;
3691
3692 case FLOAT:
3693 /* (float (sign_extend <X>)) = (float <X>). */
3694 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3695 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3696 break;
3697
3698 case LSHIFT:
3699 case ASHIFT:
3700 case LSHIFTRT:
3701 case ASHIFTRT:
3702 case ROTATE:
3703 case ROTATERT:
3704 /* If this is a shift by a constant amount, simplify it. */
3705 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3706 {
3707 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
3708 INTVAL (XEXP (x, 1)));
3709 if (GET_CODE (x) != code)
3710 goto restart;
3711 }
3712
3713 #ifdef SHIFT_COUNT_TRUNCATED
3714 else if (GET_CODE (XEXP (x, 1)) != REG)
3715 SUBST (XEXP (x, 1),
3716 force_to_mode (XEXP (x, 1), GET_MODE (x),
3717 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
3718 NULL_RTX));
3719 #endif
3720
3721 break;
3722 }
3723
3724 return x;
3725 }
3726 \f
3727 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3728 operations" because they can be replaced with two more basic operations.
3729 ZERO_EXTEND is also considered "compound" because it can be replaced with
3730 an AND operation, which is simpler, though only one operation.
3731
3732 The function expand_compound_operation is called with an rtx expression
3733 and will convert it to the appropriate shifts and AND operations,
3734 simplifying at each stage.
3735
3736 The function make_compound_operation is called to convert an expression
3737 consisting of shifts and ANDs into the equivalent compound expression.
3738 It is the inverse of this function, loosely speaking. */
3739
3740 static rtx
3741 expand_compound_operation (x)
3742 rtx x;
3743 {
3744 int pos = 0, len;
3745 int unsignedp = 0;
3746 int modewidth;
3747 rtx tem;
3748
3749 switch (GET_CODE (x))
3750 {
3751 case ZERO_EXTEND:
3752 unsignedp = 1;
3753 case SIGN_EXTEND:
3754 /* We can't necessarily use a const_int for a multiword mode;
3755 it depends on implicitly extending the value.
3756 Since we don't know the right way to extend it,
3757 we can't tell whether the implicit way is right.
3758
3759 Even for a mode that is no wider than a const_int,
3760 we can't win, because we need to sign extend one of its bits through
3761 the rest of it, and we don't know which bit. */
3762 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3763 return x;
3764
3765 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
3766 return x;
3767
3768 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
3769 /* If the inner object has VOIDmode (the only way this can happen
3770 is if it is a ASM_OPERANDS), we can't do anything since we don't
3771 know how much masking to do. */
3772 if (len == 0)
3773 return x;
3774
3775 break;
3776
3777 case ZERO_EXTRACT:
3778 unsignedp = 1;
3779 case SIGN_EXTRACT:
3780 /* If the operand is a CLOBBER, just return it. */
3781 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3782 return XEXP (x, 0);
3783
3784 if (GET_CODE (XEXP (x, 1)) != CONST_INT
3785 || GET_CODE (XEXP (x, 2)) != CONST_INT
3786 || GET_MODE (XEXP (x, 0)) == VOIDmode)
3787 return x;
3788
3789 len = INTVAL (XEXP (x, 1));
3790 pos = INTVAL (XEXP (x, 2));
3791
3792 /* If this goes outside the object being extracted, replace the object
3793 with a (use (mem ...)) construct that only combine understands
3794 and is used only for this purpose. */
3795 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
3796 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
3797
3798 #if BITS_BIG_ENDIAN
3799 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
3800 #endif
3801 break;
3802
3803 default:
3804 return x;
3805 }
3806
3807 /* If we reach here, we want to return a pair of shifts. The inner
3808 shift is a left shift of BITSIZE - POS - LEN bits. The outer
3809 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
3810 logical depending on the value of UNSIGNEDP.
3811
3812 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
3813 converted into an AND of a shift.
3814
3815 We must check for the case where the left shift would have a negative
3816 count. This can happen in a case like (x >> 31) & 255 on machines
3817 that can't shift by a constant. On those machines, we would first
3818 combine the shift with the AND to produce a variable-position
3819 extraction. Then the constant of 31 would be substituted in to produce
3820 a such a position. */
3821
3822 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
3823 if (modewidth >= pos - len)
3824 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
3825 GET_MODE (x),
3826 simplify_shift_const (NULL_RTX, ASHIFT,
3827 GET_MODE (x),
3828 XEXP (x, 0),
3829 modewidth - pos - len),
3830 modewidth - len);
3831
3832 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
3833 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
3834 simplify_shift_const (NULL_RTX, LSHIFTRT,
3835 GET_MODE (x),
3836 XEXP (x, 0), pos),
3837 ((HOST_WIDE_INT) 1 << len) - 1);
3838 else
3839 /* Any other cases we can't handle. */
3840 return x;
3841
3842
3843 /* If we couldn't do this for some reason, return the original
3844 expression. */
3845 if (GET_CODE (tem) == CLOBBER)
3846 return x;
3847
3848 return tem;
3849 }
3850 \f
3851 /* X is a SET which contains an assignment of one object into
3852 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
3853 or certain SUBREGS). If possible, convert it into a series of
3854 logical operations.
3855
3856 We half-heartedly support variable positions, but do not at all
3857 support variable lengths. */
3858
3859 static rtx
3860 expand_field_assignment (x)
3861 rtx x;
3862 {
3863 rtx inner;
3864 rtx pos; /* Always counts from low bit. */
3865 int len;
3866 rtx mask;
3867 enum machine_mode compute_mode;
3868
3869 /* Loop until we find something we can't simplify. */
3870 while (1)
3871 {
3872 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
3873 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
3874 {
3875 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
3876 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
3877 pos = const0_rtx;
3878 }
3879 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
3880 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
3881 {
3882 inner = XEXP (SET_DEST (x), 0);
3883 len = INTVAL (XEXP (SET_DEST (x), 1));
3884 pos = XEXP (SET_DEST (x), 2);
3885
3886 /* If the position is constant and spans the width of INNER,
3887 surround INNER with a USE to indicate this. */
3888 if (GET_CODE (pos) == CONST_INT
3889 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
3890 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
3891
3892 #if BITS_BIG_ENDIAN
3893 if (GET_CODE (pos) == CONST_INT)
3894 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
3895 - INTVAL (pos));
3896 else if (GET_CODE (pos) == MINUS
3897 && GET_CODE (XEXP (pos, 1)) == CONST_INT
3898 && (INTVAL (XEXP (pos, 1))
3899 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
3900 /* If position is ADJUST - X, new position is X. */
3901 pos = XEXP (pos, 0);
3902 else
3903 pos = gen_binary (MINUS, GET_MODE (pos),
3904 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
3905 - len),
3906 pos);
3907 #endif
3908 }
3909
3910 /* A SUBREG between two modes that occupy the same numbers of words
3911 can be done by moving the SUBREG to the source. */
3912 else if (GET_CODE (SET_DEST (x)) == SUBREG
3913 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
3914 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
3915 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
3916 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
3917 {
3918 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
3919 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
3920 SET_SRC (x)));
3921 continue;
3922 }
3923 else
3924 break;
3925
3926 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
3927 inner = SUBREG_REG (inner);
3928
3929 compute_mode = GET_MODE (inner);
3930
3931 /* Compute a mask of LEN bits, if we can do this on the host machine. */
3932 if (len < HOST_BITS_PER_WIDE_INT)
3933 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
3934 else
3935 break;
3936
3937 /* Now compute the equivalent expression. Make a copy of INNER
3938 for the SET_DEST in case it is a MEM into which we will substitute;
3939 we don't want shared RTL in that case. */
3940 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
3941 gen_binary (IOR, compute_mode,
3942 gen_binary (AND, compute_mode,
3943 gen_unary (NOT, compute_mode,
3944 gen_binary (ASHIFT,
3945 compute_mode,
3946 mask, pos)),
3947 inner),
3948 gen_binary (ASHIFT, compute_mode,
3949 gen_binary (AND, compute_mode,
3950 gen_lowpart_for_combine
3951 (compute_mode,
3952 SET_SRC (x)),
3953 mask),
3954 pos)));
3955 }
3956
3957 return x;
3958 }
3959 \f
3960 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
3961 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
3962 the starting bit position.
3963
3964 INNER may be a USE. This will occur when we started with a bitfield
3965 that went outside the boundary of the object in memory, which is
3966 allowed on most machines. To isolate this case, we produce a USE
3967 whose mode is wide enough and surround the MEM with it. The only
3968 code that understands the USE is this routine. If it is not removed,
3969 it will cause the resulting insn not to match.
3970
3971 UNSIGNEDP is non-zero for an unsigned reference and zero for a
3972 signed reference.
3973
3974 IN_DEST is non-zero if this is a reference in the destination of a
3975 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
3976 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
3977 be used.
3978
3979 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
3980 ZERO_EXTRACT should be built even for bits starting at bit 0.
3981
3982 MODE is the desired mode of the result (if IN_DEST == 0). */
3983
3984 static rtx
3985 make_extraction (mode, inner, pos, pos_rtx, len,
3986 unsignedp, in_dest, in_compare)
3987 enum machine_mode mode;
3988 rtx inner;
3989 int pos;
3990 rtx pos_rtx;
3991 int len;
3992 int unsignedp;
3993 int in_dest, in_compare;
3994 {
3995 enum machine_mode is_mode = GET_MODE (inner);
3996 enum machine_mode inner_mode;
3997 enum machine_mode wanted_mem_mode = byte_mode;
3998 enum machine_mode pos_mode = word_mode;
3999 enum machine_mode extraction_mode = word_mode;
4000 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4001 int spans_byte = 0;
4002 rtx new = 0;
4003
4004 /* Get some information about INNER and get the innermost object. */
4005 if (GET_CODE (inner) == USE)
4006 /* We don't need to adjust the position because we set up the USE
4007 to pretend that it was a full-word object. */
4008 spans_byte = 1, inner = XEXP (inner, 0);
4009 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4010 inner = SUBREG_REG (inner);
4011
4012 inner_mode = GET_MODE (inner);
4013
4014 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4015 pos = INTVAL (pos_rtx);
4016
4017 /* See if this can be done without an extraction. We never can if the
4018 width of the field is not the same as that of some integer mode. For
4019 registers, we can only avoid the extraction if the position is at the
4020 low-order bit and this is either not in the destination or we have the
4021 appropriate STRICT_LOW_PART operation available.
4022
4023 For MEM, we can avoid an extract if the field starts on an appropriate
4024 boundary and we can change the mode of the memory reference. However,
4025 we cannot directly access the MEM if we have a USE and the underlying
4026 MEM is not TMODE. This combination means that MEM was being used in a
4027 context where bits outside its mode were being referenced; that is only
4028 valid in bit-field insns. */
4029
4030 if (tmode != BLKmode
4031 && ! (spans_byte && inner_mode != tmode)
4032 && ((pos == 0 && GET_CODE (inner) != MEM
4033 && (! in_dest
4034 || (GET_CODE (inner) == REG
4035 && (movstrict_optab->handlers[(int) tmode].insn_code
4036 != CODE_FOR_nothing))))
4037 || (GET_CODE (inner) == MEM && pos >= 0
4038 && (pos
4039 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4040 : BITS_PER_UNIT)) == 0
4041 /* We can't do this if we are widening INNER_MODE (it
4042 may not be aligned, for one thing). */
4043 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4044 && (inner_mode == tmode
4045 || (! mode_dependent_address_p (XEXP (inner, 0))
4046 && ! MEM_VOLATILE_P (inner))))))
4047 {
4048 int offset = pos / BITS_PER_UNIT;
4049
4050 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4051 field. If the original and current mode are the same, we need not
4052 adjust the offset. Otherwise, we do if bytes big endian.
4053
4054 If INNER is not a MEM, get a piece consisting of the just the field
4055 of interest (in this case POS must be 0). */
4056
4057 if (GET_CODE (inner) == MEM)
4058 {
4059 #if BYTES_BIG_ENDIAN
4060 if (inner_mode != tmode)
4061 offset = (GET_MODE_SIZE (inner_mode)
4062 - GET_MODE_SIZE (tmode) - offset);
4063 #endif
4064
4065 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4066 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4067 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4068 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4069 }
4070 else if (GET_CODE (inner) == REG)
4071 /* We can't call gen_lowpart_for_combine here since we always want
4072 a SUBREG and it would sometimes return a new hard register. */
4073 new = gen_rtx (SUBREG, tmode, inner,
4074 (WORDS_BIG_ENDIAN
4075 && GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
4076 ? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
4077 / UNITS_PER_WORD))
4078 : 0);
4079 else
4080 new = gen_lowpart_for_combine (tmode, inner);
4081
4082 /* If this extraction is going into the destination of a SET,
4083 make a STRICT_LOW_PART unless we made a MEM. */
4084
4085 if (in_dest)
4086 return (GET_CODE (new) == MEM ? new
4087 : (GET_CODE (new) != SUBREG
4088 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4089 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4090
4091 /* Otherwise, sign- or zero-extend unless we already are in the
4092 proper mode. */
4093
4094 return (mode == tmode ? new
4095 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4096 mode, new));
4097 }
4098
4099 /* Unless this is in a COMPARE or we have a funny memory reference,
4100 don't do anything with field extracts starting at the low-order
4101 bit since they are simple AND operations. */
4102 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte)
4103 return 0;
4104
4105 /* Get the mode to use should INNER be a MEM, the mode for the position,
4106 and the mode for the result. */
4107 #ifdef HAVE_insv
4108 if (in_dest)
4109 {
4110 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4111 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4112 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4113 }
4114 #endif
4115
4116 #ifdef HAVE_extzv
4117 if (! in_dest && unsignedp)
4118 {
4119 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4120 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4121 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4122 }
4123 #endif
4124
4125 #ifdef HAVE_extv
4126 if (! in_dest && ! unsignedp)
4127 {
4128 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4129 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4130 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4131 }
4132 #endif
4133
4134 /* Never narrow an object, since that might not be safe. */
4135
4136 if (mode != VOIDmode
4137 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4138 extraction_mode = mode;
4139
4140 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4141 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4142 pos_mode = GET_MODE (pos_rtx);
4143
4144 /* If this is not from memory or we have to change the mode of memory and
4145 cannot, the desired mode is EXTRACTION_MODE. */
4146 if (GET_CODE (inner) != MEM
4147 || (inner_mode != wanted_mem_mode
4148 && (mode_dependent_address_p (XEXP (inner, 0))
4149 || MEM_VOLATILE_P (inner))))
4150 wanted_mem_mode = extraction_mode;
4151
4152 #if BITS_BIG_ENDIAN
4153 /* If position is constant, compute new position. Otherwise, build
4154 subtraction. */
4155 if (pos >= 0)
4156 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4157 - len - pos);
4158 else
4159 pos_rtx
4160 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4161 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4162 GET_MODE_BITSIZE (wanted_mem_mode))
4163 - len),
4164 pos_rtx);
4165 #endif
4166
4167 /* If INNER has a wider mode, make it smaller. If this is a constant
4168 extract, try to adjust the byte to point to the byte containing
4169 the value. */
4170 if (wanted_mem_mode != VOIDmode
4171 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4172 && ((GET_CODE (inner) == MEM
4173 && (inner_mode == wanted_mem_mode
4174 || (! mode_dependent_address_p (XEXP (inner, 0))
4175 && ! MEM_VOLATILE_P (inner))))))
4176 {
4177 int offset = 0;
4178
4179 /* The computations below will be correct if the machine is big
4180 endian in both bits and bytes or little endian in bits and bytes.
4181 If it is mixed, we must adjust. */
4182
4183 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4184 if (! spans_byte && is_mode != wanted_mem_mode)
4185 offset = (GET_MODE_SIZE (is_mode)
4186 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4187 #endif
4188
4189 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4190 adjust OFFSET to compensate. */
4191 #if BYTES_BIG_ENDIAN
4192 if (! spans_byte
4193 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4194 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4195 #endif
4196
4197 /* If this is a constant position, we can move to the desired byte. */
4198 if (pos >= 0)
4199 {
4200 offset += pos / BITS_PER_UNIT;
4201 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4202 }
4203
4204 if (offset != 0 || inner_mode != wanted_mem_mode)
4205 {
4206 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4207 plus_constant (XEXP (inner, 0), offset));
4208 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4209 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4210 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4211 inner = newmem;
4212 }
4213 }
4214
4215 /* If INNER is not memory, we can always get it into the proper mode. */
4216 else if (GET_CODE (inner) != MEM)
4217 inner = gen_lowpart_for_combine (extraction_mode, inner);
4218
4219 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4220 have to zero extend. Otherwise, we can just use a SUBREG. */
4221 if (pos < 0
4222 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4223 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4224 else if (pos < 0
4225 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4226 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4227
4228 /* Make POS_RTX unless we already have it and it is correct. */
4229 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
4230 pos_rtx = GEN_INT (pos);
4231
4232 /* Make the required operation. See if we can use existing rtx. */
4233 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4234 extraction_mode, inner, GEN_INT (len), pos_rtx);
4235 if (! in_dest)
4236 new = gen_lowpart_for_combine (mode, new);
4237
4238 return new;
4239 }
4240 \f
4241 /* Look at the expression rooted at X. Look for expressions
4242 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4243 Form these expressions.
4244
4245 Return the new rtx, usually just X.
4246
4247 Also, for machines like the Vax that don't have logical shift insns,
4248 try to convert logical to arithmetic shift operations in cases where
4249 they are equivalent. This undoes the canonicalizations to logical
4250 shifts done elsewhere.
4251
4252 We try, as much as possible, to re-use rtl expressions to save memory.
4253
4254 IN_CODE says what kind of expression we are processing. Normally, it is
4255 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4256 being kludges), it is MEM. When processing the arguments of a comparison
4257 or a COMPARE against zero, it is COMPARE. */
4258
4259 static rtx
4260 make_compound_operation (x, in_code)
4261 rtx x;
4262 enum rtx_code in_code;
4263 {
4264 enum rtx_code code = GET_CODE (x);
4265 enum machine_mode mode = GET_MODE (x);
4266 int mode_width = GET_MODE_BITSIZE (mode);
4267 enum rtx_code next_code;
4268 int i;
4269 rtx new = 0;
4270 char *fmt;
4271
4272 /* Select the code to be used in recursive calls. Once we are inside an
4273 address, we stay there. If we have a comparison, set to COMPARE,
4274 but once inside, go back to our default of SET. */
4275
4276 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4277 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4278 && XEXP (x, 1) == const0_rtx) ? COMPARE
4279 : in_code == COMPARE ? SET : in_code);
4280
4281 /* Process depending on the code of this operation. If NEW is set
4282 non-zero, it will be returned. */
4283
4284 switch (code)
4285 {
4286 case ASHIFT:
4287 case LSHIFT:
4288 /* Convert shifts by constants into multiplications if inside
4289 an address. */
4290 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4291 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4292 && INTVAL (XEXP (x, 1)) >= 0)
4293 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4294 GEN_INT ((HOST_WIDE_INT) 1
4295 << INTVAL (XEXP (x, 1))));
4296 break;
4297
4298 case AND:
4299 /* If the second operand is not a constant, we can't do anything
4300 with it. */
4301 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4302 break;
4303
4304 /* If the constant is a power of two minus one and the first operand
4305 is a logical right shift, make an extraction. */
4306 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4307 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4308 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4309 XEXP (XEXP (x, 0), 1), i, 1,
4310 0, in_code == COMPARE);
4311
4312 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4313 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4314 && subreg_lowpart_p (XEXP (x, 0))
4315 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4316 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4317 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4318 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4319 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4320 0, in_code == COMPARE);
4321
4322
4323 /* If we are have (and (rotate X C) M) and C is larger than the number
4324 of bits in M, this is an extraction. */
4325
4326 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4327 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4328 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4329 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4330 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4331 (GET_MODE_BITSIZE (mode)
4332 - INTVAL (XEXP (XEXP (x, 0), 1))),
4333 NULL_RTX, i, 1, 0, in_code == COMPARE);
4334
4335 /* On machines without logical shifts, if the operand of the AND is
4336 a logical shift and our mask turns off all the propagated sign
4337 bits, we can replace the logical shift with an arithmetic shift. */
4338 else if (
4339 #ifdef HAVE_ashrsi3
4340 HAVE_ashrsi3
4341 #else
4342 0
4343 #endif
4344 #ifdef HAVE_lshrsi3
4345 && ! HAVE_lshrsi3
4346 #else
4347 && 1
4348 #endif
4349 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4350 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4351 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
4352 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4353 && mode_width <= HOST_BITS_PER_WIDE_INT)
4354 {
4355 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
4356
4357 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4358 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4359 SUBST (XEXP (x, 0),
4360 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4361 XEXP (XEXP (x, 0), 1)));
4362 }
4363
4364 /* If the constant is one less than a power of two, this might be
4365 representable by an extraction even if no shift is present.
4366 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4367 we are in a COMPARE. */
4368 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4369 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
4370 0, in_code == COMPARE);
4371
4372 /* If we are in a comparison and this is an AND with a power of two,
4373 convert this into the appropriate bit extract. */
4374 else if (in_code == COMPARE
4375 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4376 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
4377
4378 break;
4379
4380 case LSHIFTRT:
4381 /* If the sign bit is known to be zero, replace this with an
4382 arithmetic shift. */
4383 if (
4384 #ifdef HAVE_ashrsi3
4385 HAVE_ashrsi3
4386 #else
4387 0
4388 #endif
4389 #ifdef HAVE_lshrsi3
4390 && ! HAVE_lshrsi3
4391 #else
4392 && 1
4393 #endif
4394 && mode_width <= HOST_BITS_PER_WIDE_INT
4395 && (significant_bits (XEXP (x, 0), mode)
4396 & (1 << (mode_width - 1))) == 0)
4397 {
4398 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4399 break;
4400 }
4401
4402 /* ... fall through ... */
4403
4404 case ASHIFTRT:
4405 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4406 this is a SIGN_EXTRACT. */
4407 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4408 && GET_CODE (XEXP (x, 0)) == ASHIFT
4409 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4410 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4411 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4412 (INTVAL (XEXP (x, 1))
4413 - INTVAL (XEXP (XEXP (x, 0), 1))),
4414 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4415 code == LSHIFTRT, 0, in_code == COMPARE);
4416 break;
4417 }
4418
4419 if (new)
4420 {
4421 x = gen_lowpart_for_combine (mode, new);
4422 code = GET_CODE (x);
4423 }
4424
4425 /* Now recursively process each operand of this operation. */
4426 fmt = GET_RTX_FORMAT (code);
4427 for (i = 0; i < GET_RTX_LENGTH (code); i++)
4428 if (fmt[i] == 'e')
4429 {
4430 new = make_compound_operation (XEXP (x, i), next_code);
4431 SUBST (XEXP (x, i), new);
4432 }
4433
4434 return x;
4435 }
4436 \f
4437 /* Given M see if it is a value that would select a field of bits
4438 within an item, but not the entire word. Return -1 if not.
4439 Otherwise, return the starting position of the field, where 0 is the
4440 low-order bit.
4441
4442 *PLEN is set to the length of the field. */
4443
4444 static int
4445 get_pos_from_mask (m, plen)
4446 unsigned HOST_WIDE_INT m;
4447 int *plen;
4448 {
4449 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4450 int pos = exact_log2 (m & - m);
4451
4452 if (pos < 0)
4453 return -1;
4454
4455 /* Now shift off the low-order zero bits and see if we have a power of
4456 two minus 1. */
4457 *plen = exact_log2 ((m >> pos) + 1);
4458
4459 if (*plen <= 0)
4460 return -1;
4461
4462 return pos;
4463 }
4464 \f
4465 /* Rewrite X so that it is an expression in MODE. We only care about the
4466 low-order BITS bits so we can ignore AND operations that just clear
4467 higher-order bits.
4468
4469 Also, if REG is non-zero and X is a register equal in value to REG,
4470 replace X with REG. */
4471
4472 static rtx
4473 force_to_mode (x, mode, bits, reg)
4474 rtx x;
4475 enum machine_mode mode;
4476 int bits;
4477 rtx reg;
4478 {
4479 enum rtx_code code = GET_CODE (x);
4480
4481 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
4482 just get X in the proper mode. */
4483
4484 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
4485 || bits > GET_MODE_BITSIZE (mode))
4486 return gen_lowpart_for_combine (mode, x);
4487
4488 switch (code)
4489 {
4490 case SIGN_EXTEND:
4491 case ZERO_EXTEND:
4492 case ZERO_EXTRACT:
4493 case SIGN_EXTRACT:
4494 x = expand_compound_operation (x);
4495 if (GET_CODE (x) != code)
4496 return force_to_mode (x, mode, bits, reg);
4497 break;
4498
4499 case REG:
4500 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
4501 || rtx_equal_p (reg, get_last_value (x))))
4502 x = reg;
4503 break;
4504
4505 case CONST_INT:
4506 if (bits < HOST_BITS_PER_WIDE_INT)
4507 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
4508 return x;
4509
4510 case SUBREG:
4511 /* Ignore low-order SUBREGs. */
4512 if (subreg_lowpart_p (x))
4513 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
4514 break;
4515
4516 case AND:
4517 /* If this is an AND with a constant. Otherwise, we fall through to
4518 do the general binary case. */
4519
4520 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4521 {
4522 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
4523 int len = exact_log2 (mask + 1);
4524 rtx op = XEXP (x, 0);
4525
4526 /* If this is masking some low-order bits, we may be able to
4527 impose a stricter constraint on what bits of the operand are
4528 required. */
4529
4530 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
4531 reg);
4532
4533 if (bits < HOST_BITS_PER_WIDE_INT)
4534 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
4535
4536 x = simplify_and_const_int (x, mode, op, mask);
4537
4538 /* If X is still an AND, see if it is an AND with a mask that
4539 is just some low-order bits. If so, and it is BITS wide (it
4540 can't be wider), we don't need it. */
4541
4542 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
4543 && bits < HOST_BITS_PER_WIDE_INT
4544 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
4545 x = XEXP (x, 0);
4546 return x;
4547 }
4548
4549 /* ... fall through ... */
4550
4551 case PLUS:
4552 case MINUS:
4553 case MULT:
4554 case IOR:
4555 case XOR:
4556 /* For most binary operations, just propagate into the operation and
4557 change the mode. */
4558
4559 return gen_binary (code, mode,
4560 force_to_mode (XEXP (x, 0), mode, bits, reg),
4561 force_to_mode (XEXP (x, 1), mode, bits, reg));
4562
4563 case ASHIFT:
4564 case LSHIFT:
4565 /* For left shifts, do the same, but just for the first operand.
4566 If the shift count is a constant, we need even fewer bits of the
4567 first operand. */
4568
4569 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
4570 bits -= INTVAL (XEXP (x, 1));
4571
4572 return gen_binary (code, mode,
4573 force_to_mode (XEXP (x, 0), mode, bits, reg),
4574 XEXP (x, 1));
4575
4576 case LSHIFTRT:
4577 /* Here we can only do something if the shift count is a constant and
4578 the count plus BITS is no larger than the width of MODE, we can do
4579 the shift in MODE. */
4580
4581 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4582 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
4583 return gen_binary (LSHIFTRT, mode,
4584 force_to_mode (XEXP (x, 0), mode,
4585 bits + INTVAL (XEXP (x, 1)), reg),
4586 XEXP (x, 1));
4587 break;
4588
4589 case NEG:
4590 case NOT:
4591 /* Handle these similarly to the way we handle most binary operations. */
4592 return gen_unary (code, mode,
4593 force_to_mode (XEXP (x, 0), mode, bits, reg));
4594 }
4595
4596 /* Otherwise, just do the operation canonically. */
4597 return gen_lowpart_for_combine (mode, x);
4598 }
4599 \f
4600 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
4601 Return that assignment if so.
4602
4603 We only handle the most common cases. */
4604
4605 static rtx
4606 make_field_assignment (x)
4607 rtx x;
4608 {
4609 rtx dest = SET_DEST (x);
4610 rtx src = SET_SRC (x);
4611 rtx ourdest;
4612 rtx assign;
4613 HOST_WIDE_INT c1;
4614 int pos, len;
4615 rtx other;
4616 enum machine_mode mode;
4617
4618 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4619 a clear of a one-bit field. We will have changed it to
4620 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4621 for a SUBREG. */
4622
4623 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
4624 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
4625 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
4626 && (rtx_equal_p (dest, XEXP (src, 1))
4627 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4628 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4629 {
4630 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4631 1, 1, 1, 0);
4632 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
4633 }
4634
4635 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
4636 && subreg_lowpart_p (XEXP (src, 0))
4637 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
4638 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
4639 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
4640 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
4641 && (rtx_equal_p (dest, XEXP (src, 1))
4642 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4643 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4644 {
4645 assign = make_extraction (VOIDmode, dest, -1,
4646 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
4647 1, 1, 1, 0);
4648 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
4649 }
4650
4651 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4652 one-bit field. */
4653 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
4654 && XEXP (XEXP (src, 0), 0) == const1_rtx
4655 && (rtx_equal_p (dest, XEXP (src, 1))
4656 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4657 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4658 {
4659 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4660 1, 1, 1, 0);
4661 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
4662 }
4663
4664 /* The other case we handle is assignments into a constant-position
4665 field. They look like (ior (and DEST C1) OTHER). If C1 represents
4666 a mask that has all one bits except for a group of zero bits and
4667 OTHER is known to have zeros where C1 has ones, this is such an
4668 assignment. Compute the position and length from C1. Shift OTHER
4669 to the appropriate position, force it to the required mode, and
4670 make the extraction. Check for the AND in both operands. */
4671
4672 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
4673 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
4674 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
4675 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
4676 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
4677 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
4678 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
4679 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
4680 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
4681 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
4682 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
4683 dest)))
4684 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
4685 else
4686 return x;
4687
4688 pos = get_pos_from_mask (~c1, &len);
4689 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
4690 || (c1 & significant_bits (other, GET_MODE (other))) != 0)
4691 return x;
4692
4693 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
4694
4695 /* The mode to use for the source is the mode of the assignment, or of
4696 what is inside a possible STRICT_LOW_PART. */
4697 mode = (GET_CODE (assign) == STRICT_LOW_PART
4698 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
4699
4700 /* Shift OTHER right POS places and make it the source, restricting it
4701 to the proper length and mode. */
4702
4703 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
4704 GET_MODE (src), other, pos),
4705 mode, len, dest);
4706
4707 return gen_rtx_combine (SET, VOIDmode, assign, src);
4708 }
4709 \f
4710 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
4711 if so. */
4712
4713 static rtx
4714 apply_distributive_law (x)
4715 rtx x;
4716 {
4717 enum rtx_code code = GET_CODE (x);
4718 rtx lhs, rhs, other;
4719 rtx tem;
4720 enum rtx_code inner_code;
4721
4722 /* The outer operation can only be one of the following: */
4723 if (code != IOR && code != AND && code != XOR
4724 && code != PLUS && code != MINUS)
4725 return x;
4726
4727 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
4728
4729 /* If either operand is a primitive we can't do anything, so get out fast. */
4730 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
4731 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
4732 return x;
4733
4734 lhs = expand_compound_operation (lhs);
4735 rhs = expand_compound_operation (rhs);
4736 inner_code = GET_CODE (lhs);
4737 if (inner_code != GET_CODE (rhs))
4738 return x;
4739
4740 /* See if the inner and outer operations distribute. */
4741 switch (inner_code)
4742 {
4743 case LSHIFTRT:
4744 case ASHIFTRT:
4745 case AND:
4746 case IOR:
4747 /* These all distribute except over PLUS. */
4748 if (code == PLUS || code == MINUS)
4749 return x;
4750 break;
4751
4752 case MULT:
4753 if (code != PLUS && code != MINUS)
4754 return x;
4755 break;
4756
4757 case ASHIFT:
4758 case LSHIFT:
4759 /* These are also multiplies, so they distribute over everything. */
4760 break;
4761
4762 case SUBREG:
4763 /* Non-paradoxical SUBREGs distributes over all operations, provided
4764 the inner modes and word numbers are the same, this is an extraction
4765 of a low-order part, we don't convert an fp operation to int or
4766 vice versa, and we would not be converting a single-word
4767 operation into a multi-word operation. The latter test is not
4768 required, but it prevents generating unneeded multi-word operations.
4769 Some of the previous tests are redundant given the latter test, but
4770 are retained because they are required for correctness.
4771
4772 We produce the result slightly differently in this case. */
4773
4774 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
4775 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
4776 || ! subreg_lowpart_p (lhs)
4777 || (GET_MODE_CLASS (GET_MODE (lhs))
4778 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
4779 || (GET_MODE_SIZE (GET_MODE (lhs))
4780 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
4781 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
4782 return x;
4783
4784 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
4785 SUBREG_REG (lhs), SUBREG_REG (rhs));
4786 return gen_lowpart_for_combine (GET_MODE (x), tem);
4787
4788 default:
4789 return x;
4790 }
4791
4792 /* Set LHS and RHS to the inner operands (A and B in the example
4793 above) and set OTHER to the common operand (C in the example).
4794 These is only one way to do this unless the inner operation is
4795 commutative. */
4796 if (GET_RTX_CLASS (inner_code) == 'c'
4797 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
4798 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
4799 else if (GET_RTX_CLASS (inner_code) == 'c'
4800 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
4801 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
4802 else if (GET_RTX_CLASS (inner_code) == 'c'
4803 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
4804 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
4805 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
4806 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
4807 else
4808 return x;
4809
4810 /* Form the new inner operation, seeing if it simplifies first. */
4811 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
4812
4813 /* There is one exception to the general way of distributing:
4814 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
4815 if (code == XOR && inner_code == IOR)
4816 {
4817 inner_code = AND;
4818 other = gen_unary (NOT, GET_MODE (x), other);
4819 }
4820
4821 /* We may be able to continuing distributing the result, so call
4822 ourselves recursively on the inner operation before forming the
4823 outer operation, which we return. */
4824 return gen_binary (inner_code, GET_MODE (x),
4825 apply_distributive_law (tem), other);
4826 }
4827 \f
4828 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
4829 in MODE.
4830
4831 Return an equivalent form, if different from X. Otherwise, return X. If
4832 X is zero, we are to always construct the equivalent form. */
4833
4834 static rtx
4835 simplify_and_const_int (x, mode, varop, constop)
4836 rtx x;
4837 enum machine_mode mode;
4838 rtx varop;
4839 unsigned HOST_WIDE_INT constop;
4840 {
4841 register enum machine_mode tmode;
4842 register rtx temp;
4843 unsigned HOST_WIDE_INT significant;
4844
4845 /* There is a large class of optimizations based on the principle that
4846 some operations produce results where certain bits are known to be zero,
4847 and hence are not significant to the AND. For example, if we have just
4848 done a left shift of one bit, the low-order bit is known to be zero and
4849 hence an AND with a mask of ~1 would not do anything.
4850
4851 At the end of the following loop, we set:
4852
4853 VAROP to be the item to be AND'ed with;
4854 CONSTOP to the constant value to AND it with. */
4855
4856 while (1)
4857 {
4858 /* If we ever encounter a mode wider than the host machine's widest
4859 integer size, we can't compute the masks accurately, so give up. */
4860 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
4861 break;
4862
4863 /* Unless one of the cases below does a `continue',
4864 a `break' will be executed to exit the loop. */
4865
4866 switch (GET_CODE (varop))
4867 {
4868 case CLOBBER:
4869 /* If VAROP is a (clobber (const_int)), return it since we know
4870 we are generating something that won't match. */
4871 return varop;
4872
4873 #if ! BITS_BIG_ENDIAN
4874 case USE:
4875 /* VAROP is a (use (mem ..)) that was made from a bit-field
4876 extraction that spanned the boundary of the MEM. If we are
4877 now masking so it is within that boundary, we don't need the
4878 USE any more. */
4879 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
4880 {
4881 varop = XEXP (varop, 0);
4882 continue;
4883 }
4884 break;
4885 #endif
4886
4887 case SUBREG:
4888 if (subreg_lowpart_p (varop)
4889 /* We can ignore the effect this SUBREG if it narrows the mode
4890 or, on machines where byte operations zero extend, if the
4891 constant masks to zero all the bits the mode doesn't have. */
4892 && ((GET_MODE_SIZE (GET_MODE (varop))
4893 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
4894 #ifdef BYTE_LOADS_ZERO_EXTEND
4895 || (0 == (constop
4896 & GET_MODE_MASK (GET_MODE (varop))
4897 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
4898 #endif
4899 ))
4900 {
4901 varop = SUBREG_REG (varop);
4902 continue;
4903 }
4904 break;
4905
4906 case ZERO_EXTRACT:
4907 case SIGN_EXTRACT:
4908 case ZERO_EXTEND:
4909 case SIGN_EXTEND:
4910 /* Try to expand these into a series of shifts and then work
4911 with that result. If we can't, for example, if the extract
4912 isn't at a fixed position, give up. */
4913 temp = expand_compound_operation (varop);
4914 if (temp != varop)
4915 {
4916 varop = temp;
4917 continue;
4918 }
4919 break;
4920
4921 case AND:
4922 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
4923 {
4924 constop &= INTVAL (XEXP (varop, 1));
4925 varop = XEXP (varop, 0);
4926 continue;
4927 }
4928 break;
4929
4930 case IOR:
4931 case XOR:
4932 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
4933 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
4934 operation which may be a bitfield extraction. */
4935
4936 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
4937 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
4938 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
4939 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
4940 && GET_CODE (XEXP (varop, 1)) == CONST_INT
4941 && (INTVAL (XEXP (varop, 1))
4942 & ~ significant_bits (XEXP (varop, 0),
4943 GET_MODE (varop)) == 0))
4944 {
4945 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
4946 << INTVAL (XEXP (XEXP (varop, 0), 1)));
4947 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
4948 XEXP (XEXP (varop, 0), 0), temp);
4949 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
4950 temp, XEXP (varop, 1));
4951 continue;
4952 }
4953
4954 /* Apply the AND to both branches of the IOR or XOR, then try to
4955 apply the distributive law. This may eliminate operations
4956 if either branch can be simplified because of the AND.
4957 It may also make some cases more complex, but those cases
4958 probably won't match a pattern either with or without this. */
4959 return
4960 gen_lowpart_for_combine
4961 (mode, apply_distributive_law
4962 (gen_rtx_combine
4963 (GET_CODE (varop), GET_MODE (varop),
4964 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
4965 XEXP (varop, 0), constop),
4966 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
4967 XEXP (varop, 1), constop))));
4968
4969 case NOT:
4970 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
4971 LSHIFTRT we can do the same as above. */
4972
4973 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
4974 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
4975 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
4976 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
4977 {
4978 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
4979 temp = gen_binary (XOR, GET_MODE (varop),
4980 XEXP (XEXP (varop, 0), 0), temp);
4981 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
4982 temp, XEXP (XEXP (varop, 0), 1));
4983 continue;
4984 }
4985 break;
4986
4987 case ASHIFTRT:
4988 /* If we are just looking for the sign bit, we don't need this
4989 shift at all, even if it has a variable count. */
4990 if (constop == ((HOST_WIDE_INT) 1
4991 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
4992 {
4993 varop = XEXP (varop, 0);
4994 continue;
4995 }
4996
4997 /* If this is a shift by a constant, get a mask that contains
4998 those bits that are not copies of the sign bit. We then have
4999 two cases: If CONSTOP only includes those bits, this can be
5000 a logical shift, which may allow simplifications. If CONSTOP
5001 is a single-bit field not within those bits, we are requesting
5002 a copy of the sign bit and hence can shift the sign bit to
5003 the appropriate location. */
5004 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5005 && INTVAL (XEXP (varop, 1)) >= 0
5006 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5007 {
5008 int i = -1;
5009
5010 significant = GET_MODE_MASK (GET_MODE (varop));
5011 significant >>= INTVAL (XEXP (varop, 1));
5012
5013 if ((constop & ~significant) == 0
5014 || (i = exact_log2 (constop)) >= 0)
5015 {
5016 varop = simplify_shift_const
5017 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5018 i < 0 ? INTVAL (XEXP (varop, 1))
5019 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5020 if (GET_CODE (varop) != ASHIFTRT)
5021 continue;
5022 }
5023 }
5024
5025 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5026 even if the shift count isn't a constant. */
5027 if (constop == 1)
5028 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5029 XEXP (varop, 0), XEXP (varop, 1));
5030 break;
5031
5032 case NE:
5033 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5034 included in STORE_FLAG_VALUE and FOO has no significant bits
5035 not in CONST. */
5036 if ((constop & ~ STORE_FLAG_VALUE) == 0
5037 && XEXP (varop, 0) == const0_rtx
5038 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5039 {
5040 varop = XEXP (varop, 0);
5041 continue;
5042 }
5043 break;
5044
5045 case PLUS:
5046 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5047 low-order bits (as in an alignment operation) and FOO is already
5048 aligned to that boundary, we can convert remove this AND
5049 and possibly the PLUS if it is now adding zero. */
5050 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5051 && exact_log2 (-constop) >= 0
5052 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5053 {
5054 varop = plus_constant (XEXP (varop, 0),
5055 INTVAL (XEXP (varop, 1)) & constop);
5056 constop = ~0;
5057 break;
5058 }
5059
5060 /* ... fall through ... */
5061
5062 case MINUS:
5063 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5064 less than powers of two and M2 is narrower than M1, we can
5065 eliminate the inner AND. This occurs when incrementing
5066 bit fields. */
5067
5068 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5069 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5070 SUBST (XEXP (varop, 0),
5071 expand_compound_operation (XEXP (varop, 0)));
5072
5073 if (GET_CODE (XEXP (varop, 0)) == AND
5074 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5075 && exact_log2 (constop + 1) >= 0
5076 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5077 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5078 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5079 break;
5080 }
5081
5082 break;
5083 }
5084
5085 /* If we have reached a constant, this whole thing is constant. */
5086 if (GET_CODE (varop) == CONST_INT)
5087 return GEN_INT (constop & INTVAL (varop));
5088
5089 /* See what bits are significant in VAROP. */
5090 significant = significant_bits (varop, mode);
5091
5092 /* Turn off all bits in the constant that are known to already be zero.
5093 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5094 which is tested below. */
5095
5096 constop &= significant;
5097
5098 /* If we don't have any bits left, return zero. */
5099 if (constop == 0)
5100 return const0_rtx;
5101
5102 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5103 if we already had one (just check for the simplest cases). */
5104 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5105 && GET_MODE (XEXP (x, 0)) == mode
5106 && SUBREG_REG (XEXP (x, 0)) == varop)
5107 varop = XEXP (x, 0);
5108 else
5109 varop = gen_lowpart_for_combine (mode, varop);
5110
5111 /* If we can't make the SUBREG, try to return what we were given. */
5112 if (GET_CODE (varop) == CLOBBER)
5113 return x ? x : varop;
5114
5115 /* If we are only masking insignificant bits, return VAROP. */
5116 if (constop == significant)
5117 x = varop;
5118
5119 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5120 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5121 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
5122
5123 else
5124 {
5125 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5126 || INTVAL (XEXP (x, 1)) != constop)
5127 SUBST (XEXP (x, 1), GEN_INT (constop));
5128
5129 SUBST (XEXP (x, 0), varop);
5130 }
5131
5132 return x;
5133 }
5134 \f
5135 /* Given an expression, X, compute which bits in X can be non-zero.
5136 We don't care about bits outside of those defined in MODE.
5137
5138 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5139 a shift, AND, or zero_extract, we can do better. */
5140
5141 static unsigned HOST_WIDE_INT
5142 significant_bits (x, mode)
5143 rtx x;
5144 enum machine_mode mode;
5145 {
5146 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5147 unsigned HOST_WIDE_INT inner_sig;
5148 enum rtx_code code;
5149 int mode_width = GET_MODE_BITSIZE (mode);
5150 rtx tem;
5151
5152 /* If X is wider than MODE, use its mode instead. */
5153 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5154 {
5155 mode = GET_MODE (x);
5156 significant = GET_MODE_MASK (mode);
5157 mode_width = GET_MODE_BITSIZE (mode);
5158 }
5159
5160 if (mode_width > HOST_BITS_PER_WIDE_INT)
5161 /* Our only callers in this case look for single bit values. So
5162 just return the mode mask. Those tests will then be false. */
5163 return significant;
5164
5165 code = GET_CODE (x);
5166 switch (code)
5167 {
5168 case REG:
5169 #ifdef STACK_BOUNDARY
5170 /* If this is the stack pointer, we may know something about its
5171 alignment. If PUSH_ROUNDING is defined, it is possible for the
5172 stack to be momentarily aligned only to that amount, so we pick
5173 the least alignment. */
5174
5175 if (x == stack_pointer_rtx)
5176 {
5177 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5178
5179 #ifdef PUSH_ROUNDING
5180 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5181 #endif
5182
5183 return significant & ~ (sp_alignment - 1);
5184 }
5185 #endif
5186
5187 /* If X is a register whose value we can find, use that value.
5188 Otherwise, use the previously-computed significant bits for this
5189 register. */
5190
5191 tem = get_last_value (x);
5192 if (tem)
5193 return significant_bits (tem, mode);
5194 else if (significant_valid && reg_significant[REGNO (x)])
5195 return reg_significant[REGNO (x)] & significant;
5196 else
5197 return significant;
5198
5199 case CONST_INT:
5200 return INTVAL (x);
5201
5202 #ifdef BYTE_LOADS_ZERO_EXTEND
5203 case MEM:
5204 /* In many, if not most, RISC machines, reading a byte from memory
5205 zeros the rest of the register. Noticing that fact saves a lot
5206 of extra zero-extends. */
5207 significant &= GET_MODE_MASK (GET_MODE (x));
5208 break;
5209 #endif
5210
5211 #if STORE_FLAG_VALUE == 1
5212 case EQ: case NE:
5213 case GT: case GTU:
5214 case LT: case LTU:
5215 case GE: case GEU:
5216 case LE: case LEU:
5217
5218 if (GET_MODE_CLASS (mode) == MODE_INT)
5219 significant = 1;
5220
5221 /* A comparison operation only sets the bits given by its mode. The
5222 rest are set undefined. */
5223 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5224 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5225 break;
5226 #endif
5227
5228 #if STORE_FLAG_VALUE == -1
5229 case NEG:
5230 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
5231 || ((tem = get_last_value (XEXP (x, 0))) != 0
5232 && GET_RTX_CLASS (GET_CODE (tem)) == '<'))
5233 significant = 1;
5234
5235 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5236 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5237 break;
5238 #endif
5239
5240 case TRUNCATE:
5241 significant &= (significant_bits (XEXP (x, 0), mode)
5242 & GET_MODE_MASK (mode));
5243 break;
5244
5245 case ZERO_EXTEND:
5246 significant &= significant_bits (XEXP (x, 0), mode);
5247 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5248 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5249 break;
5250
5251 case SIGN_EXTEND:
5252 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5253 Otherwise, show all the bits in the outer mode but not the inner
5254 may be non-zero. */
5255 inner_sig = significant_bits (XEXP (x, 0), mode);
5256 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5257 {
5258 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5259 if (inner_sig &
5260 (((HOST_WIDE_INT) 1
5261 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
5262 inner_sig |= (GET_MODE_MASK (mode)
5263 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
5264 }
5265
5266 significant &= inner_sig;
5267 break;
5268
5269 case AND:
5270 significant &= (significant_bits (XEXP (x, 0), mode)
5271 & significant_bits (XEXP (x, 1), mode));
5272 break;
5273
5274 case XOR:
5275 case IOR:
5276 significant &= (significant_bits (XEXP (x, 0), mode)
5277 | significant_bits (XEXP (x, 1), mode));
5278 break;
5279
5280 case PLUS: case MINUS:
5281 case MULT:
5282 case DIV: case UDIV:
5283 case MOD: case UMOD:
5284 /* We can apply the rules of arithmetic to compute the number of
5285 high- and low-order zero bits of these operations. We start by
5286 computing the width (position of the highest-order non-zero bit)
5287 and the number of low-order zero bits for each value. */
5288 {
5289 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
5290 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
5291 int width0 = floor_log2 (sig0) + 1;
5292 int width1 = floor_log2 (sig1) + 1;
5293 int low0 = floor_log2 (sig0 & -sig0);
5294 int low1 = floor_log2 (sig1 & -sig1);
5295 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
5296 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
5297 int result_width = mode_width;
5298 int result_low = 0;
5299
5300 switch (code)
5301 {
5302 case PLUS:
5303 result_width = MAX (width0, width1) + 1;
5304 result_low = MIN (low0, low1);
5305 break;
5306 case MINUS:
5307 result_low = MIN (low0, low1);
5308 break;
5309 case MULT:
5310 result_width = width0 + width1;
5311 result_low = low0 + low1;
5312 break;
5313 case DIV:
5314 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5315 result_width = width0;
5316 break;
5317 case UDIV:
5318 result_width = width0;
5319 break;
5320 case MOD:
5321 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5322 result_width = MIN (width0, width1);
5323 result_low = MIN (low0, low1);
5324 break;
5325 case UMOD:
5326 result_width = MIN (width0, width1);
5327 result_low = MIN (low0, low1);
5328 break;
5329 }
5330
5331 if (result_width < mode_width)
5332 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
5333
5334 if (result_low > 0)
5335 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
5336 }
5337 break;
5338
5339 case ZERO_EXTRACT:
5340 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5341 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5342 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
5343 break;
5344
5345 case SUBREG:
5346 /* If the inner mode is a single word for both the host and target
5347 machines, we can compute this from which bits of the inner
5348 object are known significant. */
5349 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5350 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5351 <= HOST_BITS_PER_WIDE_INT))
5352 {
5353 significant &= significant_bits (SUBREG_REG (x), mode);
5354 #ifndef BYTE_LOADS_ZERO_EXTEND
5355 /* On many CISC machines, accessing an object in a wider mode
5356 causes the high-order bits to become undefined. So they are
5357 not known to be zero. */
5358 if (GET_MODE_SIZE (GET_MODE (x))
5359 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5360 significant |= (GET_MODE_MASK (GET_MODE (x))
5361 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
5362 #endif
5363 }
5364 break;
5365
5366 case ASHIFTRT:
5367 case LSHIFTRT:
5368 case ASHIFT:
5369 case LSHIFT:
5370 case ROTATE:
5371 /* The significant bits are in two classes: any bits within MODE
5372 that aren't in GET_MODE (x) are always significant. The rest of the
5373 significant bits are those that are significant in the operand of
5374 the shift when shifted the appropriate number of bits. This
5375 shows that high-order bits are cleared by the right shift and
5376 low-order bits by left shifts. */
5377 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5378 && INTVAL (XEXP (x, 1)) >= 0
5379 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5380 {
5381 enum machine_mode inner_mode = GET_MODE (x);
5382 int width = GET_MODE_BITSIZE (inner_mode);
5383 int count = INTVAL (XEXP (x, 1));
5384 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
5385 unsigned HOST_WIDE_INT op_significant
5386 = significant_bits (XEXP (x, 0), mode);
5387 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
5388 unsigned HOST_WIDE_INT outer = 0;
5389
5390 if (mode_width > width)
5391 outer = (op_significant & significant & ~ mode_mask);
5392
5393 if (code == LSHIFTRT)
5394 inner >>= count;
5395 else if (code == ASHIFTRT)
5396 {
5397 inner >>= count;
5398
5399 /* If the sign bit was significant at before the shift, we
5400 need to mark all the places it could have been copied to
5401 by the shift significant. */
5402 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
5403 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
5404 }
5405 else if (code == LSHIFT || code == ASHIFT)
5406 inner <<= count;
5407 else
5408 inner = ((inner << (count % width)
5409 | (inner >> (width - (count % width)))) & mode_mask);
5410
5411 significant &= (outer | inner);
5412 }
5413 break;
5414
5415 case FFS:
5416 /* This is at most the number of bits in the mode. */
5417 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
5418 break;
5419 }
5420
5421 return significant;
5422 }
5423 \f
5424 /* This function is called from `simplify_shift_const' to merge two
5425 outer operations. Specifically, we have already found that we need
5426 to perform operation *POP0 with constant *PCONST0 at the outermost
5427 position. We would now like to also perform OP1 with constant CONST1
5428 (with *POP0 being done last).
5429
5430 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5431 the resulting operation. *PCOMP_P is set to 1 if we would need to
5432 complement the innermost operand, otherwise it is unchanged.
5433
5434 MODE is the mode in which the operation will be done. No bits outside
5435 the width of this mode matter. It is assumed that the width of this mode
5436 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
5437
5438 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
5439 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
5440 result is simply *PCONST0.
5441
5442 If the resulting operation cannot be expressed as one operation, we
5443 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
5444
5445 static int
5446 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
5447 enum rtx_code *pop0;
5448 HOST_WIDE_INT *pconst0;
5449 enum rtx_code op1;
5450 HOST_WIDE_INT const1;
5451 enum machine_mode mode;
5452 int *pcomp_p;
5453 {
5454 enum rtx_code op0 = *pop0;
5455 HOST_WIDE_INT const0 = *pconst0;
5456
5457 const0 &= GET_MODE_MASK (mode);
5458 const1 &= GET_MODE_MASK (mode);
5459
5460 /* If OP0 is an AND, clear unimportant bits in CONST1. */
5461 if (op0 == AND)
5462 const1 &= const0;
5463
5464 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
5465 if OP0 is SET. */
5466
5467 if (op1 == NIL || op0 == SET)
5468 return 1;
5469
5470 else if (op0 == NIL)
5471 op0 = op1, const0 = const1;
5472
5473 else if (op0 == op1)
5474 {
5475 switch (op0)
5476 {
5477 case AND:
5478 const0 &= const1;
5479 break;
5480 case IOR:
5481 const0 |= const1;
5482 break;
5483 case XOR:
5484 const0 ^= const1;
5485 break;
5486 case PLUS:
5487 const0 += const1;
5488 break;
5489 case NEG:
5490 op0 = NIL;
5491 break;
5492 }
5493 }
5494
5495 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
5496 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
5497 return 0;
5498
5499 /* If the two constants aren't the same, we can't do anything. The
5500 remaining six cases can all be done. */
5501 else if (const0 != const1)
5502 return 0;
5503
5504 else
5505 switch (op0)
5506 {
5507 case IOR:
5508 if (op1 == AND)
5509 /* (a & b) | b == b */
5510 op0 = SET;
5511 else /* op1 == XOR */
5512 /* (a ^ b) | b == a | b */
5513 ;
5514 break;
5515
5516 case XOR:
5517 if (op1 == AND)
5518 /* (a & b) ^ b == (~a) & b */
5519 op0 = AND, *pcomp_p = 1;
5520 else /* op1 == IOR */
5521 /* (a | b) ^ b == a & ~b */
5522 op0 = AND, *pconst0 = ~ const0;
5523 break;
5524
5525 case AND:
5526 if (op1 == IOR)
5527 /* (a | b) & b == b */
5528 op0 = SET;
5529 else /* op1 == XOR */
5530 /* (a ^ b) & b) == (~a) & b */
5531 *pcomp_p = 1;
5532 break;
5533 }
5534
5535 /* Check for NO-OP cases. */
5536 const0 &= GET_MODE_MASK (mode);
5537 if (const0 == 0
5538 && (op0 == IOR || op0 == XOR || op0 == PLUS))
5539 op0 = NIL;
5540 else if (const0 == 0 && op0 == AND)
5541 op0 = SET;
5542 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
5543 op0 = NIL;
5544
5545 *pop0 = op0;
5546 *pconst0 = const0;
5547
5548 return 1;
5549 }
5550 \f
5551 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
5552 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
5553 that we started with.
5554
5555 The shift is normally computed in the widest mode we find in VAROP, as
5556 long as it isn't a different number of words than RESULT_MODE. Exceptions
5557 are ASHIFTRT and ROTATE, which are always done in their original mode, */
5558
5559 static rtx
5560 simplify_shift_const (x, code, result_mode, varop, count)
5561 rtx x;
5562 enum rtx_code code;
5563 enum machine_mode result_mode;
5564 rtx varop;
5565 int count;
5566 {
5567 enum rtx_code orig_code = code;
5568 int orig_count = count;
5569 enum machine_mode mode = result_mode;
5570 enum machine_mode shift_mode, tmode;
5571 int mode_words
5572 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
5573 /* We form (outer_op (code varop count) (outer_const)). */
5574 enum rtx_code outer_op = NIL;
5575 HOST_WIDE_INT outer_const;
5576 rtx const_rtx;
5577 int complement_p = 0;
5578 rtx new;
5579
5580 /* If we were given an invalid count, don't do anything except exactly
5581 what was requested. */
5582
5583 if (count < 0 || count > GET_MODE_BITSIZE (mode))
5584 {
5585 if (x)
5586 return x;
5587
5588 return gen_rtx (code, mode, varop, GEN_INT (count));
5589 }
5590
5591 /* Unless one of the branches of the `if' in this loop does a `continue',
5592 we will `break' the loop after the `if'. */
5593
5594 while (count != 0)
5595 {
5596 /* If we have an operand of (clobber (const_int 0)), just return that
5597 value. */
5598 if (GET_CODE (varop) == CLOBBER)
5599 return varop;
5600
5601 /* If we discovered we had to complement VAROP, leave. Making a NOT
5602 here would cause an infinite loop. */
5603 if (complement_p)
5604 break;
5605
5606 /* Convert ROTATETRT to ROTATE. */
5607 if (code == ROTATERT)
5608 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
5609
5610 /* Canonicalize LSHIFT to ASHIFT. */
5611 if (code == LSHIFT)
5612 code = ASHIFT;
5613
5614 /* We need to determine what mode we will do the shift in. If the
5615 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
5616 was originally done in. Otherwise, we can do it in MODE, the widest
5617 mode encountered. */
5618 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
5619
5620 /* Handle cases where the count is greater than the size of the mode
5621 minus 1. For ASHIFT, use the size minus one as the count (this can
5622 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
5623 take the count modulo the size. For other shifts, the result is
5624 zero.
5625
5626 Since these shifts are being produced by the compiler by combining
5627 multiple operations, each of which are defined, we know what the
5628 result is supposed to be. */
5629
5630 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
5631 {
5632 if (code == ASHIFTRT)
5633 count = GET_MODE_BITSIZE (shift_mode) - 1;
5634 else if (code == ROTATE || code == ROTATERT)
5635 count %= GET_MODE_BITSIZE (shift_mode);
5636 else
5637 {
5638 /* We can't simply return zero because there may be an
5639 outer op. */
5640 varop = const0_rtx;
5641 count = 0;
5642 break;
5643 }
5644 }
5645
5646 /* Negative counts are invalid and should not have been made (a
5647 programmer-specified negative count should have been handled
5648 above). */
5649 else if (count < 0)
5650 abort ();
5651
5652 /* We simplify the tests below and elsewhere by converting
5653 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
5654 `make_compound_operation' will convert it to a ASHIFTRT for
5655 those machines (such as Vax) that don't have a LSHIFTRT. */
5656 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
5657 && code == ASHIFTRT
5658 && ((significant_bits (varop, shift_mode)
5659 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
5660 == 0))
5661 code = LSHIFTRT;
5662
5663 switch (GET_CODE (varop))
5664 {
5665 case SIGN_EXTEND:
5666 case ZERO_EXTEND:
5667 case SIGN_EXTRACT:
5668 case ZERO_EXTRACT:
5669 new = expand_compound_operation (varop);
5670 if (new != varop)
5671 {
5672 varop = new;
5673 continue;
5674 }
5675 break;
5676
5677 case MEM:
5678 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
5679 minus the width of a smaller mode, we can do this with a
5680 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
5681 if ((code == ASHIFTRT || code == LSHIFTRT)
5682 && ! mode_dependent_address_p (XEXP (varop, 0))
5683 && ! MEM_VOLATILE_P (varop)
5684 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
5685 MODE_INT, 1)) != BLKmode)
5686 {
5687 #if BYTES_BIG_ENDIAN
5688 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
5689 #else
5690 new = gen_rtx (MEM, tmode,
5691 plus_constant (XEXP (varop, 0),
5692 count / BITS_PER_UNIT));
5693 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
5694 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
5695 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
5696 #endif
5697 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
5698 : ZERO_EXTEND, mode, new);
5699 count = 0;
5700 continue;
5701 }
5702 break;
5703
5704 case USE:
5705 /* Similar to the case above, except that we can only do this if
5706 the resulting mode is the same as that of the underlying
5707 MEM and adjust the address depending on the *bits* endianness
5708 because of the way that bit-field extract insns are defined. */
5709 if ((code == ASHIFTRT || code == LSHIFTRT)
5710 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
5711 MODE_INT, 1)) != BLKmode
5712 && tmode == GET_MODE (XEXP (varop, 0)))
5713 {
5714 #if BITS_BIG_ENDIAN
5715 new = XEXP (varop, 0);
5716 #else
5717 new = copy_rtx (XEXP (varop, 0));
5718 SUBST (XEXP (new, 0),
5719 plus_constant (XEXP (new, 0),
5720 count / BITS_PER_UNIT));
5721 #endif
5722
5723 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
5724 : ZERO_EXTEND, mode, new);
5725 count = 0;
5726 continue;
5727 }
5728 break;
5729
5730 case SUBREG:
5731 /* If VAROP is a SUBREG, strip it as long as the inner operand has
5732 the same number of words as what we've seen so far. Then store
5733 the widest mode in MODE. */
5734 if (SUBREG_WORD (varop) == 0
5735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
5736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
5737 == mode_words))
5738 {
5739 varop = SUBREG_REG (varop);
5740 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
5741 mode = GET_MODE (varop);
5742 continue;
5743 }
5744 break;
5745
5746 case MULT:
5747 /* Some machines use MULT instead of ASHIFT because MULT
5748 is cheaper. But it is still better on those machines to
5749 merge two shifts into one. */
5750 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5751 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
5752 {
5753 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
5754 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
5755 continue;
5756 }
5757 break;
5758
5759 case UDIV:
5760 /* Similar, for when divides are cheaper. */
5761 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5762 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
5763 {
5764 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5765 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
5766 continue;
5767 }
5768 break;
5769
5770 case ASHIFTRT:
5771 /* If we are extracting just the sign bit of an arithmetic right
5772 shift, that shift is not needed. */
5773 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
5774 {
5775 varop = XEXP (varop, 0);
5776 continue;
5777 }
5778
5779 /* ... fall through ... */
5780
5781 case LSHIFTRT:
5782 case ASHIFT:
5783 case LSHIFT:
5784 case ROTATE:
5785 /* Here we have two nested shifts. The result is usually the
5786 AND of a new shift with a mask. We compute the result below. */
5787 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5788 && INTVAL (XEXP (varop, 1)) >= 0
5789 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
5790 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
5791 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
5792 {
5793 enum rtx_code first_code = GET_CODE (varop);
5794 int first_count = INTVAL (XEXP (varop, 1));
5795 unsigned HOST_WIDE_INT mask;
5796 rtx mask_rtx;
5797 rtx inner;
5798
5799 if (first_code == LSHIFT)
5800 first_code = ASHIFT;
5801
5802 /* We have one common special case. We can't do any merging if
5803 the inner code is an ASHIFTRT of a smaller mode. However, if
5804 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
5805 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
5806 we can convert it to
5807 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
5808 This simplifies certain SIGN_EXTEND operations. */
5809 if (code == ASHIFT && first_code == ASHIFTRT
5810 && (GET_MODE_BITSIZE (result_mode)
5811 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
5812 {
5813 /* C3 has the low-order C1 bits zero. */
5814
5815 mask = (GET_MODE_MASK (mode)
5816 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
5817
5818 varop = simplify_and_const_int (NULL_RTX, result_mode,
5819 XEXP (varop, 0), mask);
5820 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
5821 varop, count);
5822 count = first_count;
5823 code = ASHIFTRT;
5824 continue;
5825 }
5826
5827 /* If this was (ashiftrt (ashift foo C1) C2) and we know
5828 something about FOO's previous value, we may be able to
5829 optimize this even though the code below can't handle this
5830 case.
5831
5832 If FOO has J high-order bits equal to the sign bit with
5833 J > C1, then we can convert this to either an ASHIFT or
5834 a ASHIFTRT depending on the two counts.
5835
5836 We cannot do this if VAROP's mode is not SHIFT_MODE. */
5837
5838 if (code == ASHIFTRT && first_code == ASHIFT
5839 && GET_MODE (varop) == shift_mode
5840 && (inner = get_last_value (XEXP (varop, 0))) != 0)
5841 {
5842 if ((GET_CODE (inner) == CONST_INT
5843 && (((INTVAL (inner)
5844 >> (HOST_BITS_PER_WIDE_INT - (first_count + 1)))
5845 == 0)
5846 || ((INTVAL (inner)
5847 >> (HOST_BITS_PER_WIDE_INT - (first_count + 1)))
5848 == -1)))
5849 || (GET_CODE (inner) == SIGN_EXTEND
5850 && ((GET_MODE_BITSIZE (GET_MODE (inner))
5851 - GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (inner))))
5852 >= first_count))
5853 || (GET_CODE (inner) == ASHIFTRT
5854 && GET_CODE (XEXP (inner, 1)) == CONST_INT
5855 && INTVAL (XEXP (inner, 1)) >= first_count))
5856 {
5857 count -= first_count;
5858 if (count < 0)
5859 count = - count, code = ASHIFT;
5860 varop = XEXP (varop, 0);
5861 continue;
5862 }
5863 }
5864
5865 /* There are some cases we can't do. If CODE is ASHIFTRT,
5866 we can only do this if FIRST_CODE is also ASHIFTRT.
5867
5868 We can't do the case when CODE is ROTATE and FIRST_CODE is
5869 ASHIFTRT.
5870
5871 If the mode of this shift is not the mode of the outer shift,
5872 we can't do this if either shift is ASHIFTRT or ROTATE.
5873
5874 Finally, we can't do any of these if the mode is too wide
5875 unless the codes are the same.
5876
5877 Handle the case where the shift codes are the same
5878 first. */
5879
5880 if (code == first_code)
5881 {
5882 if (GET_MODE (varop) != result_mode
5883 && (code == ASHIFTRT || code == ROTATE))
5884 break;
5885
5886 count += first_count;
5887 varop = XEXP (varop, 0);
5888 continue;
5889 }
5890
5891 if (code == ASHIFTRT
5892 || (code == ROTATE && first_code == ASHIFTRT)
5893 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
5894 || (GET_MODE (varop) != result_mode
5895 && (first_code == ASHIFTRT || first_code == ROTATE
5896 || code == ROTATE)))
5897 break;
5898
5899 /* To compute the mask to apply after the shift, shift the
5900 significant bits of the inner shift the same way the
5901 outer shift will. */
5902
5903 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
5904
5905 mask_rtx
5906 = simplify_binary_operation (code, result_mode, mask_rtx,
5907 GEN_INT (count));
5908
5909 /* Give up if we can't compute an outer operation to use. */
5910 if (mask_rtx == 0
5911 || GET_CODE (mask_rtx) != CONST_INT
5912 || ! merge_outer_ops (&outer_op, &outer_const, AND,
5913 INTVAL (mask_rtx),
5914 result_mode, &complement_p))
5915 break;
5916
5917 /* If the shifts are in the same direction, we add the
5918 counts. Otherwise, we subtract them. */
5919 if ((code == ASHIFTRT || code == LSHIFTRT)
5920 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
5921 count += first_count;
5922 else
5923 count -= first_count;
5924
5925 /* If COUNT is positive, the new shift is usually CODE,
5926 except for the two exceptions below, in which case it is
5927 FIRST_CODE. If the count is negative, FIRST_CODE should
5928 always be used */
5929 if (count > 0
5930 && ((first_code == ROTATE && code == ASHIFT)
5931 || (first_code == ASHIFTRT && code == LSHIFTRT)))
5932 code = first_code;
5933 else if (count < 0)
5934 code = first_code, count = - count;
5935
5936 varop = XEXP (varop, 0);
5937 continue;
5938 }
5939
5940 /* If we have (A << B << C) for any shift, we can convert this to
5941 (A << C << B). This wins if A is a constant. Only try this if
5942 B is not a constant. */
5943
5944 else if (GET_CODE (varop) == code
5945 && GET_CODE (XEXP (varop, 1)) != CONST_INT
5946 && 0 != (new
5947 = simplify_binary_operation (code, mode,
5948 XEXP (varop, 0),
5949 GEN_INT (count))))
5950 {
5951 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
5952 count = 0;
5953 continue;
5954 }
5955 break;
5956
5957 case NOT:
5958 /* Make this fit the case below. */
5959 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
5960 GEN_INT (GET_MODE_MASK (mode)));
5961 continue;
5962
5963 case IOR:
5964 case AND:
5965 case XOR:
5966 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
5967 with C the size of VAROP - 1 and the shift is logical if
5968 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
5969 we have an (le X 0) operation. If we have an arithmetic shift
5970 and STORE_FLAG_VALUE is 1 or we have a logical shift with
5971 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
5972
5973 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
5974 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
5975 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
5976 && (code == LSHIFTRT || code == ASHIFTRT)
5977 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
5978 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
5979 {
5980 count = 0;
5981 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
5982 const0_rtx);
5983
5984 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
5985 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
5986
5987 continue;
5988 }
5989
5990 /* If we have (shift (logical)), move the logical to the outside
5991 to allow it to possibly combine with another logical and the
5992 shift to combine with another shift. This also canonicalizes to
5993 what a ZERO_EXTRACT looks like. Also, some machines have
5994 (and (shift)) insns. */
5995
5996 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5997 && (new = simplify_binary_operation (code, result_mode,
5998 XEXP (varop, 1),
5999 GEN_INT (count))) != 0
6000 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
6001 INTVAL (new), result_mode, &complement_p))
6002 {
6003 varop = XEXP (varop, 0);
6004 continue;
6005 }
6006
6007 /* If we can't do that, try to simplify the shift in each arm of the
6008 logical expression, make a new logical expression, and apply
6009 the inverse distributive law. */
6010 {
6011 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
6012 XEXP (varop, 0), count);
6013 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
6014 XEXP (varop, 1), count);
6015
6016 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
6017 varop = apply_distributive_law (varop);
6018
6019 count = 0;
6020 }
6021 break;
6022
6023 case EQ:
6024 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
6025 says that the sign bit can be tested, FOO has mode MODE, C is
6026 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
6027 significant. */
6028 if (code == LSHIFT
6029 && XEXP (varop, 1) == const0_rtx
6030 && GET_MODE (XEXP (varop, 0)) == result_mode
6031 && count == GET_MODE_BITSIZE (result_mode) - 1
6032 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6033 && ((STORE_FLAG_VALUE
6034 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
6035 && significant_bits (XEXP (varop, 0), result_mode) == 1
6036 && merge_outer_ops (&outer_op, &outer_const, XOR,
6037 (HOST_WIDE_INT) 1, result_mode,
6038 &complement_p))
6039 {
6040 varop = XEXP (varop, 0);
6041 count = 0;
6042 continue;
6043 }
6044 break;
6045
6046 case NEG:
6047 /* If we are doing an arithmetic right shift of something known
6048 to be -1 or 0, we don't need the shift. */
6049 if (code == ASHIFTRT
6050 && significant_bits (XEXP (varop, 0), result_mode) == 1)
6051 {
6052 count = 0;
6053 continue;
6054 }
6055
6056 /* NEG commutes with ASHIFT since it is multiplication. Move the
6057 NEG outside to allow shifts to combine. */
6058 if (code == ASHIFT
6059 && merge_outer_ops (&outer_op, &outer_const, NEG,
6060 (HOST_WIDE_INT) 0, result_mode,
6061 &complement_p))
6062 {
6063 varop = XEXP (varop, 0);
6064 continue;
6065 }
6066 break;
6067
6068 case PLUS:
6069 /* Similar to case above. If X is 0 or 1 then X - 1 is -1 or 0. */
6070 if (XEXP (varop, 1) == constm1_rtx && code == ASHIFTRT
6071 && significant_bits (XEXP (varop, 0), result_mode) == 1)
6072 {
6073 count = 0;
6074 continue;
6075 }
6076
6077 /* If we have the same operands as above but we are shifting the
6078 sign bit into the low-order bit, we are exclusive-or'ing
6079 the operand of the PLUS with a one. */
6080 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
6081 && XEXP (varop, 1) == constm1_rtx
6082 && significant_bits (XEXP (varop, 0), result_mode) == 1
6083 && merge_outer_ops (&outer_op, &outer_const, XOR,
6084 (HOST_WIDE_INT) 1, result_mode,
6085 &complement_p))
6086 {
6087 count = 0;
6088 varop = XEXP (varop, 0);
6089 continue;
6090 }
6091
6092 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
6093 significant in BAR are those being shifted out and those
6094 bits are known zero in FOO, we can replace the PLUS with FOO.
6095 Similarly in the other operand order. This code occurs when
6096 we are computing the size of a variable-size array. */
6097
6098 if ((code == ASHIFTRT || code == LSHIFTRT)
6099 && count < HOST_BITS_PER_WIDE_INT
6100 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
6101 && (significant_bits (XEXP (varop, 1), result_mode)
6102 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
6103 {
6104 varop = XEXP (varop, 0);
6105 continue;
6106 }
6107 else if ((code == ASHIFTRT || code == LSHIFTRT)
6108 && count < HOST_BITS_PER_WIDE_INT
6109 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6110 >> count)
6111 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6112 & significant_bits (XEXP (varop, 1),
6113 result_mode)))
6114 {
6115 varop = XEXP (varop, 1);
6116 continue;
6117 }
6118
6119 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
6120 if (code == ASHIFT
6121 && GET_CODE (XEXP (varop, 1)) == CONST_INT
6122 && (new = simplify_binary_operation (ASHIFT, result_mode,
6123 XEXP (varop, 1),
6124 GEN_INT (count))) != 0
6125 && merge_outer_ops (&outer_op, &outer_const, PLUS,
6126 INTVAL (new), result_mode, &complement_p))
6127 {
6128 varop = XEXP (varop, 0);
6129 continue;
6130 }
6131 break;
6132
6133 case MINUS:
6134 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
6135 with C the size of VAROP - 1 and the shift is logical if
6136 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6137 we have a (gt X 0) operation. If the shift is arithmetic with
6138 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
6139 we have a (neg (gt X 0)) operation. */
6140
6141 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
6142 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6143 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6144 && (code == LSHIFTRT || code == ASHIFTRT)
6145 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6146 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
6147 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6148 {
6149 count = 0;
6150 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
6151 const0_rtx);
6152
6153 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6154 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6155
6156 continue;
6157 }
6158 break;
6159 }
6160
6161 break;
6162 }
6163
6164 /* We need to determine what mode to do the shift in. If the shift is
6165 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
6166 done in. Otherwise, we can do it in MODE, the widest mode encountered.
6167 The code we care about is that of the shift that will actually be done,
6168 not the shift that was originally requested. */
6169 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6170
6171 /* We have now finished analyzing the shift. The result should be
6172 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
6173 OUTER_OP is non-NIL, it is an operation that needs to be applied
6174 to the result of the shift. OUTER_CONST is the relevant constant,
6175 but we must turn off all bits turned off in the shift.
6176
6177 If we were passed a value for X, see if we can use any pieces of
6178 it. If not, make new rtx. */
6179
6180 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
6181 && GET_CODE (XEXP (x, 1)) == CONST_INT
6182 && INTVAL (XEXP (x, 1)) == count)
6183 const_rtx = XEXP (x, 1);
6184 else
6185 const_rtx = GEN_INT (count);
6186
6187 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6188 && GET_MODE (XEXP (x, 0)) == shift_mode
6189 && SUBREG_REG (XEXP (x, 0)) == varop)
6190 varop = XEXP (x, 0);
6191 else if (GET_MODE (varop) != shift_mode)
6192 varop = gen_lowpart_for_combine (shift_mode, varop);
6193
6194 /* If we can't make the SUBREG, try to return what we were given. */
6195 if (GET_CODE (varop) == CLOBBER)
6196 return x ? x : varop;
6197
6198 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
6199 if (new != 0)
6200 x = new;
6201 else
6202 {
6203 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
6204 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
6205
6206 SUBST (XEXP (x, 0), varop);
6207 SUBST (XEXP (x, 1), const_rtx);
6208 }
6209
6210 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
6211 turn off all the bits that the shift would have turned off. */
6212 if (orig_code == LSHIFTRT && result_mode != shift_mode)
6213 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
6214 GET_MODE_MASK (result_mode) >> orig_count);
6215
6216 /* Do the remainder of the processing in RESULT_MODE. */
6217 x = gen_lowpart_for_combine (result_mode, x);
6218
6219 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
6220 operation. */
6221 if (complement_p)
6222 x = gen_unary (NOT, result_mode, x);
6223
6224 if (outer_op != NIL)
6225 {
6226 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
6227 outer_const &= GET_MODE_MASK (result_mode);
6228
6229 if (outer_op == AND)
6230 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
6231 else if (outer_op == SET)
6232 /* This means that we have determined that the result is
6233 equivalent to a constant. This should be rare. */
6234 x = GEN_INT (outer_const);
6235 else if (GET_RTX_CLASS (outer_op) == '1')
6236 x = gen_unary (outer_op, result_mode, x);
6237 else
6238 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
6239 }
6240
6241 return x;
6242 }
6243 \f
6244 /* Like recog, but we receive the address of a pointer to a new pattern.
6245 We try to match the rtx that the pointer points to.
6246 If that fails, we may try to modify or replace the pattern,
6247 storing the replacement into the same pointer object.
6248
6249 Modifications include deletion or addition of CLOBBERs.
6250
6251 PNOTES is a pointer to a location where any REG_UNUSED notes added for
6252 the CLOBBERs are placed.
6253
6254 The value is the final insn code from the pattern ultimately matched,
6255 or -1. */
6256
6257 static int
6258 recog_for_combine (pnewpat, insn, pnotes)
6259 rtx *pnewpat;
6260 rtx insn;
6261 rtx *pnotes;
6262 {
6263 register rtx pat = *pnewpat;
6264 int insn_code_number;
6265 int num_clobbers_to_add = 0;
6266 int i;
6267 rtx notes = 0;
6268
6269 /* Is the result of combination a valid instruction? */
6270 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6271
6272 /* If it isn't, there is the possibility that we previously had an insn
6273 that clobbered some register as a side effect, but the combined
6274 insn doesn't need to do that. So try once more without the clobbers
6275 unless this represents an ASM insn. */
6276
6277 if (insn_code_number < 0 && ! check_asm_operands (pat)
6278 && GET_CODE (pat) == PARALLEL)
6279 {
6280 int pos;
6281
6282 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
6283 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
6284 {
6285 if (i != pos)
6286 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
6287 pos++;
6288 }
6289
6290 SUBST_INT (XVECLEN (pat, 0), pos);
6291
6292 if (pos == 1)
6293 pat = XVECEXP (pat, 0, 0);
6294
6295 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6296 }
6297
6298 /* If we had any clobbers to add, make a new pattern than contains
6299 them. Then check to make sure that all of them are dead. */
6300 if (num_clobbers_to_add)
6301 {
6302 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
6303 gen_rtvec (GET_CODE (pat) == PARALLEL
6304 ? XVECLEN (pat, 0) + num_clobbers_to_add
6305 : num_clobbers_to_add + 1));
6306
6307 if (GET_CODE (pat) == PARALLEL)
6308 for (i = 0; i < XVECLEN (pat, 0); i++)
6309 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
6310 else
6311 XVECEXP (newpat, 0, 0) = pat;
6312
6313 add_clobbers (newpat, insn_code_number);
6314
6315 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
6316 i < XVECLEN (newpat, 0); i++)
6317 {
6318 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
6319 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
6320 return -1;
6321 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
6322 XEXP (XVECEXP (newpat, 0, i), 0), notes);
6323 }
6324 pat = newpat;
6325 }
6326
6327 *pnewpat = pat;
6328 *pnotes = notes;
6329
6330 return insn_code_number;
6331 }
6332 \f
6333 /* Like gen_lowpart but for use by combine. In combine it is not possible
6334 to create any new pseudoregs. However, it is safe to create
6335 invalid memory addresses, because combine will try to recognize
6336 them and all they will do is make the combine attempt fail.
6337
6338 If for some reason this cannot do its job, an rtx
6339 (clobber (const_int 0)) is returned.
6340 An insn containing that will not be recognized. */
6341
6342 #undef gen_lowpart
6343
6344 static rtx
6345 gen_lowpart_for_combine (mode, x)
6346 enum machine_mode mode;
6347 register rtx x;
6348 {
6349 rtx result;
6350
6351 if (GET_MODE (x) == mode)
6352 return x;
6353
6354 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6355 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6356
6357 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
6358 won't know what to do. So we will strip off the SUBREG here and
6359 process normally. */
6360 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
6361 {
6362 x = SUBREG_REG (x);
6363 if (GET_MODE (x) == mode)
6364 return x;
6365 }
6366
6367 result = gen_lowpart_common (mode, x);
6368 if (result)
6369 return result;
6370
6371 if (GET_CODE (x) == MEM)
6372 {
6373 register int offset = 0;
6374 rtx new;
6375
6376 /* Refuse to work on a volatile memory ref or one with a mode-dependent
6377 address. */
6378 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
6379 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6380
6381 /* If we want to refer to something bigger than the original memref,
6382 generate a perverse subreg instead. That will force a reload
6383 of the original memref X. */
6384 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
6385 return gen_rtx (SUBREG, mode, x, 0);
6386
6387 #if WORDS_BIG_ENDIAN
6388 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6389 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6390 #endif
6391 #if BYTES_BIG_ENDIAN
6392 /* Adjust the address so that the address-after-the-data
6393 is unchanged. */
6394 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6395 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6396 #endif
6397 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
6398 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6399 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
6400 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
6401 return new;
6402 }
6403
6404 /* If X is a comparison operator, rewrite it in a new mode. This
6405 probably won't match, but may allow further simplifications. */
6406 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6407 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
6408
6409 /* If we couldn't simplify X any other way, just enclose it in a
6410 SUBREG. Normally, this SUBREG won't match, but some patterns may
6411 include an explicit SUBREG or we may simplify it further in combine. */
6412 else
6413 {
6414 int word = 0;
6415
6416 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
6417 word = ((GET_MODE_SIZE (GET_MODE (x))
6418 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
6419 / UNITS_PER_WORD);
6420 return gen_rtx (SUBREG, mode, x, word);
6421 }
6422 }
6423 \f
6424 /* Make an rtx expression. This is a subset of gen_rtx and only supports
6425 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
6426
6427 If the identical expression was previously in the insn (in the undobuf),
6428 it will be returned. Only if it is not found will a new expression
6429 be made. */
6430
6431 /*VARARGS2*/
6432 static rtx
6433 gen_rtx_combine (va_alist)
6434 va_dcl
6435 {
6436 va_list p;
6437 enum rtx_code code;
6438 enum machine_mode mode;
6439 int n_args;
6440 rtx args[3];
6441 int i, j;
6442 char *fmt;
6443 rtx rt;
6444
6445 va_start (p);
6446 code = va_arg (p, enum rtx_code);
6447 mode = va_arg (p, enum machine_mode);
6448 n_args = GET_RTX_LENGTH (code);
6449 fmt = GET_RTX_FORMAT (code);
6450
6451 if (n_args == 0 || n_args > 3)
6452 abort ();
6453
6454 /* Get each arg and verify that it is supposed to be an expression. */
6455 for (j = 0; j < n_args; j++)
6456 {
6457 if (*fmt++ != 'e')
6458 abort ();
6459
6460 args[j] = va_arg (p, rtx);
6461 }
6462
6463 /* See if this is in undobuf. Be sure we don't use objects that came
6464 from another insn; this could produce circular rtl structures. */
6465
6466 for (i = previous_num_undos; i < undobuf.num_undo; i++)
6467 if (!undobuf.undo[i].is_int
6468 && GET_CODE (undobuf.undo[i].old_contents) == code
6469 && GET_MODE (undobuf.undo[i].old_contents) == mode)
6470 {
6471 for (j = 0; j < n_args; j++)
6472 if (XEXP (undobuf.undo[i].old_contents, j) != args[j])
6473 break;
6474
6475 if (j == n_args)
6476 return undobuf.undo[i].old_contents;
6477 }
6478
6479 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
6480 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
6481 rt = rtx_alloc (code);
6482 PUT_MODE (rt, mode);
6483 XEXP (rt, 0) = args[0];
6484 if (n_args > 1)
6485 {
6486 XEXP (rt, 1) = args[1];
6487 if (n_args > 2)
6488 XEXP (rt, 2) = args[2];
6489 }
6490 return rt;
6491 }
6492
6493 /* These routines make binary and unary operations by first seeing if they
6494 fold; if not, a new expression is allocated. */
6495
6496 static rtx
6497 gen_binary (code, mode, op0, op1)
6498 enum rtx_code code;
6499 enum machine_mode mode;
6500 rtx op0, op1;
6501 {
6502 rtx result;
6503
6504 if (GET_RTX_CLASS (code) == '<')
6505 {
6506 enum machine_mode op_mode = GET_MODE (op0);
6507 if (op_mode == VOIDmode)
6508 op_mode = GET_MODE (op1);
6509 result = simplify_relational_operation (code, op_mode, op0, op1);
6510 }
6511 else
6512 result = simplify_binary_operation (code, mode, op0, op1);
6513
6514 if (result)
6515 return result;
6516
6517 /* Put complex operands first and constants second. */
6518 if (GET_RTX_CLASS (code) == 'c'
6519 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
6520 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
6521 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
6522 || (GET_CODE (op0) == SUBREG
6523 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
6524 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
6525 return gen_rtx_combine (code, mode, op1, op0);
6526
6527 return gen_rtx_combine (code, mode, op0, op1);
6528 }
6529
6530 static rtx
6531 gen_unary (code, mode, op0)
6532 enum rtx_code code;
6533 enum machine_mode mode;
6534 rtx op0;
6535 {
6536 rtx result = simplify_unary_operation (code, mode, op0, mode);
6537
6538 if (result)
6539 return result;
6540
6541 return gen_rtx_combine (code, mode, op0);
6542 }
6543 \f
6544 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
6545 comparison code that will be tested.
6546
6547 The result is a possibly different comparison code to use. *POP0 and
6548 *POP1 may be updated.
6549
6550 It is possible that we might detect that a comparison is either always
6551 true or always false. However, we do not perform general constant
6552 folding in combine, so this knowledge isn't useful. Such tautologies
6553 should have been detected earlier. Hence we ignore all such cases. */
6554
6555 static enum rtx_code
6556 simplify_comparison (code, pop0, pop1)
6557 enum rtx_code code;
6558 rtx *pop0;
6559 rtx *pop1;
6560 {
6561 rtx op0 = *pop0;
6562 rtx op1 = *pop1;
6563 rtx tem, tem1;
6564 int i;
6565 enum machine_mode mode, tmode;
6566
6567 /* Try a few ways of applying the same transformation to both operands. */
6568 while (1)
6569 {
6570 /* If both operands are the same constant shift, see if we can ignore the
6571 shift. We can if the shift is a rotate or if the bits shifted out of
6572 this shift are not significant for either input and if the type of
6573 comparison is compatible with the shift. */
6574 if (GET_CODE (op0) == GET_CODE (op1)
6575 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
6576 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
6577 || ((GET_CODE (op0) == LSHIFTRT
6578 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
6579 && (code != GT && code != LT && code != GE && code != LE))
6580 || (GET_CODE (op0) == ASHIFTRT
6581 && (code != GTU && code != LTU
6582 && code != GEU && code != GEU)))
6583 && GET_CODE (XEXP (op0, 1)) == CONST_INT
6584 && INTVAL (XEXP (op0, 1)) >= 0
6585 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
6586 && XEXP (op0, 1) == XEXP (op1, 1))
6587 {
6588 enum machine_mode mode = GET_MODE (op0);
6589 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6590 int shift_count = INTVAL (XEXP (op0, 1));
6591
6592 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
6593 mask &= (mask >> shift_count) << shift_count;
6594 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
6595 mask = (mask & (mask << shift_count)) >> shift_count;
6596
6597 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
6598 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
6599 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
6600 else
6601 break;
6602 }
6603
6604 /* If both operands are AND's of a paradoxical SUBREG by constant, the
6605 SUBREGs are of the same mode, and, in both cases, the AND would
6606 be redundant if the comparison was done in the narrower mode,
6607 do the comparison in the narrower mode (e.g., we are AND'ing with 1
6608 and the operand's significant bits are 0xffffff01; in that case if
6609 we only care about QImode, we don't need the AND). This case occurs
6610 if the output mode of an scc insn is not SImode and
6611 STORE_FLAG_VALUE == 1 (e.g., the 386). */
6612
6613 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
6614 && GET_CODE (XEXP (op0, 1)) == CONST_INT
6615 && GET_CODE (XEXP (op1, 1)) == CONST_INT
6616 && GET_CODE (XEXP (op0, 0)) == SUBREG
6617 && GET_CODE (XEXP (op1, 0)) == SUBREG
6618 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
6619 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
6620 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
6621 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
6622 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
6623 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
6624 & ~ INTVAL (XEXP (op0, 1))) == 0
6625 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
6626 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
6627 & ~ INTVAL (XEXP (op1, 1))) == 0)
6628 {
6629 op0 = SUBREG_REG (XEXP (op0, 0));
6630 op1 = SUBREG_REG (XEXP (op1, 0));
6631
6632 /* the resulting comparison is always unsigned since we masked off
6633 the original sign bit. */
6634 code = unsigned_condition (code);
6635 }
6636 else
6637 break;
6638 }
6639
6640 /* If the first operand is a constant, swap the operands and adjust the
6641 comparison code appropriately. */
6642 if (CONSTANT_P (op0))
6643 {
6644 tem = op0, op0 = op1, op1 = tem;
6645 code = swap_condition (code);
6646 }
6647
6648 /* We now enter a loop during which we will try to simplify the comparison.
6649 For the most part, we only are concerned with comparisons with zero,
6650 but some things may really be comparisons with zero but not start
6651 out looking that way. */
6652
6653 while (GET_CODE (op1) == CONST_INT)
6654 {
6655 enum machine_mode mode = GET_MODE (op0);
6656 int mode_width = GET_MODE_BITSIZE (mode);
6657 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
6658 int equality_comparison_p;
6659 int sign_bit_comparison_p;
6660 int unsigned_comparison_p;
6661 HOST_WIDE_INT const_op;
6662
6663 /* We only want to handle integral modes. This catches VOIDmode,
6664 CCmode, and the floating-point modes. An exception is that we
6665 can handle VOIDmode if OP0 is a COMPARE or a comparison
6666 operation. */
6667
6668 if (GET_MODE_CLASS (mode) != MODE_INT
6669 && ! (mode == VOIDmode
6670 && (GET_CODE (op0) == COMPARE
6671 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
6672 break;
6673
6674 /* Get the constant we are comparing against and turn off all bits
6675 not on in our mode. */
6676 const_op = INTVAL (op1);
6677 if (mode_width <= HOST_BITS_PER_WIDE_INT)
6678 const_op &= mask;
6679
6680 /* If we are comparing against a constant power of two and the value
6681 being compared has only that single significant bit (e.g., it was
6682 `and'ed with that bit), we can replace this with a comparison
6683 with zero. */
6684 if (const_op
6685 && (code == EQ || code == NE || code == GE || code == GEU
6686 || code == LT || code == LTU)
6687 && mode_width <= HOST_BITS_PER_WIDE_INT
6688 && exact_log2 (const_op) >= 0
6689 && significant_bits (op0, mode) == const_op)
6690 {
6691 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
6692 op1 = const0_rtx, const_op = 0;
6693 }
6694
6695 /* Do some canonicalizations based on the comparison code. We prefer
6696 comparisons against zero and then prefer equality comparisons.
6697 If we can reduce the size of a constant, we will do that too. */
6698
6699 switch (code)
6700 {
6701 case LT:
6702 /* < C is equivalent to <= (C - 1) */
6703 if (const_op > 0)
6704 {
6705 const_op -= 1;
6706 op1 = GEN_INT (const_op);
6707 code = LE;
6708 /* ... fall through to LE case below. */
6709 }
6710 else
6711 break;
6712
6713 case LE:
6714 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
6715 if (const_op < 0)
6716 {
6717 const_op += 1;
6718 op1 = GEN_INT (const_op);
6719 code = LT;
6720 }
6721
6722 /* If we are doing a <= 0 comparison on a value known to have
6723 a zero sign bit, we can replace this with == 0. */
6724 else if (const_op == 0
6725 && mode_width <= HOST_BITS_PER_WIDE_INT
6726 && (significant_bits (op0, mode)
6727 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
6728 code = EQ;
6729 break;
6730
6731 case GE:
6732 /* >= C is equivalent to > (C - 1). */
6733 if (const_op > 0)
6734 {
6735 const_op -= 1;
6736 op1 = GEN_INT (const_op);
6737 code = GT;
6738 /* ... fall through to GT below. */
6739 }
6740 else
6741 break;
6742
6743 case GT:
6744 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
6745 if (const_op < 0)
6746 {
6747 const_op += 1;
6748 op1 = GEN_INT (const_op);
6749 code = GE;
6750 }
6751
6752 /* If we are doing a > 0 comparison on a value known to have
6753 a zero sign bit, we can replace this with != 0. */
6754 else if (const_op == 0
6755 && mode_width <= HOST_BITS_PER_WIDE_INT
6756 && (significant_bits (op0, mode)
6757 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
6758 code = NE;
6759 break;
6760
6761 case LTU:
6762 /* < C is equivalent to <= (C - 1). */
6763 if (const_op > 0)
6764 {
6765 const_op -= 1;
6766 op1 = GEN_INT (const_op);
6767 code = LEU;
6768 /* ... fall through ... */
6769 }
6770 else
6771 break;
6772
6773 case LEU:
6774 /* unsigned <= 0 is equivalent to == 0 */
6775 if (const_op == 0)
6776 code = EQ;
6777 break;
6778
6779 case GEU:
6780 /* >= C is equivalent to < (C - 1). */
6781 if (const_op > 1)
6782 {
6783 const_op -= 1;
6784 op1 = GEN_INT (const_op);
6785 code = GTU;
6786 /* ... fall through ... */
6787 }
6788 else
6789 break;
6790
6791 case GTU:
6792 /* unsigned > 0 is equivalent to != 0 */
6793 if (const_op == 0)
6794 code = NE;
6795 break;
6796 }
6797
6798 /* Compute some predicates to simplify code below. */
6799
6800 equality_comparison_p = (code == EQ || code == NE);
6801 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
6802 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
6803 || code == LEU);
6804
6805 /* Now try cases based on the opcode of OP0. If none of the cases
6806 does a "continue", we exit this loop immediately after the
6807 switch. */
6808
6809 switch (GET_CODE (op0))
6810 {
6811 case ZERO_EXTRACT:
6812 /* If we are extracting a single bit from a variable position in
6813 a constant that has only a single bit set and are comparing it
6814 with zero, we can convert this into an equality comparison
6815 between the position and the location of the single bit. We can't
6816 do this if bit endian and we don't have an extzv since we then
6817 can't know what mode to use for the endianness adjustment. */
6818
6819 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
6820 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
6821 && XEXP (op0, 1) == const1_rtx
6822 && equality_comparison_p && const_op == 0
6823 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
6824 {
6825 #if BITS_BIG_ENDIAN
6826 i = (GET_MODE_BITSIZE
6827 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
6828 #endif
6829
6830 op0 = XEXP (op0, 2);
6831 op1 = GEN_INT (i);
6832 const_op = i;
6833
6834 /* Result is nonzero iff shift count is equal to I. */
6835 code = reverse_condition (code);
6836 continue;
6837 }
6838 #endif
6839
6840 /* ... fall through ... */
6841
6842 case SIGN_EXTRACT:
6843 tem = expand_compound_operation (op0);
6844 if (tem != op0)
6845 {
6846 op0 = tem;
6847 continue;
6848 }
6849 break;
6850
6851 case NOT:
6852 /* If testing for equality, we can take the NOT of the constant. */
6853 if (equality_comparison_p
6854 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
6855 {
6856 op0 = XEXP (op0, 0);
6857 op1 = tem;
6858 continue;
6859 }
6860
6861 /* If just looking at the sign bit, reverse the sense of the
6862 comparison. */
6863 if (sign_bit_comparison_p)
6864 {
6865 op0 = XEXP (op0, 0);
6866 code = (code == GE ? LT : GE);
6867 continue;
6868 }
6869 break;
6870
6871 case NEG:
6872 /* If testing for equality, we can take the NEG of the constant. */
6873 if (equality_comparison_p
6874 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
6875 {
6876 op0 = XEXP (op0, 0);
6877 op1 = tem;
6878 continue;
6879 }
6880
6881 /* The remaining cases only apply to comparisons with zero. */
6882 if (const_op != 0)
6883 break;
6884
6885 /* When X is ABS or is known positive,
6886 (neg X) is < 0 if and only if X != 0. */
6887
6888 if (sign_bit_comparison_p
6889 && (GET_CODE (XEXP (op0, 0)) == ABS
6890 || (mode_width <= HOST_BITS_PER_WIDE_INT
6891 && (significant_bits (XEXP (op0, 0), mode)
6892 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
6893 {
6894 op0 = XEXP (op0, 0);
6895 code = (code == LT ? NE : EQ);
6896 continue;
6897 }
6898
6899 /* If we have NEG of something that is the result of a
6900 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
6901 two high-order bits must be the same and hence that
6902 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
6903 do this. */
6904 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
6905 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
6906 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
6907 && (INTVAL (XEXP (XEXP (op0, 0), 1))
6908 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
6909 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
6910 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
6911 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
6912 || ((tem = get_last_value (XEXP (op0, 0))) != 0
6913 && (GET_CODE (tem) == SIGN_EXTEND
6914 || (GET_CODE (tem) == SIGN_EXTRACT
6915 && GET_CODE (XEXP (tem, 1)) == CONST_INT
6916 && (INTVAL (XEXP (tem, 1))
6917 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
6918 || (GET_CODE (tem) == ASHIFTRT
6919 && GET_CODE (XEXP (tem, 1)) == CONST_INT
6920 && XEXP (tem, 1) != const0_rtx))))
6921 {
6922 op0 = XEXP (op0, 0);
6923 code = swap_condition (code);
6924 continue;
6925 }
6926 break;
6927
6928 case ROTATE:
6929 /* If we are testing equality and our count is a constant, we
6930 can perform the inverse operation on our RHS. */
6931 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
6932 && (tem = simplify_binary_operation (ROTATERT, mode,
6933 op1, XEXP (op0, 1))) != 0)
6934 {
6935 op0 = XEXP (op0, 0);
6936 op1 = tem;
6937 continue;
6938 }
6939
6940 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
6941 a particular bit. Convert it to an AND of a constant of that
6942 bit. This will be converted into a ZERO_EXTRACT. */
6943 if (const_op == 0 && sign_bit_comparison_p
6944 && GET_CODE (XEXP (op0, 1)) == CONST_INT
6945 && mode_width <= HOST_BITS_PER_WIDE_INT)
6946 {
6947 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
6948 ((HOST_WIDE_INT) 1
6949 << (mode_width - 1
6950 - INTVAL (XEXP (op0, 1)))));
6951 code = (code == LT ? NE : EQ);
6952 continue;
6953 }
6954
6955 /* ... fall through ... */
6956
6957 case ABS:
6958 /* ABS is ignorable inside an equality comparison with zero. */
6959 if (const_op == 0 && equality_comparison_p)
6960 {
6961 op0 = XEXP (op0, 0);
6962 continue;
6963 }
6964 break;
6965
6966
6967 case SIGN_EXTEND:
6968 /* Can simplify (compare (zero/sign_extend FOO) CONST)
6969 to (compare FOO CONST) if CONST fits in FOO's mode and we
6970 are either testing inequality or have an unsigned comparison
6971 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
6972 if (! unsigned_comparison_p
6973 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
6974 <= HOST_BITS_PER_WIDE_INT)
6975 && ((unsigned HOST_WIDE_INT) const_op
6976 < (((HOST_WIDE_INT) 1
6977 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
6978 {
6979 op0 = XEXP (op0, 0);
6980 continue;
6981 }
6982 break;
6983
6984 case SUBREG:
6985 /* If the inner mode is smaller and we are extracting the low
6986 part, we can treat the SUBREG as if it were a ZERO_EXTEND. */
6987 if (! subreg_lowpart_p (op0)
6988 || GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) >= mode_width)
6989 break;
6990
6991 /* ... fall through ... */
6992
6993 case ZERO_EXTEND:
6994 if ((unsigned_comparison_p || equality_comparison_p)
6995 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
6996 <= HOST_BITS_PER_WIDE_INT)
6997 && ((unsigned HOST_WIDE_INT) const_op
6998 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
6999 {
7000 op0 = XEXP (op0, 0);
7001 continue;
7002 }
7003 break;
7004
7005 case PLUS:
7006 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
7007 this for equality comparisons due to pathological cases involving
7008 overflows. */
7009 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7010 && (tem = simplify_binary_operation (MINUS, mode, op1,
7011 XEXP (op0, 1))) != 0)
7012 {
7013 op0 = XEXP (op0, 0);
7014 op1 = tem;
7015 continue;
7016 }
7017
7018 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
7019 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
7020 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
7021 {
7022 op0 = XEXP (XEXP (op0, 0), 0);
7023 code = (code == LT ? EQ : NE);
7024 continue;
7025 }
7026 break;
7027
7028 case MINUS:
7029 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
7030 of bits in X minus 1, is one iff X > 0. */
7031 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7032 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7033 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
7034 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7035 {
7036 op0 = XEXP (op0, 1);
7037 code = (code == GE ? LE : GT);
7038 continue;
7039 }
7040 break;
7041
7042 case XOR:
7043 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
7044 if C is zero or B is a constant. */
7045 if (equality_comparison_p
7046 && 0 != (tem = simplify_binary_operation (XOR, mode,
7047 XEXP (op0, 1), op1)))
7048 {
7049 op0 = XEXP (op0, 0);
7050 op1 = tem;
7051 continue;
7052 }
7053 break;
7054
7055 case EQ: case NE:
7056 case LT: case LTU: case LE: case LEU:
7057 case GT: case GTU: case GE: case GEU:
7058 /* We can't do anything if OP0 is a condition code value, rather
7059 than an actual data value. */
7060 if (const_op != 0
7061 #ifdef HAVE_cc0
7062 || XEXP (op0, 0) == cc0_rtx
7063 #endif
7064 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
7065 break;
7066
7067 /* Get the two operands being compared. */
7068 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
7069 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
7070 else
7071 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
7072
7073 /* Check for the cases where we simply want the result of the
7074 earlier test or the opposite of that result. */
7075 if (code == NE
7076 || (code == EQ && reversible_comparison_p (op0))
7077 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7078 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7079 && (STORE_FLAG_VALUE
7080 & (((HOST_WIDE_INT) 1
7081 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
7082 && (code == LT
7083 || (code == GE && reversible_comparison_p (op0)))))
7084 {
7085 code = (code == LT || code == NE
7086 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
7087 op0 = tem, op1 = tem1;
7088 continue;
7089 }
7090 break;
7091
7092 case IOR:
7093 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
7094 iff X <= 0. */
7095 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
7096 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
7097 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7098 {
7099 op0 = XEXP (op0, 1);
7100 code = (code == GE ? GT : LE);
7101 continue;
7102 }
7103 break;
7104
7105 case AND:
7106 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
7107 will be converted to a ZERO_EXTRACT later. */
7108 if (const_op == 0 && equality_comparison_p
7109 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
7110 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
7111 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
7112 {
7113 op0 = simplify_and_const_int
7114 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
7115 XEXP (op0, 1),
7116 XEXP (XEXP (op0, 0), 1)),
7117 (HOST_WIDE_INT) 1);
7118 continue;
7119 }
7120
7121 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
7122 zero and X is a comparison and C1 and C2 describe only bits set
7123 in STORE_FLAG_VALUE, we can compare with X. */
7124 if (const_op == 0 && equality_comparison_p
7125 && mode_width <= HOST_BITS_PER_WIDE_INT
7126 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7127 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
7128 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7129 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
7130 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7131 {
7132 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7133 << INTVAL (XEXP (XEXP (op0, 0), 1)));
7134 if ((~ STORE_FLAG_VALUE & mask) == 0
7135 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
7136 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
7137 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
7138 {
7139 op0 = XEXP (XEXP (op0, 0), 0);
7140 continue;
7141 }
7142 }
7143
7144 /* If we are doing an equality comparison of an AND of a bit equal
7145 to the sign bit, replace this with a LT or GE comparison of
7146 the underlying value. */
7147 if (equality_comparison_p
7148 && const_op == 0
7149 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7150 && mode_width <= HOST_BITS_PER_WIDE_INT
7151 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7152 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
7153 {
7154 op0 = XEXP (op0, 0);
7155 code = (code == EQ ? GE : LT);
7156 continue;
7157 }
7158
7159 /* If this AND operation is really a ZERO_EXTEND from a narrower
7160 mode, the constant fits within that mode, and this is either an
7161 equality or unsigned comparison, try to do this comparison in
7162 the narrower mode. */
7163 if ((equality_comparison_p || unsigned_comparison_p)
7164 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7165 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
7166 & GET_MODE_MASK (mode))
7167 + 1)) >= 0
7168 && const_op >> i == 0
7169 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
7170 {
7171 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
7172 continue;
7173 }
7174 break;
7175
7176 case ASHIFT:
7177 case LSHIFT:
7178 /* If we have (compare (xshift FOO N) (const_int C)) and
7179 the high order N bits of FOO (N+1 if an inequality comparison)
7180 are not significant, we can do this by comparing FOO with C
7181 shifted right N bits so long as the low-order N bits of C are
7182 zero. */
7183 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7184 && INTVAL (XEXP (op0, 1)) >= 0
7185 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
7186 < HOST_BITS_PER_WIDE_INT)
7187 && ((const_op
7188 & ~ (((HOST_WIDE_INT) 1
7189 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
7190 && mode_width <= HOST_BITS_PER_WIDE_INT
7191 && (significant_bits (XEXP (op0, 0), mode)
7192 & ~ (mask >> (INTVAL (XEXP (op0, 1))
7193 + ! equality_comparison_p))) == 0)
7194 {
7195 const_op >>= INTVAL (XEXP (op0, 1));
7196 op1 = GEN_INT (const_op);
7197 op0 = XEXP (op0, 0);
7198 continue;
7199 }
7200
7201 /* If we are doing a sign bit comparison, it means we are testing
7202 a particular bit. Convert it to the appropriate AND. */
7203 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7204 && mode_width <= HOST_BITS_PER_WIDE_INT)
7205 {
7206 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7207 ((HOST_WIDE_INT) 1
7208 << (mode_width - 1
7209 - INTVAL (XEXP (op0, 1)))));
7210 code = (code == LT ? NE : EQ);
7211 continue;
7212 }
7213
7214 /* If this an equality comparison with zero and we are shifting
7215 the low bit to the sign bit, we can convert this to an AND of the
7216 low-order bit. */
7217 if (const_op == 0 && equality_comparison_p
7218 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7219 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7220 {
7221 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7222 (HOST_WIDE_INT) 1);
7223 continue;
7224 }
7225 break;
7226
7227 case ASHIFTRT:
7228 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
7229 do the comparison in a narrower mode. */
7230 if (! unsigned_comparison_p
7231 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7232 && GET_CODE (XEXP (op0, 0)) == ASHIFT
7233 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
7234 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
7235 MODE_INT, 1)) != VOIDmode
7236 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
7237 || ((unsigned HOST_WIDE_INT) - const_op
7238 <= GET_MODE_MASK (tmode))))
7239 {
7240 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
7241 continue;
7242 }
7243
7244 /* ... fall through ... */
7245 case LSHIFTRT:
7246 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
7247 the low order N bits of FOO are not significant, we can do this
7248 by comparing FOO with C shifted left N bits so long as no
7249 overflow occurs. */
7250 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7251 && INTVAL (XEXP (op0, 1)) >= 0
7252 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7253 && mode_width <= HOST_BITS_PER_WIDE_INT
7254 && (significant_bits (XEXP (op0, 0), mode)
7255 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
7256 && (const_op == 0
7257 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
7258 < mode_width)))
7259 {
7260 const_op <<= INTVAL (XEXP (op0, 1));
7261 op1 = GEN_INT (const_op);
7262 op0 = XEXP (op0, 0);
7263 continue;
7264 }
7265
7266 /* If we are using this shift to extract just the sign bit, we
7267 can replace this with an LT or GE comparison. */
7268 if (const_op == 0
7269 && (equality_comparison_p || sign_bit_comparison_p)
7270 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7271 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7272 {
7273 op0 = XEXP (op0, 0);
7274 code = (code == NE || code == GT ? LT : GE);
7275 continue;
7276 }
7277 break;
7278 }
7279
7280 break;
7281 }
7282
7283 /* Now make any compound operations involved in this comparison. Then,
7284 check for an outmost SUBREG on OP0 that isn't doing anything or is
7285 paradoxical. The latter case can only occur when it is known that the
7286 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
7287 We can never remove a SUBREG for a non-equality comparison because the
7288 sign bit is in a different place in the underlying object. */
7289
7290 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
7291 op1 = make_compound_operation (op1, SET);
7292
7293 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7294 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7295 && (code == NE || code == EQ)
7296 && ((GET_MODE_SIZE (GET_MODE (op0))
7297 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
7298 {
7299 op0 = SUBREG_REG (op0);
7300 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
7301 }
7302
7303 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7304 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7305 && (code == NE || code == EQ)
7306 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7307 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
7308 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
7309 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
7310 op1),
7311 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
7312 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
7313 op0 = SUBREG_REG (op0), op1 = tem;
7314
7315 /* We now do the opposite procedure: Some machines don't have compare
7316 insns in all modes. If OP0's mode is an integer mode smaller than a
7317 word and we can't do a compare in that mode, see if there is a larger
7318 mode for which we can do the compare and where the only significant
7319 bits in OP0 and OP1 are those in the narrower mode. We can do
7320 this if this is an equality comparison, in which case we can
7321 merely widen the operation, or if we are testing the sign bit, in
7322 which case we can explicitly put in the test. */
7323
7324 mode = GET_MODE (op0);
7325 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
7326 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
7327 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
7328 for (tmode = GET_MODE_WIDER_MODE (mode);
7329 (tmode != VOIDmode
7330 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
7331 tmode = GET_MODE_WIDER_MODE (tmode))
7332 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing
7333 && (significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
7334 && (significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0
7335 && (code == EQ || code == NE
7336 || (op1 == const0_rtx && (code == LT || code == GE)
7337 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)))
7338 {
7339 op0 = gen_lowpart_for_combine (tmode, op0);
7340 op1 = gen_lowpart_for_combine (tmode, op1);
7341
7342 if (code == LT || code == GE)
7343 {
7344 op0 = gen_binary (AND, tmode, op0,
7345 GEN_INT ((HOST_WIDE_INT) 1
7346 << (GET_MODE_BITSIZE (mode) - 1)));
7347 code = (code == LT) ? NE : EQ;
7348 }
7349
7350 break;
7351 }
7352
7353 *pop0 = op0;
7354 *pop1 = op1;
7355
7356 return code;
7357 }
7358 \f
7359 /* Return 1 if we know that X, a comparison operation, is not operating
7360 on a floating-point value or is EQ or NE, meaning that we can safely
7361 reverse it. */
7362
7363 static int
7364 reversible_comparison_p (x)
7365 rtx x;
7366 {
7367 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7368 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
7369 return 1;
7370
7371 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
7372 {
7373 case MODE_INT:
7374 return 1;
7375
7376 case MODE_CC:
7377 x = get_last_value (XEXP (x, 0));
7378 return (x && GET_CODE (x) == COMPARE
7379 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
7380 }
7381
7382 return 0;
7383 }
7384 \f
7385 /* Utility function for following routine. Called when X is part of a value
7386 being stored into reg_last_set_value. Sets reg_last_set_table_tick
7387 for each register mentioned. Similar to mention_regs in cse.c */
7388
7389 static void
7390 update_table_tick (x)
7391 rtx x;
7392 {
7393 register enum rtx_code code = GET_CODE (x);
7394 register char *fmt = GET_RTX_FORMAT (code);
7395 register int i;
7396
7397 if (code == REG)
7398 {
7399 int regno = REGNO (x);
7400 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
7401 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
7402
7403 for (i = regno; i < endregno; i++)
7404 reg_last_set_table_tick[i] = label_tick;
7405
7406 return;
7407 }
7408
7409 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7410 /* Note that we can't have an "E" in values stored; see
7411 get_last_value_validate. */
7412 if (fmt[i] == 'e')
7413 update_table_tick (XEXP (x, i));
7414 }
7415
7416 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
7417 are saying that the register is clobbered and we no longer know its
7418 value. If INSN is zero, don't update reg_last_set; this call is normally
7419 done with VALUE also zero to invalidate the register. */
7420
7421 static void
7422 record_value_for_reg (reg, insn, value)
7423 rtx reg;
7424 rtx insn;
7425 rtx value;
7426 {
7427 int regno = REGNO (reg);
7428 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
7429 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
7430 int i;
7431
7432 /* If VALUE contains REG and we have a previous value for REG, substitute
7433 the previous value. */
7434 if (value && insn && reg_overlap_mentioned_p (reg, value))
7435 {
7436 rtx tem;
7437
7438 /* Set things up so get_last_value is allowed to see anything set up to
7439 our insn. */
7440 subst_low_cuid = INSN_CUID (insn);
7441 tem = get_last_value (reg);
7442
7443 if (tem)
7444 value = replace_rtx (copy_rtx (value), reg, tem);
7445 }
7446
7447 /* For each register modified, show we don't know its value, that
7448 its value has been updated, and that we don't know the location of
7449 the death of the register. */
7450 for (i = regno; i < endregno; i ++)
7451 {
7452 if (insn)
7453 reg_last_set[i] = insn;
7454 reg_last_set_value[i] = 0;
7455 reg_last_death[i] = 0;
7456 }
7457
7458 /* Mark registers that are being referenced in this value. */
7459 if (value)
7460 update_table_tick (value);
7461
7462 /* Now update the status of each register being set.
7463 If someone is using this register in this block, set this register
7464 to invalid since we will get confused between the two lives in this
7465 basic block. This makes using this register always invalid. In cse, we
7466 scan the table to invalidate all entries using this register, but this
7467 is too much work for us. */
7468
7469 for (i = regno; i < endregno; i++)
7470 {
7471 reg_last_set_label[i] = label_tick;
7472 if (value && reg_last_set_table_tick[i] == label_tick)
7473 reg_last_set_invalid[i] = 1;
7474 else
7475 reg_last_set_invalid[i] = 0;
7476 }
7477
7478 /* The value being assigned might refer to X (like in "x++;"). In that
7479 case, we must replace it with (clobber (const_int 0)) to prevent
7480 infinite loops. */
7481 if (value && ! get_last_value_validate (&value,
7482 reg_last_set_label[regno], 0))
7483 {
7484 value = copy_rtx (value);
7485 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
7486 value = 0;
7487 }
7488
7489 /* For the main register being modified, update the value. */
7490 reg_last_set_value[regno] = value;
7491
7492 }
7493
7494 /* Used for communication between the following two routines. */
7495 static rtx record_dead_insn;
7496
7497 /* Called via note_stores from record_dead_and_set_regs to handle one
7498 SET or CLOBBER in an insn. */
7499
7500 static void
7501 record_dead_and_set_regs_1 (dest, setter)
7502 rtx dest, setter;
7503 {
7504 if (GET_CODE (dest) == REG)
7505 {
7506 /* If we are setting the whole register, we know its value. Otherwise
7507 show that we don't know the value. We can handle SUBREG in
7508 some cases. */
7509 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
7510 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
7511 else if (GET_CODE (setter) == SET
7512 && GET_CODE (SET_DEST (setter)) == SUBREG
7513 && SUBREG_REG (SET_DEST (setter)) == dest
7514 && subreg_lowpart_p (SET_DEST (setter)))
7515 record_value_for_reg
7516 (dest, record_dead_insn,
7517 gen_lowpart_for_combine (GET_MODE (SET_DEST (setter)),
7518 SET_SRC (setter)));
7519 else
7520 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
7521 }
7522 else if (GET_CODE (dest) == MEM
7523 /* Ignore pushes, they clobber nothing. */
7524 && ! push_operand (dest, GET_MODE (dest)))
7525 mem_last_set = INSN_CUID (record_dead_insn);
7526 }
7527
7528 /* Update the records of when each REG was most recently set or killed
7529 for the things done by INSN. This is the last thing done in processing
7530 INSN in the combiner loop.
7531
7532 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
7533 similar information mem_last_set (which insn most recently modified memory)
7534 and last_call_cuid (which insn was the most recent subroutine call). */
7535
7536 static void
7537 record_dead_and_set_regs (insn)
7538 rtx insn;
7539 {
7540 register rtx link;
7541 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7542 {
7543 if (REG_NOTE_KIND (link) == REG_DEAD)
7544 reg_last_death[REGNO (XEXP (link, 0))] = insn;
7545 else if (REG_NOTE_KIND (link) == REG_INC)
7546 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
7547 }
7548
7549 if (GET_CODE (insn) == CALL_INSN)
7550 last_call_cuid = mem_last_set = INSN_CUID (insn);
7551
7552 record_dead_insn = insn;
7553 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
7554 }
7555 \f
7556 /* Utility routine for the following function. Verify that all the registers
7557 mentioned in *LOC are valid when *LOC was part of a value set when
7558 label_tick == TICK. Return 0 if some are not.
7559
7560 If REPLACE is non-zero, replace the invalid reference with
7561 (clobber (const_int 0)) and return 1. This replacement is useful because
7562 we often can get useful information about the form of a value (e.g., if
7563 it was produced by a shift that always produces -1 or 0) even though
7564 we don't know exactly what registers it was produced from. */
7565
7566 static int
7567 get_last_value_validate (loc, tick, replace)
7568 rtx *loc;
7569 int tick;
7570 int replace;
7571 {
7572 rtx x = *loc;
7573 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
7574 int len = GET_RTX_LENGTH (GET_CODE (x));
7575 int i;
7576
7577 if (GET_CODE (x) == REG)
7578 {
7579 int regno = REGNO (x);
7580 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
7581 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
7582 int j;
7583
7584 for (j = regno; j < endregno; j++)
7585 if (reg_last_set_invalid[j]
7586 /* If this is a pseudo-register that was only set once, it is
7587 always valid. */
7588 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
7589 && reg_last_set_label[j] > tick))
7590 {
7591 if (replace)
7592 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7593 return replace;
7594 }
7595
7596 return 1;
7597 }
7598
7599 for (i = 0; i < len; i++)
7600 if ((fmt[i] == 'e'
7601 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
7602 /* Don't bother with these. They shouldn't occur anyway. */
7603 || fmt[i] == 'E')
7604 return 0;
7605
7606 /* If we haven't found a reason for it to be invalid, it is valid. */
7607 return 1;
7608 }
7609
7610 /* Get the last value assigned to X, if known. Some registers
7611 in the value may be replaced with (clobber (const_int 0)) if their value
7612 is known longer known reliably. */
7613
7614 static rtx
7615 get_last_value (x)
7616 rtx x;
7617 {
7618 int regno;
7619 rtx value;
7620
7621 /* If this is a non-paradoxical SUBREG, get the value of its operand and
7622 then convert it to the desired mode. If this is a paradoxical SUBREG,
7623 we cannot predict what values the "extra" bits might have. */
7624 if (GET_CODE (x) == SUBREG
7625 && subreg_lowpart_p (x)
7626 && (GET_MODE_SIZE (GET_MODE (x))
7627 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
7628 && (value = get_last_value (SUBREG_REG (x))) != 0)
7629 return gen_lowpart_for_combine (GET_MODE (x), value);
7630
7631 if (GET_CODE (x) != REG)
7632 return 0;
7633
7634 regno = REGNO (x);
7635 value = reg_last_set_value[regno];
7636
7637 /* If we don't have a value, it isn't for this basic block, or if it was
7638 set in a later insn that the ones we are processing, return 0. */
7639
7640 if (value == 0
7641 || (reg_n_sets[regno] != 1
7642 && (reg_last_set_label[regno] != label_tick
7643 || INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)))
7644 return 0;
7645
7646 /* If the value has all its register valid, return it. */
7647 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
7648 return value;
7649
7650 /* Otherwise, make a copy and replace any invalid register with
7651 (clobber (const_int 0)). If that fails for some reason, return 0. */
7652
7653 value = copy_rtx (value);
7654 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
7655 return value;
7656
7657 return 0;
7658 }
7659 \f
7660 /* Return nonzero if expression X refers to a REG or to memory
7661 that is set in an instruction more recent than FROM_CUID. */
7662
7663 static int
7664 use_crosses_set_p (x, from_cuid)
7665 register rtx x;
7666 int from_cuid;
7667 {
7668 register char *fmt;
7669 register int i;
7670 register enum rtx_code code = GET_CODE (x);
7671
7672 if (code == REG)
7673 {
7674 register int regno = REGNO (x);
7675 #ifdef PUSH_ROUNDING
7676 /* Don't allow uses of the stack pointer to be moved,
7677 because we don't know whether the move crosses a push insn. */
7678 if (regno == STACK_POINTER_REGNUM)
7679 return 1;
7680 #endif
7681 return (reg_last_set[regno]
7682 && INSN_CUID (reg_last_set[regno]) > from_cuid);
7683 }
7684
7685 if (code == MEM && mem_last_set > from_cuid)
7686 return 1;
7687
7688 fmt = GET_RTX_FORMAT (code);
7689
7690 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7691 {
7692 if (fmt[i] == 'E')
7693 {
7694 register int j;
7695 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7696 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
7697 return 1;
7698 }
7699 else if (fmt[i] == 'e'
7700 && use_crosses_set_p (XEXP (x, i), from_cuid))
7701 return 1;
7702 }
7703 return 0;
7704 }
7705 \f
7706 /* Define three variables used for communication between the following
7707 routines. */
7708
7709 static int reg_dead_regno, reg_dead_endregno;
7710 static int reg_dead_flag;
7711
7712 /* Function called via note_stores from reg_dead_at_p.
7713
7714 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
7715 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
7716
7717 static void
7718 reg_dead_at_p_1 (dest, x)
7719 rtx dest;
7720 rtx x;
7721 {
7722 int regno, endregno;
7723
7724 if (GET_CODE (dest) != REG)
7725 return;
7726
7727 regno = REGNO (dest);
7728 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
7729 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
7730
7731 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
7732 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
7733 }
7734
7735 /* Return non-zero if REG is known to be dead at INSN.
7736
7737 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
7738 referencing REG, it is dead. If we hit a SET referencing REG, it is
7739 live. Otherwise, see if it is live or dead at the start of the basic
7740 block we are in. */
7741
7742 static int
7743 reg_dead_at_p (reg, insn)
7744 rtx reg;
7745 rtx insn;
7746 {
7747 int block, i;
7748
7749 /* Set variables for reg_dead_at_p_1. */
7750 reg_dead_regno = REGNO (reg);
7751 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
7752 ? HARD_REGNO_NREGS (reg_dead_regno,
7753 GET_MODE (reg))
7754 : 1);
7755
7756 reg_dead_flag = 0;
7757
7758 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
7759 beginning of function. */
7760 for (; insn && GET_CODE (insn) != CODE_LABEL;
7761 insn = prev_nonnote_insn (insn))
7762 {
7763 note_stores (PATTERN (insn), reg_dead_at_p_1);
7764 if (reg_dead_flag)
7765 return reg_dead_flag == 1 ? 1 : 0;
7766
7767 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
7768 return 1;
7769 }
7770
7771 /* Get the basic block number that we were in. */
7772 if (insn == 0)
7773 block = 0;
7774 else
7775 {
7776 for (block = 0; block < n_basic_blocks; block++)
7777 if (insn == basic_block_head[block])
7778 break;
7779
7780 if (block == n_basic_blocks)
7781 return 0;
7782 }
7783
7784 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
7785 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
7786 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
7787 return 0;
7788
7789 return 1;
7790 }
7791 \f
7792 /* Remove register number REGNO from the dead registers list of INSN.
7793
7794 Return the note used to record the death, if there was one. */
7795
7796 rtx
7797 remove_death (regno, insn)
7798 int regno;
7799 rtx insn;
7800 {
7801 register rtx note = find_regno_note (insn, REG_DEAD, regno);
7802
7803 if (note)
7804 remove_note (insn, note);
7805
7806 return note;
7807 }
7808
7809 /* For each register (hardware or pseudo) used within expression X, if its
7810 death is in an instruction with cuid between FROM_CUID (inclusive) and
7811 TO_INSN (exclusive), put a REG_DEAD note for that register in the
7812 list headed by PNOTES.
7813
7814 This is done when X is being merged by combination into TO_INSN. These
7815 notes will then be distributed as needed. */
7816
7817 static void
7818 move_deaths (x, from_cuid, to_insn, pnotes)
7819 rtx x;
7820 int from_cuid;
7821 rtx to_insn;
7822 rtx *pnotes;
7823 {
7824 register char *fmt;
7825 register int len, i;
7826 register enum rtx_code code = GET_CODE (x);
7827
7828 if (code == REG)
7829 {
7830 register int regno = REGNO (x);
7831 register rtx where_dead = reg_last_death[regno];
7832
7833 if (where_dead && INSN_CUID (where_dead) >= from_cuid
7834 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
7835 {
7836 rtx note = remove_death (regno, reg_last_death[regno]);
7837
7838 /* It is possible for the call above to return 0. This can occur
7839 when reg_last_death points to I2 or I1 that we combined with.
7840 In that case make a new note. */
7841
7842 if (note)
7843 {
7844 XEXP (note, 1) = *pnotes;
7845 *pnotes = note;
7846 }
7847 else
7848 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
7849 }
7850
7851 return;
7852 }
7853
7854 else if (GET_CODE (x) == SET)
7855 {
7856 rtx dest = SET_DEST (x);
7857
7858 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
7859
7860 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
7861 that accesses one word of a multi-word item, some
7862 piece of everything register in the expression is used by
7863 this insn, so remove any old death. */
7864
7865 if (GET_CODE (dest) == ZERO_EXTRACT
7866 || GET_CODE (dest) == STRICT_LOW_PART
7867 || (GET_CODE (dest) == SUBREG
7868 && (((GET_MODE_SIZE (GET_MODE (dest))
7869 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
7870 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
7871 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
7872 {
7873 move_deaths (dest, from_cuid, to_insn, pnotes);
7874 return;
7875 }
7876
7877 /* If this is some other SUBREG, we know it replaces the entire
7878 value, so use that as the destination. */
7879 if (GET_CODE (dest) == SUBREG)
7880 dest = SUBREG_REG (dest);
7881
7882 /* If this is a MEM, adjust deaths of anything used in the address.
7883 For a REG (the only other possibility), the entire value is
7884 being replaced so the old value is not used in this insn. */
7885
7886 if (GET_CODE (dest) == MEM)
7887 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
7888 return;
7889 }
7890
7891 else if (GET_CODE (x) == CLOBBER)
7892 return;
7893
7894 len = GET_RTX_LENGTH (code);
7895 fmt = GET_RTX_FORMAT (code);
7896
7897 for (i = 0; i < len; i++)
7898 {
7899 if (fmt[i] == 'E')
7900 {
7901 register int j;
7902 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7903 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
7904 }
7905 else if (fmt[i] == 'e')
7906 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
7907 }
7908 }
7909 \f
7910 /* Return 1 if X is the target of a bit-field assignment in BODY, the
7911 pattern of an insn. X must be a REG. */
7912
7913 static int
7914 reg_bitfield_target_p (x, body)
7915 rtx x;
7916 rtx body;
7917 {
7918 int i;
7919
7920 if (GET_CODE (body) == SET)
7921 {
7922 rtx dest = SET_DEST (body);
7923 rtx target;
7924 int regno, tregno, endregno, endtregno;
7925
7926 if (GET_CODE (dest) == ZERO_EXTRACT)
7927 target = XEXP (dest, 0);
7928 else if (GET_CODE (dest) == STRICT_LOW_PART)
7929 target = SUBREG_REG (XEXP (dest, 0));
7930 else
7931 return 0;
7932
7933 if (GET_CODE (target) == SUBREG)
7934 target = SUBREG_REG (target);
7935
7936 if (GET_CODE (target) != REG)
7937 return 0;
7938
7939 tregno = REGNO (target), regno = REGNO (x);
7940 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
7941 return target == x;
7942
7943 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
7944 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
7945
7946 return endregno > tregno && regno < endtregno;
7947 }
7948
7949 else if (GET_CODE (body) == PARALLEL)
7950 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
7951 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
7952 return 1;
7953
7954 return 0;
7955 }
7956 \f
7957 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
7958 as appropriate. I3 and I2 are the insns resulting from the combination
7959 insns including FROM (I2 may be zero).
7960
7961 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
7962 not need REG_DEAD notes because they are being substituted for. This
7963 saves searching in the most common cases.
7964
7965 Each note in the list is either ignored or placed on some insns, depending
7966 on the type of note. */
7967
7968 static void
7969 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
7970 rtx notes;
7971 rtx from_insn;
7972 rtx i3, i2;
7973 rtx elim_i2, elim_i1;
7974 {
7975 rtx note, next_note;
7976 rtx tem;
7977
7978 for (note = notes; note; note = next_note)
7979 {
7980 rtx place = 0, place2 = 0;
7981
7982 /* If this NOTE references a pseudo register, ensure it references
7983 the latest copy of that register. */
7984 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
7985 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
7986 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
7987
7988 next_note = XEXP (note, 1);
7989 switch (REG_NOTE_KIND (note))
7990 {
7991 case REG_UNUSED:
7992 /* If this register is set or clobbered in I3, put the note there
7993 unless there is one already. */
7994 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
7995 {
7996 if (! (GET_CODE (XEXP (note, 0)) == REG
7997 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
7998 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
7999 place = i3;
8000 }
8001 /* Otherwise, if this register is used by I3, then this register
8002 now dies here, so we must put a REG_DEAD note here unless there
8003 is one already. */
8004 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
8005 && ! (GET_CODE (XEXP (note, 0)) == REG
8006 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
8007 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
8008 {
8009 PUT_REG_NOTE_KIND (note, REG_DEAD);
8010 place = i3;
8011 }
8012 break;
8013
8014 case REG_EQUAL:
8015 case REG_EQUIV:
8016 case REG_NONNEG:
8017 /* These notes say something about results of an insn. We can
8018 only support them if they used to be on I3 in which case they
8019 remain on I3. Otherwise they are ignored. */
8020 if (from_insn == i3)
8021 place = i3;
8022 break;
8023
8024 case REG_INC:
8025 case REG_NO_CONFLICT:
8026 case REG_LABEL:
8027 /* These notes say something about how a register is used. They must
8028 be present on any use of the register in I2 or I3. */
8029 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
8030 place = i3;
8031
8032 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
8033 {
8034 if (place)
8035 place2 = i2;
8036 else
8037 place = i2;
8038 }
8039 break;
8040
8041 case REG_WAS_0:
8042 /* It is too much trouble to try to see if this note is still
8043 correct in all situations. It is better to simply delete it. */
8044 break;
8045
8046 case REG_RETVAL:
8047 /* If the insn previously containing this note still exists,
8048 put it back where it was. Otherwise move it to the previous
8049 insn. Adjust the corresponding REG_LIBCALL note. */
8050 if (GET_CODE (from_insn) != NOTE)
8051 place = from_insn;
8052 else
8053 {
8054 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
8055 place = prev_real_insn (from_insn);
8056 if (tem && place)
8057 XEXP (tem, 0) = place;
8058 }
8059 break;
8060
8061 case REG_LIBCALL:
8062 /* This is handled similarly to REG_RETVAL. */
8063 if (GET_CODE (from_insn) != NOTE)
8064 place = from_insn;
8065 else
8066 {
8067 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
8068 place = next_real_insn (from_insn);
8069 if (tem && place)
8070 XEXP (tem, 0) = place;
8071 }
8072 break;
8073
8074 case REG_DEAD:
8075 /* If the register is used as an input in I3, it dies there.
8076 Similarly for I2, if it is non-zero and adjacent to I3.
8077
8078 If the register is not used as an input in either I3 or I2
8079 and it is not one of the registers we were supposed to eliminate,
8080 there are two possibilities. We might have a non-adjacent I2
8081 or we might have somehow eliminated an additional register
8082 from a computation. For example, we might have had A & B where
8083 we discover that B will always be zero. In this case we will
8084 eliminate the reference to A.
8085
8086 In both cases, we must search to see if we can find a previous
8087 use of A and put the death note there. */
8088
8089 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
8090 place = i3;
8091 else if (i2 != 0 && next_nonnote_insn (i2) == i3
8092 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8093 place = i2;
8094
8095 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
8096 break;
8097
8098 /* If the register is used in both I2 and I3 and it dies in I3,
8099 we might have added another reference to it. If reg_n_refs
8100 was 2, bump it to 3. This has to be correct since the
8101 register must have been set somewhere. The reason this is
8102 done is because local-alloc.c treats 2 references as a
8103 special case. */
8104
8105 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
8106 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
8107 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8108 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
8109
8110 if (place == 0)
8111 for (tem = prev_nonnote_insn (i3);
8112 tem && (GET_CODE (tem) == INSN
8113 || GET_CODE (tem) == CALL_INSN);
8114 tem = prev_nonnote_insn (tem))
8115 {
8116 /* If the register is being set at TEM, see if that is all
8117 TEM is doing. If so, delete TEM. Otherwise, make this
8118 into a REG_UNUSED note instead. */
8119 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
8120 {
8121 rtx set = single_set (tem);
8122
8123 /* Verify that it was the set, and not a clobber that
8124 modified the register. */
8125
8126 if (set != 0 && ! side_effects_p (SET_SRC (set))
8127 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
8128 {
8129 /* Move the notes and links of TEM elsewhere.
8130 This might delete other dead insns recursively.
8131 First set the pattern to something that won't use
8132 any register. */
8133
8134 PATTERN (tem) = pc_rtx;
8135
8136 distribute_notes (REG_NOTES (tem), tem, tem,
8137 NULL_RTX, NULL_RTX, NULL_RTX);
8138 distribute_links (LOG_LINKS (tem));
8139
8140 PUT_CODE (tem, NOTE);
8141 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
8142 NOTE_SOURCE_FILE (tem) = 0;
8143 }
8144 else
8145 {
8146 PUT_REG_NOTE_KIND (note, REG_UNUSED);
8147
8148 /* If there isn't already a REG_UNUSED note, put one
8149 here. */
8150 if (! find_regno_note (tem, REG_UNUSED,
8151 REGNO (XEXP (note, 0))))
8152 place = tem;
8153 break;
8154 }
8155 }
8156 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
8157 {
8158 place = tem;
8159 break;
8160 }
8161 }
8162
8163 /* If the register is set or already dead at PLACE, we needn't do
8164 anything with this note if it is still a REG_DEAD note.
8165
8166 Note that we cannot use just `dead_or_set_p' here since we can
8167 convert an assignment to a register into a bit-field assignment.
8168 Therefore, we must also omit the note if the register is the
8169 target of a bitfield assignment. */
8170
8171 if (place && REG_NOTE_KIND (note) == REG_DEAD)
8172 {
8173 int regno = REGNO (XEXP (note, 0));
8174
8175 if (dead_or_set_p (place, XEXP (note, 0))
8176 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
8177 {
8178 /* Unless the register previously died in PLACE, clear
8179 reg_last_death. [I no longer understand why this is
8180 being done.] */
8181 if (reg_last_death[regno] != place)
8182 reg_last_death[regno] = 0;
8183 place = 0;
8184 }
8185 else
8186 reg_last_death[regno] = place;
8187
8188 /* If this is a death note for a hard reg that is occupying
8189 multiple registers, ensure that we are still using all
8190 parts of the object. If we find a piece of the object
8191 that is unused, we must add a USE for that piece before
8192 PLACE and put the appropriate REG_DEAD note on it.
8193
8194 An alternative would be to put a REG_UNUSED for the pieces
8195 on the insn that set the register, but that can't be done if
8196 it is not in the same block. It is simpler, though less
8197 efficient, to add the USE insns. */
8198
8199 if (place && regno < FIRST_PSEUDO_REGISTER
8200 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
8201 {
8202 int endregno
8203 = regno + HARD_REGNO_NREGS (regno,
8204 GET_MODE (XEXP (note, 0)));
8205 int all_used = 1;
8206 int i;
8207
8208 for (i = regno; i < endregno; i++)
8209 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
8210 {
8211 rtx piece = gen_rtx (REG, word_mode, i);
8212 rtx p;
8213
8214 /* See if we already placed a USE note for this
8215 register in front of PLACE. */
8216 for (p = place;
8217 GET_CODE (PREV_INSN (p)) == INSN
8218 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
8219 p = PREV_INSN (p))
8220 if (rtx_equal_p (piece,
8221 XEXP (PATTERN (PREV_INSN (p)), 0)))
8222 {
8223 p = 0;
8224 break;
8225 }
8226
8227 if (p)
8228 {
8229 rtx use_insn
8230 = emit_insn_before (gen_rtx (USE, VOIDmode,
8231 piece),
8232 p);
8233 REG_NOTES (use_insn)
8234 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
8235 REG_NOTES (use_insn));
8236 }
8237
8238 all_used = 0;
8239 }
8240
8241 if (! all_used)
8242 {
8243 /* Put only REG_DEAD notes for pieces that are
8244 still used and that are not already dead or set. */
8245
8246 for (i = regno; i < endregno; i++)
8247 {
8248 rtx piece = gen_rtx (REG, word_mode, i);
8249
8250 if (reg_referenced_p (piece, PATTERN (place))
8251 && ! dead_or_set_p (place, piece)
8252 && ! reg_bitfield_target_p (piece,
8253 PATTERN (place)))
8254 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
8255 piece,
8256 REG_NOTES (place));
8257 }
8258
8259 place = 0;
8260 }
8261 }
8262 }
8263 break;
8264
8265 default:
8266 /* Any other notes should not be present at this point in the
8267 compilation. */
8268 abort ();
8269 }
8270
8271 if (place)
8272 {
8273 XEXP (note, 1) = REG_NOTES (place);
8274 REG_NOTES (place) = note;
8275 }
8276
8277 if (place2)
8278 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
8279 XEXP (note, 0), REG_NOTES (place2));
8280 }
8281 }
8282 \f
8283 /* Similarly to above, distribute the LOG_LINKS that used to be present on
8284 I3, I2, and I1 to new locations. This is also called in one case to
8285 add a link pointing at I3 when I3's destination is changed. */
8286
8287 static void
8288 distribute_links (links)
8289 rtx links;
8290 {
8291 rtx link, next_link;
8292
8293 for (link = links; link; link = next_link)
8294 {
8295 rtx place = 0;
8296 rtx insn;
8297 rtx set, reg;
8298
8299 next_link = XEXP (link, 1);
8300
8301 /* If the insn that this link points to is a NOTE or isn't a single
8302 set, ignore it. In the latter case, it isn't clear what we
8303 can do other than ignore the link, since we can't tell which
8304 register it was for. Such links wouldn't be used by combine
8305 anyway.
8306
8307 It is not possible for the destination of the target of the link to
8308 have been changed by combine. The only potential of this is if we
8309 replace I3, I2, and I1 by I3 and I2. But in that case the
8310 destination of I2 also remains unchanged. */
8311
8312 if (GET_CODE (XEXP (link, 0)) == NOTE
8313 || (set = single_set (XEXP (link, 0))) == 0)
8314 continue;
8315
8316 reg = SET_DEST (set);
8317 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
8318 || GET_CODE (reg) == SIGN_EXTRACT
8319 || GET_CODE (reg) == STRICT_LOW_PART)
8320 reg = XEXP (reg, 0);
8321
8322 /* A LOG_LINK is defined as being placed on the first insn that uses
8323 a register and points to the insn that sets the register. Start
8324 searching at the next insn after the target of the link and stop
8325 when we reach a set of the register or the end of the basic block.
8326
8327 Note that this correctly handles the link that used to point from
8328 I3 to I2. Also note that not much searching is typically done here
8329 since most links don't point very far away. */
8330
8331 for (insn = NEXT_INSN (XEXP (link, 0));
8332 (insn && GET_CODE (insn) != CODE_LABEL
8333 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
8334 insn = NEXT_INSN (insn))
8335 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8336 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8337 {
8338 if (reg_referenced_p (reg, PATTERN (insn)))
8339 place = insn;
8340 break;
8341 }
8342
8343 /* If we found a place to put the link, place it there unless there
8344 is already a link to the same insn as LINK at that point. */
8345
8346 if (place)
8347 {
8348 rtx link2;
8349
8350 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
8351 if (XEXP (link2, 0) == XEXP (link, 0))
8352 break;
8353
8354 if (link2 == 0)
8355 {
8356 XEXP (link, 1) = LOG_LINKS (place);
8357 LOG_LINKS (place) = link;
8358 }
8359 }
8360 }
8361 }
8362 \f
8363 void
8364 dump_combine_stats (file)
8365 FILE *file;
8366 {
8367 fprintf
8368 (file,
8369 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
8370 combine_attempts, combine_merges, combine_extras, combine_successes);
8371 }
8372
8373 void
8374 dump_combine_total_stats (file)
8375 FILE *file;
8376 {
8377 fprintf
8378 (file,
8379 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
8380 total_attempts, total_merges, total_extras, total_successes);
8381 }
This page took 0.500117 seconds and 6 git commands to generate.