]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
(get_last_value): Don't go past a CODE_LABEL when searching for a
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include <stdio.h>
77
78 #include "config.h"
79 #include "gvarargs.h"
80 #include "rtl.h"
81 #include "flags.h"
82 #include "regs.h"
83 #include "expr.h"
84 #include "basic-block.h"
85 #include "insn-config.h"
86 #include "insn-flags.h"
87 #include "insn-codes.h"
88 #include "insn-attr.h"
89 #include "recog.h"
90 #include "real.h"
91
92 /* It is not safe to use ordinary gen_lowpart in combine.
93 Use gen_lowpart_for_combine instead. See comments there. */
94 #define gen_lowpart dont_use_gen_lowpart_you_dummy
95
96 /* Number of attempts to combine instructions in this function. */
97
98 static int combine_attempts;
99
100 /* Number of attempts that got as far as substitution in this function. */
101
102 static int combine_merges;
103
104 /* Number of instructions combined with added SETs in this function. */
105
106 static int combine_extras;
107
108 /* Number of instructions combined in this function. */
109
110 static int combine_successes;
111
112 /* Totals over entire compilation. */
113
114 static int total_attempts, total_merges, total_extras, total_successes;
115 \f
116 /* Vector mapping INSN_UIDs to cuids.
117 The cuids are like uids but increase monotonically always.
118 Combine always uses cuids so that it can compare them.
119 But actually renumbering the uids, which we used to do,
120 proves to be a bad idea because it makes it hard to compare
121 the dumps produced by earlier passes with those from later passes. */
122
123 static int *uid_cuid;
124
125 /* Get the cuid of an insn. */
126
127 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
128
129 /* Maximum register number, which is the size of the tables below. */
130
131 static int combine_max_regno;
132
133 /* Record last point of death of (hard or pseudo) register n. */
134
135 static rtx *reg_last_death;
136
137 /* Record last point of modification of (hard or pseudo) register n. */
138
139 static rtx *reg_last_set;
140
141 /* Record the cuid of the last insn that invalidated memory
142 (anything that writes memory, and subroutine calls, but not pushes). */
143
144 static int mem_last_set;
145
146 /* Record the cuid of the last CALL_INSN
147 so we can tell whether a potential combination crosses any calls. */
148
149 static int last_call_cuid;
150
151 /* When `subst' is called, this is the insn that is being modified
152 (by combining in a previous insn). The PATTERN of this insn
153 is still the old pattern partially modified and it should not be
154 looked at, but this may be used to examine the successors of the insn
155 to judge whether a simplification is valid. */
156
157 static rtx subst_insn;
158
159 /* This is the lowest CUID that `subst' is currently dealing with.
160 get_last_value will not return a value if the register was set at or
161 after this CUID. If not for this mechanism, we could get confused if
162 I2 or I1 in try_combine were an insn that used the old value of a register
163 to obtain a new value. In that case, we might erroneously get the
164 new value of the register when we wanted the old one. */
165
166 static int subst_low_cuid;
167
168 /* This is the value of undobuf.num_undo when we started processing this
169 substitution. This will prevent gen_rtx_combine from re-used a piece
170 from the previous expression. Doing so can produce circular rtl
171 structures. */
172
173 static int previous_num_undos;
174 \f
175 /* The next group of arrays allows the recording of the last value assigned
176 to (hard or pseudo) register n. We use this information to see if a
177 operation being processed is redundant given a prior operation performed
178 on the register. For example, an `and' with a constant is redundant if
179 all the zero bits are already known to be turned off.
180
181 We use an approach similar to that used by cse, but change it in the
182 following ways:
183
184 (1) We do not want to reinitialize at each label.
185 (2) It is useful, but not critical, to know the actual value assigned
186 to a register. Often just its form is helpful.
187
188 Therefore, we maintain the following arrays:
189
190 reg_last_set_value the last value assigned
191 reg_last_set_label records the value of label_tick when the
192 register was assigned
193 reg_last_set_table_tick records the value of label_tick when a
194 value using the register is assigned
195 reg_last_set_invalid set to non-zero when it is not valid
196 to use the value of this register in some
197 register's value
198
199 To understand the usage of these tables, it is important to understand
200 the distinction between the value in reg_last_set_value being valid
201 and the register being validly contained in some other expression in the
202 table.
203
204 Entry I in reg_last_set_value is valid if it is non-zero, and either
205 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
206
207 Register I may validly appear in any expression returned for the value
208 of another register if reg_n_sets[i] is 1. It may also appear in the
209 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
210 reg_last_set_invalid[j] is zero.
211
212 If an expression is found in the table containing a register which may
213 not validly appear in an expression, the register is replaced by
214 something that won't match, (clobber (const_int 0)).
215
216 reg_last_set_invalid[i] is set non-zero when register I is being assigned
217 to and reg_last_set_table_tick[i] == label_tick. */
218
219 /* Record last value assigned to (hard or pseudo) register n. */
220
221 static rtx *reg_last_set_value;
222
223 /* Record the value of label_tick when the value for register n is placed in
224 reg_last_set_value[n]. */
225
226 static short *reg_last_set_label;
227
228 /* Record the value of label_tick when an expression involving register n
229 is placed in reg_last_set_value. */
230
231 static short *reg_last_set_table_tick;
232
233 /* Set non-zero if references to register n in expressions should not be
234 used. */
235
236 static char *reg_last_set_invalid;
237
238 /* Incremented for each label. */
239
240 static short label_tick;
241
242 /* Some registers that are set more than once and used in more than one
243 basic block are nevertheless always set in similar ways. For example,
244 a QImode register may be loaded from memory in two places on a machine
245 where byte loads zero extend.
246
247 We record in the following array what we know about the significant
248 bits of a register, specifically which bits are known to be zero.
249
250 If an entry is zero, it means that we don't know anything special. */
251
252 static HOST_WIDE_INT *reg_significant;
253
254 /* Mode used to compute significance in reg_significant. It is the largest
255 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
256
257 static enum machine_mode significant_mode;
258
259 /* Nonzero if we know that a register has some leading bits that are always
260 equal to the sign bit. */
261
262 static char *reg_sign_bit_copies;
263
264 /* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
265 It is zero while computing them. This prevents propagating values based
266 on previously set values, which can be incorrect if a variable
267 is modified in a loop. */
268
269 static int significant_valid;
270 \f
271 /* Record one modification to rtl structure
272 to be undone by storing old_contents into *where.
273 is_int is 1 if the contents are an int. */
274
275 struct undo
276 {
277 rtx *where;
278 rtx old_contents;
279 int is_int;
280 };
281
282 struct undo_int
283 {
284 int *where;
285 int old_contents;
286 int is_int;
287 };
288
289 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
290 num_undo says how many are currently recorded.
291
292 storage is nonzero if we must undo the allocation of new storage.
293 The value of storage is what to pass to obfree.
294
295 other_insn is nonzero if we have modified some other insn in the process
296 of working on subst_insn. It must be verified too. */
297
298 #define MAX_UNDO 50
299
300 struct undobuf
301 {
302 int num_undo;
303 char *storage;
304 struct undo undo[MAX_UNDO];
305 rtx other_insn;
306 };
307
308 static struct undobuf undobuf;
309
310 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
311 insn. The substitution can be undone by undo_all. If INTO is already
312 set to NEWVAL, do not record this change. Because computing NEWVAL might
313 also call SUBST, we have to compute it before we put anything into
314 the undo table. */
315
316 #define SUBST(INTO, NEWVAL) \
317 do { rtx _new = (NEWVAL); \
318 if (undobuf.num_undo < MAX_UNDO) \
319 { \
320 undobuf.undo[undobuf.num_undo].where = &INTO; \
321 undobuf.undo[undobuf.num_undo].old_contents = INTO; \
322 undobuf.undo[undobuf.num_undo].is_int = 0; \
323 INTO = _new; \
324 if (undobuf.undo[undobuf.num_undo].old_contents != INTO) \
325 undobuf.num_undo++; \
326 } \
327 } while (0)
328
329 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
330 expression.
331 Note that substitution for the value of a CONST_INT is not safe. */
332
333 #define SUBST_INT(INTO, NEWVAL) \
334 do { if (undobuf.num_undo < MAX_UNDO) \
335 { \
336 struct undo_int *u \
337 = (struct undo_int *)&undobuf.undo[undobuf.num_undo]; \
338 u->where = (int *) &INTO; \
339 u->old_contents = INTO; \
340 u->is_int = 1; \
341 INTO = NEWVAL; \
342 if (u->old_contents != INTO) \
343 undobuf.num_undo++; \
344 } \
345 } while (0)
346
347 /* Number of times the pseudo being substituted for
348 was found and replaced. */
349
350 static int n_occurrences;
351
352 static void set_significant ();
353 static void move_deaths ();
354 rtx remove_death ();
355 static void record_value_for_reg ();
356 static void record_dead_and_set_regs ();
357 static int use_crosses_set_p ();
358 static rtx try_combine ();
359 static rtx *find_split_point ();
360 static rtx subst ();
361 static void undo_all ();
362 static int reg_dead_at_p ();
363 static rtx expand_compound_operation ();
364 static rtx expand_field_assignment ();
365 static rtx make_extraction ();
366 static int get_pos_from_mask ();
367 static rtx force_to_mode ();
368 static rtx make_field_assignment ();
369 static rtx make_compound_operation ();
370 static rtx apply_distributive_law ();
371 static rtx simplify_and_const_int ();
372 static unsigned HOST_WIDE_INT significant_bits ();
373 static int num_sign_bit_copies ();
374 static int merge_outer_ops ();
375 static rtx simplify_shift_const ();
376 static int recog_for_combine ();
377 static rtx gen_lowpart_for_combine ();
378 static rtx gen_rtx_combine ();
379 static rtx gen_binary ();
380 static rtx gen_unary ();
381 static enum rtx_code simplify_comparison ();
382 static int reversible_comparison_p ();
383 static int get_last_value_validate ();
384 static rtx get_last_value ();
385 static void distribute_notes ();
386 static void distribute_links ();
387 \f
388 /* Main entry point for combiner. F is the first insn of the function.
389 NREGS is the first unused pseudo-reg number. */
390
391 void
392 combine_instructions (f, nregs)
393 rtx f;
394 int nregs;
395 {
396 register rtx insn, next, prev;
397 register int i;
398 register rtx links, nextlinks;
399
400 combine_attempts = 0;
401 combine_merges = 0;
402 combine_extras = 0;
403 combine_successes = 0;
404
405 combine_max_regno = nregs;
406
407 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
408 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
409 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
410 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
411 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
412 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
413 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
414 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
415
416 bzero (reg_last_death, nregs * sizeof (rtx));
417 bzero (reg_last_set, nregs * sizeof (rtx));
418 bzero (reg_last_set_value, nregs * sizeof (rtx));
419 bzero (reg_last_set_table_tick, nregs * sizeof (short));
420 bzero (reg_last_set_invalid, nregs * sizeof (char));
421 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
422 bzero (reg_sign_bit_copies, nregs * sizeof (char));
423
424 init_recog_no_volatile ();
425
426 /* Compute maximum uid value so uid_cuid can be allocated. */
427
428 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
429 if (INSN_UID (insn) > i)
430 i = INSN_UID (insn);
431
432 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
433
434 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
435
436 /* Don't use reg_significant when computing it. This can cause problems
437 when, for example, we have j <<= 1 in a loop. */
438
439 significant_valid = 0;
440
441 /* Compute the mapping from uids to cuids.
442 Cuids are numbers assigned to insns, like uids,
443 except that cuids increase monotonically through the code.
444
445 Scan all SETs and see if we can deduce anything about what
446 bits are significant for some registers. */
447
448 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
449 {
450 INSN_CUID (insn) = ++i;
451 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
452 note_stores (PATTERN (insn), set_significant);
453 }
454
455 significant_valid = 1;
456
457 /* Now scan all the insns in forward order. */
458
459 label_tick = 1;
460 last_call_cuid = 0;
461 mem_last_set = 0;
462
463 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
464 {
465 next = 0;
466
467 if (GET_CODE (insn) == CODE_LABEL)
468 label_tick++;
469
470 else if (GET_CODE (insn) == INSN
471 || GET_CODE (insn) == CALL_INSN
472 || GET_CODE (insn) == JUMP_INSN)
473 {
474 /* Try this insn with each insn it links back to. */
475
476 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
477 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
478 goto retry;
479
480 /* Try each sequence of three linked insns ending with this one. */
481
482 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
483 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
484 nextlinks = XEXP (nextlinks, 1))
485 if ((next = try_combine (insn, XEXP (links, 0),
486 XEXP (nextlinks, 0))) != 0)
487 goto retry;
488
489 #ifdef HAVE_cc0
490 /* Try to combine a jump insn that uses CC0
491 with a preceding insn that sets CC0, and maybe with its
492 logical predecessor as well.
493 This is how we make decrement-and-branch insns.
494 We need this special code because data flow connections
495 via CC0 do not get entered in LOG_LINKS. */
496
497 if (GET_CODE (insn) == JUMP_INSN
498 && (prev = prev_nonnote_insn (insn)) != 0
499 && GET_CODE (prev) == INSN
500 && sets_cc0_p (PATTERN (prev)))
501 {
502 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
503 goto retry;
504
505 for (nextlinks = LOG_LINKS (prev); nextlinks;
506 nextlinks = XEXP (nextlinks, 1))
507 if ((next = try_combine (insn, prev,
508 XEXP (nextlinks, 0))) != 0)
509 goto retry;
510 }
511
512 /* Do the same for an insn that explicitly references CC0. */
513 if (GET_CODE (insn) == INSN
514 && (prev = prev_nonnote_insn (insn)) != 0
515 && GET_CODE (prev) == INSN
516 && sets_cc0_p (PATTERN (prev))
517 && GET_CODE (PATTERN (insn)) == SET
518 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
519 {
520 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
521 goto retry;
522
523 for (nextlinks = LOG_LINKS (prev); nextlinks;
524 nextlinks = XEXP (nextlinks, 1))
525 if ((next = try_combine (insn, prev,
526 XEXP (nextlinks, 0))) != 0)
527 goto retry;
528 }
529
530 /* Finally, see if any of the insns that this insn links to
531 explicitly references CC0. If so, try this insn, that insn,
532 and its predecessor if it sets CC0. */
533 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
534 if (GET_CODE (XEXP (links, 0)) == INSN
535 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
536 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
537 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
538 && GET_CODE (prev) == INSN
539 && sets_cc0_p (PATTERN (prev))
540 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
541 goto retry;
542 #endif
543
544 /* Try combining an insn with two different insns whose results it
545 uses. */
546 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
547 for (nextlinks = XEXP (links, 1); nextlinks;
548 nextlinks = XEXP (nextlinks, 1))
549 if ((next = try_combine (insn, XEXP (links, 0),
550 XEXP (nextlinks, 0))) != 0)
551 goto retry;
552
553 if (GET_CODE (insn) != NOTE)
554 record_dead_and_set_regs (insn);
555
556 retry:
557 ;
558 }
559 }
560
561 total_attempts += combine_attempts;
562 total_merges += combine_merges;
563 total_extras += combine_extras;
564 total_successes += combine_successes;
565 }
566 \f
567 /* Called via note_stores. If X is a pseudo that is used in more than
568 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
569 set, record what bits are significant. If we are clobbering X,
570 ignore this "set" because the clobbered value won't be used.
571
572 If we are setting only a portion of X and we can't figure out what
573 portion, assume all bits will be used since we don't know what will
574 be happening.
575
576 Similarly, set how many bits of X are known to be copies of the sign bit
577 at all locations in the function. This is the smallest number implied
578 by any set of X. */
579
580 static void
581 set_significant (x, set)
582 rtx x;
583 rtx set;
584 {
585 int num;
586
587 if (GET_CODE (x) == REG
588 && REGNO (x) >= FIRST_PSEUDO_REGISTER
589 && reg_n_sets[REGNO (x)] > 1
590 && reg_basic_block[REGNO (x)] < 0
591 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
592 {
593 if (GET_CODE (set) == CLOBBER)
594 return;
595
596 /* If this is a complex assignment, see if we can convert it into a
597 simple assignment. */
598 set = expand_field_assignment (set);
599 if (SET_DEST (set) == x)
600 {
601 reg_significant[REGNO (x)]
602 |= significant_bits (SET_SRC (set), significant_mode);
603 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
604 if (reg_sign_bit_copies[REGNO (x)] == 0
605 || reg_sign_bit_copies[REGNO (x)] > num)
606 reg_sign_bit_copies[REGNO (x)] = num;
607 }
608 else
609 {
610 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
611 reg_sign_bit_copies[REGNO (x)] = 0;
612 }
613 }
614 }
615 \f
616 /* See if INSN can be combined into I3. PRED and SUCC are optionally
617 insns that were previously combined into I3 or that will be combined
618 into the merger of INSN and I3.
619
620 Return 0 if the combination is not allowed for any reason.
621
622 If the combination is allowed, *PDEST will be set to the single
623 destination of INSN and *PSRC to the single source, and this function
624 will return 1. */
625
626 static int
627 can_combine_p (insn, i3, pred, succ, pdest, psrc)
628 rtx insn;
629 rtx i3;
630 rtx pred, succ;
631 rtx *pdest, *psrc;
632 {
633 int i;
634 rtx set = 0, src, dest;
635 rtx p, link;
636 int all_adjacent = (succ ? (next_active_insn (insn) == succ
637 && next_active_insn (succ) == i3)
638 : next_active_insn (insn) == i3);
639
640 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
641 or a PARALLEL consisting of such a SET and CLOBBERs.
642
643 If INSN has CLOBBER parallel parts, ignore them for our processing.
644 By definition, these happen during the execution of the insn. When it
645 is merged with another insn, all bets are off. If they are, in fact,
646 needed and aren't also supplied in I3, they may be added by
647 recog_for_combine. Otherwise, it won't match.
648
649 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
650 note.
651
652 Get the source and destination of INSN. If more than one, can't
653 combine. */
654
655 if (GET_CODE (PATTERN (insn)) == SET)
656 set = PATTERN (insn);
657 else if (GET_CODE (PATTERN (insn)) == PARALLEL
658 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
659 {
660 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
661 {
662 rtx elt = XVECEXP (PATTERN (insn), 0, i);
663
664 switch (GET_CODE (elt))
665 {
666 /* We can ignore CLOBBERs. */
667 case CLOBBER:
668 break;
669
670 case SET:
671 /* Ignore SETs whose result isn't used but not those that
672 have side-effects. */
673 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
674 && ! side_effects_p (elt))
675 break;
676
677 /* If we have already found a SET, this is a second one and
678 so we cannot combine with this insn. */
679 if (set)
680 return 0;
681
682 set = elt;
683 break;
684
685 default:
686 /* Anything else means we can't combine. */
687 return 0;
688 }
689 }
690
691 if (set == 0
692 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
693 so don't do anything with it. */
694 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
695 return 0;
696 }
697 else
698 return 0;
699
700 if (set == 0)
701 return 0;
702
703 set = expand_field_assignment (set);
704 src = SET_SRC (set), dest = SET_DEST (set);
705
706 /* Don't eliminate a store in the stack pointer. */
707 if (dest == stack_pointer_rtx
708 /* Don't install a subreg involving two modes not tieable.
709 It can worsen register allocation, and can even make invalid reload
710 insns, since the reg inside may need to be copied from in the
711 outside mode, and that may be invalid if it is an fp reg copied in
712 integer mode. As a special exception, we can allow this if
713 I3 is simply copying DEST, a REG, to CC0. */
714 || (GET_CODE (src) == SUBREG
715 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
716 #ifdef HAVE_cc0
717 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
718 && SET_DEST (PATTERN (i3)) == cc0_rtx
719 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
720 #endif
721 )
722 /* If we couldn't eliminate a field assignment, we can't combine. */
723 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
724 /* Don't combine with an insn that sets a register to itself if it has
725 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
726 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
727 /* Can't merge a function call. */
728 || GET_CODE (src) == CALL
729 /* Don't substitute into an incremented register. */
730 || FIND_REG_INC_NOTE (i3, dest)
731 || (succ && FIND_REG_INC_NOTE (succ, dest))
732 /* Don't combine the end of a libcall into anything. */
733 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
734 /* Make sure that DEST is not used after SUCC but before I3. */
735 || (succ && ! all_adjacent
736 && reg_used_between_p (dest, succ, i3))
737 /* Make sure that the value that is to be substituted for the register
738 does not use any registers whose values alter in between. However,
739 If the insns are adjacent, a use can't cross a set even though we
740 think it might (this can happen for a sequence of insns each setting
741 the same destination; reg_last_set of that register might point to
742 a NOTE). Also, don't move a volatile asm across any other insns. */
743 || (! all_adjacent
744 && (use_crosses_set_p (src, INSN_CUID (insn))
745 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
746 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
747 better register allocation by not doing the combine. */
748 || find_reg_note (i3, REG_NO_CONFLICT, dest)
749 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
750 /* Don't combine across a CALL_INSN, because that would possibly
751 change whether the life span of some REGs crosses calls or not,
752 and it is a pain to update that information.
753 Exception: if source is a constant, moving it later can't hurt.
754 Accept that special case, because it helps -fforce-addr a lot. */
755 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
756 return 0;
757
758 /* DEST must either be a REG or CC0. */
759 if (GET_CODE (dest) == REG)
760 {
761 /* If register alignment is being enforced for multi-word items in all
762 cases except for parameters, it is possible to have a register copy
763 insn referencing a hard register that is not allowed to contain the
764 mode being copied and which would not be valid as an operand of most
765 insns. Eliminate this problem by not combining with such an insn.
766
767 Also, on some machines we don't want to extend the life of a hard
768 register. */
769
770 if (GET_CODE (src) == REG
771 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
772 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
773 #ifdef SMALL_REGISTER_CLASSES
774 /* Don't extend the life of a hard register. */
775 || REGNO (src) < FIRST_PSEUDO_REGISTER
776 #else
777 || (REGNO (src) < FIRST_PSEUDO_REGISTER
778 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
779 #endif
780 ))
781 return 0;
782 }
783 else if (GET_CODE (dest) != CC0)
784 return 0;
785
786 /* Don't substitute for a register intended as a clobberable operand. */
787 if (GET_CODE (PATTERN (i3)) == PARALLEL)
788 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
789 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
790 && rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest))
791 return 0;
792
793 /* If INSN contains anything volatile, or is an `asm' (whether volatile
794 or not), reject, unless nothing volatile comes between it and I3,
795 with the exception of SUCC. */
796
797 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
798 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
799 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
800 && p != succ && volatile_refs_p (PATTERN (p)))
801 return 0;
802
803 /* If INSN or I2 contains an autoincrement or autodecrement,
804 make sure that register is not used between there and I3,
805 and not already used in I3 either.
806 Also insist that I3 not be a jump; if it were one
807 and the incremented register were spilled, we would lose. */
808
809 #ifdef AUTO_INC_DEC
810 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
811 if (REG_NOTE_KIND (link) == REG_INC
812 && (GET_CODE (i3) == JUMP_INSN
813 || reg_used_between_p (XEXP (link, 0), insn, i3)
814 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
815 return 0;
816 #endif
817
818 #ifdef HAVE_cc0
819 /* Don't combine an insn that follows a CC0-setting insn.
820 An insn that uses CC0 must not be separated from the one that sets it.
821 We do, however, allow I2 to follow a CC0-setting insn if that insn
822 is passed as I1; in that case it will be deleted also.
823 We also allow combining in this case if all the insns are adjacent
824 because that would leave the two CC0 insns adjacent as well.
825 It would be more logical to test whether CC0 occurs inside I1 or I2,
826 but that would be much slower, and this ought to be equivalent. */
827
828 p = prev_nonnote_insn (insn);
829 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
830 && ! all_adjacent)
831 return 0;
832 #endif
833
834 /* If we get here, we have passed all the tests and the combination is
835 to be allowed. */
836
837 *pdest = dest;
838 *psrc = src;
839
840 return 1;
841 }
842 \f
843 /* LOC is the location within I3 that contains its pattern or the component
844 of a PARALLEL of the pattern. We validate that it is valid for combining.
845
846 One problem is if I3 modifies its output, as opposed to replacing it
847 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
848 so would produce an insn that is not equivalent to the original insns.
849
850 Consider:
851
852 (set (reg:DI 101) (reg:DI 100))
853 (set (subreg:SI (reg:DI 101) 0) <foo>)
854
855 This is NOT equivalent to:
856
857 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
858 (set (reg:DI 101) (reg:DI 100))])
859
860 Not only does this modify 100 (in which case it might still be valid
861 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
862
863 We can also run into a problem if I2 sets a register that I1
864 uses and I1 gets directly substituted into I3 (not via I2). In that
865 case, we would be getting the wrong value of I2DEST into I3, so we
866 must reject the combination. This case occurs when I2 and I1 both
867 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
868 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
869 of a SET must prevent combination from occurring.
870
871 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
872 if the destination of a SET is a hard register.
873
874 Before doing the above check, we first try to expand a field assignment
875 into a set of logical operations.
876
877 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
878 we place a register that is both set and used within I3. If more than one
879 such register is detected, we fail.
880
881 Return 1 if the combination is valid, zero otherwise. */
882
883 static int
884 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
885 rtx i3;
886 rtx *loc;
887 rtx i2dest;
888 rtx i1dest;
889 int i1_not_in_src;
890 rtx *pi3dest_killed;
891 {
892 rtx x = *loc;
893
894 if (GET_CODE (x) == SET)
895 {
896 rtx set = expand_field_assignment (x);
897 rtx dest = SET_DEST (set);
898 rtx src = SET_SRC (set);
899 rtx inner_dest = dest, inner_src = src;
900
901 SUBST (*loc, set);
902
903 while (GET_CODE (inner_dest) == STRICT_LOW_PART
904 || GET_CODE (inner_dest) == SUBREG
905 || GET_CODE (inner_dest) == ZERO_EXTRACT)
906 inner_dest = XEXP (inner_dest, 0);
907
908 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
909 was added. */
910 #if 0
911 while (GET_CODE (inner_src) == STRICT_LOW_PART
912 || GET_CODE (inner_src) == SUBREG
913 || GET_CODE (inner_src) == ZERO_EXTRACT)
914 inner_src = XEXP (inner_src, 0);
915
916 /* If it is better that two different modes keep two different pseudos,
917 avoid combining them. This avoids producing the following pattern
918 on a 386:
919 (set (subreg:SI (reg/v:QI 21) 0)
920 (lshiftrt:SI (reg/v:SI 20)
921 (const_int 24)))
922 If that were made, reload could not handle the pair of
923 reg 20/21, since it would try to get any GENERAL_REGS
924 but some of them don't handle QImode. */
925
926 if (rtx_equal_p (inner_src, i2dest)
927 && GET_CODE (inner_dest) == REG
928 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
929 return 0;
930 #endif
931
932 /* Check for the case where I3 modifies its output, as
933 discussed above. */
934 if ((inner_dest != dest
935 && (reg_overlap_mentioned_p (i2dest, inner_dest)
936 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
937 /* This is the same test done in can_combine_p except that we
938 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
939 CALL operation. */
940 || (GET_CODE (inner_dest) == REG
941 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
942 #ifdef SMALL_REGISTER_CLASSES
943 && GET_CODE (src) != CALL
944 #else
945 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
946 GET_MODE (inner_dest))
947 #endif
948 )
949
950 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
951 return 0;
952
953 /* If DEST is used in I3, it is being killed in this insn,
954 so record that for later. */
955 if (pi3dest_killed && GET_CODE (dest) == REG
956 && reg_referenced_p (dest, PATTERN (i3)))
957 {
958 if (*pi3dest_killed)
959 return 0;
960
961 *pi3dest_killed = dest;
962 }
963 }
964
965 else if (GET_CODE (x) == PARALLEL)
966 {
967 int i;
968
969 for (i = 0; i < XVECLEN (x, 0); i++)
970 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
971 i1_not_in_src, pi3dest_killed))
972 return 0;
973 }
974
975 return 1;
976 }
977 \f
978 /* Try to combine the insns I1 and I2 into I3.
979 Here I1 and I2 appear earlier than I3.
980 I1 can be zero; then we combine just I2 into I3.
981
982 It we are combining three insns and the resulting insn is not recognized,
983 try splitting it into two insns. If that happens, I2 and I3 are retained
984 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
985 are pseudo-deleted.
986
987 If we created two insns, return I2; otherwise return I3.
988 Return 0 if the combination does not work. Then nothing is changed. */
989
990 static rtx
991 try_combine (i3, i2, i1)
992 register rtx i3, i2, i1;
993 {
994 /* New patterns for I3 and I3, respectively. */
995 rtx newpat, newi2pat = 0;
996 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
997 int added_sets_1, added_sets_2;
998 /* Total number of SETs to put into I3. */
999 int total_sets;
1000 /* Nonzero is I2's body now appears in I3. */
1001 int i2_is_used;
1002 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1003 int insn_code_number, i2_code_number, other_code_number;
1004 /* Contains I3 if the destination of I3 is used in its source, which means
1005 that the old life of I3 is being killed. If that usage is placed into
1006 I2 and not in I3, a REG_DEAD note must be made. */
1007 rtx i3dest_killed = 0;
1008 /* SET_DEST and SET_SRC of I2 and I1. */
1009 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1010 /* PATTERN (I2), or a copy of it in certain cases. */
1011 rtx i2pat;
1012 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1013 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1014 int i1_feeds_i3 = 0;
1015 /* Notes that must be added to REG_NOTES in I3 and I2. */
1016 rtx new_i3_notes, new_i2_notes;
1017
1018 int maxreg;
1019 rtx temp;
1020 register rtx link;
1021 int i;
1022
1023 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1024 This can occur when flow deletes an insn that it has merged into an
1025 auto-increment address. We also can't do anything if I3 has a
1026 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1027 libcall. */
1028
1029 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1030 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1031 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1032 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1033 return 0;
1034
1035 combine_attempts++;
1036
1037 undobuf.num_undo = previous_num_undos = 0;
1038 undobuf.other_insn = 0;
1039
1040 /* Save the current high-water-mark so we can free storage if we didn't
1041 accept this combination. */
1042 undobuf.storage = (char *) oballoc (0);
1043
1044 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1045 code below, set I1 to be the earlier of the two insns. */
1046 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1047 temp = i1, i1 = i2, i2 = temp;
1048
1049 /* First check for one important special-case that the code below will
1050 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1051 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1052 we may be able to replace that destination with the destination of I3.
1053 This occurs in the common code where we compute both a quotient and
1054 remainder into a structure, in which case we want to do the computation
1055 directly into the structure to avoid register-register copies.
1056
1057 We make very conservative checks below and only try to handle the
1058 most common cases of this. For example, we only handle the case
1059 where I2 and I3 are adjacent to avoid making difficult register
1060 usage tests. */
1061
1062 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1063 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1064 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1065 #ifdef SMALL_REGISTER_CLASSES
1066 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1067 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1068 #endif
1069 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1070 && GET_CODE (PATTERN (i2)) == PARALLEL
1071 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1072 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1073 below would need to check what is inside (and reg_overlap_mentioned_p
1074 doesn't support those codes anyway). Don't allow those destinations;
1075 the resulting insn isn't likely to be recognized anyway. */
1076 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1077 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1078 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1079 SET_DEST (PATTERN (i3)))
1080 && next_real_insn (i2) == i3)
1081 {
1082 rtx p2 = PATTERN (i2);
1083
1084 /* Make sure that the destination of I3,
1085 which we are going to substitute into one output of I2,
1086 is not used within another output of I2. We must avoid making this:
1087 (parallel [(set (mem (reg 69)) ...)
1088 (set (reg 69) ...)])
1089 which is not well-defined as to order of actions.
1090 (Besides, reload can't handle output reloads for this.)
1091
1092 The problem can also happen if the dest of I3 is a memory ref,
1093 if another dest in I2 is an indirect memory ref. */
1094 for (i = 0; i < XVECLEN (p2, 0); i++)
1095 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1096 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1097 SET_DEST (XVECEXP (p2, 0, i))))
1098 break;
1099
1100 if (i == XVECLEN (p2, 0))
1101 for (i = 0; i < XVECLEN (p2, 0); i++)
1102 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1103 {
1104 combine_merges++;
1105
1106 subst_insn = i3;
1107 subst_low_cuid = INSN_CUID (i2);
1108
1109 added_sets_2 = 0;
1110 i2dest = SET_SRC (PATTERN (i3));
1111
1112 /* Replace the dest in I2 with our dest and make the resulting
1113 insn the new pattern for I3. Then skip to where we
1114 validate the pattern. Everything was set up above. */
1115 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1116 SET_DEST (PATTERN (i3)));
1117
1118 newpat = p2;
1119 goto validate_replacement;
1120 }
1121 }
1122
1123 #ifndef HAVE_cc0
1124 /* If we have no I1 and I2 looks like:
1125 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1126 (set Y OP)])
1127 make up a dummy I1 that is
1128 (set Y OP)
1129 and change I2 to be
1130 (set (reg:CC X) (compare:CC Y (const_int 0)))
1131
1132 (We can ignore any trailing CLOBBERs.)
1133
1134 This undoes a previous combination and allows us to match a branch-and-
1135 decrement insn. */
1136
1137 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1138 && XVECLEN (PATTERN (i2), 0) >= 2
1139 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1140 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1141 == MODE_CC)
1142 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1143 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1144 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1145 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1146 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1147 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1148 {
1149 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1150 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1151 break;
1152
1153 if (i == 1)
1154 {
1155 /* We make I1 with the same INSN_UID as I2. This gives it
1156 the same INSN_CUID for value tracking. Our fake I1 will
1157 never appear in the insn stream so giving it the same INSN_UID
1158 as I2 will not cause a problem. */
1159
1160 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1161 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1162
1163 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1164 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1165 SET_DEST (PATTERN (i1)));
1166 }
1167 }
1168 #endif
1169
1170 /* Verify that I2 and I1 are valid for combining. */
1171 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1172 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1173 {
1174 undo_all ();
1175 return 0;
1176 }
1177
1178 /* Record whether I2DEST is used in I2SRC and similarly for the other
1179 cases. Knowing this will help in register status updating below. */
1180 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1181 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1182 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1183
1184 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1185 in I2SRC. */
1186 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1187
1188 /* Ensure that I3's pattern can be the destination of combines. */
1189 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1190 i1 && i2dest_in_i1src && i1_feeds_i3,
1191 &i3dest_killed))
1192 {
1193 undo_all ();
1194 return 0;
1195 }
1196
1197 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1198 We used to do this EXCEPT in one case: I3 has a post-inc in an
1199 output operand. However, that exception can give rise to insns like
1200 mov r3,(r3)+
1201 which is a famous insn on the PDP-11 where the value of r3 used as the
1202 source was model-dependent. Avoid this sort of thing. */
1203
1204 #if 0
1205 if (!(GET_CODE (PATTERN (i3)) == SET
1206 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1207 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1208 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1209 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1210 /* It's not the exception. */
1211 #endif
1212 #ifdef AUTO_INC_DEC
1213 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1214 if (REG_NOTE_KIND (link) == REG_INC
1215 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1216 || (i1 != 0
1217 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1218 {
1219 undo_all ();
1220 return 0;
1221 }
1222 #endif
1223
1224 /* See if the SETs in I1 or I2 need to be kept around in the merged
1225 instruction: whenever the value set there is still needed past I3.
1226 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1227
1228 For the SET in I1, we have two cases: If I1 and I2 independently
1229 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1230 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1231 in I1 needs to be kept around unless I1DEST dies or is set in either
1232 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1233 I1DEST. If so, we know I1 feeds into I2. */
1234
1235 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1236
1237 added_sets_1
1238 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1239 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1240
1241 /* If the set in I2 needs to be kept around, we must make a copy of
1242 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1243 PATTERN (I2), we are only substituting for the original I1DEST, not into
1244 an already-substituted copy. This also prevents making self-referential
1245 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1246 I2DEST. */
1247
1248 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1249 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1250 : PATTERN (i2));
1251
1252 if (added_sets_2)
1253 i2pat = copy_rtx (i2pat);
1254
1255 combine_merges++;
1256
1257 /* Substitute in the latest insn for the regs set by the earlier ones. */
1258
1259 maxreg = max_reg_num ();
1260
1261 subst_insn = i3;
1262
1263 /* It is possible that the source of I2 or I1 may be performing an
1264 unneeded operation, such as a ZERO_EXTEND of something that is known
1265 to have the high part zero. Handle that case by letting subst look at
1266 the innermost one of them.
1267
1268 Another way to do this would be to have a function that tries to
1269 simplify a single insn instead of merging two or more insns. We don't
1270 do this because of the potential of infinite loops and because
1271 of the potential extra memory required. However, doing it the way
1272 we are is a bit of a kludge and doesn't catch all cases.
1273
1274 But only do this if -fexpensive-optimizations since it slows things down
1275 and doesn't usually win. */
1276
1277 if (flag_expensive_optimizations)
1278 {
1279 /* Pass pc_rtx so no substitutions are done, just simplifications.
1280 The cases that we are interested in here do not involve the few
1281 cases were is_replaced is checked. */
1282 if (i1)
1283 {
1284 subst_low_cuid = INSN_CUID (i1);
1285 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1286 }
1287 else
1288 {
1289 subst_low_cuid = INSN_CUID (i2);
1290 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1291 }
1292
1293 previous_num_undos = undobuf.num_undo;
1294 }
1295
1296 #ifndef HAVE_cc0
1297 /* Many machines that don't use CC0 have insns that can both perform an
1298 arithmetic operation and set the condition code. These operations will
1299 be represented as a PARALLEL with the first element of the vector
1300 being a COMPARE of an arithmetic operation with the constant zero.
1301 The second element of the vector will set some pseudo to the result
1302 of the same arithmetic operation. If we simplify the COMPARE, we won't
1303 match such a pattern and so will generate an extra insn. Here we test
1304 for this case, where both the comparison and the operation result are
1305 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1306 I2SRC. Later we will make the PARALLEL that contains I2. */
1307
1308 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1309 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1310 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1311 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1312 {
1313 rtx *cc_use;
1314 enum machine_mode compare_mode;
1315
1316 newpat = PATTERN (i3);
1317 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1318
1319 i2_is_used = 1;
1320
1321 #ifdef EXTRA_CC_MODES
1322 /* See if a COMPARE with the operand we substituted in should be done
1323 with the mode that is currently being used. If not, do the same
1324 processing we do in `subst' for a SET; namely, if the destination
1325 is used only once, try to replace it with a register of the proper
1326 mode and also replace the COMPARE. */
1327 if (undobuf.other_insn == 0
1328 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1329 &undobuf.other_insn))
1330 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1331 i2src, const0_rtx))
1332 != GET_MODE (SET_DEST (newpat))))
1333 {
1334 int regno = REGNO (SET_DEST (newpat));
1335 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1336
1337 if (regno < FIRST_PSEUDO_REGISTER
1338 || (reg_n_sets[regno] == 1 && ! added_sets_2
1339 && ! REG_USERVAR_P (SET_DEST (newpat))))
1340 {
1341 if (regno >= FIRST_PSEUDO_REGISTER)
1342 SUBST (regno_reg_rtx[regno], new_dest);
1343
1344 SUBST (SET_DEST (newpat), new_dest);
1345 SUBST (XEXP (*cc_use, 0), new_dest);
1346 SUBST (SET_SRC (newpat),
1347 gen_rtx_combine (COMPARE, compare_mode,
1348 i2src, const0_rtx));
1349 }
1350 else
1351 undobuf.other_insn = 0;
1352 }
1353 #endif
1354 }
1355 else
1356 #endif
1357 {
1358 n_occurrences = 0; /* `subst' counts here */
1359
1360 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1361 need to make a unique copy of I2SRC each time we substitute it
1362 to avoid self-referential rtl. */
1363
1364 subst_low_cuid = INSN_CUID (i2);
1365 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1366 ! i1_feeds_i3 && i1dest_in_i1src);
1367 previous_num_undos = undobuf.num_undo;
1368
1369 /* Record whether i2's body now appears within i3's body. */
1370 i2_is_used = n_occurrences;
1371 }
1372
1373 /* If we already got a failure, don't try to do more. Otherwise,
1374 try to substitute in I1 if we have it. */
1375
1376 if (i1 && GET_CODE (newpat) != CLOBBER)
1377 {
1378 /* Before we can do this substitution, we must redo the test done
1379 above (see detailed comments there) that ensures that I1DEST
1380 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1381
1382 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1383 0, NULL_PTR))
1384 {
1385 undo_all ();
1386 return 0;
1387 }
1388
1389 n_occurrences = 0;
1390 subst_low_cuid = INSN_CUID (i1);
1391 newpat = subst (newpat, i1dest, i1src, 0, 0);
1392 previous_num_undos = undobuf.num_undo;
1393 }
1394
1395 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1396 to count all the ways that I2SRC and I1SRC can be used. */
1397 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1398 && i2_is_used + added_sets_2 > 1)
1399 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1400 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1401 > 1))
1402 /* Fail if we tried to make a new register (we used to abort, but there's
1403 really no reason to). */
1404 || max_reg_num () != maxreg
1405 /* Fail if we couldn't do something and have a CLOBBER. */
1406 || GET_CODE (newpat) == CLOBBER)
1407 {
1408 undo_all ();
1409 return 0;
1410 }
1411
1412 /* If the actions of the earlier insns must be kept
1413 in addition to substituting them into the latest one,
1414 we must make a new PARALLEL for the latest insn
1415 to hold additional the SETs. */
1416
1417 if (added_sets_1 || added_sets_2)
1418 {
1419 combine_extras++;
1420
1421 if (GET_CODE (newpat) == PARALLEL)
1422 {
1423 rtvec old = XVEC (newpat, 0);
1424 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1425 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1426 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1427 sizeof (old->elem[0]) * old->num_elem);
1428 }
1429 else
1430 {
1431 rtx old = newpat;
1432 total_sets = 1 + added_sets_1 + added_sets_2;
1433 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1434 XVECEXP (newpat, 0, 0) = old;
1435 }
1436
1437 if (added_sets_1)
1438 XVECEXP (newpat, 0, --total_sets)
1439 = (GET_CODE (PATTERN (i1)) == PARALLEL
1440 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1441
1442 if (added_sets_2)
1443 {
1444 /* If there is no I1, use I2's body as is. We used to also not do
1445 the subst call below if I2 was substituted into I3,
1446 but that could lose a simplification. */
1447 if (i1 == 0)
1448 XVECEXP (newpat, 0, --total_sets) = i2pat;
1449 else
1450 /* See comment where i2pat is assigned. */
1451 XVECEXP (newpat, 0, --total_sets)
1452 = subst (i2pat, i1dest, i1src, 0, 0);
1453 }
1454 }
1455
1456 /* We come here when we are replacing a destination in I2 with the
1457 destination of I3. */
1458 validate_replacement:
1459
1460 /* Is the result of combination a valid instruction? */
1461 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1462
1463 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1464 the second SET's destination is a register that is unused. In that case,
1465 we just need the first SET. This can occur when simplifying a divmod
1466 insn. We *must* test for this case here because the code below that
1467 splits two independent SETs doesn't handle this case correctly when it
1468 updates the register status. Also check the case where the first
1469 SET's destination is unused. That would not cause incorrect code, but
1470 does cause an unneeded insn to remain. */
1471
1472 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1473 && XVECLEN (newpat, 0) == 2
1474 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1475 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1476 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1477 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1478 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1479 && asm_noperands (newpat) < 0)
1480 {
1481 newpat = XVECEXP (newpat, 0, 0);
1482 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1483 }
1484
1485 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1486 && XVECLEN (newpat, 0) == 2
1487 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1488 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1489 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1490 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1491 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1492 && asm_noperands (newpat) < 0)
1493 {
1494 newpat = XVECEXP (newpat, 0, 1);
1495 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1496 }
1497
1498 /* See if this is an XOR. If so, perhaps the problem is that the
1499 constant is out of range. Replace it with a complemented XOR with
1500 a complemented constant; it might be in range. */
1501
1502 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1503 && GET_CODE (SET_SRC (newpat)) == XOR
1504 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1505 && ((temp = simplify_unary_operation (NOT,
1506 GET_MODE (SET_SRC (newpat)),
1507 XEXP (SET_SRC (newpat), 1),
1508 GET_MODE (SET_SRC (newpat))))
1509 != 0))
1510 {
1511 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1512 rtx pat
1513 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1514 gen_unary (NOT, i_mode,
1515 gen_binary (XOR, i_mode,
1516 XEXP (SET_SRC (newpat), 0),
1517 temp)));
1518
1519 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1520 if (insn_code_number >= 0)
1521 newpat = pat;
1522 }
1523
1524 /* If we were combining three insns and the result is a simple SET
1525 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1526 insns. There are two ways to do this. It can be split using a
1527 machine-specific method (like when you have an addition of a large
1528 constant) or by combine in the function find_split_point. */
1529
1530 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1531 && asm_noperands (newpat) < 0)
1532 {
1533 rtx m_split, *split;
1534 rtx ni2dest = i2dest;
1535
1536 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1537 use I2DEST as a scratch register will help. In the latter case,
1538 convert I2DEST to the mode of the source of NEWPAT if we can. */
1539
1540 m_split = split_insns (newpat, i3);
1541 if (m_split == 0)
1542 {
1543 /* If I2DEST is a hard register or the only use of a pseudo,
1544 we can change its mode. */
1545 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1546 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1547 && GET_CODE (i2dest) == REG
1548 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1549 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1550 && ! REG_USERVAR_P (i2dest))))
1551 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1552 REGNO (i2dest));
1553
1554 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1555 gen_rtvec (2, newpat,
1556 gen_rtx (CLOBBER,
1557 VOIDmode,
1558 ni2dest))),
1559 i3);
1560 }
1561
1562 if (m_split && GET_CODE (m_split) == SEQUENCE
1563 && XVECLEN (m_split, 0) == 2
1564 && (next_real_insn (i2) == i3
1565 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1566 INSN_CUID (i2))))
1567 {
1568 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1569 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1570
1571 /* In case we changed the mode of I2DEST, replace it in the
1572 pseudo-register table here. We can't do it above in case this
1573 code doesn't get executed and we do a split the other way. */
1574
1575 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1576 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1577
1578 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1579 if (i2_code_number >= 0)
1580 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1581
1582 if (insn_code_number >= 0)
1583 newpat = newi3pat;
1584
1585 /* It is possible that both insns now set the destination of I3.
1586 If so, we must show an extra use of it and update
1587 reg_significant. */
1588
1589 if (insn_code_number >= 0 && GET_CODE (SET_DEST (newpat)) == REG
1590 && GET_CODE (SET_DEST (newi2pat)) == REG
1591 && REGNO (SET_DEST (newpat)) == REGNO (SET_DEST (newi2pat)))
1592 {
1593 reg_n_sets[REGNO (SET_DEST (newpat))]++;
1594 set_significant (SET_DEST (newi2pat), newi2pat);
1595 set_significant (SET_DEST (newpat), newpat);
1596 }
1597 }
1598
1599 /* If we can split it and use I2DEST, go ahead and see if that
1600 helps things be recognized. Verify that none of the registers
1601 are set between I2 and I3. */
1602 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1603 #ifdef HAVE_cc0
1604 && GET_CODE (i2dest) == REG
1605 #endif
1606 /* We need I2DEST in the proper mode. If it is a hard register
1607 or the only use of a pseudo, we can change its mode. */
1608 && (GET_MODE (*split) == GET_MODE (i2dest)
1609 || GET_MODE (*split) == VOIDmode
1610 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1611 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1612 && ! REG_USERVAR_P (i2dest)))
1613 && (next_real_insn (i2) == i3
1614 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1615 /* We can't overwrite I2DEST if its value is still used by
1616 NEWPAT. */
1617 && ! reg_referenced_p (i2dest, newpat))
1618 {
1619 rtx newdest = i2dest;
1620
1621 /* Get NEWDEST as a register in the proper mode. We have already
1622 validated that we can do this. */
1623 if (GET_MODE (i2dest) != GET_MODE (*split)
1624 && GET_MODE (*split) != VOIDmode)
1625 {
1626 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1627
1628 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1629 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1630 }
1631
1632 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1633 an ASHIFT. This can occur if it was inside a PLUS and hence
1634 appeared to be a memory address. This is a kludge. */
1635 if (GET_CODE (*split) == MULT
1636 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1637 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1638 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1639 XEXP (*split, 0), GEN_INT (i)));
1640
1641 #ifdef INSN_SCHEDULING
1642 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1643 be written as a ZERO_EXTEND. */
1644 if (GET_CODE (*split) == SUBREG
1645 && GET_CODE (SUBREG_REG (*split)) == MEM)
1646 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1647 XEXP (*split, 0)));
1648 #endif
1649
1650 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1651 SUBST (*split, newdest);
1652 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1653 if (i2_code_number >= 0)
1654 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1655 }
1656 }
1657
1658 /* Check for a case where we loaded from memory in a narrow mode and
1659 then sign extended it, but we need both registers. In that case,
1660 we have a PARALLEL with both loads from the same memory location.
1661 We can split this into a load from memory followed by a register-register
1662 copy. This saves at least one insn, more if register allocation can
1663 eliminate the copy. */
1664
1665 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1666 && GET_CODE (newpat) == PARALLEL
1667 && XVECLEN (newpat, 0) == 2
1668 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1669 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1670 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1671 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1672 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1673 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1674 INSN_CUID (i2))
1675 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1676 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1677 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1678 SET_SRC (XVECEXP (newpat, 0, 1)))
1679 && ! find_reg_note (i3, REG_UNUSED,
1680 SET_DEST (XVECEXP (newpat, 0, 0))))
1681 {
1682 newi2pat = XVECEXP (newpat, 0, 0);
1683 newpat = XVECEXP (newpat, 0, 1);
1684 SUBST (SET_SRC (newpat),
1685 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)),
1686 SET_DEST (newi2pat)));
1687 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1688 if (i2_code_number >= 0)
1689 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1690
1691 if (insn_code_number >= 0)
1692 {
1693 rtx insn;
1694 rtx link;
1695
1696 /* If we will be able to accept this, we have made a change to the
1697 destination of I3. This can invalidate a LOG_LINKS pointing
1698 to I3. No other part of combine.c makes such a transformation.
1699
1700 The new I3 will have a destination that was previously the
1701 destination of I1 or I2 and which was used in i2 or I3. Call
1702 distribute_links to make a LOG_LINK from the next use of
1703 that destination. */
1704
1705 PATTERN (i3) = newpat;
1706 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1707
1708 /* I3 now uses what used to be its destination and which is
1709 now I2's destination. That means we need a LOG_LINK from
1710 I3 to I2. But we used to have one, so we still will.
1711
1712 However, some later insn might be using I2's dest and have
1713 a LOG_LINK pointing at I3. We must remove this link.
1714 The simplest way to remove the link is to point it at I1,
1715 which we know will be a NOTE. */
1716
1717 for (insn = NEXT_INSN (i3);
1718 insn && GET_CODE (insn) != CODE_LABEL
1719 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1720 insn = NEXT_INSN (insn))
1721 {
1722 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1723 && reg_referenced_p (SET_DEST (newi2pat), PATTERN (insn)))
1724 {
1725 for (link = LOG_LINKS (insn); link;
1726 link = XEXP (link, 1))
1727 if (XEXP (link, 0) == i3)
1728 XEXP (link, 0) = i1;
1729
1730 break;
1731 }
1732 }
1733 }
1734 }
1735
1736 /* Similarly, check for a case where we have a PARALLEL of two independent
1737 SETs but we started with three insns. In this case, we can do the sets
1738 as two separate insns. This case occurs when some SET allows two
1739 other insns to combine, but the destination of that SET is still live. */
1740
1741 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1742 && GET_CODE (newpat) == PARALLEL
1743 && XVECLEN (newpat, 0) == 2
1744 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1745 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1746 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1747 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1748 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1749 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1750 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1751 INSN_CUID (i2))
1752 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1753 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1754 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1755 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1756 XVECEXP (newpat, 0, 0))
1757 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1758 XVECEXP (newpat, 0, 1)))
1759 {
1760 newi2pat = XVECEXP (newpat, 0, 1);
1761 newpat = XVECEXP (newpat, 0, 0);
1762
1763 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1764 if (i2_code_number >= 0)
1765 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1766 }
1767
1768 /* If it still isn't recognized, fail and change things back the way they
1769 were. */
1770 if ((insn_code_number < 0
1771 /* Is the result a reasonable ASM_OPERANDS? */
1772 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1773 {
1774 undo_all ();
1775 return 0;
1776 }
1777
1778 /* If we had to change another insn, make sure it is valid also. */
1779 if (undobuf.other_insn)
1780 {
1781 rtx other_notes = REG_NOTES (undobuf.other_insn);
1782 rtx other_pat = PATTERN (undobuf.other_insn);
1783 rtx new_other_notes;
1784 rtx note, next;
1785
1786 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1787 &new_other_notes);
1788
1789 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1790 {
1791 undo_all ();
1792 return 0;
1793 }
1794
1795 PATTERN (undobuf.other_insn) = other_pat;
1796
1797 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1798 are still valid. Then add any non-duplicate notes added by
1799 recog_for_combine. */
1800 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1801 {
1802 next = XEXP (note, 1);
1803
1804 if (REG_NOTE_KIND (note) == REG_UNUSED
1805 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1806 remove_note (undobuf.other_insn, note);
1807 }
1808
1809 distribute_notes (new_other_notes, undobuf.other_insn,
1810 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1811 }
1812
1813 /* We now know that we can do this combination. Merge the insns and
1814 update the status of registers and LOG_LINKS. */
1815
1816 {
1817 rtx i3notes, i2notes, i1notes = 0;
1818 rtx i3links, i2links, i1links = 0;
1819 rtx midnotes = 0;
1820 int all_adjacent = (next_real_insn (i2) == i3
1821 && (i1 == 0 || next_real_insn (i1) == i2));
1822 register int regno;
1823 /* Compute which registers we expect to eliminate. */
1824 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1825 ? 0 : i2dest);
1826 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1827
1828 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1829 clear them. */
1830 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1831 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1832 if (i1)
1833 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1834
1835 /* Ensure that we do not have something that should not be shared but
1836 occurs multiple times in the new insns. Check this by first
1837 resetting all the `used' flags and then copying anything is shared. */
1838
1839 reset_used_flags (i3notes);
1840 reset_used_flags (i2notes);
1841 reset_used_flags (i1notes);
1842 reset_used_flags (newpat);
1843 reset_used_flags (newi2pat);
1844 if (undobuf.other_insn)
1845 reset_used_flags (PATTERN (undobuf.other_insn));
1846
1847 i3notes = copy_rtx_if_shared (i3notes);
1848 i2notes = copy_rtx_if_shared (i2notes);
1849 i1notes = copy_rtx_if_shared (i1notes);
1850 newpat = copy_rtx_if_shared (newpat);
1851 newi2pat = copy_rtx_if_shared (newi2pat);
1852 if (undobuf.other_insn)
1853 reset_used_flags (PATTERN (undobuf.other_insn));
1854
1855 INSN_CODE (i3) = insn_code_number;
1856 PATTERN (i3) = newpat;
1857 if (undobuf.other_insn)
1858 INSN_CODE (undobuf.other_insn) = other_code_number;
1859
1860 /* We had one special case above where I2 had more than one set and
1861 we replaced a destination of one of those sets with the destination
1862 of I3. In that case, we have to update LOG_LINKS of insns later
1863 in this basic block. Note that this (expensive) case is rare. */
1864
1865 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1866 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1867 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1868 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1869 && ! find_reg_note (i2, REG_UNUSED,
1870 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1871 {
1872 register rtx insn;
1873
1874 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1875 {
1876 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1877 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1878 if (XEXP (link, 0) == i2)
1879 XEXP (link, 0) = i3;
1880
1881 if (GET_CODE (insn) == CODE_LABEL
1882 || GET_CODE (insn) == JUMP_INSN)
1883 break;
1884 }
1885 }
1886
1887 LOG_LINKS (i3) = 0;
1888 REG_NOTES (i3) = 0;
1889 LOG_LINKS (i2) = 0;
1890 REG_NOTES (i2) = 0;
1891
1892 if (newi2pat)
1893 {
1894 INSN_CODE (i2) = i2_code_number;
1895 PATTERN (i2) = newi2pat;
1896 }
1897 else
1898 {
1899 PUT_CODE (i2, NOTE);
1900 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1901 NOTE_SOURCE_FILE (i2) = 0;
1902 }
1903
1904 if (i1)
1905 {
1906 LOG_LINKS (i1) = 0;
1907 REG_NOTES (i1) = 0;
1908 PUT_CODE (i1, NOTE);
1909 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1910 NOTE_SOURCE_FILE (i1) = 0;
1911 }
1912
1913 /* Get death notes for everything that is now used in either I3 or
1914 I2 and used to die in a previous insn. */
1915
1916 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1917 if (newi2pat)
1918 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1919
1920 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1921 if (i3notes)
1922 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1923 elim_i2, elim_i1);
1924 if (i2notes)
1925 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1926 elim_i2, elim_i1);
1927 if (i1notes)
1928 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1929 elim_i2, elim_i1);
1930 if (midnotes)
1931 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1932 elim_i2, elim_i1);
1933
1934 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1935 know these are REG_UNUSED and want them to go to the desired insn,
1936 so we always pass it as i3. */
1937 if (newi2pat && new_i2_notes)
1938 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1939 if (new_i3_notes)
1940 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1941
1942 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1943 put a REG_DEAD note for it somewhere. Similarly for I2 and I1. */
1944
1945 if (i3dest_killed)
1946 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed, NULL_RTX),
1947 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1948 NULL_RTX, NULL_RTX);
1949
1950 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1951 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1952 we passed I3 in that case, it might delete I2. */
1953
1954 if (i2dest_in_i2src)
1955 {
1956 if (newi2pat && reg_set_p (i2dest, newi2pat))
1957 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1958 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1959 else
1960 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
1961 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1962 NULL_RTX, NULL_RTX);
1963 }
1964
1965 if (i1dest_in_i1src)
1966 {
1967 if (newi2pat && reg_set_p (i1dest, newi2pat))
1968 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1969 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1970 else
1971 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
1972 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1973 NULL_RTX, NULL_RTX);
1974 }
1975
1976 distribute_links (i3links);
1977 distribute_links (i2links);
1978 distribute_links (i1links);
1979
1980 if (GET_CODE (i2dest) == REG)
1981 {
1982 rtx link;
1983 rtx i2_insn = 0, i2_val = 0, set;
1984
1985 /* The insn that used to set this register doesn't exist, and
1986 this life of the register may not exist either. See if one of
1987 I3's links points to an insn that sets I2DEST. If it does,
1988 that is now the last known value for I2DEST. If we don't update
1989 this and I2 set the register to a value that depended on its old
1990 contents, we will get confused. If this insn is used, thing
1991 will be set correctly in combine_instructions. */
1992
1993 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
1994 if ((set = single_set (XEXP (link, 0))) != 0
1995 && rtx_equal_p (i2dest, SET_DEST (set)))
1996 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
1997
1998 record_value_for_reg (i2dest, i2_insn, i2_val);
1999
2000 /* If the reg formerly set in I2 died only once and that was in I3,
2001 zero its use count so it won't make `reload' do any work. */
2002 if (! added_sets_2 && newi2pat == 0)
2003 {
2004 regno = REGNO (i2dest);
2005 reg_n_sets[regno]--;
2006 if (reg_n_sets[regno] == 0
2007 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2008 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2009 reg_n_refs[regno] = 0;
2010 }
2011 }
2012
2013 if (i1 && GET_CODE (i1dest) == REG)
2014 {
2015 rtx link;
2016 rtx i1_insn = 0, i1_val = 0, set;
2017
2018 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2019 if ((set = single_set (XEXP (link, 0))) != 0
2020 && rtx_equal_p (i1dest, SET_DEST (set)))
2021 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2022
2023 record_value_for_reg (i1dest, i1_insn, i1_val);
2024
2025 regno = REGNO (i1dest);
2026 if (! added_sets_1)
2027 {
2028 reg_n_sets[regno]--;
2029 if (reg_n_sets[regno] == 0
2030 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2031 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2032 reg_n_refs[regno] = 0;
2033 }
2034 }
2035
2036 /* If I3 is now an unconditional jump, ensure that it has a
2037 BARRIER following it since it may have initially been a
2038 conditional jump. */
2039
2040 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2041 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2042 emit_barrier_after (i3);
2043 }
2044
2045 combine_successes++;
2046
2047 return newi2pat ? i2 : i3;
2048 }
2049 \f
2050 /* Undo all the modifications recorded in undobuf. */
2051
2052 static void
2053 undo_all ()
2054 {
2055 register int i;
2056 if (undobuf.num_undo > MAX_UNDO)
2057 undobuf.num_undo = MAX_UNDO;
2058 for (i = undobuf.num_undo - 1; i >= 0; i--)
2059 *undobuf.undo[i].where = undobuf.undo[i].old_contents;
2060
2061 obfree (undobuf.storage);
2062 undobuf.num_undo = 0;
2063 }
2064 \f
2065 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2066 where we have an arithmetic expression and return that point. LOC will
2067 be inside INSN.
2068
2069 try_combine will call this function to see if an insn can be split into
2070 two insns. */
2071
2072 static rtx *
2073 find_split_point (loc, insn)
2074 rtx *loc;
2075 rtx insn;
2076 {
2077 rtx x = *loc;
2078 enum rtx_code code = GET_CODE (x);
2079 rtx *split;
2080 int len = 0, pos, unsignedp;
2081 rtx inner;
2082
2083 /* First special-case some codes. */
2084 switch (code)
2085 {
2086 case SUBREG:
2087 #ifdef INSN_SCHEDULING
2088 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2089 point. */
2090 if (GET_CODE (SUBREG_REG (x)) == MEM)
2091 return loc;
2092 #endif
2093 return find_split_point (&SUBREG_REG (x), insn);
2094
2095 case MEM:
2096 #ifdef HAVE_lo_sum
2097 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2098 using LO_SUM and HIGH. */
2099 if (GET_CODE (XEXP (x, 0)) == CONST
2100 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2101 {
2102 SUBST (XEXP (x, 0),
2103 gen_rtx_combine (LO_SUM, Pmode,
2104 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2105 XEXP (x, 0)));
2106 return &XEXP (XEXP (x, 0), 0);
2107 }
2108 #endif
2109
2110 /* If we have a PLUS whose second operand is a constant and the
2111 address is not valid, perhaps will can split it up using
2112 the machine-specific way to split large constants. We use
2113 the first psuedo-reg (one of the virtual regs) as a placeholder;
2114 it will not remain in the result. */
2115 if (GET_CODE (XEXP (x, 0)) == PLUS
2116 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2117 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2118 {
2119 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2120 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2121 subst_insn);
2122
2123 /* This should have produced two insns, each of which sets our
2124 placeholder. If the source of the second is a valid address,
2125 we can make put both sources together and make a split point
2126 in the middle. */
2127
2128 if (seq && XVECLEN (seq, 0) == 2
2129 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2130 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2131 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2132 && ! reg_mentioned_p (reg,
2133 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2134 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2135 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2136 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2137 && memory_address_p (GET_MODE (x),
2138 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2139 {
2140 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2141 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2142
2143 /* Replace the placeholder in SRC2 with SRC1. If we can
2144 find where in SRC2 it was placed, that can become our
2145 split point and we can replace this address with SRC2.
2146 Just try two obvious places. */
2147
2148 src2 = replace_rtx (src2, reg, src1);
2149 split = 0;
2150 if (XEXP (src2, 0) == src1)
2151 split = &XEXP (src2, 0);
2152 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2153 && XEXP (XEXP (src2, 0), 0) == src1)
2154 split = &XEXP (XEXP (src2, 0), 0);
2155
2156 if (split)
2157 {
2158 SUBST (XEXP (x, 0), src2);
2159 return split;
2160 }
2161 }
2162 }
2163 break;
2164
2165 case SET:
2166 #ifdef HAVE_cc0
2167 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2168 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2169 we need to put the operand into a register. So split at that
2170 point. */
2171
2172 if (SET_DEST (x) == cc0_rtx
2173 && GET_CODE (SET_SRC (x)) != COMPARE
2174 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2175 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2176 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2177 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2178 return &SET_SRC (x);
2179 #endif
2180
2181 /* See if we can split SET_SRC as it stands. */
2182 split = find_split_point (&SET_SRC (x), insn);
2183 if (split && split != &SET_SRC (x))
2184 return split;
2185
2186 /* See if this is a bitfield assignment with everything constant. If
2187 so, this is an IOR of an AND, so split it into that. */
2188 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2189 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2190 <= HOST_BITS_PER_WIDE_INT)
2191 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2192 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2193 && GET_CODE (SET_SRC (x)) == CONST_INT
2194 && ((INTVAL (XEXP (SET_DEST (x), 1))
2195 + INTVAL (XEXP (SET_DEST (x), 2)))
2196 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2197 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2198 {
2199 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2200 int len = INTVAL (XEXP (SET_DEST (x), 1));
2201 int src = INTVAL (SET_SRC (x));
2202 rtx dest = XEXP (SET_DEST (x), 0);
2203 enum machine_mode mode = GET_MODE (dest);
2204 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2205
2206 #if BITS_BIG_ENDIAN
2207 pos = GET_MODE_BITSIZE (mode) - len - pos;
2208 #endif
2209
2210 if (src == mask)
2211 SUBST (SET_SRC (x),
2212 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2213 else
2214 SUBST (SET_SRC (x),
2215 gen_binary (IOR, mode,
2216 gen_binary (AND, mode, dest,
2217 GEN_INT (~ (mask << pos)
2218 & GET_MODE_MASK (mode))),
2219 GEN_INT (src << pos)));
2220
2221 SUBST (SET_DEST (x), dest);
2222
2223 split = find_split_point (&SET_SRC (x), insn);
2224 if (split && split != &SET_SRC (x))
2225 return split;
2226 }
2227
2228 /* Otherwise, see if this is an operation that we can split into two.
2229 If so, try to split that. */
2230 code = GET_CODE (SET_SRC (x));
2231
2232 switch (code)
2233 {
2234 case AND:
2235 /* If we are AND'ing with a large constant that is only a single
2236 bit and the result is only being used in a context where we
2237 need to know if it is zero or non-zero, replace it with a bit
2238 extraction. This will avoid the large constant, which might
2239 have taken more than one insn to make. If the constant were
2240 not a valid argument to the AND but took only one insn to make,
2241 this is no worse, but if it took more than one insn, it will
2242 be better. */
2243
2244 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2245 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2246 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2247 && GET_CODE (SET_DEST (x)) == REG
2248 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2249 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2250 && XEXP (*split, 0) == SET_DEST (x)
2251 && XEXP (*split, 1) == const0_rtx)
2252 {
2253 SUBST (SET_SRC (x),
2254 make_extraction (GET_MODE (SET_DEST (x)),
2255 XEXP (SET_SRC (x), 0),
2256 pos, NULL_RTX, 1, 1, 0, 0));
2257 return find_split_point (loc, insn);
2258 }
2259 break;
2260
2261 case SIGN_EXTEND:
2262 inner = XEXP (SET_SRC (x), 0);
2263 pos = 0;
2264 len = GET_MODE_BITSIZE (GET_MODE (inner));
2265 unsignedp = 0;
2266 break;
2267
2268 case SIGN_EXTRACT:
2269 case ZERO_EXTRACT:
2270 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2271 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2272 {
2273 inner = XEXP (SET_SRC (x), 0);
2274 len = INTVAL (XEXP (SET_SRC (x), 1));
2275 pos = INTVAL (XEXP (SET_SRC (x), 2));
2276
2277 #if BITS_BIG_ENDIAN
2278 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2279 #endif
2280 unsignedp = (code == ZERO_EXTRACT);
2281 }
2282 break;
2283 }
2284
2285 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2286 {
2287 enum machine_mode mode = GET_MODE (SET_SRC (x));
2288
2289 /* For unsigned, we have a choice of a shift followed by an
2290 AND or two shifts. Use two shifts for field sizes where the
2291 constant might be too large. We assume here that we can
2292 always at least get 8-bit constants in an AND insn, which is
2293 true for every current RISC. */
2294
2295 if (unsignedp && len <= 8)
2296 {
2297 SUBST (SET_SRC (x),
2298 gen_rtx_combine
2299 (AND, mode,
2300 gen_rtx_combine (LSHIFTRT, mode,
2301 gen_lowpart_for_combine (mode, inner),
2302 GEN_INT (pos)),
2303 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2304
2305 split = find_split_point (&SET_SRC (x), insn);
2306 if (split && split != &SET_SRC (x))
2307 return split;
2308 }
2309 else
2310 {
2311 SUBST (SET_SRC (x),
2312 gen_rtx_combine
2313 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2314 gen_rtx_combine (ASHIFT, mode,
2315 gen_lowpart_for_combine (mode, inner),
2316 GEN_INT (GET_MODE_BITSIZE (mode)
2317 - len - pos)),
2318 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2319
2320 split = find_split_point (&SET_SRC (x), insn);
2321 if (split && split != &SET_SRC (x))
2322 return split;
2323 }
2324 }
2325
2326 /* See if this is a simple operation with a constant as the second
2327 operand. It might be that this constant is out of range and hence
2328 could be used as a split point. */
2329 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2330 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2331 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2332 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2333 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2334 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2335 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2336 == 'o'))))
2337 return &XEXP (SET_SRC (x), 1);
2338
2339 /* Finally, see if this is a simple operation with its first operand
2340 not in a register. The operation might require this operand in a
2341 register, so return it as a split point. We can always do this
2342 because if the first operand were another operation, we would have
2343 already found it as a split point. */
2344 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2345 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2346 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2347 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2348 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2349 return &XEXP (SET_SRC (x), 0);
2350
2351 return 0;
2352
2353 case AND:
2354 case IOR:
2355 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2356 it is better to write this as (not (ior A B)) so we can split it.
2357 Similarly for IOR. */
2358 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2359 {
2360 SUBST (*loc,
2361 gen_rtx_combine (NOT, GET_MODE (x),
2362 gen_rtx_combine (code == IOR ? AND : IOR,
2363 GET_MODE (x),
2364 XEXP (XEXP (x, 0), 0),
2365 XEXP (XEXP (x, 1), 0))));
2366 return find_split_point (loc, insn);
2367 }
2368
2369 /* Many RISC machines have a large set of logical insns. If the
2370 second operand is a NOT, put it first so we will try to split the
2371 other operand first. */
2372 if (GET_CODE (XEXP (x, 1)) == NOT)
2373 {
2374 rtx tem = XEXP (x, 0);
2375 SUBST (XEXP (x, 0), XEXP (x, 1));
2376 SUBST (XEXP (x, 1), tem);
2377 }
2378 break;
2379 }
2380
2381 /* Otherwise, select our actions depending on our rtx class. */
2382 switch (GET_RTX_CLASS (code))
2383 {
2384 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2385 case '3':
2386 split = find_split_point (&XEXP (x, 2), insn);
2387 if (split)
2388 return split;
2389 /* ... fall through ... */
2390 case '2':
2391 case 'c':
2392 case '<':
2393 split = find_split_point (&XEXP (x, 1), insn);
2394 if (split)
2395 return split;
2396 /* ... fall through ... */
2397 case '1':
2398 /* Some machines have (and (shift ...) ...) insns. If X is not
2399 an AND, but XEXP (X, 0) is, use it as our split point. */
2400 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2401 return &XEXP (x, 0);
2402
2403 split = find_split_point (&XEXP (x, 0), insn);
2404 if (split)
2405 return split;
2406 return loc;
2407 }
2408
2409 /* Otherwise, we don't have a split point. */
2410 return 0;
2411 }
2412 \f
2413 /* Throughout X, replace FROM with TO, and return the result.
2414 The result is TO if X is FROM;
2415 otherwise the result is X, but its contents may have been modified.
2416 If they were modified, a record was made in undobuf so that
2417 undo_all will (among other things) return X to its original state.
2418
2419 If the number of changes necessary is too much to record to undo,
2420 the excess changes are not made, so the result is invalid.
2421 The changes already made can still be undone.
2422 undobuf.num_undo is incremented for such changes, so by testing that
2423 the caller can tell whether the result is valid.
2424
2425 `n_occurrences' is incremented each time FROM is replaced.
2426
2427 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2428
2429 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2430 by copying if `n_occurrences' is non-zero. */
2431
2432 static rtx
2433 subst (x, from, to, in_dest, unique_copy)
2434 register rtx x, from, to;
2435 int in_dest;
2436 int unique_copy;
2437 {
2438 register char *fmt;
2439 register int len, i;
2440 register enum rtx_code code = GET_CODE (x), orig_code = code;
2441 rtx temp;
2442 enum machine_mode mode = GET_MODE (x);
2443 enum machine_mode op0_mode = VOIDmode;
2444 rtx other_insn;
2445 rtx *cc_use;
2446 int n_restarts = 0;
2447
2448 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2449 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2450 If it is 0, that cannot be done. We can now do this for any MEM
2451 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2452 If not for that, MEM's would very rarely be safe. */
2453
2454 /* Reject MODEs bigger than a word, because we might not be able
2455 to reference a two-register group starting with an arbitrary register
2456 (and currently gen_lowpart might crash for a SUBREG). */
2457
2458 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2459 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2460
2461 /* Two expressions are equal if they are identical copies of a shared
2462 RTX or if they are both registers with the same register number
2463 and mode. */
2464
2465 #define COMBINE_RTX_EQUAL_P(X,Y) \
2466 ((X) == (Y) \
2467 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2468 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2469
2470 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2471 {
2472 n_occurrences++;
2473 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2474 }
2475
2476 /* If X and FROM are the same register but different modes, they will
2477 not have been seen as equal above. However, flow.c will make a
2478 LOG_LINKS entry for that case. If we do nothing, we will try to
2479 rerecognize our original insn and, when it succeeds, we will
2480 delete the feeding insn, which is incorrect.
2481
2482 So force this insn not to match in this (rare) case. */
2483 if (! in_dest && code == REG && GET_CODE (from) == REG
2484 && REGNO (x) == REGNO (from))
2485 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2486
2487 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2488 of which may contain things that can be combined. */
2489 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2490 return x;
2491
2492 /* It is possible to have a subexpression appear twice in the insn.
2493 Suppose that FROM is a register that appears within TO.
2494 Then, after that subexpression has been scanned once by `subst',
2495 the second time it is scanned, TO may be found. If we were
2496 to scan TO here, we would find FROM within it and create a
2497 self-referent rtl structure which is completely wrong. */
2498 if (COMBINE_RTX_EQUAL_P (x, to))
2499 return to;
2500
2501 len = GET_RTX_LENGTH (code);
2502 fmt = GET_RTX_FORMAT (code);
2503
2504 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2505 set up to skip this common case. All other cases where we want to
2506 suppress replacing something inside a SET_SRC are handled via the
2507 IN_DEST operand. */
2508 if (code == SET
2509 && (GET_CODE (SET_DEST (x)) == REG
2510 || GET_CODE (SET_DEST (x)) == CC0
2511 || GET_CODE (SET_DEST (x)) == PC))
2512 fmt = "ie";
2513
2514 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2515 if (fmt[0] == 'e')
2516 op0_mode = GET_MODE (XEXP (x, 0));
2517
2518 for (i = 0; i < len; i++)
2519 {
2520 if (fmt[i] == 'E')
2521 {
2522 register int j;
2523 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2524 {
2525 register rtx new;
2526 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2527 {
2528 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2529 n_occurrences++;
2530 }
2531 else
2532 {
2533 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2534
2535 /* If this substitution failed, this whole thing fails. */
2536 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2537 return new;
2538 }
2539
2540 SUBST (XVECEXP (x, i, j), new);
2541 }
2542 }
2543 else if (fmt[i] == 'e')
2544 {
2545 register rtx new;
2546
2547 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2548 {
2549 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2550 n_occurrences++;
2551 }
2552 else
2553 /* If we are in a SET_DEST, suppress most cases unless we
2554 have gone inside a MEM, in which case we want to
2555 simplify the address. We assume here that things that
2556 are actually part of the destination have their inner
2557 parts in the first expression. This is true for SUBREG,
2558 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2559 things aside from REG and MEM that should appear in a
2560 SET_DEST. */
2561 new = subst (XEXP (x, i), from, to,
2562 (((in_dest
2563 && (code == SUBREG || code == STRICT_LOW_PART
2564 || code == ZERO_EXTRACT))
2565 || code == SET)
2566 && i == 0), unique_copy);
2567
2568 /* If we found that we will have to reject this combination,
2569 indicate that by returning the CLOBBER ourselves, rather than
2570 an expression containing it. This will speed things up as
2571 well as prevent accidents where two CLOBBERs are considered
2572 to be equal, thus producing an incorrect simplification. */
2573
2574 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2575 return new;
2576
2577 SUBST (XEXP (x, i), new);
2578 }
2579 }
2580
2581 /* We come back to here if we have replaced the expression with one of
2582 a different code and it is likely that further simplification will be
2583 possible. */
2584
2585 restart:
2586
2587 code = GET_CODE (x);
2588
2589 /* If this is a commutative operation, put a constant last and a complex
2590 expression first. We don't need to do this for comparisons here. */
2591 if (GET_RTX_CLASS (code) == 'c'
2592 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2593 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2594 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2595 || (GET_CODE (XEXP (x, 0)) == SUBREG
2596 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2597 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2598 {
2599 temp = XEXP (x, 0);
2600 SUBST (XEXP (x, 0), XEXP (x, 1));
2601 SUBST (XEXP (x, 1), temp);
2602 }
2603
2604 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2605 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2606 things. Don't deal with operations that change modes here. */
2607
2608 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2609 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2610 {
2611 SUBST (XEXP (XEXP (x, 0), 1),
2612 subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2613 XEXP (x, 1)),
2614 pc_rtx, pc_rtx, 0));
2615 SUBST (XEXP (XEXP (x, 0), 2),
2616 subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2617 XEXP (x, 1)),
2618 pc_rtx, pc_rtx, 0));
2619
2620 x = XEXP (x, 0);
2621 goto restart;
2622 }
2623
2624 else if (GET_RTX_CLASS (code) == '1'
2625 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2626 && GET_MODE (XEXP (x, 0)) == mode)
2627 {
2628 SUBST (XEXP (XEXP (x, 0), 1),
2629 subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2630 pc_rtx, pc_rtx, 0));
2631 SUBST (XEXP (XEXP (x, 0), 2),
2632 subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2633 pc_rtx, pc_rtx, 0));
2634
2635 x = XEXP (x, 0);
2636 goto restart;
2637 }
2638
2639 /* Try to fold this expression in case we have constants that weren't
2640 present before. */
2641 temp = 0;
2642 switch (GET_RTX_CLASS (code))
2643 {
2644 case '1':
2645 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2646 break;
2647 case '<':
2648 temp = simplify_relational_operation (code, op0_mode,
2649 XEXP (x, 0), XEXP (x, 1));
2650 #ifdef FLOAT_STORE_FLAG_VALUE
2651 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2652 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2653 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2654 #endif
2655 break;
2656 case 'c':
2657 case '2':
2658 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2659 break;
2660 case 'b':
2661 case '3':
2662 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2663 XEXP (x, 1), XEXP (x, 2));
2664 break;
2665 }
2666
2667 if (temp)
2668 x = temp, code = GET_CODE (temp);
2669
2670 /* If we have restarted more than 4 times, we are probably looping, so
2671 give up. */
2672 if (++n_restarts > 4)
2673 return x;
2674
2675 /* First see if we can apply the inverse distributive law. */
2676 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2677 {
2678 x = apply_distributive_law (x);
2679 code = GET_CODE (x);
2680 }
2681
2682 /* If CODE is an associative operation not otherwise handled, see if we
2683 can associate some operands. This can win if they are constants or
2684 if they are logically related (i.e. (a & b) & a. */
2685 if ((code == PLUS || code == MINUS
2686 || code == MULT || code == AND || code == IOR || code == XOR
2687 || code == DIV || code == UDIV
2688 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2689 && GET_MODE_CLASS (mode) == MODE_INT)
2690 {
2691 if (GET_CODE (XEXP (x, 0)) == code)
2692 {
2693 rtx other = XEXP (XEXP (x, 0), 0);
2694 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2695 rtx inner_op1 = XEXP (x, 1);
2696 rtx inner;
2697
2698 /* Make sure we pass the constant operand if any as the second
2699 one if this is a commutative operation. */
2700 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2701 {
2702 rtx tem = inner_op0;
2703 inner_op0 = inner_op1;
2704 inner_op1 = tem;
2705 }
2706 inner = simplify_binary_operation (code == MINUS ? PLUS
2707 : code == DIV ? MULT
2708 : code == UDIV ? MULT
2709 : code,
2710 mode, inner_op0, inner_op1);
2711
2712 /* For commutative operations, try the other pair if that one
2713 didn't simplify. */
2714 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2715 {
2716 other = XEXP (XEXP (x, 0), 1);
2717 inner = simplify_binary_operation (code, mode,
2718 XEXP (XEXP (x, 0), 0),
2719 XEXP (x, 1));
2720 }
2721
2722 if (inner)
2723 {
2724 x = gen_binary (code, mode, other, inner);
2725 goto restart;
2726
2727 }
2728 }
2729 }
2730
2731 /* A little bit of algebraic simplification here. */
2732 switch (code)
2733 {
2734 case MEM:
2735 /* Ensure that our address has any ASHIFTs converted to MULT in case
2736 address-recognizing predicates are called later. */
2737 temp = make_compound_operation (XEXP (x, 0), MEM);
2738 SUBST (XEXP (x, 0), temp);
2739 break;
2740
2741 case SUBREG:
2742 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2743 is paradoxical. If we can't do that safely, then it becomes
2744 something nonsensical so that this combination won't take place. */
2745
2746 if (GET_CODE (SUBREG_REG (x)) == MEM
2747 && (GET_MODE_SIZE (mode)
2748 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2749 {
2750 rtx inner = SUBREG_REG (x);
2751 int endian_offset = 0;
2752 /* Don't change the mode of the MEM
2753 if that would change the meaning of the address. */
2754 if (MEM_VOLATILE_P (SUBREG_REG (x))
2755 || mode_dependent_address_p (XEXP (inner, 0)))
2756 return gen_rtx (CLOBBER, mode, const0_rtx);
2757
2758 #if BYTES_BIG_ENDIAN
2759 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2760 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2761 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2762 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2763 #endif
2764 /* Note if the plus_constant doesn't make a valid address
2765 then this combination won't be accepted. */
2766 x = gen_rtx (MEM, mode,
2767 plus_constant (XEXP (inner, 0),
2768 (SUBREG_WORD (x) * UNITS_PER_WORD
2769 + endian_offset)));
2770 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2771 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2772 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2773 return x;
2774 }
2775
2776 /* If we are in a SET_DEST, these other cases can't apply. */
2777 if (in_dest)
2778 return x;
2779
2780 /* Changing mode twice with SUBREG => just change it once,
2781 or not at all if changing back to starting mode. */
2782 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2783 {
2784 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2785 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2786 return SUBREG_REG (SUBREG_REG (x));
2787
2788 SUBST_INT (SUBREG_WORD (x),
2789 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2790 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2791 }
2792
2793 /* SUBREG of a hard register => just change the register number
2794 and/or mode. If the hard register is not valid in that mode,
2795 suppress this combination. */
2796
2797 if (GET_CODE (SUBREG_REG (x)) == REG
2798 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
2799 {
2800 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2801 mode))
2802 return gen_rtx (REG, mode,
2803 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2804 else
2805 return gen_rtx (CLOBBER, mode, const0_rtx);
2806 }
2807
2808 /* For a constant, try to pick up the part we want. Handle a full
2809 word and low-order part. Only do this if we are narrowing
2810 the constant; if it is being widened, we have no idea what
2811 the extra bits will have been set to. */
2812
2813 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2814 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
2815 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
2816 && GET_MODE_CLASS (mode) == MODE_INT)
2817 {
2818 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
2819 0, op0_mode);
2820 if (temp)
2821 return temp;
2822 }
2823
2824 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2825 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
2826 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2827
2828 /* If we are narrowing the object, we need to see if we can simplify
2829 the expression for the object knowing that we only need the
2830 low-order bits. */
2831
2832 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2833 && subreg_lowpart_p (x))
2834 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2835 NULL_RTX);
2836 break;
2837
2838 case NOT:
2839 /* (not (plus X -1)) can become (neg X). */
2840 if (GET_CODE (XEXP (x, 0)) == PLUS
2841 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2842 {
2843 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2844 goto restart;
2845 }
2846
2847 /* Similarly, (not (neg X)) is (plus X -1). */
2848 if (GET_CODE (XEXP (x, 0)) == NEG)
2849 {
2850 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2851 goto restart;
2852 }
2853
2854 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2855 if (GET_CODE (XEXP (x, 0)) == XOR
2856 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2857 && (temp = simplify_unary_operation (NOT, mode,
2858 XEXP (XEXP (x, 0), 1),
2859 mode)) != 0)
2860 {
2861 SUBST (XEXP (XEXP (x, 0), 1), temp);
2862 return XEXP (x, 0);
2863 }
2864
2865 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2866 other than 1, but that is not valid. We could do a similar
2867 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2868 but this doesn't seem common enough to bother with. */
2869 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2870 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2871 {
2872 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
2873 XEXP (XEXP (x, 0), 1));
2874 goto restart;
2875 }
2876
2877 if (GET_CODE (XEXP (x, 0)) == SUBREG
2878 && subreg_lowpart_p (XEXP (x, 0))
2879 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
2880 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
2881 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
2882 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
2883 {
2884 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
2885
2886 x = gen_rtx (ROTATE, inner_mode,
2887 gen_unary (NOT, inner_mode, const1_rtx),
2888 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
2889 x = gen_lowpart_for_combine (mode, x);
2890 goto restart;
2891 }
2892
2893 #if STORE_FLAG_VALUE == -1
2894 /* (not (comparison foo bar)) can be done by reversing the comparison
2895 code if valid. */
2896 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
2897 && reversible_comparison_p (XEXP (x, 0)))
2898 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
2899 mode, XEXP (XEXP (x, 0), 0),
2900 XEXP (XEXP (x, 0), 1));
2901 #endif
2902
2903 /* Apply De Morgan's laws to reduce number of patterns for machines
2904 with negating logical insns (and-not, nand, etc.). If result has
2905 only one NOT, put it first, since that is how the patterns are
2906 coded. */
2907
2908 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
2909 {
2910 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
2911
2912 if (GET_CODE (in1) == NOT)
2913 in1 = XEXP (in1, 0);
2914 else
2915 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
2916
2917 if (GET_CODE (in2) == NOT)
2918 in2 = XEXP (in2, 0);
2919 else if (GET_CODE (in2) == CONST_INT
2920 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
2921 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
2922 else
2923 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
2924
2925 if (GET_CODE (in2) == NOT)
2926 {
2927 rtx tem = in2;
2928 in2 = in1; in1 = tem;
2929 }
2930
2931 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
2932 mode, in1, in2);
2933 goto restart;
2934 }
2935 break;
2936
2937 case NEG:
2938 /* (neg (plus X 1)) can become (not X). */
2939 if (GET_CODE (XEXP (x, 0)) == PLUS
2940 && XEXP (XEXP (x, 0), 1) == const1_rtx)
2941 {
2942 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
2943 goto restart;
2944 }
2945
2946 /* Similarly, (neg (not X)) is (plus X 1). */
2947 if (GET_CODE (XEXP (x, 0)) == NOT)
2948 {
2949 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
2950 goto restart;
2951 }
2952
2953 /* (neg (minus X Y)) can become (minus Y X). */
2954 if (GET_CODE (XEXP (x, 0)) == MINUS
2955 && (GET_MODE_CLASS (mode) != MODE_FLOAT
2956 /* x-y != -(y-x) with IEEE floating point. */
2957 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
2958 {
2959 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
2960 XEXP (XEXP (x, 0), 0));
2961 goto restart;
2962 }
2963
2964 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
2965 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
2966 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
2967 {
2968 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2969 goto restart;
2970 }
2971
2972 /* NEG commutes with ASHIFT since it is multiplication. Only do this
2973 if we can then eliminate the NEG (e.g.,
2974 if the operand is a constant). */
2975
2976 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
2977 {
2978 temp = simplify_unary_operation (NEG, mode,
2979 XEXP (XEXP (x, 0), 0), mode);
2980 if (temp)
2981 {
2982 SUBST (XEXP (XEXP (x, 0), 0), temp);
2983 return XEXP (x, 0);
2984 }
2985 }
2986
2987 temp = expand_compound_operation (XEXP (x, 0));
2988
2989 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
2990 replaced by (lshiftrt X C). This will convert
2991 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
2992
2993 if (GET_CODE (temp) == ASHIFTRT
2994 && GET_CODE (XEXP (temp, 1)) == CONST_INT
2995 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
2996 {
2997 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
2998 INTVAL (XEXP (temp, 1)));
2999 goto restart;
3000 }
3001
3002 /* If X has only a single bit significant, say, bit I, convert
3003 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3004 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3005 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3006 or a SUBREG of one since we'd be making the expression more
3007 complex if it was just a register. */
3008
3009 if (GET_CODE (temp) != REG
3010 && ! (GET_CODE (temp) == SUBREG
3011 && GET_CODE (SUBREG_REG (temp)) == REG)
3012 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3013 {
3014 rtx temp1 = simplify_shift_const
3015 (NULL_RTX, ASHIFTRT, mode,
3016 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3017 GET_MODE_BITSIZE (mode) - 1 - i),
3018 GET_MODE_BITSIZE (mode) - 1 - i);
3019
3020 /* If all we did was surround TEMP with the two shifts, we
3021 haven't improved anything, so don't use it. Otherwise,
3022 we are better off with TEMP1. */
3023 if (GET_CODE (temp1) != ASHIFTRT
3024 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3025 || XEXP (XEXP (temp1, 0), 0) != temp)
3026 {
3027 x = temp1;
3028 goto restart;
3029 }
3030 }
3031 break;
3032
3033 case FLOAT_TRUNCATE:
3034 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3035 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3036 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3037 return XEXP (XEXP (x, 0), 0);
3038 break;
3039
3040 #ifdef HAVE_cc0
3041 case COMPARE:
3042 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3043 using cc0, in which case we want to leave it as a COMPARE
3044 so we can distinguish it from a register-register-copy. */
3045 if (XEXP (x, 1) == const0_rtx)
3046 return XEXP (x, 0);
3047
3048 /* In IEEE floating point, x-0 is not the same as x. */
3049 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3050 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3051 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3052 return XEXP (x, 0);
3053 break;
3054 #endif
3055
3056 case CONST:
3057 /* (const (const X)) can become (const X). Do it this way rather than
3058 returning the inner CONST since CONST can be shared with a
3059 REG_EQUAL note. */
3060 if (GET_CODE (XEXP (x, 0)) == CONST)
3061 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3062 break;
3063
3064 #ifdef HAVE_lo_sum
3065 case LO_SUM:
3066 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3067 can add in an offset. find_split_point will split this address up
3068 again if it doesn't match. */
3069 if (GET_CODE (XEXP (x, 0)) == HIGH
3070 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3071 return XEXP (x, 1);
3072 break;
3073 #endif
3074
3075 case PLUS:
3076 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3077 outermost. That's because that's the way indexed addresses are
3078 supposed to appear. This code used to check many more cases, but
3079 they are now checked elsewhere. */
3080 if (GET_CODE (XEXP (x, 0)) == PLUS
3081 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3082 return gen_binary (PLUS, mode,
3083 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3084 XEXP (x, 1)),
3085 XEXP (XEXP (x, 0), 1));
3086
3087 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3088 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3089 bit-field and can be replaced by either a sign_extend or a
3090 sign_extract. The `and' may be a zero_extend. */
3091 if (GET_CODE (XEXP (x, 0)) == XOR
3092 && GET_CODE (XEXP (x, 1)) == CONST_INT
3093 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3094 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3095 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3096 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3097 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3098 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3099 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3100 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3101 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3102 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3103 == i + 1))))
3104 {
3105 x = simplify_shift_const
3106 (NULL_RTX, ASHIFTRT, mode,
3107 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3108 XEXP (XEXP (XEXP (x, 0), 0), 0),
3109 GET_MODE_BITSIZE (mode) - (i + 1)),
3110 GET_MODE_BITSIZE (mode) - (i + 1));
3111 goto restart;
3112 }
3113
3114 /* If only the low-order bit of X is significant, (plus x -1)
3115 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3116 the bitsize of the mode - 1. This allows simplification of
3117 "a = (b & 8) == 0;" */
3118 if (XEXP (x, 1) == constm1_rtx
3119 && GET_CODE (XEXP (x, 0)) != REG
3120 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3121 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3122 && significant_bits (XEXP (x, 0), mode) == 1)
3123 {
3124 x = simplify_shift_const
3125 (NULL_RTX, ASHIFTRT, mode,
3126 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3127 gen_rtx_combine (XOR, mode,
3128 XEXP (x, 0), const1_rtx),
3129 GET_MODE_BITSIZE (mode) - 1),
3130 GET_MODE_BITSIZE (mode) - 1);
3131 goto restart;
3132 }
3133
3134 /* If we are adding two things that have no bits in common, convert
3135 the addition into an IOR. This will often be further simplified,
3136 for example in cases like ((a & 1) + (a & 2)), which can
3137 become a & 3. */
3138
3139 if ((significant_bits (XEXP (x, 0), mode)
3140 & significant_bits (XEXP (x, 1), mode)) == 0)
3141 {
3142 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3143 goto restart;
3144 }
3145 break;
3146
3147 case MINUS:
3148 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3149 (and <foo> (const_int pow2-1)) */
3150 if (GET_CODE (XEXP (x, 1)) == AND
3151 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3152 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3153 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3154 {
3155 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3156 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3157 goto restart;
3158 }
3159 break;
3160
3161 case MULT:
3162 /* If we have (mult (plus A B) C), apply the distributive law and then
3163 the inverse distributive law to see if things simplify. This
3164 occurs mostly in addresses, often when unrolling loops. */
3165
3166 if (GET_CODE (XEXP (x, 0)) == PLUS)
3167 {
3168 x = apply_distributive_law
3169 (gen_binary (PLUS, mode,
3170 gen_binary (MULT, mode,
3171 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3172 gen_binary (MULT, mode,
3173 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3174
3175 if (GET_CODE (x) != MULT)
3176 goto restart;
3177 }
3178
3179 /* If this is multiplication by a power of two and its first operand is
3180 a shift, treat the multiply as a shift to allow the shifts to
3181 possibly combine. */
3182 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3183 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3184 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3185 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3186 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3187 || GET_CODE (XEXP (x, 0)) == ROTATE
3188 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3189 {
3190 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3191 goto restart;
3192 }
3193
3194 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3195 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3196 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3197 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3198 XEXP (XEXP (x, 0), 1));
3199 break;
3200
3201 case UDIV:
3202 /* If this is a divide by a power of two, treat it as a shift if
3203 its first operand is a shift. */
3204 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3205 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3206 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3207 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3208 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3209 || GET_CODE (XEXP (x, 0)) == ROTATE
3210 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3211 {
3212 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3213 goto restart;
3214 }
3215 break;
3216
3217 case EQ: case NE:
3218 case GT: case GTU: case GE: case GEU:
3219 case LT: case LTU: case LE: case LEU:
3220 /* If the first operand is a condition code, we can't do anything
3221 with it. */
3222 if (GET_CODE (XEXP (x, 0)) == COMPARE
3223 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3224 #ifdef HAVE_cc0
3225 && XEXP (x, 0) != cc0_rtx
3226 #endif
3227 ))
3228 {
3229 rtx op0 = XEXP (x, 0);
3230 rtx op1 = XEXP (x, 1);
3231 enum rtx_code new_code;
3232
3233 if (GET_CODE (op0) == COMPARE)
3234 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3235
3236 /* Simplify our comparison, if possible. */
3237 new_code = simplify_comparison (code, &op0, &op1);
3238
3239 #if STORE_FLAG_VALUE == 1
3240 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3241 if only the low-order bit is significant in X (such as when
3242 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3243 EQ to (xor X 1). */
3244 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3245 && op1 == const0_rtx
3246 && significant_bits (op0, GET_MODE (op0)) == 1)
3247 return gen_lowpart_for_combine (mode, op0);
3248 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3249 && op1 == const0_rtx
3250 && significant_bits (op0, GET_MODE (op0)) == 1)
3251 return gen_rtx_combine (XOR, mode,
3252 gen_lowpart_for_combine (mode, op0),
3253 const1_rtx);
3254 #endif
3255
3256 #if STORE_FLAG_VALUE == -1
3257 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3258 to (neg x) if only the low-order bit of X is significant.
3259 This converts (ne (zero_extract X 1 Y) 0) to
3260 (sign_extract X 1 Y). */
3261 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3262 && op1 == const0_rtx
3263 && significant_bits (op0, GET_MODE (op0)) == 1)
3264 {
3265 x = gen_rtx_combine (NEG, mode,
3266 gen_lowpart_for_combine (mode, op0));
3267 goto restart;
3268 }
3269 #endif
3270
3271 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3272 one significant bit, we can convert (ne x 0) to (ashift x c)
3273 where C puts the bit in the sign bit. Remove any AND with
3274 STORE_FLAG_VALUE when we are done, since we are only going to
3275 test the sign bit. */
3276 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3277 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3278 && (STORE_FLAG_VALUE
3279 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3280 && op1 == const0_rtx
3281 && mode == GET_MODE (op0)
3282 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3283 {
3284 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
3285 GET_MODE_BITSIZE (mode) - 1 - i);
3286 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3287 return XEXP (x, 0);
3288 else
3289 return x;
3290 }
3291
3292 /* If the code changed, return a whole new comparison. */
3293 if (new_code != code)
3294 return gen_rtx_combine (new_code, mode, op0, op1);
3295
3296 /* Otherwise, keep this operation, but maybe change its operands.
3297 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3298 SUBST (XEXP (x, 0), op0);
3299 SUBST (XEXP (x, 1), op1);
3300 }
3301 break;
3302
3303 case IF_THEN_ELSE:
3304 /* If we are testing a register for equality see if that register is
3305 used in one of the arms. If so, and we know something about its
3306 value in that arm, try to simplify it. */
3307
3308 if ((GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3309 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3310 {
3311 /* Get the value being compared and the value it has on the equal
3312 branch. */
3313 HOST_WIDE_INT sig;
3314 rtx from = XEXP (XEXP (x, 0), 0);
3315 rtx val_if_eq = XEXP (XEXP (x, 0), 1);
3316 rtx val_if_ne = from;
3317 int is_eq = (GET_CODE (XEXP (x, 0)) == EQ);
3318
3319 /* If we are comparing against zero and the expressiond being tested
3320 has only a single significant bit, that is it's value when it is
3321 not equal to zero. Simplilarly if it is known to be -1 or 0. */
3322
3323 if (val_if_eq == const0_rtx
3324 && exact_log2 (sig = significant_bits (from,
3325 GET_MODE (from))) >= 0)
3326 val_if_ne = GEN_INT (sig);
3327 else if (val_if_eq == const0_rtx
3328 && (num_sign_bit_copies (from, GET_MODE (from))
3329 == GET_MODE_BITSIZE (GET_MODE (from))))
3330 val_if_ne = constm1_rtx;
3331
3332 /* Now simplify an arm if we know the value of the register
3333 in the branch and it is used in the arm. Be carefull due to
3334 the potential of locally-shared RTL. */
3335
3336 if ((is_eq || val_if_ne != from)
3337 && reg_mentioned_p (from, XEXP (x, 1)))
3338 SUBST (XEXP (x, 1), subst (copy_rtx (XEXP (x, 1)), from,
3339 is_eq ? val_if_eq : val_if_ne, 0));
3340
3341 if ((! is_eq || val_if_ne != from)
3342 && reg_mentioned_p (from, XEXP (x, 2)))
3343 SUBST (XEXP (x, 2), subst (XEXP (x, 2), from,
3344 is_eq ? val_if_ne : val_if_eq, 0));
3345 }
3346
3347 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3348 reversed, do so to avoid needing two sets of patterns for
3349 subtract-and-branch insns. Similarly if we have a constant in that
3350 position. */
3351 if ((XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT)
3352 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3353 && reversible_comparison_p (XEXP (x, 0)))
3354 {
3355 SUBST (XEXP (x, 0),
3356 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3357 GET_MODE (XEXP (x, 0)),
3358 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3359
3360 temp = XEXP (x, 1);
3361 SUBST (XEXP (x, 1), XEXP (x, 2));
3362 SUBST (XEXP (x, 2), temp);
3363 }
3364 break;
3365
3366 case ZERO_EXTRACT:
3367 case SIGN_EXTRACT:
3368 case ZERO_EXTEND:
3369 case SIGN_EXTEND:
3370 /* If we are processing SET_DEST, we are done. */
3371 if (in_dest)
3372 return x;
3373
3374 x = expand_compound_operation (x);
3375 if (GET_CODE (x) != code)
3376 goto restart;
3377 break;
3378
3379 case SET:
3380 /* (set (pc) (return)) gets written as (return). */
3381 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3382 return SET_SRC (x);
3383
3384 /* Convert this into a field assignment operation, if possible. */
3385 x = make_field_assignment (x);
3386
3387 /* If we are setting CC0 or if the source is a COMPARE, look for the
3388 use of the comparison result and try to simplify it unless we already
3389 have used undobuf.other_insn. */
3390 if ((GET_CODE (SET_SRC (x)) == COMPARE
3391 #ifdef HAVE_cc0
3392 || SET_DEST (x) == cc0_rtx
3393 #endif
3394 )
3395 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3396 &other_insn)) != 0
3397 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3398 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3399 && XEXP (*cc_use, 0) == SET_DEST (x))
3400 {
3401 enum rtx_code old_code = GET_CODE (*cc_use);
3402 enum rtx_code new_code;
3403 rtx op0, op1;
3404 int other_changed = 0;
3405 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3406
3407 if (GET_CODE (SET_SRC (x)) == COMPARE)
3408 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3409 else
3410 op0 = SET_SRC (x), op1 = const0_rtx;
3411
3412 /* Simplify our comparison, if possible. */
3413 new_code = simplify_comparison (old_code, &op0, &op1);
3414
3415 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3416 /* If this machine has CC modes other than CCmode, check to see
3417 if we need to use a different CC mode here. */
3418 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3419
3420 /* If the mode changed, we have to change SET_DEST, the mode
3421 in the compare, and the mode in the place SET_DEST is used.
3422 If SET_DEST is a hard register, just build new versions with
3423 the proper mode. If it is a pseudo, we lose unless it is only
3424 time we set the pseudo, in which case we can safely change
3425 its mode. */
3426 if (compare_mode != GET_MODE (SET_DEST (x)))
3427 {
3428 int regno = REGNO (SET_DEST (x));
3429 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3430
3431 if (regno < FIRST_PSEUDO_REGISTER
3432 || (reg_n_sets[regno] == 1
3433 && ! REG_USERVAR_P (SET_DEST (x))))
3434 {
3435 if (regno >= FIRST_PSEUDO_REGISTER)
3436 SUBST (regno_reg_rtx[regno], new_dest);
3437
3438 SUBST (SET_DEST (x), new_dest);
3439 SUBST (XEXP (*cc_use, 0), new_dest);
3440 other_changed = 1;
3441 }
3442 }
3443 #endif
3444
3445 /* If the code changed, we have to build a new comparison
3446 in undobuf.other_insn. */
3447 if (new_code != old_code)
3448 {
3449 unsigned mask;
3450
3451 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3452 SET_DEST (x), const0_rtx));
3453
3454 /* If the only change we made was to change an EQ into an
3455 NE or vice versa, OP0 has only one significant bit,
3456 and OP1 is zero, check if changing the user of the condition
3457 code will produce a valid insn. If it won't, we can keep
3458 the original code in that insn by surrounding our operation
3459 with an XOR. */
3460
3461 if (((old_code == NE && new_code == EQ)
3462 || (old_code == EQ && new_code == NE))
3463 && ! other_changed && op1 == const0_rtx
3464 && (GET_MODE_BITSIZE (GET_MODE (op0))
3465 <= HOST_BITS_PER_WIDE_INT)
3466 && (exact_log2 (mask = significant_bits (op0,
3467 GET_MODE (op0)))
3468 >= 0))
3469 {
3470 rtx pat = PATTERN (other_insn), note = 0;
3471
3472 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3473 && ! check_asm_operands (pat)))
3474 {
3475 PUT_CODE (*cc_use, old_code);
3476 other_insn = 0;
3477
3478 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3479 GEN_INT (mask));
3480 }
3481 }
3482
3483 other_changed = 1;
3484 }
3485
3486 if (other_changed)
3487 undobuf.other_insn = other_insn;
3488
3489 #ifdef HAVE_cc0
3490 /* If we are now comparing against zero, change our source if
3491 needed. If we do not use cc0, we always have a COMPARE. */
3492 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3493 SUBST (SET_SRC (x), op0);
3494 else
3495 #endif
3496
3497 /* Otherwise, if we didn't previously have a COMPARE in the
3498 correct mode, we need one. */
3499 if (GET_CODE (SET_SRC (x)) != COMPARE
3500 || GET_MODE (SET_SRC (x)) != compare_mode)
3501 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3502 op0, op1));
3503 else
3504 {
3505 /* Otherwise, update the COMPARE if needed. */
3506 SUBST (XEXP (SET_SRC (x), 0), op0);
3507 SUBST (XEXP (SET_SRC (x), 1), op1);
3508 }
3509 }
3510 else
3511 {
3512 /* Get SET_SRC in a form where we have placed back any
3513 compound expressions. Then do the checks below. */
3514 temp = make_compound_operation (SET_SRC (x), SET);
3515 SUBST (SET_SRC (x), temp);
3516 }
3517
3518 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3519 operation, and X being a REG or (subreg (reg)), we may be able to
3520 convert this to (set (subreg:m2 x) (op)).
3521
3522 We can always do this if M1 is narrower than M2 because that
3523 means that we only care about the low bits of the result.
3524
3525 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3526 not defined), we cannot perform a narrower operation that
3527 requested since the high-order bits will be undefined. On
3528 machine where BYTE_LOADS_ZERO_EXTEND are defined, however, this
3529 transformation is safe as long as M1 and M2 have the same number
3530 of words. */
3531
3532 if (GET_CODE (SET_SRC (x)) == SUBREG
3533 && subreg_lowpart_p (SET_SRC (x))
3534 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3535 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3536 / UNITS_PER_WORD)
3537 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3538 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3539 #ifndef BYTE_LOADS_ZERO_EXTEND
3540 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3541 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3542 #endif
3543 && (GET_CODE (SET_DEST (x)) == REG
3544 || (GET_CODE (SET_DEST (x)) == SUBREG
3545 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3546 {
3547 SUBST (SET_DEST (x),
3548 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3549 SET_DEST (x)));
3550 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3551 }
3552
3553 #ifdef BYTE_LOADS_ZERO_EXTEND
3554 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3555 M wider than N, this would require a paradoxical subreg.
3556 Replace the subreg with a zero_extend to avoid the reload that
3557 would otherwise be required. */
3558 if (GET_CODE (SET_SRC (x)) == SUBREG
3559 && subreg_lowpart_p (SET_SRC (x))
3560 && SUBREG_WORD (SET_SRC (x)) == 0
3561 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3562 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3563 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3564 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3565 GET_MODE (SET_SRC (x)),
3566 XEXP (SET_SRC (x), 0)));
3567 #endif
3568
3569 break;
3570
3571 case AND:
3572 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3573 {
3574 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3575 INTVAL (XEXP (x, 1)));
3576
3577 /* If we have (ior (and (X C1) C2)) and the next restart would be
3578 the last, simplify this by making C1 as small as possible
3579 and then exit. */
3580 if (n_restarts >= 3 && GET_CODE (x) == IOR
3581 && GET_CODE (XEXP (x, 0)) == AND
3582 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3583 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3584 {
3585 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
3586 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3587 & ~ INTVAL (XEXP (x, 1))));
3588 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3589 }
3590
3591 if (GET_CODE (x) != AND)
3592 goto restart;
3593 }
3594
3595 /* Convert (A | B) & A to A. */
3596 if (GET_CODE (XEXP (x, 0)) == IOR
3597 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3598 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3599 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3600 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3601 return XEXP (x, 1);
3602
3603 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3604 insn (and may simplify more). */
3605 else if (GET_CODE (XEXP (x, 0)) == XOR
3606 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3607 && ! side_effects_p (XEXP (x, 1)))
3608 {
3609 x = gen_binary (AND, mode,
3610 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3611 XEXP (x, 1));
3612 goto restart;
3613 }
3614 else if (GET_CODE (XEXP (x, 0)) == XOR
3615 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3616 && ! side_effects_p (XEXP (x, 1)))
3617 {
3618 x = gen_binary (AND, mode,
3619 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3620 XEXP (x, 1));
3621 goto restart;
3622 }
3623
3624 /* Similarly for (~ (A ^ B)) & A. */
3625 else if (GET_CODE (XEXP (x, 0)) == NOT
3626 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3627 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3628 && ! side_effects_p (XEXP (x, 1)))
3629 {
3630 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3631 XEXP (x, 1));
3632 goto restart;
3633 }
3634 else if (GET_CODE (XEXP (x, 0)) == NOT
3635 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3636 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3637 && ! side_effects_p (XEXP (x, 1)))
3638 {
3639 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3640 XEXP (x, 1));
3641 goto restart;
3642 }
3643
3644 /* If we have (and A B) with A not an object but that is known to
3645 be -1 or 0, this is equivalent to the expression
3646 (if_then_else (ne A (const_int 0)) B (const_int 0))
3647 We make this conversion because it may allow further
3648 simplifications and then allow use of conditional move insns. */
3649
3650 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3651 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3652 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3653 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3654 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3655 {
3656 rtx op0 = XEXP (x, 0);
3657 rtx op1 = const0_rtx;
3658 enum rtx_code comp_code
3659 = simplify_comparison (NE, &op0, &op1);
3660
3661 x = gen_rtx_combine (IF_THEN_ELSE, mode,
3662 gen_binary (comp_code, VOIDmode, op0, op1),
3663 XEXP (x, 1), const0_rtx);
3664 goto restart;
3665 }
3666
3667 /* In the following group of tests (and those in case IOR below),
3668 we start with some combination of logical operations and apply
3669 the distributive law followed by the inverse distributive law.
3670 Most of the time, this results in no change. However, if some of
3671 the operands are the same or inverses of each other, simplifications
3672 will result.
3673
3674 For example, (and (ior A B) (not B)) can occur as the result of
3675 expanding a bit field assignment. When we apply the distributive
3676 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
3677 which then simplifies to (and (A (not B))). */
3678
3679 /* If we have (and (ior A B) C), apply the distributive law and then
3680 the inverse distributive law to see if things simplify. */
3681
3682 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
3683 {
3684 x = apply_distributive_law
3685 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
3686 gen_binary (AND, mode,
3687 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3688 gen_binary (AND, mode,
3689 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3690 if (GET_CODE (x) != AND)
3691 goto restart;
3692 }
3693
3694 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
3695 {
3696 x = apply_distributive_law
3697 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
3698 gen_binary (AND, mode,
3699 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3700 gen_binary (AND, mode,
3701 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3702 if (GET_CODE (x) != AND)
3703 goto restart;
3704 }
3705
3706 /* Similarly, taking advantage of the fact that
3707 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
3708
3709 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
3710 {
3711 x = apply_distributive_law
3712 (gen_binary (XOR, mode,
3713 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3714 XEXP (XEXP (x, 1), 0)),
3715 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
3716 XEXP (XEXP (x, 1), 1))));
3717 if (GET_CODE (x) != AND)
3718 goto restart;
3719 }
3720
3721 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
3722 {
3723 x = apply_distributive_law
3724 (gen_binary (XOR, mode,
3725 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3726 XEXP (XEXP (x, 0), 0)),
3727 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
3728 XEXP (XEXP (x, 0), 1))));
3729 if (GET_CODE (x) != AND)
3730 goto restart;
3731 }
3732 break;
3733
3734 case IOR:
3735 /* (ior A C) is C if all significant bits of A are on in C. */
3736 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3737 && (significant_bits (XEXP (x, 0), mode)
3738 & ~ INTVAL (XEXP (x, 1))) == 0)
3739 return XEXP (x, 1);
3740
3741 /* Convert (A & B) | A to A. */
3742 if (GET_CODE (XEXP (x, 0)) == AND
3743 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3744 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3745 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3746 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3747 return XEXP (x, 1);
3748
3749 /* If we have (ior (and A B) C), apply the distributive law and then
3750 the inverse distributive law to see if things simplify. */
3751
3752 if (GET_CODE (XEXP (x, 0)) == AND)
3753 {
3754 x = apply_distributive_law
3755 (gen_binary (AND, mode,
3756 gen_binary (IOR, mode,
3757 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3758 gen_binary (IOR, mode,
3759 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3760
3761 if (GET_CODE (x) != IOR)
3762 goto restart;
3763 }
3764
3765 if (GET_CODE (XEXP (x, 1)) == AND)
3766 {
3767 x = apply_distributive_law
3768 (gen_binary (AND, mode,
3769 gen_binary (IOR, mode,
3770 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
3771 gen_binary (IOR, mode,
3772 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
3773
3774 if (GET_CODE (x) != IOR)
3775 goto restart;
3776 }
3777
3778 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
3779 mode size to (rotate A CX). */
3780
3781 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
3782 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
3783 || (GET_CODE (XEXP (x, 1)) == ASHIFT
3784 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
3785 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
3786 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3787 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3788 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
3789 == GET_MODE_BITSIZE (mode)))
3790 {
3791 rtx shift_count;
3792
3793 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3794 shift_count = XEXP (XEXP (x, 0), 1);
3795 else
3796 shift_count = XEXP (XEXP (x, 1), 1);
3797 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
3798 goto restart;
3799 }
3800 break;
3801
3802 case XOR:
3803 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
3804 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
3805 (NOT y). */
3806 {
3807 int num_negated = 0;
3808 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
3809
3810 if (GET_CODE (in1) == NOT)
3811 num_negated++, in1 = XEXP (in1, 0);
3812 if (GET_CODE (in2) == NOT)
3813 num_negated++, in2 = XEXP (in2, 0);
3814
3815 if (num_negated == 2)
3816 {
3817 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3818 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
3819 }
3820 else if (num_negated == 1)
3821 {
3822 x = gen_unary (NOT, mode,
3823 gen_binary (XOR, mode, in1, in2));
3824 goto restart;
3825 }
3826 }
3827
3828 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
3829 correspond to a machine insn or result in further simplifications
3830 if B is a constant. */
3831
3832 if (GET_CODE (XEXP (x, 0)) == AND
3833 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3834 && ! side_effects_p (XEXP (x, 1)))
3835 {
3836 x = gen_binary (AND, mode,
3837 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3838 XEXP (x, 1));
3839 goto restart;
3840 }
3841 else if (GET_CODE (XEXP (x, 0)) == AND
3842 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3843 && ! side_effects_p (XEXP (x, 1)))
3844 {
3845 x = gen_binary (AND, mode,
3846 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3847 XEXP (x, 1));
3848 goto restart;
3849 }
3850
3851
3852 #if STORE_FLAG_VALUE == 1
3853 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
3854 comparison. */
3855 if (XEXP (x, 1) == const1_rtx
3856 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3857 && reversible_comparison_p (XEXP (x, 0)))
3858 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3859 mode, XEXP (XEXP (x, 0), 0),
3860 XEXP (XEXP (x, 0), 1));
3861 #endif
3862
3863 /* (xor (comparison foo bar) (const_int sign-bit))
3864 when STORE_FLAG_VALUE is the sign bit. */
3865 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3866 && (STORE_FLAG_VALUE
3867 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3868 && XEXP (x, 1) == const_true_rtx
3869 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3870 && reversible_comparison_p (XEXP (x, 0)))
3871 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3872 mode, XEXP (XEXP (x, 0), 0),
3873 XEXP (XEXP (x, 0), 1));
3874 break;
3875
3876 case ABS:
3877 /* (abs (neg <foo>)) -> (abs <foo>) */
3878 if (GET_CODE (XEXP (x, 0)) == NEG)
3879 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3880
3881 /* If operand is something known to be positive, ignore the ABS. */
3882 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
3883 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
3884 <= HOST_BITS_PER_WIDE_INT)
3885 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3886 & ((HOST_WIDE_INT) 1
3887 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
3888 == 0)))
3889 return XEXP (x, 0);
3890
3891
3892 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
3893 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
3894 {
3895 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
3896 goto restart;
3897 }
3898 break;
3899
3900 case FFS:
3901 /* (ffs (*_extend <X>)) = (ffs <X>) */
3902 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
3903 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
3904 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3905 break;
3906
3907 case FLOAT:
3908 /* (float (sign_extend <X>)) = (float <X>). */
3909 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
3910 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3911 break;
3912
3913 case LSHIFT:
3914 case ASHIFT:
3915 case LSHIFTRT:
3916 case ASHIFTRT:
3917 case ROTATE:
3918 case ROTATERT:
3919 /* If this is a shift by a constant amount, simplify it. */
3920 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3921 {
3922 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
3923 INTVAL (XEXP (x, 1)));
3924 if (GET_CODE (x) != code)
3925 goto restart;
3926 }
3927
3928 #ifdef SHIFT_COUNT_TRUNCATED
3929 else if (GET_CODE (XEXP (x, 1)) != REG)
3930 SUBST (XEXP (x, 1),
3931 force_to_mode (XEXP (x, 1), GET_MODE (x),
3932 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
3933 NULL_RTX));
3934 #endif
3935
3936 break;
3937 }
3938
3939 return x;
3940 }
3941 \f
3942 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
3943 operations" because they can be replaced with two more basic operations.
3944 ZERO_EXTEND is also considered "compound" because it can be replaced with
3945 an AND operation, which is simpler, though only one operation.
3946
3947 The function expand_compound_operation is called with an rtx expression
3948 and will convert it to the appropriate shifts and AND operations,
3949 simplifying at each stage.
3950
3951 The function make_compound_operation is called to convert an expression
3952 consisting of shifts and ANDs into the equivalent compound expression.
3953 It is the inverse of this function, loosely speaking. */
3954
3955 static rtx
3956 expand_compound_operation (x)
3957 rtx x;
3958 {
3959 int pos = 0, len;
3960 int unsignedp = 0;
3961 int modewidth;
3962 rtx tem;
3963
3964 switch (GET_CODE (x))
3965 {
3966 case ZERO_EXTEND:
3967 unsignedp = 1;
3968 case SIGN_EXTEND:
3969 /* We can't necessarily use a const_int for a multiword mode;
3970 it depends on implicitly extending the value.
3971 Since we don't know the right way to extend it,
3972 we can't tell whether the implicit way is right.
3973
3974 Even for a mode that is no wider than a const_int,
3975 we can't win, because we need to sign extend one of its bits through
3976 the rest of it, and we don't know which bit. */
3977 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
3978 return x;
3979
3980 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
3981 return x;
3982
3983 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
3984 /* If the inner object has VOIDmode (the only way this can happen
3985 is if it is a ASM_OPERANDS), we can't do anything since we don't
3986 know how much masking to do. */
3987 if (len == 0)
3988 return x;
3989
3990 break;
3991
3992 case ZERO_EXTRACT:
3993 unsignedp = 1;
3994 case SIGN_EXTRACT:
3995 /* If the operand is a CLOBBER, just return it. */
3996 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
3997 return XEXP (x, 0);
3998
3999 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4000 || GET_CODE (XEXP (x, 2)) != CONST_INT
4001 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4002 return x;
4003
4004 len = INTVAL (XEXP (x, 1));
4005 pos = INTVAL (XEXP (x, 2));
4006
4007 /* If this goes outside the object being extracted, replace the object
4008 with a (use (mem ...)) construct that only combine understands
4009 and is used only for this purpose. */
4010 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4011 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4012
4013 #if BITS_BIG_ENDIAN
4014 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4015 #endif
4016 break;
4017
4018 default:
4019 return x;
4020 }
4021
4022 /* If we reach here, we want to return a pair of shifts. The inner
4023 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4024 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4025 logical depending on the value of UNSIGNEDP.
4026
4027 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4028 converted into an AND of a shift.
4029
4030 We must check for the case where the left shift would have a negative
4031 count. This can happen in a case like (x >> 31) & 255 on machines
4032 that can't shift by a constant. On those machines, we would first
4033 combine the shift with the AND to produce a variable-position
4034 extraction. Then the constant of 31 would be substituted in to produce
4035 a such a position. */
4036
4037 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4038 if (modewidth >= pos - len)
4039 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4040 GET_MODE (x),
4041 simplify_shift_const (NULL_RTX, ASHIFT,
4042 GET_MODE (x),
4043 XEXP (x, 0),
4044 modewidth - pos - len),
4045 modewidth - len);
4046
4047 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4048 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4049 simplify_shift_const (NULL_RTX, LSHIFTRT,
4050 GET_MODE (x),
4051 XEXP (x, 0), pos),
4052 ((HOST_WIDE_INT) 1 << len) - 1);
4053 else
4054 /* Any other cases we can't handle. */
4055 return x;
4056
4057
4058 /* If we couldn't do this for some reason, return the original
4059 expression. */
4060 if (GET_CODE (tem) == CLOBBER)
4061 return x;
4062
4063 return tem;
4064 }
4065 \f
4066 /* X is a SET which contains an assignment of one object into
4067 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4068 or certain SUBREGS). If possible, convert it into a series of
4069 logical operations.
4070
4071 We half-heartedly support variable positions, but do not at all
4072 support variable lengths. */
4073
4074 static rtx
4075 expand_field_assignment (x)
4076 rtx x;
4077 {
4078 rtx inner;
4079 rtx pos; /* Always counts from low bit. */
4080 int len;
4081 rtx mask;
4082 enum machine_mode compute_mode;
4083
4084 /* Loop until we find something we can't simplify. */
4085 while (1)
4086 {
4087 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4088 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4089 {
4090 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4091 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4092 pos = const0_rtx;
4093 }
4094 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4095 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4096 {
4097 inner = XEXP (SET_DEST (x), 0);
4098 len = INTVAL (XEXP (SET_DEST (x), 1));
4099 pos = XEXP (SET_DEST (x), 2);
4100
4101 /* If the position is constant and spans the width of INNER,
4102 surround INNER with a USE to indicate this. */
4103 if (GET_CODE (pos) == CONST_INT
4104 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4105 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4106
4107 #if BITS_BIG_ENDIAN
4108 if (GET_CODE (pos) == CONST_INT)
4109 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4110 - INTVAL (pos));
4111 else if (GET_CODE (pos) == MINUS
4112 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4113 && (INTVAL (XEXP (pos, 1))
4114 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4115 /* If position is ADJUST - X, new position is X. */
4116 pos = XEXP (pos, 0);
4117 else
4118 pos = gen_binary (MINUS, GET_MODE (pos),
4119 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4120 - len),
4121 pos);
4122 #endif
4123 }
4124
4125 /* A SUBREG between two modes that occupy the same numbers of words
4126 can be done by moving the SUBREG to the source. */
4127 else if (GET_CODE (SET_DEST (x)) == SUBREG
4128 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4129 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4130 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4131 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4132 {
4133 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4134 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4135 SET_SRC (x)));
4136 continue;
4137 }
4138 else
4139 break;
4140
4141 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4142 inner = SUBREG_REG (inner);
4143
4144 compute_mode = GET_MODE (inner);
4145
4146 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4147 if (len < HOST_BITS_PER_WIDE_INT)
4148 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4149 else
4150 break;
4151
4152 /* Now compute the equivalent expression. Make a copy of INNER
4153 for the SET_DEST in case it is a MEM into which we will substitute;
4154 we don't want shared RTL in that case. */
4155 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4156 gen_binary (IOR, compute_mode,
4157 gen_binary (AND, compute_mode,
4158 gen_unary (NOT, compute_mode,
4159 gen_binary (ASHIFT,
4160 compute_mode,
4161 mask, pos)),
4162 inner),
4163 gen_binary (ASHIFT, compute_mode,
4164 gen_binary (AND, compute_mode,
4165 gen_lowpart_for_combine
4166 (compute_mode,
4167 SET_SRC (x)),
4168 mask),
4169 pos)));
4170 }
4171
4172 return x;
4173 }
4174 \f
4175 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4176 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4177 the starting bit position.
4178
4179 INNER may be a USE. This will occur when we started with a bitfield
4180 that went outside the boundary of the object in memory, which is
4181 allowed on most machines. To isolate this case, we produce a USE
4182 whose mode is wide enough and surround the MEM with it. The only
4183 code that understands the USE is this routine. If it is not removed,
4184 it will cause the resulting insn not to match.
4185
4186 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4187 signed reference.
4188
4189 IN_DEST is non-zero if this is a reference in the destination of a
4190 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4191 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4192 be used.
4193
4194 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4195 ZERO_EXTRACT should be built even for bits starting at bit 0.
4196
4197 MODE is the desired mode of the result (if IN_DEST == 0). */
4198
4199 static rtx
4200 make_extraction (mode, inner, pos, pos_rtx, len,
4201 unsignedp, in_dest, in_compare)
4202 enum machine_mode mode;
4203 rtx inner;
4204 int pos;
4205 rtx pos_rtx;
4206 int len;
4207 int unsignedp;
4208 int in_dest, in_compare;
4209 {
4210 enum machine_mode is_mode = GET_MODE (inner);
4211 enum machine_mode inner_mode;
4212 enum machine_mode wanted_mem_mode = byte_mode;
4213 enum machine_mode pos_mode = word_mode;
4214 enum machine_mode extraction_mode = word_mode;
4215 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4216 int spans_byte = 0;
4217 rtx new = 0;
4218
4219 /* Get some information about INNER and get the innermost object. */
4220 if (GET_CODE (inner) == USE)
4221 /* We don't need to adjust the position because we set up the USE
4222 to pretend that it was a full-word object. */
4223 spans_byte = 1, inner = XEXP (inner, 0);
4224 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4225 inner = SUBREG_REG (inner);
4226
4227 inner_mode = GET_MODE (inner);
4228
4229 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4230 pos = INTVAL (pos_rtx);
4231
4232 /* See if this can be done without an extraction. We never can if the
4233 width of the field is not the same as that of some integer mode. For
4234 registers, we can only avoid the extraction if the position is at the
4235 low-order bit and this is either not in the destination or we have the
4236 appropriate STRICT_LOW_PART operation available.
4237
4238 For MEM, we can avoid an extract if the field starts on an appropriate
4239 boundary and we can change the mode of the memory reference. However,
4240 we cannot directly access the MEM if we have a USE and the underlying
4241 MEM is not TMODE. This combination means that MEM was being used in a
4242 context where bits outside its mode were being referenced; that is only
4243 valid in bit-field insns. */
4244
4245 if (tmode != BLKmode
4246 && ! (spans_byte && inner_mode != tmode)
4247 && ((pos == 0 && GET_CODE (inner) != MEM
4248 && (! in_dest
4249 || (GET_CODE (inner) == REG
4250 && (movstrict_optab->handlers[(int) tmode].insn_code
4251 != CODE_FOR_nothing))))
4252 || (GET_CODE (inner) == MEM && pos >= 0
4253 && (pos
4254 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4255 : BITS_PER_UNIT)) == 0
4256 /* We can't do this if we are widening INNER_MODE (it
4257 may not be aligned, for one thing). */
4258 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4259 && (inner_mode == tmode
4260 || (! mode_dependent_address_p (XEXP (inner, 0))
4261 && ! MEM_VOLATILE_P (inner))))))
4262 {
4263 int offset = pos / BITS_PER_UNIT;
4264
4265 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4266 field. If the original and current mode are the same, we need not
4267 adjust the offset. Otherwise, we do if bytes big endian.
4268
4269 If INNER is not a MEM, get a piece consisting of the just the field
4270 of interest (in this case POS must be 0). */
4271
4272 if (GET_CODE (inner) == MEM)
4273 {
4274 #if BYTES_BIG_ENDIAN
4275 if (inner_mode != tmode)
4276 offset = (GET_MODE_SIZE (inner_mode)
4277 - GET_MODE_SIZE (tmode) - offset);
4278 #endif
4279
4280 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4281 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4282 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4283 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4284 }
4285 else if (GET_CODE (inner) == REG)
4286 /* We can't call gen_lowpart_for_combine here since we always want
4287 a SUBREG and it would sometimes return a new hard register. */
4288 new = gen_rtx (SUBREG, tmode, inner,
4289 (WORDS_BIG_ENDIAN
4290 && GET_MODE_SIZE (is_mode) > UNITS_PER_WORD)
4291 ? ((GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (tmode)
4292 / UNITS_PER_WORD))
4293 : 0);
4294 else
4295 new = force_to_mode (inner, tmode, len, NULL_RTX);
4296
4297 /* If this extraction is going into the destination of a SET,
4298 make a STRICT_LOW_PART unless we made a MEM. */
4299
4300 if (in_dest)
4301 return (GET_CODE (new) == MEM ? new
4302 : (GET_CODE (new) != SUBREG
4303 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4304 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4305
4306 /* Otherwise, sign- or zero-extend unless we already are in the
4307 proper mode. */
4308
4309 return (mode == tmode ? new
4310 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4311 mode, new));
4312 }
4313
4314 /* Unless this isin a COMPARE or we have a funny memory reference,
4315 don't do anything with field extracts starting at the low-order
4316 bit since they are simple AND operations. */
4317 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte)
4318 return 0;
4319
4320 /* Get the mode to use should INNER be a MEM, the mode for the position,
4321 and the mode for the result. */
4322 #ifdef HAVE_insv
4323 if (in_dest)
4324 {
4325 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4326 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4327 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4328 }
4329 #endif
4330
4331 #ifdef HAVE_extzv
4332 if (! in_dest && unsignedp)
4333 {
4334 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4335 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4336 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4337 }
4338 #endif
4339
4340 #ifdef HAVE_extv
4341 if (! in_dest && ! unsignedp)
4342 {
4343 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4344 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4345 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4346 }
4347 #endif
4348
4349 /* Never narrow an object, since that might not be safe. */
4350
4351 if (mode != VOIDmode
4352 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4353 extraction_mode = mode;
4354
4355 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4356 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4357 pos_mode = GET_MODE (pos_rtx);
4358
4359 /* If this is not from memory or we have to change the mode of memory and
4360 cannot, the desired mode is EXTRACTION_MODE. */
4361 if (GET_CODE (inner) != MEM
4362 || (inner_mode != wanted_mem_mode
4363 && (mode_dependent_address_p (XEXP (inner, 0))
4364 || MEM_VOLATILE_P (inner))))
4365 wanted_mem_mode = extraction_mode;
4366
4367 #if BITS_BIG_ENDIAN
4368 /* If position is constant, compute new position. Otherwise, build
4369 subtraction. */
4370 if (pos >= 0)
4371 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4372 - len - pos);
4373 else
4374 pos_rtx
4375 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4376 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4377 GET_MODE_BITSIZE (wanted_mem_mode))
4378 - len),
4379 pos_rtx);
4380 #endif
4381
4382 /* If INNER has a wider mode, make it smaller. If this is a constant
4383 extract, try to adjust the byte to point to the byte containing
4384 the value. */
4385 if (wanted_mem_mode != VOIDmode
4386 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4387 && ((GET_CODE (inner) == MEM
4388 && (inner_mode == wanted_mem_mode
4389 || (! mode_dependent_address_p (XEXP (inner, 0))
4390 && ! MEM_VOLATILE_P (inner))))))
4391 {
4392 int offset = 0;
4393
4394 /* The computations below will be correct if the machine is big
4395 endian in both bits and bytes or little endian in bits and bytes.
4396 If it is mixed, we must adjust. */
4397
4398 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4399 if (! spans_byte && is_mode != wanted_mem_mode)
4400 offset = (GET_MODE_SIZE (is_mode)
4401 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4402 #endif
4403
4404 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4405 adjust OFFSET to compensate. */
4406 #if BYTES_BIG_ENDIAN
4407 if (! spans_byte
4408 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4409 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4410 #endif
4411
4412 /* If this is a constant position, we can move to the desired byte. */
4413 if (pos >= 0)
4414 {
4415 offset += pos / BITS_PER_UNIT;
4416 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4417 }
4418
4419 if (offset != 0 || inner_mode != wanted_mem_mode)
4420 {
4421 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4422 plus_constant (XEXP (inner, 0), offset));
4423 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4424 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4425 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4426 inner = newmem;
4427 }
4428 }
4429
4430 /* If INNER is not memory, we can always get it into the proper mode. */
4431 else if (GET_CODE (inner) != MEM)
4432 inner = force_to_mode (inner, extraction_mode,
4433 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4434 : len + pos),
4435 NULL_RTX);
4436
4437 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4438 have to zero extend. Otherwise, we can just use a SUBREG. */
4439 if (pos < 0
4440 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4441 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4442 else if (pos < 0
4443 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4444 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4445
4446 /* Make POS_RTX unless we already have it and it is correct. */
4447 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
4448 pos_rtx = GEN_INT (pos);
4449
4450 /* Make the required operation. See if we can use existing rtx. */
4451 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4452 extraction_mode, inner, GEN_INT (len), pos_rtx);
4453 if (! in_dest)
4454 new = gen_lowpart_for_combine (mode, new);
4455
4456 return new;
4457 }
4458 \f
4459 /* Look at the expression rooted at X. Look for expressions
4460 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4461 Form these expressions.
4462
4463 Return the new rtx, usually just X.
4464
4465 Also, for machines like the Vax that don't have logical shift insns,
4466 try to convert logical to arithmetic shift operations in cases where
4467 they are equivalent. This undoes the canonicalizations to logical
4468 shifts done elsewhere.
4469
4470 We try, as much as possible, to re-use rtl expressions to save memory.
4471
4472 IN_CODE says what kind of expression we are processing. Normally, it is
4473 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4474 being kludges), it is MEM. When processing the arguments of a comparison
4475 or a COMPARE against zero, it is COMPARE. */
4476
4477 static rtx
4478 make_compound_operation (x, in_code)
4479 rtx x;
4480 enum rtx_code in_code;
4481 {
4482 enum rtx_code code = GET_CODE (x);
4483 enum machine_mode mode = GET_MODE (x);
4484 int mode_width = GET_MODE_BITSIZE (mode);
4485 enum rtx_code next_code;
4486 int i, count;
4487 rtx new = 0;
4488 char *fmt;
4489
4490 /* Select the code to be used in recursive calls. Once we are inside an
4491 address, we stay there. If we have a comparison, set to COMPARE,
4492 but once inside, go back to our default of SET. */
4493
4494 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4495 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4496 && XEXP (x, 1) == const0_rtx) ? COMPARE
4497 : in_code == COMPARE ? SET : in_code);
4498
4499 /* Process depending on the code of this operation. If NEW is set
4500 non-zero, it will be returned. */
4501
4502 switch (code)
4503 {
4504 case ASHIFT:
4505 case LSHIFT:
4506 /* Convert shifts by constants into multiplications if inside
4507 an address. */
4508 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4509 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4510 && INTVAL (XEXP (x, 1)) >= 0)
4511 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4512 GEN_INT ((HOST_WIDE_INT) 1
4513 << INTVAL (XEXP (x, 1))));
4514 break;
4515
4516 case AND:
4517 /* If the second operand is not a constant, we can't do anything
4518 with it. */
4519 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4520 break;
4521
4522 /* If the constant is a power of two minus one and the first operand
4523 is a logical right shift, make an extraction. */
4524 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4525 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4526 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4527 XEXP (XEXP (x, 0), 1), i, 1,
4528 0, in_code == COMPARE);
4529
4530 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4531 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4532 && subreg_lowpart_p (XEXP (x, 0))
4533 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4534 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4535 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4536 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4537 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4538 0, in_code == COMPARE);
4539
4540
4541 /* If we are have (and (rotate X C) M) and C is larger than the number
4542 of bits in M, this is an extraction. */
4543
4544 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4545 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4546 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4547 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4548 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4549 (GET_MODE_BITSIZE (mode)
4550 - INTVAL (XEXP (XEXP (x, 0), 1))),
4551 NULL_RTX, i, 1, 0, in_code == COMPARE);
4552
4553 /* On machines without logical shifts, if the operand of the AND is
4554 a logical shift and our mask turns off all the propagated sign
4555 bits, we can replace the logical shift with an arithmetic shift. */
4556 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4557 && (lshr_optab->handlers[(int) mode].insn_code
4558 == CODE_FOR_nothing)
4559 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4560 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4561 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
4562 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4563 && mode_width <= HOST_BITS_PER_WIDE_INT)
4564 {
4565 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
4566
4567 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4568 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4569 SUBST (XEXP (x, 0),
4570 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4571 XEXP (XEXP (x, 0), 1)));
4572 }
4573
4574 /* If the constant is one less than a power of two, this might be
4575 representable by an extraction even if no shift is present.
4576 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4577 we are in a COMPARE. */
4578 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4579 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
4580 0, in_code == COMPARE);
4581
4582 /* If we are in a comparison and this is an AND with a power of two,
4583 convert this into the appropriate bit extract. */
4584 else if (in_code == COMPARE
4585 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4586 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
4587
4588 break;
4589
4590 case LSHIFTRT:
4591 /* If the sign bit is known to be zero, replace this with an
4592 arithmetic shift. */
4593 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4594 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4595 && mode_width <= HOST_BITS_PER_WIDE_INT
4596 && (significant_bits (XEXP (x, 0), mode)
4597 & (1 << (mode_width - 1))) == 0)
4598 {
4599 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4600 break;
4601 }
4602
4603 /* ... fall through ... */
4604
4605 case ASHIFTRT:
4606 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4607 this is a SIGN_EXTRACT. */
4608 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4609 && GET_CODE (XEXP (x, 0)) == ASHIFT
4610 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4611 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4612 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4613 (INTVAL (XEXP (x, 1))
4614 - INTVAL (XEXP (XEXP (x, 0), 1))),
4615 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4616 code == LSHIFTRT, 0, in_code == COMPARE);
4617
4618 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4619 cases, we are better off returning a SIGN_EXTEND of the operation. */
4620
4621 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4622 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4623 || GET_CODE (XEXP (x, 0)) == XOR
4624 || GET_CODE (XEXP (x, 0)) == PLUS)
4625 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4626 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4627 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4628 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4629 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4630 && (INTVAL (XEXP (XEXP (x, 0), 1))
4631 & (((HOST_WIDE_INT) 1
4632 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4633 {
4634 HOST_WIDE_INT newop1
4635 = (INTVAL (XEXP (XEXP (x, 0), 1))
4636 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4637
4638 new = make_extraction (mode,
4639 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4640 XEXP (XEXP (XEXP (x, 0), 0), 0),
4641 GEN_INT (newop1)),
4642 (INTVAL (XEXP (x, 1))
4643 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4644 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4645 code == LSHIFTRT, 0, in_code == COMPARE);
4646 }
4647
4648 break;
4649 }
4650
4651 if (new)
4652 {
4653 x = gen_lowpart_for_combine (mode, new);
4654 code = GET_CODE (x);
4655 }
4656
4657 /* Now recursively process each operand of this operation. */
4658 fmt = GET_RTX_FORMAT (code);
4659 for (i = 0; i < GET_RTX_LENGTH (code); i++)
4660 if (fmt[i] == 'e')
4661 {
4662 new = make_compound_operation (XEXP (x, i), next_code);
4663 SUBST (XEXP (x, i), new);
4664 }
4665
4666 return x;
4667 }
4668 \f
4669 /* Given M see if it is a value that would select a field of bits
4670 within an item, but not the entire word. Return -1 if not.
4671 Otherwise, return the starting position of the field, where 0 is the
4672 low-order bit.
4673
4674 *PLEN is set to the length of the field. */
4675
4676 static int
4677 get_pos_from_mask (m, plen)
4678 unsigned HOST_WIDE_INT m;
4679 int *plen;
4680 {
4681 /* Get the bit number of the first 1 bit from the right, -1 if none. */
4682 int pos = exact_log2 (m & - m);
4683
4684 if (pos < 0)
4685 return -1;
4686
4687 /* Now shift off the low-order zero bits and see if we have a power of
4688 two minus 1. */
4689 *plen = exact_log2 ((m >> pos) + 1);
4690
4691 if (*plen <= 0)
4692 return -1;
4693
4694 return pos;
4695 }
4696 \f
4697 /* Rewrite X so that it is an expression in MODE. We only care about the
4698 low-order BITS bits so we can ignore AND operations that just clear
4699 higher-order bits.
4700
4701 Also, if REG is non-zero and X is a register equal in value to REG,
4702 replace X with REG. */
4703
4704 static rtx
4705 force_to_mode (x, mode, bits, reg)
4706 rtx x;
4707 enum machine_mode mode;
4708 int bits;
4709 rtx reg;
4710 {
4711 enum rtx_code code = GET_CODE (x);
4712 enum machine_mode op_mode = mode;
4713
4714 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
4715 just get X in the proper mode. */
4716
4717 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
4718 || bits > GET_MODE_BITSIZE (mode))
4719 return gen_lowpart_for_combine (mode, x);
4720
4721 switch (code)
4722 {
4723 case SIGN_EXTEND:
4724 case ZERO_EXTEND:
4725 case ZERO_EXTRACT:
4726 case SIGN_EXTRACT:
4727 x = expand_compound_operation (x);
4728 if (GET_CODE (x) != code)
4729 return force_to_mode (x, mode, bits, reg);
4730 break;
4731
4732 case REG:
4733 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
4734 || rtx_equal_p (reg, get_last_value (x))))
4735 x = reg;
4736 break;
4737
4738 case CONST_INT:
4739 if (bits < HOST_BITS_PER_WIDE_INT)
4740 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
4741 return x;
4742
4743 case SUBREG:
4744 /* Ignore low-order SUBREGs. */
4745 if (subreg_lowpart_p (x))
4746 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
4747 break;
4748
4749 case AND:
4750 /* If this is an AND with a constant. Otherwise, we fall through to
4751 do the general binary case. */
4752
4753 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4754 {
4755 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
4756 int len = exact_log2 (mask + 1);
4757 rtx op = XEXP (x, 0);
4758
4759 /* If this is masking some low-order bits, we may be able to
4760 impose a stricter constraint on what bits of the operand are
4761 required. */
4762
4763 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
4764 reg);
4765
4766 if (bits < HOST_BITS_PER_WIDE_INT)
4767 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
4768
4769 /* If we have no AND in MODE, use the original mode for the
4770 operation. */
4771
4772 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4773 op_mode = GET_MODE (x);
4774
4775 x = simplify_and_const_int (x, op_mode, op, mask);
4776
4777 /* If X is still an AND, see if it is an AND with a mask that
4778 is just some low-order bits. If so, and it is BITS wide (it
4779 can't be wider), we don't need it. */
4780
4781 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
4782 && bits < HOST_BITS_PER_WIDE_INT
4783 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
4784 x = XEXP (x, 0);
4785
4786 break;
4787 }
4788
4789 /* ... fall through ... */
4790
4791 case PLUS:
4792 case MINUS:
4793 case MULT:
4794 case IOR:
4795 case XOR:
4796 /* For most binary operations, just propagate into the operation and
4797 change the mode if we have an operation of that mode. */
4798
4799 if ((code == PLUS
4800 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4801 || (code == MINUS
4802 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4803 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
4804 == CODE_FOR_nothing))
4805 || (code == IOR
4806 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4807 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
4808 == CODE_FOR_nothing)))
4809 op_mode = GET_MODE (x);
4810
4811 x = gen_binary (code, op_mode,
4812 gen_lowpart_for_combine (op_mode,
4813 force_to_mode (XEXP (x, 0),
4814 mode, bits,
4815 reg)),
4816 gen_lowpart_for_combine (op_mode,
4817 force_to_mode (XEXP (x, 1),
4818 mode, bits,
4819 reg)));
4820 break;
4821
4822 case ASHIFT:
4823 case LSHIFT:
4824 /* For left shifts, do the same, but just for the first operand.
4825 If the shift count is a constant, we need even fewer bits of the
4826 first operand. */
4827
4828 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
4829 bits -= INTVAL (XEXP (x, 1));
4830
4831 if ((code == ASHIFT
4832 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4833 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
4834 == CODE_FOR_nothing)))
4835 op_mode = GET_MODE (x);
4836
4837 x = gen_binary (code, op_mode,
4838 gen_lowpart_for_combine (op_mode,
4839 force_to_mode (XEXP (x, 0),
4840 mode, bits,
4841 reg)),
4842 XEXP (x, 1));
4843 break;
4844
4845 case LSHIFTRT:
4846 /* Here we can only do something if the shift count is a constant and
4847 the count plus BITS is no larger than the width of MODE, we can do
4848 the shift in MODE. */
4849
4850 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4851 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
4852 {
4853 rtx inner = force_to_mode (XEXP (x, 0), mode,
4854 bits + INTVAL (XEXP (x, 1)), reg);
4855
4856 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4857 op_mode = GET_MODE (x);
4858
4859 x = gen_binary (LSHIFTRT, op_mode,
4860 gen_lowpart_for_combine (op_mode, inner),
4861 XEXP (x, 1));
4862 }
4863 break;
4864
4865 case ASHIFTRT:
4866 /* If this is a sign-extension operation that just affects bits
4867 we don't care about, remove it. */
4868
4869 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4870 && INTVAL (XEXP (x, 1)) >= 0
4871 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
4872 && GET_CODE (XEXP (x, 0)) == ASHIFT
4873 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4874 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
4875 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
4876 break;
4877
4878 case NEG:
4879 case NOT:
4880 if ((code == NEG
4881 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
4882 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
4883 == CODE_FOR_nothing)))
4884 op_mode = GET_MODE (x);
4885
4886 /* Handle these similarly to the way we handle most binary operations. */
4887 x = gen_unary (code, op_mode,
4888 gen_lowpart_for_combine (op_mode,
4889 force_to_mode (XEXP (x, 0), mode,
4890 bits, reg)));
4891 break;
4892
4893 case IF_THEN_ELSE:
4894 /* We have no way of knowing if the IF_THEN_ELSE can itself be
4895 written in a narrower mode. We play it safe and do not do so. */
4896
4897 SUBST (XEXP (x, 1),
4898 gen_lowpart_for_combine (GET_MODE (x),
4899 force_to_mode (XEXP (x, 1), mode,
4900 bits, reg)));
4901 SUBST (XEXP (x, 2),
4902 gen_lowpart_for_combine (GET_MODE (x),
4903 force_to_mode (XEXP (x, 2), mode,
4904 bits, reg)));
4905 break;
4906 }
4907
4908 /* Ensure we return a value of the proper mode. */
4909 return gen_lowpart_for_combine (mode, x);
4910 }
4911 \f
4912 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
4913 Return that assignment if so.
4914
4915 We only handle the most common cases. */
4916
4917 static rtx
4918 make_field_assignment (x)
4919 rtx x;
4920 {
4921 rtx dest = SET_DEST (x);
4922 rtx src = SET_SRC (x);
4923 rtx ourdest;
4924 rtx assign;
4925 HOST_WIDE_INT c1;
4926 int pos, len;
4927 rtx other;
4928 enum machine_mode mode;
4929
4930 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
4931 a clear of a one-bit field. We will have changed it to
4932 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
4933 for a SUBREG. */
4934
4935 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
4936 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
4937 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
4938 && (rtx_equal_p (dest, XEXP (src, 1))
4939 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4940 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4941 {
4942 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4943 1, 1, 1, 0);
4944 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
4945 }
4946
4947 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
4948 && subreg_lowpart_p (XEXP (src, 0))
4949 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
4950 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
4951 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
4952 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
4953 && (rtx_equal_p (dest, XEXP (src, 1))
4954 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4955 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4956 {
4957 assign = make_extraction (VOIDmode, dest, -1,
4958 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
4959 1, 1, 1, 0);
4960 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
4961 }
4962
4963 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
4964 one-bit field. */
4965 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
4966 && XEXP (XEXP (src, 0), 0) == const1_rtx
4967 && (rtx_equal_p (dest, XEXP (src, 1))
4968 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
4969 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
4970 {
4971 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
4972 1, 1, 1, 0);
4973 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
4974 }
4975
4976 /* The other case we handle is assignments into a constant-position
4977 field. They look like (ior (and DEST C1) OTHER). If C1 represents
4978 a mask that has all one bits except for a group of zero bits and
4979 OTHER is known to have zeros where C1 has ones, this is such an
4980 assignment. Compute the position and length from C1. Shift OTHER
4981 to the appropriate position, force it to the required mode, and
4982 make the extraction. Check for the AND in both operands. */
4983
4984 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
4985 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
4986 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
4987 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
4988 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
4989 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
4990 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
4991 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
4992 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
4993 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
4994 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
4995 dest)))
4996 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
4997 else
4998 return x;
4999
5000 pos = get_pos_from_mask (~c1, &len);
5001 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5002 || (c1 & significant_bits (other, GET_MODE (other))) != 0)
5003 return x;
5004
5005 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5006
5007 /* The mode to use for the source is the mode of the assignment, or of
5008 what is inside a possible STRICT_LOW_PART. */
5009 mode = (GET_CODE (assign) == STRICT_LOW_PART
5010 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5011
5012 /* Shift OTHER right POS places and make it the source, restricting it
5013 to the proper length and mode. */
5014
5015 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5016 GET_MODE (src), other, pos),
5017 mode, len, dest);
5018
5019 return gen_rtx_combine (SET, VOIDmode, assign, src);
5020 }
5021 \f
5022 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5023 if so. */
5024
5025 static rtx
5026 apply_distributive_law (x)
5027 rtx x;
5028 {
5029 enum rtx_code code = GET_CODE (x);
5030 rtx lhs, rhs, other;
5031 rtx tem;
5032 enum rtx_code inner_code;
5033
5034 /* The outer operation can only be one of the following: */
5035 if (code != IOR && code != AND && code != XOR
5036 && code != PLUS && code != MINUS)
5037 return x;
5038
5039 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5040
5041 /* If either operand is a primitive we can't do anything, so get out fast. */
5042 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5043 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5044 return x;
5045
5046 lhs = expand_compound_operation (lhs);
5047 rhs = expand_compound_operation (rhs);
5048 inner_code = GET_CODE (lhs);
5049 if (inner_code != GET_CODE (rhs))
5050 return x;
5051
5052 /* See if the inner and outer operations distribute. */
5053 switch (inner_code)
5054 {
5055 case LSHIFTRT:
5056 case ASHIFTRT:
5057 case AND:
5058 case IOR:
5059 /* These all distribute except over PLUS. */
5060 if (code == PLUS || code == MINUS)
5061 return x;
5062 break;
5063
5064 case MULT:
5065 if (code != PLUS && code != MINUS)
5066 return x;
5067 break;
5068
5069 case ASHIFT:
5070 case LSHIFT:
5071 /* These are also multiplies, so they distribute over everything. */
5072 break;
5073
5074 case SUBREG:
5075 /* Non-paradoxical SUBREGs distributes over all operations, provided
5076 the inner modes and word numbers are the same, this is an extraction
5077 of a low-order part, we don't convert an fp operation to int or
5078 vice versa, and we would not be converting a single-word
5079 operation into a multi-word operation. The latter test is not
5080 required, but it prevents generating unneeded multi-word operations.
5081 Some of the previous tests are redundant given the latter test, but
5082 are retained because they are required for correctness.
5083
5084 We produce the result slightly differently in this case. */
5085
5086 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5087 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5088 || ! subreg_lowpart_p (lhs)
5089 || (GET_MODE_CLASS (GET_MODE (lhs))
5090 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5091 || (GET_MODE_SIZE (GET_MODE (lhs))
5092 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5093 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5094 return x;
5095
5096 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5097 SUBREG_REG (lhs), SUBREG_REG (rhs));
5098 return gen_lowpart_for_combine (GET_MODE (x), tem);
5099
5100 default:
5101 return x;
5102 }
5103
5104 /* Set LHS and RHS to the inner operands (A and B in the example
5105 above) and set OTHER to the common operand (C in the example).
5106 These is only one way to do this unless the inner operation is
5107 commutative. */
5108 if (GET_RTX_CLASS (inner_code) == 'c'
5109 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5110 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5111 else if (GET_RTX_CLASS (inner_code) == 'c'
5112 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5113 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5114 else if (GET_RTX_CLASS (inner_code) == 'c'
5115 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5116 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5117 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5118 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5119 else
5120 return x;
5121
5122 /* Form the new inner operation, seeing if it simplifies first. */
5123 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5124
5125 /* There is one exception to the general way of distributing:
5126 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5127 if (code == XOR && inner_code == IOR)
5128 {
5129 inner_code = AND;
5130 other = gen_unary (NOT, GET_MODE (x), other);
5131 }
5132
5133 /* We may be able to continuing distributing the result, so call
5134 ourselves recursively on the inner operation before forming the
5135 outer operation, which we return. */
5136 return gen_binary (inner_code, GET_MODE (x),
5137 apply_distributive_law (tem), other);
5138 }
5139 \f
5140 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5141 in MODE.
5142
5143 Return an equivalent form, if different from X. Otherwise, return X. If
5144 X is zero, we are to always construct the equivalent form. */
5145
5146 static rtx
5147 simplify_and_const_int (x, mode, varop, constop)
5148 rtx x;
5149 enum machine_mode mode;
5150 rtx varop;
5151 unsigned HOST_WIDE_INT constop;
5152 {
5153 register enum machine_mode tmode;
5154 register rtx temp;
5155 unsigned HOST_WIDE_INT significant;
5156
5157 /* There is a large class of optimizations based on the principle that
5158 some operations produce results where certain bits are known to be zero,
5159 and hence are not significant to the AND. For example, if we have just
5160 done a left shift of one bit, the low-order bit is known to be zero and
5161 hence an AND with a mask of ~1 would not do anything.
5162
5163 At the end of the following loop, we set:
5164
5165 VAROP to be the item to be AND'ed with;
5166 CONSTOP to the constant value to AND it with. */
5167
5168 while (1)
5169 {
5170 /* If we ever encounter a mode wider than the host machine's widest
5171 integer size, we can't compute the masks accurately, so give up. */
5172 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5173 break;
5174
5175 /* Unless one of the cases below does a `continue',
5176 a `break' will be executed to exit the loop. */
5177
5178 switch (GET_CODE (varop))
5179 {
5180 case CLOBBER:
5181 /* If VAROP is a (clobber (const_int)), return it since we know
5182 we are generating something that won't match. */
5183 return varop;
5184
5185 #if ! BITS_BIG_ENDIAN
5186 case USE:
5187 /* VAROP is a (use (mem ..)) that was made from a bit-field
5188 extraction that spanned the boundary of the MEM. If we are
5189 now masking so it is within that boundary, we don't need the
5190 USE any more. */
5191 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5192 {
5193 varop = XEXP (varop, 0);
5194 continue;
5195 }
5196 break;
5197 #endif
5198
5199 case SUBREG:
5200 if (subreg_lowpart_p (varop)
5201 /* We can ignore the effect this SUBREG if it narrows the mode
5202 or, on machines where byte operations zero extend, if the
5203 constant masks to zero all the bits the mode doesn't have. */
5204 && ((GET_MODE_SIZE (GET_MODE (varop))
5205 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5206 #ifdef BYTE_LOADS_ZERO_EXTEND
5207 || (0 == (constop
5208 & GET_MODE_MASK (GET_MODE (varop))
5209 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5210 #endif
5211 ))
5212 {
5213 varop = SUBREG_REG (varop);
5214 continue;
5215 }
5216 break;
5217
5218 case ZERO_EXTRACT:
5219 case SIGN_EXTRACT:
5220 case ZERO_EXTEND:
5221 case SIGN_EXTEND:
5222 /* Try to expand these into a series of shifts and then work
5223 with that result. If we can't, for example, if the extract
5224 isn't at a fixed position, give up. */
5225 temp = expand_compound_operation (varop);
5226 if (temp != varop)
5227 {
5228 varop = temp;
5229 continue;
5230 }
5231 break;
5232
5233 case AND:
5234 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5235 {
5236 constop &= INTVAL (XEXP (varop, 1));
5237 varop = XEXP (varop, 0);
5238 continue;
5239 }
5240 break;
5241
5242 case IOR:
5243 case XOR:
5244 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5245 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5246 operation which may be a bitfield extraction. */
5247
5248 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5249 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5250 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5251 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5252 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5253 && (INTVAL (XEXP (varop, 1))
5254 & ~ significant_bits (XEXP (varop, 0),
5255 GET_MODE (varop)) == 0))
5256 {
5257 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5258 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5259 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5260 XEXP (XEXP (varop, 0), 0), temp);
5261 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5262 temp, XEXP (varop, 1));
5263 continue;
5264 }
5265
5266 /* Apply the AND to both branches of the IOR or XOR, then try to
5267 apply the distributive law. This may eliminate operations
5268 if either branch can be simplified because of the AND.
5269 It may also make some cases more complex, but those cases
5270 probably won't match a pattern either with or without this. */
5271 return
5272 gen_lowpart_for_combine
5273 (mode, apply_distributive_law
5274 (gen_rtx_combine
5275 (GET_CODE (varop), GET_MODE (varop),
5276 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5277 XEXP (varop, 0), constop),
5278 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5279 XEXP (varop, 1), constop))));
5280
5281 case NOT:
5282 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5283 LSHIFTRT we can do the same as above. */
5284
5285 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5286 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5287 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5288 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5289 {
5290 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5291 temp = gen_binary (XOR, GET_MODE (varop),
5292 XEXP (XEXP (varop, 0), 0), temp);
5293 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5294 temp, XEXP (XEXP (varop, 0), 1));
5295 continue;
5296 }
5297 break;
5298
5299 case ASHIFTRT:
5300 /* If we are just looking for the sign bit, we don't need this
5301 shift at all, even if it has a variable count. */
5302 if (constop == ((HOST_WIDE_INT) 1
5303 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5304 {
5305 varop = XEXP (varop, 0);
5306 continue;
5307 }
5308
5309 /* If this is a shift by a constant, get a mask that contains
5310 those bits that are not copies of the sign bit. We then have
5311 two cases: If CONSTOP only includes those bits, this can be
5312 a logical shift, which may allow simplifications. If CONSTOP
5313 is a single-bit field not within those bits, we are requesting
5314 a copy of the sign bit and hence can shift the sign bit to
5315 the appropriate location. */
5316 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5317 && INTVAL (XEXP (varop, 1)) >= 0
5318 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5319 {
5320 int i = -1;
5321
5322 significant = GET_MODE_MASK (GET_MODE (varop));
5323 significant >>= INTVAL (XEXP (varop, 1));
5324
5325 if ((constop & ~significant) == 0
5326 || (i = exact_log2 (constop)) >= 0)
5327 {
5328 varop = simplify_shift_const
5329 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5330 i < 0 ? INTVAL (XEXP (varop, 1))
5331 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5332 if (GET_CODE (varop) != ASHIFTRT)
5333 continue;
5334 }
5335 }
5336
5337 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5338 even if the shift count isn't a constant. */
5339 if (constop == 1)
5340 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5341 XEXP (varop, 0), XEXP (varop, 1));
5342 break;
5343
5344 case NE:
5345 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5346 included in STORE_FLAG_VALUE and FOO has no significant bits
5347 not in CONST. */
5348 if ((constop & ~ STORE_FLAG_VALUE) == 0
5349 && XEXP (varop, 0) == const0_rtx
5350 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5351 {
5352 varop = XEXP (varop, 0);
5353 continue;
5354 }
5355 break;
5356
5357 case PLUS:
5358 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5359 low-order bits (as in an alignment operation) and FOO is already
5360 aligned to that boundary, we can convert remove this AND
5361 and possibly the PLUS if it is now adding zero. */
5362 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5363 && exact_log2 (-constop) >= 0
5364 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5365 {
5366 varop = plus_constant (XEXP (varop, 0),
5367 INTVAL (XEXP (varop, 1)) & constop);
5368 constop = ~0;
5369 break;
5370 }
5371
5372 /* ... fall through ... */
5373
5374 case MINUS:
5375 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5376 less than powers of two and M2 is narrower than M1, we can
5377 eliminate the inner AND. This occurs when incrementing
5378 bit fields. */
5379
5380 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5381 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5382 SUBST (XEXP (varop, 0),
5383 expand_compound_operation (XEXP (varop, 0)));
5384
5385 if (GET_CODE (XEXP (varop, 0)) == AND
5386 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5387 && exact_log2 (constop + 1) >= 0
5388 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5389 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5390 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5391 break;
5392 }
5393
5394 break;
5395 }
5396
5397 /* If we have reached a constant, this whole thing is constant. */
5398 if (GET_CODE (varop) == CONST_INT)
5399 return GEN_INT (constop & INTVAL (varop));
5400
5401 /* See what bits are significant in VAROP. */
5402 significant = significant_bits (varop, mode);
5403
5404 /* Turn off all bits in the constant that are known to already be zero.
5405 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5406 which is tested below. */
5407
5408 constop &= significant;
5409
5410 /* If we don't have any bits left, return zero. */
5411 if (constop == 0)
5412 return const0_rtx;
5413
5414 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5415 if we already had one (just check for the simplest cases). */
5416 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5417 && GET_MODE (XEXP (x, 0)) == mode
5418 && SUBREG_REG (XEXP (x, 0)) == varop)
5419 varop = XEXP (x, 0);
5420 else
5421 varop = gen_lowpart_for_combine (mode, varop);
5422
5423 /* If we can't make the SUBREG, try to return what we were given. */
5424 if (GET_CODE (varop) == CLOBBER)
5425 return x ? x : varop;
5426
5427 /* If we are only masking insignificant bits, return VAROP. */
5428 if (constop == significant)
5429 x = varop;
5430
5431 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5432 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5433 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
5434
5435 else
5436 {
5437 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5438 || INTVAL (XEXP (x, 1)) != constop)
5439 SUBST (XEXP (x, 1), GEN_INT (constop));
5440
5441 SUBST (XEXP (x, 0), varop);
5442 }
5443
5444 return x;
5445 }
5446 \f
5447 /* Given an expression, X, compute which bits in X can be non-zero.
5448 We don't care about bits outside of those defined in MODE.
5449
5450 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5451 a shift, AND, or zero_extract, we can do better. */
5452
5453 static unsigned HOST_WIDE_INT
5454 significant_bits (x, mode)
5455 rtx x;
5456 enum machine_mode mode;
5457 {
5458 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5459 unsigned HOST_WIDE_INT inner_sig;
5460 enum rtx_code code;
5461 int mode_width = GET_MODE_BITSIZE (mode);
5462 rtx tem;
5463
5464 /* If X is wider than MODE, use its mode instead. */
5465 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5466 {
5467 mode = GET_MODE (x);
5468 significant = GET_MODE_MASK (mode);
5469 mode_width = GET_MODE_BITSIZE (mode);
5470 }
5471
5472 if (mode_width > HOST_BITS_PER_WIDE_INT)
5473 /* Our only callers in this case look for single bit values. So
5474 just return the mode mask. Those tests will then be false. */
5475 return significant;
5476
5477 code = GET_CODE (x);
5478 switch (code)
5479 {
5480 case REG:
5481 #ifdef STACK_BOUNDARY
5482 /* If this is the stack pointer, we may know something about its
5483 alignment. If PUSH_ROUNDING is defined, it is possible for the
5484 stack to be momentarily aligned only to that amount, so we pick
5485 the least alignment. */
5486
5487 if (x == stack_pointer_rtx)
5488 {
5489 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5490
5491 #ifdef PUSH_ROUNDING
5492 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5493 #endif
5494
5495 return significant & ~ (sp_alignment - 1);
5496 }
5497 #endif
5498
5499 /* If X is a register whose value we can find, use that value.
5500 Otherwise, use the previously-computed significant bits for this
5501 register. */
5502
5503 tem = get_last_value (x);
5504 if (tem)
5505 return significant_bits (tem, mode);
5506 else if (significant_valid && reg_significant[REGNO (x)])
5507 return reg_significant[REGNO (x)] & significant;
5508 else
5509 return significant;
5510
5511 case CONST_INT:
5512 return INTVAL (x);
5513
5514 #ifdef BYTE_LOADS_ZERO_EXTEND
5515 case MEM:
5516 /* In many, if not most, RISC machines, reading a byte from memory
5517 zeros the rest of the register. Noticing that fact saves a lot
5518 of extra zero-extends. */
5519 significant &= GET_MODE_MASK (GET_MODE (x));
5520 break;
5521 #endif
5522
5523 #if STORE_FLAG_VALUE == 1
5524 case EQ: case NE:
5525 case GT: case GTU:
5526 case LT: case LTU:
5527 case GE: case GEU:
5528 case LE: case LEU:
5529
5530 if (GET_MODE_CLASS (mode) == MODE_INT)
5531 significant = 1;
5532
5533 /* A comparison operation only sets the bits given by its mode. The
5534 rest are set undefined. */
5535 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5536 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5537 break;
5538 #endif
5539
5540 case NEG:
5541 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5542 == GET_MODE_BITSIZE (GET_MODE (x)))
5543 significant = 1;
5544
5545 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5546 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5547 break;
5548
5549 case ABS:
5550 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
5551 == GET_MODE_BITSIZE (GET_MODE (x)))
5552 significant = 1;
5553 break;
5554
5555 case TRUNCATE:
5556 significant &= (significant_bits (XEXP (x, 0), mode)
5557 & GET_MODE_MASK (mode));
5558 break;
5559
5560 case ZERO_EXTEND:
5561 significant &= significant_bits (XEXP (x, 0), mode);
5562 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5563 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5564 break;
5565
5566 case SIGN_EXTEND:
5567 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
5568 Otherwise, show all the bits in the outer mode but not the inner
5569 may be non-zero. */
5570 inner_sig = significant_bits (XEXP (x, 0), mode);
5571 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
5572 {
5573 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
5574 if (inner_sig &
5575 (((HOST_WIDE_INT) 1
5576 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
5577 inner_sig |= (GET_MODE_MASK (mode)
5578 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
5579 }
5580
5581 significant &= inner_sig;
5582 break;
5583
5584 case AND:
5585 significant &= (significant_bits (XEXP (x, 0), mode)
5586 & significant_bits (XEXP (x, 1), mode));
5587 break;
5588
5589 case XOR: case IOR:
5590 case UMIN: case UMAX: case SMIN: case SMAX:
5591 significant &= (significant_bits (XEXP (x, 0), mode)
5592 | significant_bits (XEXP (x, 1), mode));
5593 break;
5594
5595 case PLUS: case MINUS:
5596 case MULT:
5597 case DIV: case UDIV:
5598 case MOD: case UMOD:
5599 /* We can apply the rules of arithmetic to compute the number of
5600 high- and low-order zero bits of these operations. We start by
5601 computing the width (position of the highest-order non-zero bit)
5602 and the number of low-order zero bits for each value. */
5603 {
5604 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
5605 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
5606 int width0 = floor_log2 (sig0) + 1;
5607 int width1 = floor_log2 (sig1) + 1;
5608 int low0 = floor_log2 (sig0 & -sig0);
5609 int low1 = floor_log2 (sig1 & -sig1);
5610 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
5611 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
5612 int result_width = mode_width;
5613 int result_low = 0;
5614
5615 switch (code)
5616 {
5617 case PLUS:
5618 result_width = MAX (width0, width1) + 1;
5619 result_low = MIN (low0, low1);
5620 break;
5621 case MINUS:
5622 result_low = MIN (low0, low1);
5623 break;
5624 case MULT:
5625 result_width = width0 + width1;
5626 result_low = low0 + low1;
5627 break;
5628 case DIV:
5629 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5630 result_width = width0;
5631 break;
5632 case UDIV:
5633 result_width = width0;
5634 break;
5635 case MOD:
5636 if (! op0_maybe_minusp && ! op1_maybe_minusp)
5637 result_width = MIN (width0, width1);
5638 result_low = MIN (low0, low1);
5639 break;
5640 case UMOD:
5641 result_width = MIN (width0, width1);
5642 result_low = MIN (low0, low1);
5643 break;
5644 }
5645
5646 if (result_width < mode_width)
5647 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
5648
5649 if (result_low > 0)
5650 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
5651 }
5652 break;
5653
5654 case ZERO_EXTRACT:
5655 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5656 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5657 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
5658 break;
5659
5660 case SUBREG:
5661 /* If the inner mode is a single word for both the host and target
5662 machines, we can compute this from which bits of the inner
5663 object are known significant. */
5664 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
5665 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5666 <= HOST_BITS_PER_WIDE_INT))
5667 {
5668 significant &= significant_bits (SUBREG_REG (x), mode);
5669 #ifndef BYTE_LOADS_ZERO_EXTEND
5670 /* On many CISC machines, accessing an object in a wider mode
5671 causes the high-order bits to become undefined. So they are
5672 not known to be zero. */
5673 if (GET_MODE_SIZE (GET_MODE (x))
5674 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
5675 significant |= (GET_MODE_MASK (GET_MODE (x))
5676 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
5677 #endif
5678 }
5679 break;
5680
5681 case ASHIFTRT:
5682 case LSHIFTRT:
5683 case ASHIFT:
5684 case LSHIFT:
5685 case ROTATE:
5686 /* The significant bits are in two classes: any bits within MODE
5687 that aren't in GET_MODE (x) are always significant. The rest of the
5688 significant bits are those that are significant in the operand of
5689 the shift when shifted the appropriate number of bits. This
5690 shows that high-order bits are cleared by the right shift and
5691 low-order bits by left shifts. */
5692 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5693 && INTVAL (XEXP (x, 1)) >= 0
5694 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
5695 {
5696 enum machine_mode inner_mode = GET_MODE (x);
5697 int width = GET_MODE_BITSIZE (inner_mode);
5698 int count = INTVAL (XEXP (x, 1));
5699 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
5700 unsigned HOST_WIDE_INT op_significant
5701 = significant_bits (XEXP (x, 0), mode);
5702 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
5703 unsigned HOST_WIDE_INT outer = 0;
5704
5705 if (mode_width > width)
5706 outer = (op_significant & significant & ~ mode_mask);
5707
5708 if (code == LSHIFTRT)
5709 inner >>= count;
5710 else if (code == ASHIFTRT)
5711 {
5712 inner >>= count;
5713
5714 /* If the sign bit was significant at before the shift, we
5715 need to mark all the places it could have been copied to
5716 by the shift significant. */
5717 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
5718 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
5719 }
5720 else if (code == LSHIFT || code == ASHIFT)
5721 inner <<= count;
5722 else
5723 inner = ((inner << (count % width)
5724 | (inner >> (width - (count % width)))) & mode_mask);
5725
5726 significant &= (outer | inner);
5727 }
5728 break;
5729
5730 case FFS:
5731 /* This is at most the number of bits in the mode. */
5732 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
5733 break;
5734
5735 case IF_THEN_ELSE:
5736 significant &= (significant_bits (XEXP (x, 1), mode)
5737 | significant_bits (XEXP (x, 2), mode));
5738 break;
5739 }
5740
5741 return significant;
5742 }
5743 \f
5744 /* Return the number of bits at the high-order end of X that are known to
5745 be equal to the sign bit. This number will always be between 1 and
5746 the number of bits in the mode of X. MODE is the mode to be used
5747 if X is VOIDmode. */
5748
5749 static int
5750 num_sign_bit_copies (x, mode)
5751 rtx x;
5752 enum machine_mode mode;
5753 {
5754 enum rtx_code code = GET_CODE (x);
5755 int bitwidth;
5756 int num0, num1, result;
5757 unsigned HOST_WIDE_INT sig;
5758 rtx tem;
5759
5760 /* If we weren't given a mode, use the mode of X. If the mode is still
5761 VOIDmode, we don't know anything. */
5762
5763 if (mode == VOIDmode)
5764 mode = GET_MODE (x);
5765
5766 if (mode == VOIDmode)
5767 return 0;
5768
5769 bitwidth = GET_MODE_BITSIZE (mode);
5770
5771 switch (code)
5772 {
5773 case REG:
5774 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
5775 return reg_sign_bit_copies[REGNO (x)];
5776
5777 tem = get_last_value (x);
5778 if (tem != 0)
5779 return num_sign_bit_copies (tem, mode);
5780 break;
5781
5782 case CONST_INT:
5783 /* If the constant is negative, take its 1's complement and remask.
5784 Then see how many zero bits we have. */
5785 sig = INTVAL (x) & GET_MODE_MASK (mode);
5786 if (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1)))
5787 sig = (~ sig) & GET_MODE_MASK (mode);
5788
5789 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
5790
5791 case SUBREG:
5792 /* For a smaller object, just ignore the high bits. */
5793 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
5794 {
5795 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
5796 return MAX (1, (num0
5797 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
5798 - bitwidth)));
5799 }
5800 break;
5801
5802 case SIGN_EXTRACT:
5803 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5804 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
5805 break;
5806
5807 case SIGN_EXTEND:
5808 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5809 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
5810
5811 case TRUNCATE:
5812 /* For a smaller object, just ignore the high bits. */
5813 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
5814 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
5815 - bitwidth)));
5816
5817 case NOT:
5818 return num_sign_bit_copies (XEXP (x, 0), mode);
5819
5820 case ROTATE: case ROTATERT:
5821 /* If we are rotating left by a number of bits less than the number
5822 of sign bit copies, we can just subtract that amount from the
5823 number. */
5824 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5825 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
5826 {
5827 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5828 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
5829 : bitwidth - INTVAL (XEXP (x, 1))));
5830 }
5831 break;
5832
5833 case NEG:
5834 /* In general, this subtracts one sign bit copy. But if the value
5835 is known to be positive, the number of sign bit copies is the
5836 same as that of the input. Finally, if the input has just one
5837 significant bit, all the bits are copies of the sign bit. */
5838 sig = significant_bits (XEXP (x, 0), mode);
5839 if (sig == 1)
5840 return bitwidth;
5841
5842 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5843 if (num0 > 1
5844 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
5845 num0--;
5846
5847 return num0;
5848
5849 case IOR: case AND: case XOR:
5850 case SMIN: case SMAX: case UMIN: case UMAX:
5851 /* Logical operations will preserve the number of sign-bit copies.
5852 MIN and MAX operations always return one of the operands. */
5853 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5854 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5855 return MIN (num0, num1);
5856
5857 case PLUS: case MINUS:
5858 /* For addition and subtraction, we can have a 1-bit carry. However,
5859 if we are subtracting 1 from a positive number, there will not
5860 be such a carry. Furthermore, if the positive number is known to
5861 be 0 or 1, we know the result is either -1 or 0. */
5862
5863 if (code == PLUS && XEXP (x, 1) == constm1_rtx)
5864 {
5865 sig = significant_bits (XEXP (x, 0), mode);
5866 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
5867 return (sig == 1 || sig == 0 ? bitwidth
5868 : bitwidth - floor_log2 (sig));
5869 }
5870
5871 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5872 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5873 return MAX (1, MIN (num0, num1) - 1);
5874
5875 case MULT:
5876 /* The number of bits of the product is the sum of the number of
5877 bits of both terms. However, unless one of the terms if known
5878 to be positive, we must allow for an additional bit since negating
5879 a negative number can remove one sign bit copy. */
5880
5881 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5882 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
5883
5884 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
5885 if (result > 0
5886 && ((significant_bits (XEXP (x, 0), mode)
5887 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5888 && (significant_bits (XEXP (x, 1), mode)
5889 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
5890 result--;
5891
5892 return MAX (1, result);
5893
5894 case UDIV:
5895 /* The result must be <= the first operand. */
5896 return num_sign_bit_copies (XEXP (x, 0), mode);
5897
5898 case UMOD:
5899 /* The result must be <= the scond operand. */
5900 return num_sign_bit_copies (XEXP (x, 1), mode);
5901
5902 case DIV:
5903 /* Similar to unsigned division, except that we have to worry about
5904 the case where the divisor is negative, in which case we have
5905 to add 1. */
5906 result = num_sign_bit_copies (XEXP (x, 0), mode);
5907 if (result > 1
5908 && (significant_bits (XEXP (x, 1), mode)
5909 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5910 result --;
5911
5912 return result;
5913
5914 case MOD:
5915 result = num_sign_bit_copies (XEXP (x, 1), mode);
5916 if (result > 1
5917 && (significant_bits (XEXP (x, 1), mode)
5918 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
5919 result --;
5920
5921 return result;
5922
5923 case ASHIFTRT:
5924 /* Shifts by a constant add to the number of bits equal to the
5925 sign bit. */
5926 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5927 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5928 && INTVAL (XEXP (x, 1)) > 0)
5929 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
5930
5931 return num0;
5932
5933 case ASHIFT:
5934 case LSHIFT:
5935 /* Left shifts destroy copies. */
5936 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5937 || INTVAL (XEXP (x, 1)) < 0
5938 || INTVAL (XEXP (x, 1)) >= bitwidth)
5939 return 1;
5940
5941 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
5942 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
5943
5944 case IF_THEN_ELSE:
5945 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
5946 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
5947 return MIN (num0, num1);
5948
5949 #if STORE_FLAG_VALUE == -1
5950 case EQ: case NE: case GE: case GT: case LE: case LT:
5951 case GEU: case GTU: case LEU: case LTU:
5952 return bitwidth;
5953 #endif
5954 }
5955
5956 /* If we haven't been able to figure it out by one of the above rules,
5957 see if some of the high-order bits are known to be zero. If so,
5958 count those bits and return one less than that amount. */
5959
5960 sig = significant_bits (x, mode);
5961 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
5962 }
5963 \f
5964 /* This function is called from `simplify_shift_const' to merge two
5965 outer operations. Specifically, we have already found that we need
5966 to perform operation *POP0 with constant *PCONST0 at the outermost
5967 position. We would now like to also perform OP1 with constant CONST1
5968 (with *POP0 being done last).
5969
5970 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
5971 the resulting operation. *PCOMP_P is set to 1 if we would need to
5972 complement the innermost operand, otherwise it is unchanged.
5973
5974 MODE is the mode in which the operation will be done. No bits outside
5975 the width of this mode matter. It is assumed that the width of this mode
5976 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
5977
5978 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
5979 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
5980 result is simply *PCONST0.
5981
5982 If the resulting operation cannot be expressed as one operation, we
5983 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
5984
5985 static int
5986 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
5987 enum rtx_code *pop0;
5988 HOST_WIDE_INT *pconst0;
5989 enum rtx_code op1;
5990 HOST_WIDE_INT const1;
5991 enum machine_mode mode;
5992 int *pcomp_p;
5993 {
5994 enum rtx_code op0 = *pop0;
5995 HOST_WIDE_INT const0 = *pconst0;
5996
5997 const0 &= GET_MODE_MASK (mode);
5998 const1 &= GET_MODE_MASK (mode);
5999
6000 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6001 if (op0 == AND)
6002 const1 &= const0;
6003
6004 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6005 if OP0 is SET. */
6006
6007 if (op1 == NIL || op0 == SET)
6008 return 1;
6009
6010 else if (op0 == NIL)
6011 op0 = op1, const0 = const1;
6012
6013 else if (op0 == op1)
6014 {
6015 switch (op0)
6016 {
6017 case AND:
6018 const0 &= const1;
6019 break;
6020 case IOR:
6021 const0 |= const1;
6022 break;
6023 case XOR:
6024 const0 ^= const1;
6025 break;
6026 case PLUS:
6027 const0 += const1;
6028 break;
6029 case NEG:
6030 op0 = NIL;
6031 break;
6032 }
6033 }
6034
6035 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6036 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6037 return 0;
6038
6039 /* If the two constants aren't the same, we can't do anything. The
6040 remaining six cases can all be done. */
6041 else if (const0 != const1)
6042 return 0;
6043
6044 else
6045 switch (op0)
6046 {
6047 case IOR:
6048 if (op1 == AND)
6049 /* (a & b) | b == b */
6050 op0 = SET;
6051 else /* op1 == XOR */
6052 /* (a ^ b) | b == a | b */
6053 ;
6054 break;
6055
6056 case XOR:
6057 if (op1 == AND)
6058 /* (a & b) ^ b == (~a) & b */
6059 op0 = AND, *pcomp_p = 1;
6060 else /* op1 == IOR */
6061 /* (a | b) ^ b == a & ~b */
6062 op0 = AND, *pconst0 = ~ const0;
6063 break;
6064
6065 case AND:
6066 if (op1 == IOR)
6067 /* (a | b) & b == b */
6068 op0 = SET;
6069 else /* op1 == XOR */
6070 /* (a ^ b) & b) == (~a) & b */
6071 *pcomp_p = 1;
6072 break;
6073 }
6074
6075 /* Check for NO-OP cases. */
6076 const0 &= GET_MODE_MASK (mode);
6077 if (const0 == 0
6078 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6079 op0 = NIL;
6080 else if (const0 == 0 && op0 == AND)
6081 op0 = SET;
6082 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6083 op0 = NIL;
6084
6085 *pop0 = op0;
6086 *pconst0 = const0;
6087
6088 return 1;
6089 }
6090 \f
6091 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6092 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6093 that we started with.
6094
6095 The shift is normally computed in the widest mode we find in VAROP, as
6096 long as it isn't a different number of words than RESULT_MODE. Exceptions
6097 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6098
6099 static rtx
6100 simplify_shift_const (x, code, result_mode, varop, count)
6101 rtx x;
6102 enum rtx_code code;
6103 enum machine_mode result_mode;
6104 rtx varop;
6105 int count;
6106 {
6107 enum rtx_code orig_code = code;
6108 int orig_count = count;
6109 enum machine_mode mode = result_mode;
6110 enum machine_mode shift_mode, tmode;
6111 int mode_words
6112 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6113 /* We form (outer_op (code varop count) (outer_const)). */
6114 enum rtx_code outer_op = NIL;
6115 HOST_WIDE_INT outer_const;
6116 rtx const_rtx;
6117 int complement_p = 0;
6118 rtx new;
6119
6120 /* If we were given an invalid count, don't do anything except exactly
6121 what was requested. */
6122
6123 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6124 {
6125 if (x)
6126 return x;
6127
6128 return gen_rtx (code, mode, varop, GEN_INT (count));
6129 }
6130
6131 /* Unless one of the branches of the `if' in this loop does a `continue',
6132 we will `break' the loop after the `if'. */
6133
6134 while (count != 0)
6135 {
6136 /* If we have an operand of (clobber (const_int 0)), just return that
6137 value. */
6138 if (GET_CODE (varop) == CLOBBER)
6139 return varop;
6140
6141 /* If we discovered we had to complement VAROP, leave. Making a NOT
6142 here would cause an infinite loop. */
6143 if (complement_p)
6144 break;
6145
6146 /* Convert ROTATETRT to ROTATE. */
6147 if (code == ROTATERT)
6148 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6149
6150 /* Canonicalize LSHIFT to ASHIFT. */
6151 if (code == LSHIFT)
6152 code = ASHIFT;
6153
6154 /* We need to determine what mode we will do the shift in. If the
6155 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6156 was originally done in. Otherwise, we can do it in MODE, the widest
6157 mode encountered. */
6158 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6159
6160 /* Handle cases where the count is greater than the size of the mode
6161 minus 1. For ASHIFT, use the size minus one as the count (this can
6162 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6163 take the count modulo the size. For other shifts, the result is
6164 zero.
6165
6166 Since these shifts are being produced by the compiler by combining
6167 multiple operations, each of which are defined, we know what the
6168 result is supposed to be. */
6169
6170 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6171 {
6172 if (code == ASHIFTRT)
6173 count = GET_MODE_BITSIZE (shift_mode) - 1;
6174 else if (code == ROTATE || code == ROTATERT)
6175 count %= GET_MODE_BITSIZE (shift_mode);
6176 else
6177 {
6178 /* We can't simply return zero because there may be an
6179 outer op. */
6180 varop = const0_rtx;
6181 count = 0;
6182 break;
6183 }
6184 }
6185
6186 /* Negative counts are invalid and should not have been made (a
6187 programmer-specified negative count should have been handled
6188 above). */
6189 else if (count < 0)
6190 abort ();
6191
6192 /* An arithmetic right shift of a quantity known to be -1 or 0
6193 is a no-op. */
6194 if (code == ASHIFTRT
6195 && (num_sign_bit_copies (varop, shift_mode)
6196 == GET_MODE_BITSIZE (shift_mode)))
6197 {
6198 count = 0;
6199 break;
6200 }
6201
6202 /* We simplify the tests below and elsewhere by converting
6203 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6204 `make_compound_operation' will convert it to a ASHIFTRT for
6205 those machines (such as Vax) that don't have a LSHIFTRT. */
6206 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6207 && code == ASHIFTRT
6208 && ((significant_bits (varop, shift_mode)
6209 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6210 == 0))
6211 code = LSHIFTRT;
6212
6213 switch (GET_CODE (varop))
6214 {
6215 case SIGN_EXTEND:
6216 case ZERO_EXTEND:
6217 case SIGN_EXTRACT:
6218 case ZERO_EXTRACT:
6219 new = expand_compound_operation (varop);
6220 if (new != varop)
6221 {
6222 varop = new;
6223 continue;
6224 }
6225 break;
6226
6227 case MEM:
6228 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6229 minus the width of a smaller mode, we can do this with a
6230 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6231 if ((code == ASHIFTRT || code == LSHIFTRT)
6232 && ! mode_dependent_address_p (XEXP (varop, 0))
6233 && ! MEM_VOLATILE_P (varop)
6234 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6235 MODE_INT, 1)) != BLKmode)
6236 {
6237 #if BYTES_BIG_ENDIAN
6238 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6239 #else
6240 new = gen_rtx (MEM, tmode,
6241 plus_constant (XEXP (varop, 0),
6242 count / BITS_PER_UNIT));
6243 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6244 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6245 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6246 #endif
6247 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6248 : ZERO_EXTEND, mode, new);
6249 count = 0;
6250 continue;
6251 }
6252 break;
6253
6254 case USE:
6255 /* Similar to the case above, except that we can only do this if
6256 the resulting mode is the same as that of the underlying
6257 MEM and adjust the address depending on the *bits* endianness
6258 because of the way that bit-field extract insns are defined. */
6259 if ((code == ASHIFTRT || code == LSHIFTRT)
6260 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6261 MODE_INT, 1)) != BLKmode
6262 && tmode == GET_MODE (XEXP (varop, 0)))
6263 {
6264 #if BITS_BIG_ENDIAN
6265 new = XEXP (varop, 0);
6266 #else
6267 new = copy_rtx (XEXP (varop, 0));
6268 SUBST (XEXP (new, 0),
6269 plus_constant (XEXP (new, 0),
6270 count / BITS_PER_UNIT));
6271 #endif
6272
6273 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6274 : ZERO_EXTEND, mode, new);
6275 count = 0;
6276 continue;
6277 }
6278 break;
6279
6280 case SUBREG:
6281 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6282 the same number of words as what we've seen so far. Then store
6283 the widest mode in MODE. */
6284 if (SUBREG_WORD (varop) == 0
6285 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6286 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6287 == mode_words))
6288 {
6289 varop = SUBREG_REG (varop);
6290 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6291 mode = GET_MODE (varop);
6292 continue;
6293 }
6294 break;
6295
6296 case MULT:
6297 /* Some machines use MULT instead of ASHIFT because MULT
6298 is cheaper. But it is still better on those machines to
6299 merge two shifts into one. */
6300 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6301 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6302 {
6303 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6304 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
6305 continue;
6306 }
6307 break;
6308
6309 case UDIV:
6310 /* Similar, for when divides are cheaper. */
6311 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6312 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6313 {
6314 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6315 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
6316 continue;
6317 }
6318 break;
6319
6320 case ASHIFTRT:
6321 /* If we are extracting just the sign bit of an arithmetic right
6322 shift, that shift is not needed. */
6323 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6324 {
6325 varop = XEXP (varop, 0);
6326 continue;
6327 }
6328
6329 /* ... fall through ... */
6330
6331 case LSHIFTRT:
6332 case ASHIFT:
6333 case LSHIFT:
6334 case ROTATE:
6335 /* Here we have two nested shifts. The result is usually the
6336 AND of a new shift with a mask. We compute the result below. */
6337 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6338 && INTVAL (XEXP (varop, 1)) >= 0
6339 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
6340 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6341 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6342 {
6343 enum rtx_code first_code = GET_CODE (varop);
6344 int first_count = INTVAL (XEXP (varop, 1));
6345 unsigned HOST_WIDE_INT mask;
6346 rtx mask_rtx;
6347 rtx inner;
6348
6349 if (first_code == LSHIFT)
6350 first_code = ASHIFT;
6351
6352 /* We have one common special case. We can't do any merging if
6353 the inner code is an ASHIFTRT of a smaller mode. However, if
6354 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6355 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6356 we can convert it to
6357 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6358 This simplifies certain SIGN_EXTEND operations. */
6359 if (code == ASHIFT && first_code == ASHIFTRT
6360 && (GET_MODE_BITSIZE (result_mode)
6361 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6362 {
6363 /* C3 has the low-order C1 bits zero. */
6364
6365 mask = (GET_MODE_MASK (mode)
6366 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
6367
6368 varop = simplify_and_const_int (NULL_RTX, result_mode,
6369 XEXP (varop, 0), mask);
6370 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
6371 varop, count);
6372 count = first_count;
6373 code = ASHIFTRT;
6374 continue;
6375 }
6376
6377 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6378 than C1 high-order bits equal to the sign bit, we can convert
6379 this to either an ASHIFT or a ASHIFTRT depending on the
6380 two counts.
6381
6382 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6383
6384 if (code == ASHIFTRT && first_code == ASHIFT
6385 && GET_MODE (varop) == shift_mode
6386 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6387 > first_count))
6388 {
6389 count -= first_count;
6390 if (count < 0)
6391 count = - count, code = ASHIFT;
6392 varop = XEXP (varop, 0);
6393 continue;
6394 }
6395
6396 /* There are some cases we can't do. If CODE is ASHIFTRT,
6397 we can only do this if FIRST_CODE is also ASHIFTRT.
6398
6399 We can't do the case when CODE is ROTATE and FIRST_CODE is
6400 ASHIFTRT.
6401
6402 If the mode of this shift is not the mode of the outer shift,
6403 we can't do this if either shift is ASHIFTRT or ROTATE.
6404
6405 Finally, we can't do any of these if the mode is too wide
6406 unless the codes are the same.
6407
6408 Handle the case where the shift codes are the same
6409 first. */
6410
6411 if (code == first_code)
6412 {
6413 if (GET_MODE (varop) != result_mode
6414 && (code == ASHIFTRT || code == ROTATE))
6415 break;
6416
6417 count += first_count;
6418 varop = XEXP (varop, 0);
6419 continue;
6420 }
6421
6422 if (code == ASHIFTRT
6423 || (code == ROTATE && first_code == ASHIFTRT)
6424 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
6425 || (GET_MODE (varop) != result_mode
6426 && (first_code == ASHIFTRT || first_code == ROTATE
6427 || code == ROTATE)))
6428 break;
6429
6430 /* To compute the mask to apply after the shift, shift the
6431 significant bits of the inner shift the same way the
6432 outer shift will. */
6433
6434 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
6435
6436 mask_rtx
6437 = simplify_binary_operation (code, result_mode, mask_rtx,
6438 GEN_INT (count));
6439
6440 /* Give up if we can't compute an outer operation to use. */
6441 if (mask_rtx == 0
6442 || GET_CODE (mask_rtx) != CONST_INT
6443 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6444 INTVAL (mask_rtx),
6445 result_mode, &complement_p))
6446 break;
6447
6448 /* If the shifts are in the same direction, we add the
6449 counts. Otherwise, we subtract them. */
6450 if ((code == ASHIFTRT || code == LSHIFTRT)
6451 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6452 count += first_count;
6453 else
6454 count -= first_count;
6455
6456 /* If COUNT is positive, the new shift is usually CODE,
6457 except for the two exceptions below, in which case it is
6458 FIRST_CODE. If the count is negative, FIRST_CODE should
6459 always be used */
6460 if (count > 0
6461 && ((first_code == ROTATE && code == ASHIFT)
6462 || (first_code == ASHIFTRT && code == LSHIFTRT)))
6463 code = first_code;
6464 else if (count < 0)
6465 code = first_code, count = - count;
6466
6467 varop = XEXP (varop, 0);
6468 continue;
6469 }
6470
6471 /* If we have (A << B << C) for any shift, we can convert this to
6472 (A << C << B). This wins if A is a constant. Only try this if
6473 B is not a constant. */
6474
6475 else if (GET_CODE (varop) == code
6476 && GET_CODE (XEXP (varop, 1)) != CONST_INT
6477 && 0 != (new
6478 = simplify_binary_operation (code, mode,
6479 XEXP (varop, 0),
6480 GEN_INT (count))))
6481 {
6482 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
6483 count = 0;
6484 continue;
6485 }
6486 break;
6487
6488 case NOT:
6489 /* Make this fit the case below. */
6490 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
6491 GEN_INT (GET_MODE_MASK (mode)));
6492 continue;
6493
6494 case IOR:
6495 case AND:
6496 case XOR:
6497 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
6498 with C the size of VAROP - 1 and the shift is logical if
6499 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6500 we have an (le X 0) operation. If we have an arithmetic shift
6501 and STORE_FLAG_VALUE is 1 or we have a logical shift with
6502 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
6503
6504 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
6505 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
6506 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6507 && (code == LSHIFTRT || code == ASHIFTRT)
6508 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6509 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6510 {
6511 count = 0;
6512 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
6513 const0_rtx);
6514
6515 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6516 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6517
6518 continue;
6519 }
6520
6521 /* If we have (shift (logical)), move the logical to the outside
6522 to allow it to possibly combine with another logical and the
6523 shift to combine with another shift. This also canonicalizes to
6524 what a ZERO_EXTRACT looks like. Also, some machines have
6525 (and (shift)) insns. */
6526
6527 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6528 && (new = simplify_binary_operation (code, result_mode,
6529 XEXP (varop, 1),
6530 GEN_INT (count))) != 0
6531 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
6532 INTVAL (new), result_mode, &complement_p))
6533 {
6534 varop = XEXP (varop, 0);
6535 continue;
6536 }
6537
6538 /* If we can't do that, try to simplify the shift in each arm of the
6539 logical expression, make a new logical expression, and apply
6540 the inverse distributive law. */
6541 {
6542 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
6543 XEXP (varop, 0), count);
6544 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
6545 XEXP (varop, 1), count);
6546
6547 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
6548 varop = apply_distributive_law (varop);
6549
6550 count = 0;
6551 }
6552 break;
6553
6554 case EQ:
6555 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
6556 says that the sign bit can be tested, FOO has mode MODE, C is
6557 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
6558 significant. */
6559 if (code == LSHIFT
6560 && XEXP (varop, 1) == const0_rtx
6561 && GET_MODE (XEXP (varop, 0)) == result_mode
6562 && count == GET_MODE_BITSIZE (result_mode) - 1
6563 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6564 && ((STORE_FLAG_VALUE
6565 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
6566 && significant_bits (XEXP (varop, 0), result_mode) == 1
6567 && merge_outer_ops (&outer_op, &outer_const, XOR,
6568 (HOST_WIDE_INT) 1, result_mode,
6569 &complement_p))
6570 {
6571 varop = XEXP (varop, 0);
6572 count = 0;
6573 continue;
6574 }
6575 break;
6576
6577 case NEG:
6578 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
6579 than the number of bits in the mode is equivalent to A. */
6580 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
6581 && significant_bits (XEXP (varop, 0), result_mode) == 1)
6582 {
6583 varop = XEXP (varop, 0);
6584 count = 0;
6585 continue;
6586 }
6587
6588 /* NEG commutes with ASHIFT since it is multiplication. Move the
6589 NEG outside to allow shifts to combine. */
6590 if (code == ASHIFT
6591 && merge_outer_ops (&outer_op, &outer_const, NEG,
6592 (HOST_WIDE_INT) 0, result_mode,
6593 &complement_p))
6594 {
6595 varop = XEXP (varop, 0);
6596 continue;
6597 }
6598 break;
6599
6600 case PLUS:
6601 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
6602 is one less than the number of bits in the mode is
6603 equivalent to (xor A 1). */
6604 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
6605 && XEXP (varop, 1) == constm1_rtx
6606 && significant_bits (XEXP (varop, 0), result_mode) == 1
6607 && merge_outer_ops (&outer_op, &outer_const, XOR,
6608 (HOST_WIDE_INT) 1, result_mode,
6609 &complement_p))
6610 {
6611 count = 0;
6612 varop = XEXP (varop, 0);
6613 continue;
6614 }
6615
6616 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
6617 significant in BAR are those being shifted out and those
6618 bits are known zero in FOO, we can replace the PLUS with FOO.
6619 Similarly in the other operand order. This code occurs when
6620 we are computing the size of a variable-size array. */
6621
6622 if ((code == ASHIFTRT || code == LSHIFTRT)
6623 && count < HOST_BITS_PER_WIDE_INT
6624 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
6625 && (significant_bits (XEXP (varop, 1), result_mode)
6626 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
6627 {
6628 varop = XEXP (varop, 0);
6629 continue;
6630 }
6631 else if ((code == ASHIFTRT || code == LSHIFTRT)
6632 && count < HOST_BITS_PER_WIDE_INT
6633 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6634 >> count)
6635 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
6636 & significant_bits (XEXP (varop, 1),
6637 result_mode)))
6638 {
6639 varop = XEXP (varop, 1);
6640 continue;
6641 }
6642
6643 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
6644 if (code == ASHIFT
6645 && GET_CODE (XEXP (varop, 1)) == CONST_INT
6646 && (new = simplify_binary_operation (ASHIFT, result_mode,
6647 XEXP (varop, 1),
6648 GEN_INT (count))) != 0
6649 && merge_outer_ops (&outer_op, &outer_const, PLUS,
6650 INTVAL (new), result_mode, &complement_p))
6651 {
6652 varop = XEXP (varop, 0);
6653 continue;
6654 }
6655 break;
6656
6657 case MINUS:
6658 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
6659 with C the size of VAROP - 1 and the shift is logical if
6660 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
6661 we have a (gt X 0) operation. If the shift is arithmetic with
6662 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
6663 we have a (neg (gt X 0)) operation. */
6664
6665 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
6666 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
6667 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
6668 && (code == LSHIFTRT || code == ASHIFTRT)
6669 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
6670 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
6671 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
6672 {
6673 count = 0;
6674 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
6675 const0_rtx);
6676
6677 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
6678 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
6679
6680 continue;
6681 }
6682 break;
6683 }
6684
6685 break;
6686 }
6687
6688 /* We need to determine what mode to do the shift in. If the shift is
6689 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
6690 done in. Otherwise, we can do it in MODE, the widest mode encountered.
6691 The code we care about is that of the shift that will actually be done,
6692 not the shift that was originally requested. */
6693 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6694
6695 /* We have now finished analyzing the shift. The result should be
6696 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
6697 OUTER_OP is non-NIL, it is an operation that needs to be applied
6698 to the result of the shift. OUTER_CONST is the relevant constant,
6699 but we must turn off all bits turned off in the shift.
6700
6701 If we were passed a value for X, see if we can use any pieces of
6702 it. If not, make new rtx. */
6703
6704 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
6705 && GET_CODE (XEXP (x, 1)) == CONST_INT
6706 && INTVAL (XEXP (x, 1)) == count)
6707 const_rtx = XEXP (x, 1);
6708 else
6709 const_rtx = GEN_INT (count);
6710
6711 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
6712 && GET_MODE (XEXP (x, 0)) == shift_mode
6713 && SUBREG_REG (XEXP (x, 0)) == varop)
6714 varop = XEXP (x, 0);
6715 else if (GET_MODE (varop) != shift_mode)
6716 varop = gen_lowpart_for_combine (shift_mode, varop);
6717
6718 /* If we can't make the SUBREG, try to return what we were given. */
6719 if (GET_CODE (varop) == CLOBBER)
6720 return x ? x : varop;
6721
6722 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
6723 if (new != 0)
6724 x = new;
6725 else
6726 {
6727 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
6728 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
6729
6730 SUBST (XEXP (x, 0), varop);
6731 SUBST (XEXP (x, 1), const_rtx);
6732 }
6733
6734 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
6735 turn off all the bits that the shift would have turned off. */
6736 if (orig_code == LSHIFTRT && result_mode != shift_mode)
6737 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
6738 GET_MODE_MASK (result_mode) >> orig_count);
6739
6740 /* Do the remainder of the processing in RESULT_MODE. */
6741 x = gen_lowpart_for_combine (result_mode, x);
6742
6743 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
6744 operation. */
6745 if (complement_p)
6746 x = gen_unary (NOT, result_mode, x);
6747
6748 if (outer_op != NIL)
6749 {
6750 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
6751 outer_const &= GET_MODE_MASK (result_mode);
6752
6753 if (outer_op == AND)
6754 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
6755 else if (outer_op == SET)
6756 /* This means that we have determined that the result is
6757 equivalent to a constant. This should be rare. */
6758 x = GEN_INT (outer_const);
6759 else if (GET_RTX_CLASS (outer_op) == '1')
6760 x = gen_unary (outer_op, result_mode, x);
6761 else
6762 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
6763 }
6764
6765 return x;
6766 }
6767 \f
6768 /* Like recog, but we receive the address of a pointer to a new pattern.
6769 We try to match the rtx that the pointer points to.
6770 If that fails, we may try to modify or replace the pattern,
6771 storing the replacement into the same pointer object.
6772
6773 Modifications include deletion or addition of CLOBBERs.
6774
6775 PNOTES is a pointer to a location where any REG_UNUSED notes added for
6776 the CLOBBERs are placed.
6777
6778 The value is the final insn code from the pattern ultimately matched,
6779 or -1. */
6780
6781 static int
6782 recog_for_combine (pnewpat, insn, pnotes)
6783 rtx *pnewpat;
6784 rtx insn;
6785 rtx *pnotes;
6786 {
6787 register rtx pat = *pnewpat;
6788 int insn_code_number;
6789 int num_clobbers_to_add = 0;
6790 int i;
6791 rtx notes = 0;
6792
6793 /* Is the result of combination a valid instruction? */
6794 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6795
6796 /* If it isn't, there is the possibility that we previously had an insn
6797 that clobbered some register as a side effect, but the combined
6798 insn doesn't need to do that. So try once more without the clobbers
6799 unless this represents an ASM insn. */
6800
6801 if (insn_code_number < 0 && ! check_asm_operands (pat)
6802 && GET_CODE (pat) == PARALLEL)
6803 {
6804 int pos;
6805
6806 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
6807 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
6808 {
6809 if (i != pos)
6810 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
6811 pos++;
6812 }
6813
6814 SUBST_INT (XVECLEN (pat, 0), pos);
6815
6816 if (pos == 1)
6817 pat = XVECEXP (pat, 0, 0);
6818
6819 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
6820 }
6821
6822 /* If we had any clobbers to add, make a new pattern than contains
6823 them. Then check to make sure that all of them are dead. */
6824 if (num_clobbers_to_add)
6825 {
6826 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
6827 gen_rtvec (GET_CODE (pat) == PARALLEL
6828 ? XVECLEN (pat, 0) + num_clobbers_to_add
6829 : num_clobbers_to_add + 1));
6830
6831 if (GET_CODE (pat) == PARALLEL)
6832 for (i = 0; i < XVECLEN (pat, 0); i++)
6833 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
6834 else
6835 XVECEXP (newpat, 0, 0) = pat;
6836
6837 add_clobbers (newpat, insn_code_number);
6838
6839 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
6840 i < XVECLEN (newpat, 0); i++)
6841 {
6842 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
6843 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
6844 return -1;
6845 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
6846 XEXP (XVECEXP (newpat, 0, i), 0), notes);
6847 }
6848 pat = newpat;
6849 }
6850
6851 *pnewpat = pat;
6852 *pnotes = notes;
6853
6854 return insn_code_number;
6855 }
6856 \f
6857 /* Like gen_lowpart but for use by combine. In combine it is not possible
6858 to create any new pseudoregs. However, it is safe to create
6859 invalid memory addresses, because combine will try to recognize
6860 them and all they will do is make the combine attempt fail.
6861
6862 If for some reason this cannot do its job, an rtx
6863 (clobber (const_int 0)) is returned.
6864 An insn containing that will not be recognized. */
6865
6866 #undef gen_lowpart
6867
6868 static rtx
6869 gen_lowpart_for_combine (mode, x)
6870 enum machine_mode mode;
6871 register rtx x;
6872 {
6873 rtx result;
6874
6875 if (GET_MODE (x) == mode)
6876 return x;
6877
6878 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6879 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6880
6881 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
6882 won't know what to do. So we will strip off the SUBREG here and
6883 process normally. */
6884 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
6885 {
6886 x = SUBREG_REG (x);
6887 if (GET_MODE (x) == mode)
6888 return x;
6889 }
6890
6891 result = gen_lowpart_common (mode, x);
6892 if (result)
6893 return result;
6894
6895 if (GET_CODE (x) == MEM)
6896 {
6897 register int offset = 0;
6898 rtx new;
6899
6900 /* Refuse to work on a volatile memory ref or one with a mode-dependent
6901 address. */
6902 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
6903 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
6904
6905 /* If we want to refer to something bigger than the original memref,
6906 generate a perverse subreg instead. That will force a reload
6907 of the original memref X. */
6908 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
6909 return gen_rtx (SUBREG, mode, x, 0);
6910
6911 #if WORDS_BIG_ENDIAN
6912 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
6913 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
6914 #endif
6915 #if BYTES_BIG_ENDIAN
6916 /* Adjust the address so that the address-after-the-data
6917 is unchanged. */
6918 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
6919 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
6920 #endif
6921 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
6922 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
6923 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
6924 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
6925 return new;
6926 }
6927
6928 /* If X is a comparison operator, rewrite it in a new mode. This
6929 probably won't match, but may allow further simplifications. */
6930 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
6931 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
6932
6933 /* If we couldn't simplify X any other way, just enclose it in a
6934 SUBREG. Normally, this SUBREG won't match, but some patterns may
6935 include an explicit SUBREG or we may simplify it further in combine. */
6936 else
6937 {
6938 int word = 0;
6939
6940 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
6941 word = ((GET_MODE_SIZE (GET_MODE (x))
6942 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
6943 / UNITS_PER_WORD);
6944 return gen_rtx (SUBREG, mode, x, word);
6945 }
6946 }
6947 \f
6948 /* Make an rtx expression. This is a subset of gen_rtx and only supports
6949 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
6950
6951 If the identical expression was previously in the insn (in the undobuf),
6952 it will be returned. Only if it is not found will a new expression
6953 be made. */
6954
6955 /*VARARGS2*/
6956 static rtx
6957 gen_rtx_combine (va_alist)
6958 va_dcl
6959 {
6960 va_list p;
6961 enum rtx_code code;
6962 enum machine_mode mode;
6963 int n_args;
6964 rtx args[3];
6965 int i, j;
6966 char *fmt;
6967 rtx rt;
6968
6969 va_start (p);
6970 code = va_arg (p, enum rtx_code);
6971 mode = va_arg (p, enum machine_mode);
6972 n_args = GET_RTX_LENGTH (code);
6973 fmt = GET_RTX_FORMAT (code);
6974
6975 if (n_args == 0 || n_args > 3)
6976 abort ();
6977
6978 /* Get each arg and verify that it is supposed to be an expression. */
6979 for (j = 0; j < n_args; j++)
6980 {
6981 if (*fmt++ != 'e')
6982 abort ();
6983
6984 args[j] = va_arg (p, rtx);
6985 }
6986
6987 /* See if this is in undobuf. Be sure we don't use objects that came
6988 from another insn; this could produce circular rtl structures. */
6989
6990 for (i = previous_num_undos; i < undobuf.num_undo; i++)
6991 if (!undobuf.undo[i].is_int
6992 && GET_CODE (undobuf.undo[i].old_contents) == code
6993 && GET_MODE (undobuf.undo[i].old_contents) == mode)
6994 {
6995 for (j = 0; j < n_args; j++)
6996 if (XEXP (undobuf.undo[i].old_contents, j) != args[j])
6997 break;
6998
6999 if (j == n_args)
7000 return undobuf.undo[i].old_contents;
7001 }
7002
7003 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7004 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7005 rt = rtx_alloc (code);
7006 PUT_MODE (rt, mode);
7007 XEXP (rt, 0) = args[0];
7008 if (n_args > 1)
7009 {
7010 XEXP (rt, 1) = args[1];
7011 if (n_args > 2)
7012 XEXP (rt, 2) = args[2];
7013 }
7014 return rt;
7015 }
7016
7017 /* These routines make binary and unary operations by first seeing if they
7018 fold; if not, a new expression is allocated. */
7019
7020 static rtx
7021 gen_binary (code, mode, op0, op1)
7022 enum rtx_code code;
7023 enum machine_mode mode;
7024 rtx op0, op1;
7025 {
7026 rtx result;
7027
7028 if (GET_RTX_CLASS (code) == '<')
7029 {
7030 enum machine_mode op_mode = GET_MODE (op0);
7031 if (op_mode == VOIDmode)
7032 op_mode = GET_MODE (op1);
7033 result = simplify_relational_operation (code, op_mode, op0, op1);
7034 }
7035 else
7036 result = simplify_binary_operation (code, mode, op0, op1);
7037
7038 if (result)
7039 return result;
7040
7041 /* Put complex operands first and constants second. */
7042 if (GET_RTX_CLASS (code) == 'c'
7043 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7044 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7045 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7046 || (GET_CODE (op0) == SUBREG
7047 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7048 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7049 return gen_rtx_combine (code, mode, op1, op0);
7050
7051 return gen_rtx_combine (code, mode, op0, op1);
7052 }
7053
7054 static rtx
7055 gen_unary (code, mode, op0)
7056 enum rtx_code code;
7057 enum machine_mode mode;
7058 rtx op0;
7059 {
7060 rtx result = simplify_unary_operation (code, mode, op0, mode);
7061
7062 if (result)
7063 return result;
7064
7065 return gen_rtx_combine (code, mode, op0);
7066 }
7067 \f
7068 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7069 comparison code that will be tested.
7070
7071 The result is a possibly different comparison code to use. *POP0 and
7072 *POP1 may be updated.
7073
7074 It is possible that we might detect that a comparison is either always
7075 true or always false. However, we do not perform general constant
7076 folding in combine, so this knowledge isn't useful. Such tautologies
7077 should have been detected earlier. Hence we ignore all such cases. */
7078
7079 static enum rtx_code
7080 simplify_comparison (code, pop0, pop1)
7081 enum rtx_code code;
7082 rtx *pop0;
7083 rtx *pop1;
7084 {
7085 rtx op0 = *pop0;
7086 rtx op1 = *pop1;
7087 rtx tem, tem1;
7088 int i;
7089 enum machine_mode mode, tmode;
7090
7091 /* Try a few ways of applying the same transformation to both operands. */
7092 while (1)
7093 {
7094 /* If both operands are the same constant shift, see if we can ignore the
7095 shift. We can if the shift is a rotate or if the bits shifted out of
7096 this shift are not significant for either input and if the type of
7097 comparison is compatible with the shift. */
7098 if (GET_CODE (op0) == GET_CODE (op1)
7099 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7100 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7101 || ((GET_CODE (op0) == LSHIFTRT
7102 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7103 && (code != GT && code != LT && code != GE && code != LE))
7104 || (GET_CODE (op0) == ASHIFTRT
7105 && (code != GTU && code != LTU
7106 && code != GEU && code != GEU)))
7107 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7108 && INTVAL (XEXP (op0, 1)) >= 0
7109 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7110 && XEXP (op0, 1) == XEXP (op1, 1))
7111 {
7112 enum machine_mode mode = GET_MODE (op0);
7113 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7114 int shift_count = INTVAL (XEXP (op0, 1));
7115
7116 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7117 mask &= (mask >> shift_count) << shift_count;
7118 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7119 mask = (mask & (mask << shift_count)) >> shift_count;
7120
7121 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7122 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7123 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7124 else
7125 break;
7126 }
7127
7128 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7129 SUBREGs are of the same mode, and, in both cases, the AND would
7130 be redundant if the comparison was done in the narrower mode,
7131 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7132 and the operand's significant bits are 0xffffff01; in that case if
7133 we only care about QImode, we don't need the AND). This case occurs
7134 if the output mode of an scc insn is not SImode and
7135 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7136
7137 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7138 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7139 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7140 && GET_CODE (XEXP (op0, 0)) == SUBREG
7141 && GET_CODE (XEXP (op1, 0)) == SUBREG
7142 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7143 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7144 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7145 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7146 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7147 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7148 & ~ INTVAL (XEXP (op0, 1))) == 0
7149 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7150 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7151 & ~ INTVAL (XEXP (op1, 1))) == 0)
7152 {
7153 op0 = SUBREG_REG (XEXP (op0, 0));
7154 op1 = SUBREG_REG (XEXP (op1, 0));
7155
7156 /* the resulting comparison is always unsigned since we masked off
7157 the original sign bit. */
7158 code = unsigned_condition (code);
7159 }
7160 else
7161 break;
7162 }
7163
7164 /* If the first operand is a constant, swap the operands and adjust the
7165 comparison code appropriately. */
7166 if (CONSTANT_P (op0))
7167 {
7168 tem = op0, op0 = op1, op1 = tem;
7169 code = swap_condition (code);
7170 }
7171
7172 /* We now enter a loop during which we will try to simplify the comparison.
7173 For the most part, we only are concerned with comparisons with zero,
7174 but some things may really be comparisons with zero but not start
7175 out looking that way. */
7176
7177 while (GET_CODE (op1) == CONST_INT)
7178 {
7179 enum machine_mode mode = GET_MODE (op0);
7180 int mode_width = GET_MODE_BITSIZE (mode);
7181 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7182 int equality_comparison_p;
7183 int sign_bit_comparison_p;
7184 int unsigned_comparison_p;
7185 HOST_WIDE_INT const_op;
7186
7187 /* We only want to handle integral modes. This catches VOIDmode,
7188 CCmode, and the floating-point modes. An exception is that we
7189 can handle VOIDmode if OP0 is a COMPARE or a comparison
7190 operation. */
7191
7192 if (GET_MODE_CLASS (mode) != MODE_INT
7193 && ! (mode == VOIDmode
7194 && (GET_CODE (op0) == COMPARE
7195 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7196 break;
7197
7198 /* Get the constant we are comparing against and turn off all bits
7199 not on in our mode. */
7200 const_op = INTVAL (op1);
7201 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7202 const_op &= mask;
7203
7204 /* If we are comparing against a constant power of two and the value
7205 being compared has only that single significant bit (e.g., it was
7206 `and'ed with that bit), we can replace this with a comparison
7207 with zero. */
7208 if (const_op
7209 && (code == EQ || code == NE || code == GE || code == GEU
7210 || code == LT || code == LTU)
7211 && mode_width <= HOST_BITS_PER_WIDE_INT
7212 && exact_log2 (const_op) >= 0
7213 && significant_bits (op0, mode) == const_op)
7214 {
7215 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7216 op1 = const0_rtx, const_op = 0;
7217 }
7218
7219 /* Similarly, if we are comparing a value known to be either -1 or
7220 0 with -1, change it to the opposite comparison against zero. */
7221
7222 if (const_op == -1
7223 && (code == EQ || code == NE || code == GT || code == LE
7224 || code == GEU || code == LTU)
7225 && num_sign_bit_copies (op0, mode) == mode_width)
7226 {
7227 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7228 op1 = const0_rtx, const_op = 0;
7229 }
7230
7231 /* Do some canonicalizations based on the comparison code. We prefer
7232 comparisons against zero and then prefer equality comparisons.
7233 If we can reduce the size of a constant, we will do that too. */
7234
7235 switch (code)
7236 {
7237 case LT:
7238 /* < C is equivalent to <= (C - 1) */
7239 if (const_op > 0)
7240 {
7241 const_op -= 1;
7242 op1 = GEN_INT (const_op);
7243 code = LE;
7244 /* ... fall through to LE case below. */
7245 }
7246 else
7247 break;
7248
7249 case LE:
7250 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7251 if (const_op < 0)
7252 {
7253 const_op += 1;
7254 op1 = GEN_INT (const_op);
7255 code = LT;
7256 }
7257
7258 /* If we are doing a <= 0 comparison on a value known to have
7259 a zero sign bit, we can replace this with == 0. */
7260 else if (const_op == 0
7261 && mode_width <= HOST_BITS_PER_WIDE_INT
7262 && (significant_bits (op0, mode)
7263 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7264 code = EQ;
7265 break;
7266
7267 case GE:
7268 /* >= C is equivalent to > (C - 1). */
7269 if (const_op > 0)
7270 {
7271 const_op -= 1;
7272 op1 = GEN_INT (const_op);
7273 code = GT;
7274 /* ... fall through to GT below. */
7275 }
7276 else
7277 break;
7278
7279 case GT:
7280 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7281 if (const_op < 0)
7282 {
7283 const_op += 1;
7284 op1 = GEN_INT (const_op);
7285 code = GE;
7286 }
7287
7288 /* If we are doing a > 0 comparison on a value known to have
7289 a zero sign bit, we can replace this with != 0. */
7290 else if (const_op == 0
7291 && mode_width <= HOST_BITS_PER_WIDE_INT
7292 && (significant_bits (op0, mode)
7293 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7294 code = NE;
7295 break;
7296
7297 case LTU:
7298 /* < C is equivalent to <= (C - 1). */
7299 if (const_op > 0)
7300 {
7301 const_op -= 1;
7302 op1 = GEN_INT (const_op);
7303 code = LEU;
7304 /* ... fall through ... */
7305 }
7306
7307 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7308 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7309 {
7310 const_op = 0, op1 = const0_rtx;
7311 code = GE;
7312 break;
7313 }
7314 else
7315 break;
7316
7317 case LEU:
7318 /* unsigned <= 0 is equivalent to == 0 */
7319 if (const_op == 0)
7320 code = EQ;
7321
7322 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7323 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7324 {
7325 const_op = 0, op1 = const0_rtx;
7326 code = GE;
7327 }
7328 break;
7329
7330 case GEU:
7331 /* >= C is equivalent to < (C - 1). */
7332 if (const_op > 1)
7333 {
7334 const_op -= 1;
7335 op1 = GEN_INT (const_op);
7336 code = GTU;
7337 /* ... fall through ... */
7338 }
7339
7340 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7341 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7342 {
7343 const_op = 0, op1 = const0_rtx;
7344 code = LT;
7345 }
7346 else
7347 break;
7348
7349 case GTU:
7350 /* unsigned > 0 is equivalent to != 0 */
7351 if (const_op == 0)
7352 code = NE;
7353
7354 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7355 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7356 {
7357 const_op = 0, op1 = const0_rtx;
7358 code = LT;
7359 }
7360 break;
7361 }
7362
7363 /* Compute some predicates to simplify code below. */
7364
7365 equality_comparison_p = (code == EQ || code == NE);
7366 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7367 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7368 || code == LEU);
7369
7370 /* Now try cases based on the opcode of OP0. If none of the cases
7371 does a "continue", we exit this loop immediately after the
7372 switch. */
7373
7374 switch (GET_CODE (op0))
7375 {
7376 case ZERO_EXTRACT:
7377 /* If we are extracting a single bit from a variable position in
7378 a constant that has only a single bit set and are comparing it
7379 with zero, we can convert this into an equality comparison
7380 between the position and the location of the single bit. We can't
7381 do this if bit endian and we don't have an extzv since we then
7382 can't know what mode to use for the endianness adjustment. */
7383
7384 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7385 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7386 && XEXP (op0, 1) == const1_rtx
7387 && equality_comparison_p && const_op == 0
7388 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7389 {
7390 #if BITS_BIG_ENDIAN
7391 i = (GET_MODE_BITSIZE
7392 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7393 #endif
7394
7395 op0 = XEXP (op0, 2);
7396 op1 = GEN_INT (i);
7397 const_op = i;
7398
7399 /* Result is nonzero iff shift count is equal to I. */
7400 code = reverse_condition (code);
7401 continue;
7402 }
7403 #endif
7404
7405 /* ... fall through ... */
7406
7407 case SIGN_EXTRACT:
7408 tem = expand_compound_operation (op0);
7409 if (tem != op0)
7410 {
7411 op0 = tem;
7412 continue;
7413 }
7414 break;
7415
7416 case NOT:
7417 /* If testing for equality, we can take the NOT of the constant. */
7418 if (equality_comparison_p
7419 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7420 {
7421 op0 = XEXP (op0, 0);
7422 op1 = tem;
7423 continue;
7424 }
7425
7426 /* If just looking at the sign bit, reverse the sense of the
7427 comparison. */
7428 if (sign_bit_comparison_p)
7429 {
7430 op0 = XEXP (op0, 0);
7431 code = (code == GE ? LT : GE);
7432 continue;
7433 }
7434 break;
7435
7436 case NEG:
7437 /* If testing for equality, we can take the NEG of the constant. */
7438 if (equality_comparison_p
7439 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7440 {
7441 op0 = XEXP (op0, 0);
7442 op1 = tem;
7443 continue;
7444 }
7445
7446 /* The remaining cases only apply to comparisons with zero. */
7447 if (const_op != 0)
7448 break;
7449
7450 /* When X is ABS or is known positive,
7451 (neg X) is < 0 if and only if X != 0. */
7452
7453 if (sign_bit_comparison_p
7454 && (GET_CODE (XEXP (op0, 0)) == ABS
7455 || (mode_width <= HOST_BITS_PER_WIDE_INT
7456 && (significant_bits (XEXP (op0, 0), mode)
7457 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
7458 {
7459 op0 = XEXP (op0, 0);
7460 code = (code == LT ? NE : EQ);
7461 continue;
7462 }
7463
7464 /* If we have NEG of something that is the result of a
7465 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
7466 two high-order bits must be the same and hence that
7467 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
7468 do this. */
7469 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
7470 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
7471 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7472 && (INTVAL (XEXP (XEXP (op0, 0), 1))
7473 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
7474 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7475 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7476 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
7477 || ((tem = get_last_value (XEXP (op0, 0))) != 0
7478 && (GET_CODE (tem) == SIGN_EXTEND
7479 || (GET_CODE (tem) == SIGN_EXTRACT
7480 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7481 && (INTVAL (XEXP (tem, 1))
7482 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
7483 || (GET_CODE (tem) == ASHIFTRT
7484 && GET_CODE (XEXP (tem, 1)) == CONST_INT
7485 && XEXP (tem, 1) != const0_rtx))))
7486 {
7487 op0 = XEXP (op0, 0);
7488 code = swap_condition (code);
7489 continue;
7490 }
7491 break;
7492
7493 case ROTATE:
7494 /* If we are testing equality and our count is a constant, we
7495 can perform the inverse operation on our RHS. */
7496 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7497 && (tem = simplify_binary_operation (ROTATERT, mode,
7498 op1, XEXP (op0, 1))) != 0)
7499 {
7500 op0 = XEXP (op0, 0);
7501 op1 = tem;
7502 continue;
7503 }
7504
7505 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
7506 a particular bit. Convert it to an AND of a constant of that
7507 bit. This will be converted into a ZERO_EXTRACT. */
7508 if (const_op == 0 && sign_bit_comparison_p
7509 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7510 && mode_width <= HOST_BITS_PER_WIDE_INT)
7511 {
7512 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7513 ((HOST_WIDE_INT) 1
7514 << (mode_width - 1
7515 - INTVAL (XEXP (op0, 1)))));
7516 code = (code == LT ? NE : EQ);
7517 continue;
7518 }
7519
7520 /* ... fall through ... */
7521
7522 case ABS:
7523 /* ABS is ignorable inside an equality comparison with zero. */
7524 if (const_op == 0 && equality_comparison_p)
7525 {
7526 op0 = XEXP (op0, 0);
7527 continue;
7528 }
7529 break;
7530
7531
7532 case SIGN_EXTEND:
7533 /* Can simplify (compare (zero/sign_extend FOO) CONST)
7534 to (compare FOO CONST) if CONST fits in FOO's mode and we
7535 are either testing inequality or have an unsigned comparison
7536 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
7537 if (! unsigned_comparison_p
7538 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
7539 <= HOST_BITS_PER_WIDE_INT)
7540 && ((unsigned HOST_WIDE_INT) const_op
7541 < (((HOST_WIDE_INT) 1
7542 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
7543 {
7544 op0 = XEXP (op0, 0);
7545 continue;
7546 }
7547 break;
7548
7549 case SUBREG:
7550 /* If the inner mode is smaller and we are extracting the low
7551 part, we can treat the SUBREG as if it were a ZERO_EXTEND. */
7552 if (! subreg_lowpart_p (op0)
7553 || GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) >= mode_width)
7554 break;
7555
7556 /* ... fall through ... */
7557
7558 case ZERO_EXTEND:
7559 if ((unsigned_comparison_p || equality_comparison_p)
7560 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
7561 <= HOST_BITS_PER_WIDE_INT)
7562 && ((unsigned HOST_WIDE_INT) const_op
7563 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
7564 {
7565 op0 = XEXP (op0, 0);
7566 continue;
7567 }
7568 break;
7569
7570 case PLUS:
7571 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
7572 this for equality comparisons due to pathological cases involving
7573 overflows. */
7574 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7575 && (tem = simplify_binary_operation (MINUS, mode, op1,
7576 XEXP (op0, 1))) != 0)
7577 {
7578 op0 = XEXP (op0, 0);
7579 op1 = tem;
7580 continue;
7581 }
7582
7583 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
7584 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
7585 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
7586 {
7587 op0 = XEXP (XEXP (op0, 0), 0);
7588 code = (code == LT ? EQ : NE);
7589 continue;
7590 }
7591 break;
7592
7593 case MINUS:
7594 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
7595 of bits in X minus 1, is one iff X > 0. */
7596 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
7597 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7598 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
7599 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7600 {
7601 op0 = XEXP (op0, 1);
7602 code = (code == GE ? LE : GT);
7603 continue;
7604 }
7605 break;
7606
7607 case XOR:
7608 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
7609 if C is zero or B is a constant. */
7610 if (equality_comparison_p
7611 && 0 != (tem = simplify_binary_operation (XOR, mode,
7612 XEXP (op0, 1), op1)))
7613 {
7614 op0 = XEXP (op0, 0);
7615 op1 = tem;
7616 continue;
7617 }
7618 break;
7619
7620 case EQ: case NE:
7621 case LT: case LTU: case LE: case LEU:
7622 case GT: case GTU: case GE: case GEU:
7623 /* We can't do anything if OP0 is a condition code value, rather
7624 than an actual data value. */
7625 if (const_op != 0
7626 #ifdef HAVE_cc0
7627 || XEXP (op0, 0) == cc0_rtx
7628 #endif
7629 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
7630 break;
7631
7632 /* Get the two operands being compared. */
7633 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
7634 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
7635 else
7636 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
7637
7638 /* Check for the cases where we simply want the result of the
7639 earlier test or the opposite of that result. */
7640 if (code == NE
7641 || (code == EQ && reversible_comparison_p (op0))
7642 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7643 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7644 && (STORE_FLAG_VALUE
7645 & (((HOST_WIDE_INT) 1
7646 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
7647 && (code == LT
7648 || (code == GE && reversible_comparison_p (op0)))))
7649 {
7650 code = (code == LT || code == NE
7651 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
7652 op0 = tem, op1 = tem1;
7653 continue;
7654 }
7655 break;
7656
7657 case IOR:
7658 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
7659 iff X <= 0. */
7660 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
7661 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
7662 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
7663 {
7664 op0 = XEXP (op0, 1);
7665 code = (code == GE ? GT : LE);
7666 continue;
7667 }
7668 break;
7669
7670 case AND:
7671 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
7672 will be converted to a ZERO_EXTRACT later. */
7673 if (const_op == 0 && equality_comparison_p
7674 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
7675 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
7676 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
7677 {
7678 op0 = simplify_and_const_int
7679 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
7680 XEXP (op0, 1),
7681 XEXP (XEXP (op0, 0), 1)),
7682 (HOST_WIDE_INT) 1);
7683 continue;
7684 }
7685
7686 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
7687 zero and X is a comparison and C1 and C2 describe only bits set
7688 in STORE_FLAG_VALUE, we can compare with X. */
7689 if (const_op == 0 && equality_comparison_p
7690 && mode_width <= HOST_BITS_PER_WIDE_INT
7691 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7692 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
7693 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
7694 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
7695 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
7696 {
7697 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7698 << INTVAL (XEXP (XEXP (op0, 0), 1)));
7699 if ((~ STORE_FLAG_VALUE & mask) == 0
7700 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
7701 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
7702 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
7703 {
7704 op0 = XEXP (XEXP (op0, 0), 0);
7705 continue;
7706 }
7707 }
7708
7709 /* If we are doing an equality comparison of an AND of a bit equal
7710 to the sign bit, replace this with a LT or GE comparison of
7711 the underlying value. */
7712 if (equality_comparison_p
7713 && const_op == 0
7714 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7715 && mode_width <= HOST_BITS_PER_WIDE_INT
7716 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
7717 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
7718 {
7719 op0 = XEXP (op0, 0);
7720 code = (code == EQ ? GE : LT);
7721 continue;
7722 }
7723
7724 /* If this AND operation is really a ZERO_EXTEND from a narrower
7725 mode, the constant fits within that mode, and this is either an
7726 equality or unsigned comparison, try to do this comparison in
7727 the narrower mode. */
7728 if ((equality_comparison_p || unsigned_comparison_p)
7729 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7730 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
7731 & GET_MODE_MASK (mode))
7732 + 1)) >= 0
7733 && const_op >> i == 0
7734 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
7735 {
7736 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
7737 continue;
7738 }
7739 break;
7740
7741 case ASHIFT:
7742 case LSHIFT:
7743 /* If we have (compare (xshift FOO N) (const_int C)) and
7744 the high order N bits of FOO (N+1 if an inequality comparison)
7745 are not significant, we can do this by comparing FOO with C
7746 shifted right N bits so long as the low-order N bits of C are
7747 zero. */
7748 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7749 && INTVAL (XEXP (op0, 1)) >= 0
7750 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
7751 < HOST_BITS_PER_WIDE_INT)
7752 && ((const_op
7753 & ~ (((HOST_WIDE_INT) 1
7754 << INTVAL (XEXP (op0, 1))) - 1)) == 0)
7755 && mode_width <= HOST_BITS_PER_WIDE_INT
7756 && (significant_bits (XEXP (op0, 0), mode)
7757 & ~ (mask >> (INTVAL (XEXP (op0, 1))
7758 + ! equality_comparison_p))) == 0)
7759 {
7760 const_op >>= INTVAL (XEXP (op0, 1));
7761 op1 = GEN_INT (const_op);
7762 op0 = XEXP (op0, 0);
7763 continue;
7764 }
7765
7766 /* If we are doing a sign bit comparison, it means we are testing
7767 a particular bit. Convert it to the appropriate AND. */
7768 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
7769 && mode_width <= HOST_BITS_PER_WIDE_INT)
7770 {
7771 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7772 ((HOST_WIDE_INT) 1
7773 << (mode_width - 1
7774 - INTVAL (XEXP (op0, 1)))));
7775 code = (code == LT ? NE : EQ);
7776 continue;
7777 }
7778
7779 /* If this an equality comparison with zero and we are shifting
7780 the low bit to the sign bit, we can convert this to an AND of the
7781 low-order bit. */
7782 if (const_op == 0 && equality_comparison_p
7783 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7784 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7785 {
7786 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
7787 (HOST_WIDE_INT) 1);
7788 continue;
7789 }
7790 break;
7791
7792 case ASHIFTRT:
7793 /* If this is an equality comparison with zero, we can do this
7794 as a logical shift, which might be much simpler. */
7795 if (equality_comparison_p && const_op == 0
7796 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
7797 {
7798 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
7799 XEXP (op0, 0),
7800 INTVAL (XEXP (op0, 1)));
7801 continue;
7802 }
7803
7804 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
7805 do the comparison in a narrower mode. */
7806 if (! unsigned_comparison_p
7807 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7808 && GET_CODE (XEXP (op0, 0)) == ASHIFT
7809 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
7810 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
7811 MODE_INT, 1)) != VOIDmode
7812 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
7813 || ((unsigned HOST_WIDE_INT) - const_op
7814 <= GET_MODE_MASK (tmode))))
7815 {
7816 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
7817 continue;
7818 }
7819
7820 /* ... fall through ... */
7821 case LSHIFTRT:
7822 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
7823 the low order N bits of FOO are not significant, we can do this
7824 by comparing FOO with C shifted left N bits so long as no
7825 overflow occurs. */
7826 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
7827 && INTVAL (XEXP (op0, 1)) >= 0
7828 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7829 && mode_width <= HOST_BITS_PER_WIDE_INT
7830 && (significant_bits (XEXP (op0, 0), mode)
7831 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
7832 && (const_op == 0
7833 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
7834 < mode_width)))
7835 {
7836 const_op <<= INTVAL (XEXP (op0, 1));
7837 op1 = GEN_INT (const_op);
7838 op0 = XEXP (op0, 0);
7839 continue;
7840 }
7841
7842 /* If we are using this shift to extract just the sign bit, we
7843 can replace this with an LT or GE comparison. */
7844 if (const_op == 0
7845 && (equality_comparison_p || sign_bit_comparison_p)
7846 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7847 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
7848 {
7849 op0 = XEXP (op0, 0);
7850 code = (code == NE || code == GT ? LT : GE);
7851 continue;
7852 }
7853 break;
7854 }
7855
7856 break;
7857 }
7858
7859 /* Now make any compound operations involved in this comparison. Then,
7860 check for an outmost SUBREG on OP0 that isn't doing anything or is
7861 paradoxical. The latter case can only occur when it is known that the
7862 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
7863 We can never remove a SUBREG for a non-equality comparison because the
7864 sign bit is in a different place in the underlying object. */
7865
7866 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
7867 op1 = make_compound_operation (op1, SET);
7868
7869 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7870 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7871 && (code == NE || code == EQ)
7872 && ((GET_MODE_SIZE (GET_MODE (op0))
7873 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
7874 {
7875 op0 = SUBREG_REG (op0);
7876 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
7877 }
7878
7879 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
7880 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7881 && (code == NE || code == EQ)
7882 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7883 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
7884 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
7885 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
7886 op1),
7887 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
7888 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
7889 op0 = SUBREG_REG (op0), op1 = tem;
7890
7891 /* We now do the opposite procedure: Some machines don't have compare
7892 insns in all modes. If OP0's mode is an integer mode smaller than a
7893 word and we can't do a compare in that mode, see if there is a larger
7894 mode for which we can do the compare and where the only significant
7895 bits in OP0 and OP1 are those in the narrower mode. We can do
7896 this if this is an equality comparison, in which case we can
7897 merely widen the operation, or if we are testing the sign bit, in
7898 which case we can explicitly put in the test. */
7899
7900 mode = GET_MODE (op0);
7901 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
7902 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
7903 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
7904 for (tmode = GET_MODE_WIDER_MODE (mode);
7905 (tmode != VOIDmode
7906 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
7907 tmode = GET_MODE_WIDER_MODE (tmode))
7908 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing
7909 && (significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode)) == 0
7910 && (significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode)) == 0
7911 && (code == EQ || code == NE
7912 || (op1 == const0_rtx && (code == LT || code == GE)
7913 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)))
7914 {
7915 op0 = gen_lowpart_for_combine (tmode, op0);
7916 op1 = gen_lowpart_for_combine (tmode, op1);
7917
7918 if (code == LT || code == GE)
7919 {
7920 op0 = gen_binary (AND, tmode, op0,
7921 GEN_INT ((HOST_WIDE_INT) 1
7922 << (GET_MODE_BITSIZE (mode) - 1)));
7923 code = (code == LT) ? NE : EQ;
7924 }
7925
7926 break;
7927 }
7928
7929 *pop0 = op0;
7930 *pop1 = op1;
7931
7932 return code;
7933 }
7934 \f
7935 /* Return 1 if we know that X, a comparison operation, is not operating
7936 on a floating-point value or is EQ or NE, meaning that we can safely
7937 reverse it. */
7938
7939 static int
7940 reversible_comparison_p (x)
7941 rtx x;
7942 {
7943 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
7944 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
7945 return 1;
7946
7947 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
7948 {
7949 case MODE_INT:
7950 return 1;
7951
7952 case MODE_CC:
7953 x = get_last_value (XEXP (x, 0));
7954 return (x && GET_CODE (x) == COMPARE
7955 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
7956 }
7957
7958 return 0;
7959 }
7960 \f
7961 /* Utility function for following routine. Called when X is part of a value
7962 being stored into reg_last_set_value. Sets reg_last_set_table_tick
7963 for each register mentioned. Similar to mention_regs in cse.c */
7964
7965 static void
7966 update_table_tick (x)
7967 rtx x;
7968 {
7969 register enum rtx_code code = GET_CODE (x);
7970 register char *fmt = GET_RTX_FORMAT (code);
7971 register int i;
7972
7973 if (code == REG)
7974 {
7975 int regno = REGNO (x);
7976 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
7977 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
7978
7979 for (i = regno; i < endregno; i++)
7980 reg_last_set_table_tick[i] = label_tick;
7981
7982 return;
7983 }
7984
7985 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7986 /* Note that we can't have an "E" in values stored; see
7987 get_last_value_validate. */
7988 if (fmt[i] == 'e')
7989 update_table_tick (XEXP (x, i));
7990 }
7991
7992 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
7993 are saying that the register is clobbered and we no longer know its
7994 value. If INSN is zero, don't update reg_last_set; this call is normally
7995 done with VALUE also zero to invalidate the register. */
7996
7997 static void
7998 record_value_for_reg (reg, insn, value)
7999 rtx reg;
8000 rtx insn;
8001 rtx value;
8002 {
8003 int regno = REGNO (reg);
8004 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8005 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8006 int i;
8007
8008 /* If VALUE contains REG and we have a previous value for REG, substitute
8009 the previous value. */
8010 if (value && insn && reg_overlap_mentioned_p (reg, value))
8011 {
8012 rtx tem;
8013
8014 /* Set things up so get_last_value is allowed to see anything set up to
8015 our insn. */
8016 subst_low_cuid = INSN_CUID (insn);
8017 tem = get_last_value (reg);
8018
8019 if (tem)
8020 value = replace_rtx (copy_rtx (value), reg, tem);
8021 }
8022
8023 /* For each register modified, show we don't know its value, that
8024 its value has been updated, and that we don't know the location of
8025 the death of the register. */
8026 for (i = regno; i < endregno; i ++)
8027 {
8028 if (insn)
8029 reg_last_set[i] = insn;
8030 reg_last_set_value[i] = 0;
8031 reg_last_death[i] = 0;
8032 }
8033
8034 /* Mark registers that are being referenced in this value. */
8035 if (value)
8036 update_table_tick (value);
8037
8038 /* Now update the status of each register being set.
8039 If someone is using this register in this block, set this register
8040 to invalid since we will get confused between the two lives in this
8041 basic block. This makes using this register always invalid. In cse, we
8042 scan the table to invalidate all entries using this register, but this
8043 is too much work for us. */
8044
8045 for (i = regno; i < endregno; i++)
8046 {
8047 reg_last_set_label[i] = label_tick;
8048 if (value && reg_last_set_table_tick[i] == label_tick)
8049 reg_last_set_invalid[i] = 1;
8050 else
8051 reg_last_set_invalid[i] = 0;
8052 }
8053
8054 /* The value being assigned might refer to X (like in "x++;"). In that
8055 case, we must replace it with (clobber (const_int 0)) to prevent
8056 infinite loops. */
8057 if (value && ! get_last_value_validate (&value,
8058 reg_last_set_label[regno], 0))
8059 {
8060 value = copy_rtx (value);
8061 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8062 value = 0;
8063 }
8064
8065 /* For the main register being modified, update the value. */
8066 reg_last_set_value[regno] = value;
8067
8068 }
8069
8070 /* Used for communication between the following two routines. */
8071 static rtx record_dead_insn;
8072
8073 /* Called via note_stores from record_dead_and_set_regs to handle one
8074 SET or CLOBBER in an insn. */
8075
8076 static void
8077 record_dead_and_set_regs_1 (dest, setter)
8078 rtx dest, setter;
8079 {
8080 if (GET_CODE (dest) == REG)
8081 {
8082 /* If we are setting the whole register, we know its value. Otherwise
8083 show that we don't know the value. We can handle SUBREG in
8084 some cases. */
8085 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8086 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8087 else if (GET_CODE (setter) == SET
8088 && GET_CODE (SET_DEST (setter)) == SUBREG
8089 && SUBREG_REG (SET_DEST (setter)) == dest
8090 && subreg_lowpart_p (SET_DEST (setter)))
8091 record_value_for_reg (dest, record_dead_insn,
8092 gen_lowpart_for_combine (GET_MODE (dest),
8093 SET_SRC (setter)));
8094 else
8095 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8096 }
8097 else if (GET_CODE (dest) == MEM
8098 /* Ignore pushes, they clobber nothing. */
8099 && ! push_operand (dest, GET_MODE (dest)))
8100 mem_last_set = INSN_CUID (record_dead_insn);
8101 }
8102
8103 /* Update the records of when each REG was most recently set or killed
8104 for the things done by INSN. This is the last thing done in processing
8105 INSN in the combiner loop.
8106
8107 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8108 similar information mem_last_set (which insn most recently modified memory)
8109 and last_call_cuid (which insn was the most recent subroutine call). */
8110
8111 static void
8112 record_dead_and_set_regs (insn)
8113 rtx insn;
8114 {
8115 register rtx link;
8116 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8117 {
8118 if (REG_NOTE_KIND (link) == REG_DEAD)
8119 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8120 else if (REG_NOTE_KIND (link) == REG_INC)
8121 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8122 }
8123
8124 if (GET_CODE (insn) == CALL_INSN)
8125 last_call_cuid = mem_last_set = INSN_CUID (insn);
8126
8127 record_dead_insn = insn;
8128 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8129 }
8130 \f
8131 /* Utility routine for the following function. Verify that all the registers
8132 mentioned in *LOC are valid when *LOC was part of a value set when
8133 label_tick == TICK. Return 0 if some are not.
8134
8135 If REPLACE is non-zero, replace the invalid reference with
8136 (clobber (const_int 0)) and return 1. This replacement is useful because
8137 we often can get useful information about the form of a value (e.g., if
8138 it was produced by a shift that always produces -1 or 0) even though
8139 we don't know exactly what registers it was produced from. */
8140
8141 static int
8142 get_last_value_validate (loc, tick, replace)
8143 rtx *loc;
8144 int tick;
8145 int replace;
8146 {
8147 rtx x = *loc;
8148 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8149 int len = GET_RTX_LENGTH (GET_CODE (x));
8150 int i;
8151
8152 if (GET_CODE (x) == REG)
8153 {
8154 int regno = REGNO (x);
8155 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8156 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8157 int j;
8158
8159 for (j = regno; j < endregno; j++)
8160 if (reg_last_set_invalid[j]
8161 /* If this is a pseudo-register that was only set once, it is
8162 always valid. */
8163 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8164 && reg_last_set_label[j] > tick))
8165 {
8166 if (replace)
8167 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8168 return replace;
8169 }
8170
8171 return 1;
8172 }
8173
8174 for (i = 0; i < len; i++)
8175 if ((fmt[i] == 'e'
8176 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8177 /* Don't bother with these. They shouldn't occur anyway. */
8178 || fmt[i] == 'E')
8179 return 0;
8180
8181 /* If we haven't found a reason for it to be invalid, it is valid. */
8182 return 1;
8183 }
8184
8185 /* Get the last value assigned to X, if known. Some registers
8186 in the value may be replaced with (clobber (const_int 0)) if their value
8187 is known longer known reliably. */
8188
8189 static rtx
8190 get_last_value (x)
8191 rtx x;
8192 {
8193 int regno;
8194 rtx value;
8195
8196 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8197 then convert it to the desired mode. If this is a paradoxical SUBREG,
8198 we cannot predict what values the "extra" bits might have. */
8199 if (GET_CODE (x) == SUBREG
8200 && subreg_lowpart_p (x)
8201 && (GET_MODE_SIZE (GET_MODE (x))
8202 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8203 && (value = get_last_value (SUBREG_REG (x))) != 0)
8204 return gen_lowpart_for_combine (GET_MODE (x), value);
8205
8206 if (GET_CODE (x) != REG)
8207 return 0;
8208
8209 regno = REGNO (x);
8210 value = reg_last_set_value[regno];
8211
8212 /* If we don't have a value or if it isn't for this basic block, return 0. */
8213
8214 if (value == 0
8215 || (reg_n_sets[regno] != 1
8216 && (reg_last_set_label[regno] != label_tick)))
8217 return 0;
8218
8219 /* If the value was set in a later insn that the ones we are processing,
8220 we can't use it, but make a quick check to see if the previous insn
8221 set it to something. This is commonly the case when the same pseudo
8222 is used by repeated insns. */
8223
8224 if (reg_n_sets[regno] != 1
8225 && INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8226 {
8227 rtx insn, set;
8228
8229 for (insn = prev_nonnote_insn (subst_insn);
8230 insn && INSN_CUID (insn) >= subst_low_cuid;
8231 insn = prev_nonnote_insn (insn))
8232 ;
8233
8234 if (insn
8235 && (set = single_set (insn)) != 0
8236 && rtx_equal_p (SET_DEST (set), x))
8237 {
8238 value = SET_SRC (set);
8239
8240 /* Make sure that VALUE doesn't reference X. Replace any
8241 expliit references with a CLOBBER. If there are any remaining
8242 references (rare), don't use the value. */
8243
8244 if (reg_mentioned_p (x, value))
8245 value = replace_rtx (copy_rtx (value), x,
8246 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8247
8248 if (reg_overlap_mentioned_p (x, value))
8249 return 0;
8250 }
8251 else
8252 return 0;
8253 }
8254
8255 /* If the value has all its registers valid, return it. */
8256 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8257 return value;
8258
8259 /* Otherwise, make a copy and replace any invalid register with
8260 (clobber (const_int 0)). If that fails for some reason, return 0. */
8261
8262 value = copy_rtx (value);
8263 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8264 return value;
8265
8266 return 0;
8267 }
8268 \f
8269 /* Return nonzero if expression X refers to a REG or to memory
8270 that is set in an instruction more recent than FROM_CUID. */
8271
8272 static int
8273 use_crosses_set_p (x, from_cuid)
8274 register rtx x;
8275 int from_cuid;
8276 {
8277 register char *fmt;
8278 register int i;
8279 register enum rtx_code code = GET_CODE (x);
8280
8281 if (code == REG)
8282 {
8283 register int regno = REGNO (x);
8284 #ifdef PUSH_ROUNDING
8285 /* Don't allow uses of the stack pointer to be moved,
8286 because we don't know whether the move crosses a push insn. */
8287 if (regno == STACK_POINTER_REGNUM)
8288 return 1;
8289 #endif
8290 return (reg_last_set[regno]
8291 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8292 }
8293
8294 if (code == MEM && mem_last_set > from_cuid)
8295 return 1;
8296
8297 fmt = GET_RTX_FORMAT (code);
8298
8299 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8300 {
8301 if (fmt[i] == 'E')
8302 {
8303 register int j;
8304 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8305 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8306 return 1;
8307 }
8308 else if (fmt[i] == 'e'
8309 && use_crosses_set_p (XEXP (x, i), from_cuid))
8310 return 1;
8311 }
8312 return 0;
8313 }
8314 \f
8315 /* Define three variables used for communication between the following
8316 routines. */
8317
8318 static int reg_dead_regno, reg_dead_endregno;
8319 static int reg_dead_flag;
8320
8321 /* Function called via note_stores from reg_dead_at_p.
8322
8323 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8324 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8325
8326 static void
8327 reg_dead_at_p_1 (dest, x)
8328 rtx dest;
8329 rtx x;
8330 {
8331 int regno, endregno;
8332
8333 if (GET_CODE (dest) != REG)
8334 return;
8335
8336 regno = REGNO (dest);
8337 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8338 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8339
8340 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8341 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8342 }
8343
8344 /* Return non-zero if REG is known to be dead at INSN.
8345
8346 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8347 referencing REG, it is dead. If we hit a SET referencing REG, it is
8348 live. Otherwise, see if it is live or dead at the start of the basic
8349 block we are in. */
8350
8351 static int
8352 reg_dead_at_p (reg, insn)
8353 rtx reg;
8354 rtx insn;
8355 {
8356 int block, i;
8357
8358 /* Set variables for reg_dead_at_p_1. */
8359 reg_dead_regno = REGNO (reg);
8360 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8361 ? HARD_REGNO_NREGS (reg_dead_regno,
8362 GET_MODE (reg))
8363 : 1);
8364
8365 reg_dead_flag = 0;
8366
8367 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8368 beginning of function. */
8369 for (; insn && GET_CODE (insn) != CODE_LABEL;
8370 insn = prev_nonnote_insn (insn))
8371 {
8372 note_stores (PATTERN (insn), reg_dead_at_p_1);
8373 if (reg_dead_flag)
8374 return reg_dead_flag == 1 ? 1 : 0;
8375
8376 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8377 return 1;
8378 }
8379
8380 /* Get the basic block number that we were in. */
8381 if (insn == 0)
8382 block = 0;
8383 else
8384 {
8385 for (block = 0; block < n_basic_blocks; block++)
8386 if (insn == basic_block_head[block])
8387 break;
8388
8389 if (block == n_basic_blocks)
8390 return 0;
8391 }
8392
8393 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8394 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8395 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
8396 return 0;
8397
8398 return 1;
8399 }
8400 \f
8401 /* Remove register number REGNO from the dead registers list of INSN.
8402
8403 Return the note used to record the death, if there was one. */
8404
8405 rtx
8406 remove_death (regno, insn)
8407 int regno;
8408 rtx insn;
8409 {
8410 register rtx note = find_regno_note (insn, REG_DEAD, regno);
8411
8412 if (note)
8413 remove_note (insn, note);
8414
8415 return note;
8416 }
8417
8418 /* For each register (hardware or pseudo) used within expression X, if its
8419 death is in an instruction with cuid between FROM_CUID (inclusive) and
8420 TO_INSN (exclusive), put a REG_DEAD note for that register in the
8421 list headed by PNOTES.
8422
8423 This is done when X is being merged by combination into TO_INSN. These
8424 notes will then be distributed as needed. */
8425
8426 static void
8427 move_deaths (x, from_cuid, to_insn, pnotes)
8428 rtx x;
8429 int from_cuid;
8430 rtx to_insn;
8431 rtx *pnotes;
8432 {
8433 register char *fmt;
8434 register int len, i;
8435 register enum rtx_code code = GET_CODE (x);
8436
8437 if (code == REG)
8438 {
8439 register int regno = REGNO (x);
8440 register rtx where_dead = reg_last_death[regno];
8441
8442 if (where_dead && INSN_CUID (where_dead) >= from_cuid
8443 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
8444 {
8445 rtx note = remove_death (regno, reg_last_death[regno]);
8446
8447 /* It is possible for the call above to return 0. This can occur
8448 when reg_last_death points to I2 or I1 that we combined with.
8449 In that case make a new note. */
8450
8451 if (note)
8452 {
8453 XEXP (note, 1) = *pnotes;
8454 *pnotes = note;
8455 }
8456 else
8457 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
8458 }
8459
8460 return;
8461 }
8462
8463 else if (GET_CODE (x) == SET)
8464 {
8465 rtx dest = SET_DEST (x);
8466
8467 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
8468
8469 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
8470 that accesses one word of a multi-word item, some
8471 piece of everything register in the expression is used by
8472 this insn, so remove any old death. */
8473
8474 if (GET_CODE (dest) == ZERO_EXTRACT
8475 || GET_CODE (dest) == STRICT_LOW_PART
8476 || (GET_CODE (dest) == SUBREG
8477 && (((GET_MODE_SIZE (GET_MODE (dest))
8478 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
8479 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
8480 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
8481 {
8482 move_deaths (dest, from_cuid, to_insn, pnotes);
8483 return;
8484 }
8485
8486 /* If this is some other SUBREG, we know it replaces the entire
8487 value, so use that as the destination. */
8488 if (GET_CODE (dest) == SUBREG)
8489 dest = SUBREG_REG (dest);
8490
8491 /* If this is a MEM, adjust deaths of anything used in the address.
8492 For a REG (the only other possibility), the entire value is
8493 being replaced so the old value is not used in this insn. */
8494
8495 if (GET_CODE (dest) == MEM)
8496 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
8497 return;
8498 }
8499
8500 else if (GET_CODE (x) == CLOBBER)
8501 return;
8502
8503 len = GET_RTX_LENGTH (code);
8504 fmt = GET_RTX_FORMAT (code);
8505
8506 for (i = 0; i < len; i++)
8507 {
8508 if (fmt[i] == 'E')
8509 {
8510 register int j;
8511 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8512 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
8513 }
8514 else if (fmt[i] == 'e')
8515 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
8516 }
8517 }
8518 \f
8519 /* Return 1 if X is the target of a bit-field assignment in BODY, the
8520 pattern of an insn. X must be a REG. */
8521
8522 static int
8523 reg_bitfield_target_p (x, body)
8524 rtx x;
8525 rtx body;
8526 {
8527 int i;
8528
8529 if (GET_CODE (body) == SET)
8530 {
8531 rtx dest = SET_DEST (body);
8532 rtx target;
8533 int regno, tregno, endregno, endtregno;
8534
8535 if (GET_CODE (dest) == ZERO_EXTRACT)
8536 target = XEXP (dest, 0);
8537 else if (GET_CODE (dest) == STRICT_LOW_PART)
8538 target = SUBREG_REG (XEXP (dest, 0));
8539 else
8540 return 0;
8541
8542 if (GET_CODE (target) == SUBREG)
8543 target = SUBREG_REG (target);
8544
8545 if (GET_CODE (target) != REG)
8546 return 0;
8547
8548 tregno = REGNO (target), regno = REGNO (x);
8549 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
8550 return target == x;
8551
8552 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
8553 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
8554
8555 return endregno > tregno && regno < endtregno;
8556 }
8557
8558 else if (GET_CODE (body) == PARALLEL)
8559 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
8560 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
8561 return 1;
8562
8563 return 0;
8564 }
8565 \f
8566 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
8567 as appropriate. I3 and I2 are the insns resulting from the combination
8568 insns including FROM (I2 may be zero).
8569
8570 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
8571 not need REG_DEAD notes because they are being substituted for. This
8572 saves searching in the most common cases.
8573
8574 Each note in the list is either ignored or placed on some insns, depending
8575 on the type of note. */
8576
8577 static void
8578 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
8579 rtx notes;
8580 rtx from_insn;
8581 rtx i3, i2;
8582 rtx elim_i2, elim_i1;
8583 {
8584 rtx note, next_note;
8585 rtx tem;
8586
8587 for (note = notes; note; note = next_note)
8588 {
8589 rtx place = 0, place2 = 0;
8590
8591 /* If this NOTE references a pseudo register, ensure it references
8592 the latest copy of that register. */
8593 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
8594 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
8595 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
8596
8597 next_note = XEXP (note, 1);
8598 switch (REG_NOTE_KIND (note))
8599 {
8600 case REG_UNUSED:
8601 /* If this register is set or clobbered in I3, put the note there
8602 unless there is one already. */
8603 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
8604 {
8605 if (! (GET_CODE (XEXP (note, 0)) == REG
8606 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
8607 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
8608 place = i3;
8609 }
8610 /* Otherwise, if this register is used by I3, then this register
8611 now dies here, so we must put a REG_DEAD note here unless there
8612 is one already. */
8613 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
8614 && ! (GET_CODE (XEXP (note, 0)) == REG
8615 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
8616 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
8617 {
8618 PUT_REG_NOTE_KIND (note, REG_DEAD);
8619 place = i3;
8620 }
8621 break;
8622
8623 case REG_EQUAL:
8624 case REG_EQUIV:
8625 case REG_NONNEG:
8626 /* These notes say something about results of an insn. We can
8627 only support them if they used to be on I3 in which case they
8628 remain on I3. Otherwise they are ignored. */
8629 if (from_insn == i3)
8630 place = i3;
8631 break;
8632
8633 case REG_INC:
8634 case REG_NO_CONFLICT:
8635 case REG_LABEL:
8636 /* These notes say something about how a register is used. They must
8637 be present on any use of the register in I2 or I3. */
8638 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
8639 place = i3;
8640
8641 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
8642 {
8643 if (place)
8644 place2 = i2;
8645 else
8646 place = i2;
8647 }
8648 break;
8649
8650 case REG_WAS_0:
8651 /* It is too much trouble to try to see if this note is still
8652 correct in all situations. It is better to simply delete it. */
8653 break;
8654
8655 case REG_RETVAL:
8656 /* If the insn previously containing this note still exists,
8657 put it back where it was. Otherwise move it to the previous
8658 insn. Adjust the corresponding REG_LIBCALL note. */
8659 if (GET_CODE (from_insn) != NOTE)
8660 place = from_insn;
8661 else
8662 {
8663 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
8664 place = prev_real_insn (from_insn);
8665 if (tem && place)
8666 XEXP (tem, 0) = place;
8667 }
8668 break;
8669
8670 case REG_LIBCALL:
8671 /* This is handled similarly to REG_RETVAL. */
8672 if (GET_CODE (from_insn) != NOTE)
8673 place = from_insn;
8674 else
8675 {
8676 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
8677 place = next_real_insn (from_insn);
8678 if (tem && place)
8679 XEXP (tem, 0) = place;
8680 }
8681 break;
8682
8683 case REG_DEAD:
8684 /* If the register is used as an input in I3, it dies there.
8685 Similarly for I2, if it is non-zero and adjacent to I3.
8686
8687 If the register is not used as an input in either I3 or I2
8688 and it is not one of the registers we were supposed to eliminate,
8689 there are two possibilities. We might have a non-adjacent I2
8690 or we might have somehow eliminated an additional register
8691 from a computation. For example, we might have had A & B where
8692 we discover that B will always be zero. In this case we will
8693 eliminate the reference to A.
8694
8695 In both cases, we must search to see if we can find a previous
8696 use of A and put the death note there. */
8697
8698 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
8699 place = i3;
8700 else if (i2 != 0 && next_nonnote_insn (i2) == i3
8701 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8702 place = i2;
8703
8704 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
8705 break;
8706
8707 /* If the register is used in both I2 and I3 and it dies in I3,
8708 we might have added another reference to it. If reg_n_refs
8709 was 2, bump it to 3. This has to be correct since the
8710 register must have been set somewhere. The reason this is
8711 done is because local-alloc.c treats 2 references as a
8712 special case. */
8713
8714 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
8715 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
8716 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
8717 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
8718
8719 if (place == 0)
8720 for (tem = prev_nonnote_insn (i3);
8721 tem && (GET_CODE (tem) == INSN
8722 || GET_CODE (tem) == CALL_INSN);
8723 tem = prev_nonnote_insn (tem))
8724 {
8725 /* If the register is being set at TEM, see if that is all
8726 TEM is doing. If so, delete TEM. Otherwise, make this
8727 into a REG_UNUSED note instead. */
8728 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
8729 {
8730 rtx set = single_set (tem);
8731
8732 /* Verify that it was the set, and not a clobber that
8733 modified the register. */
8734
8735 if (set != 0 && ! side_effects_p (SET_SRC (set))
8736 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
8737 {
8738 /* Move the notes and links of TEM elsewhere.
8739 This might delete other dead insns recursively.
8740 First set the pattern to something that won't use
8741 any register. */
8742
8743 PATTERN (tem) = pc_rtx;
8744
8745 distribute_notes (REG_NOTES (tem), tem, tem,
8746 NULL_RTX, NULL_RTX, NULL_RTX);
8747 distribute_links (LOG_LINKS (tem));
8748
8749 PUT_CODE (tem, NOTE);
8750 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
8751 NOTE_SOURCE_FILE (tem) = 0;
8752 }
8753 else
8754 {
8755 PUT_REG_NOTE_KIND (note, REG_UNUSED);
8756
8757 /* If there isn't already a REG_UNUSED note, put one
8758 here. */
8759 if (! find_regno_note (tem, REG_UNUSED,
8760 REGNO (XEXP (note, 0))))
8761 place = tem;
8762 break;
8763 }
8764 }
8765 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
8766 {
8767 place = tem;
8768 break;
8769 }
8770 }
8771
8772 /* If the register is set or already dead at PLACE, we needn't do
8773 anything with this note if it is still a REG_DEAD note.
8774
8775 Note that we cannot use just `dead_or_set_p' here since we can
8776 convert an assignment to a register into a bit-field assignment.
8777 Therefore, we must also omit the note if the register is the
8778 target of a bitfield assignment. */
8779
8780 if (place && REG_NOTE_KIND (note) == REG_DEAD)
8781 {
8782 int regno = REGNO (XEXP (note, 0));
8783
8784 if (dead_or_set_p (place, XEXP (note, 0))
8785 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
8786 {
8787 /* Unless the register previously died in PLACE, clear
8788 reg_last_death. [I no longer understand why this is
8789 being done.] */
8790 if (reg_last_death[regno] != place)
8791 reg_last_death[regno] = 0;
8792 place = 0;
8793 }
8794 else
8795 reg_last_death[regno] = place;
8796
8797 /* If this is a death note for a hard reg that is occupying
8798 multiple registers, ensure that we are still using all
8799 parts of the object. If we find a piece of the object
8800 that is unused, we must add a USE for that piece before
8801 PLACE and put the appropriate REG_DEAD note on it.
8802
8803 An alternative would be to put a REG_UNUSED for the pieces
8804 on the insn that set the register, but that can't be done if
8805 it is not in the same block. It is simpler, though less
8806 efficient, to add the USE insns. */
8807
8808 if (place && regno < FIRST_PSEUDO_REGISTER
8809 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
8810 {
8811 int endregno
8812 = regno + HARD_REGNO_NREGS (regno,
8813 GET_MODE (XEXP (note, 0)));
8814 int all_used = 1;
8815 int i;
8816
8817 for (i = regno; i < endregno; i++)
8818 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
8819 {
8820 rtx piece = gen_rtx (REG, word_mode, i);
8821 rtx p;
8822
8823 /* See if we already placed a USE note for this
8824 register in front of PLACE. */
8825 for (p = place;
8826 GET_CODE (PREV_INSN (p)) == INSN
8827 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
8828 p = PREV_INSN (p))
8829 if (rtx_equal_p (piece,
8830 XEXP (PATTERN (PREV_INSN (p)), 0)))
8831 {
8832 p = 0;
8833 break;
8834 }
8835
8836 if (p)
8837 {
8838 rtx use_insn
8839 = emit_insn_before (gen_rtx (USE, VOIDmode,
8840 piece),
8841 p);
8842 REG_NOTES (use_insn)
8843 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
8844 REG_NOTES (use_insn));
8845 }
8846
8847 all_used = 0;
8848 }
8849
8850 if (! all_used)
8851 {
8852 /* Put only REG_DEAD notes for pieces that are
8853 still used and that are not already dead or set. */
8854
8855 for (i = regno; i < endregno; i++)
8856 {
8857 rtx piece = gen_rtx (REG, word_mode, i);
8858
8859 if (reg_referenced_p (piece, PATTERN (place))
8860 && ! dead_or_set_p (place, piece)
8861 && ! reg_bitfield_target_p (piece,
8862 PATTERN (place)))
8863 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
8864 piece,
8865 REG_NOTES (place));
8866 }
8867
8868 place = 0;
8869 }
8870 }
8871 }
8872 break;
8873
8874 default:
8875 /* Any other notes should not be present at this point in the
8876 compilation. */
8877 abort ();
8878 }
8879
8880 if (place)
8881 {
8882 XEXP (note, 1) = REG_NOTES (place);
8883 REG_NOTES (place) = note;
8884 }
8885
8886 if (place2)
8887 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
8888 XEXP (note, 0), REG_NOTES (place2));
8889 }
8890 }
8891 \f
8892 /* Similarly to above, distribute the LOG_LINKS that used to be present on
8893 I3, I2, and I1 to new locations. This is also called in one case to
8894 add a link pointing at I3 when I3's destination is changed. */
8895
8896 static void
8897 distribute_links (links)
8898 rtx links;
8899 {
8900 rtx link, next_link;
8901
8902 for (link = links; link; link = next_link)
8903 {
8904 rtx place = 0;
8905 rtx insn;
8906 rtx set, reg;
8907
8908 next_link = XEXP (link, 1);
8909
8910 /* If the insn that this link points to is a NOTE or isn't a single
8911 set, ignore it. In the latter case, it isn't clear what we
8912 can do other than ignore the link, since we can't tell which
8913 register it was for. Such links wouldn't be used by combine
8914 anyway.
8915
8916 It is not possible for the destination of the target of the link to
8917 have been changed by combine. The only potential of this is if we
8918 replace I3, I2, and I1 by I3 and I2. But in that case the
8919 destination of I2 also remains unchanged. */
8920
8921 if (GET_CODE (XEXP (link, 0)) == NOTE
8922 || (set = single_set (XEXP (link, 0))) == 0)
8923 continue;
8924
8925 reg = SET_DEST (set);
8926 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
8927 || GET_CODE (reg) == SIGN_EXTRACT
8928 || GET_CODE (reg) == STRICT_LOW_PART)
8929 reg = XEXP (reg, 0);
8930
8931 /* A LOG_LINK is defined as being placed on the first insn that uses
8932 a register and points to the insn that sets the register. Start
8933 searching at the next insn after the target of the link and stop
8934 when we reach a set of the register or the end of the basic block.
8935
8936 Note that this correctly handles the link that used to point from
8937 I3 to I2. Also note that not much searching is typically done here
8938 since most links don't point very far away. */
8939
8940 for (insn = NEXT_INSN (XEXP (link, 0));
8941 (insn && GET_CODE (insn) != CODE_LABEL
8942 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
8943 insn = NEXT_INSN (insn))
8944 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
8945 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
8946 {
8947 if (reg_referenced_p (reg, PATTERN (insn)))
8948 place = insn;
8949 break;
8950 }
8951
8952 /* If we found a place to put the link, place it there unless there
8953 is already a link to the same insn as LINK at that point. */
8954
8955 if (place)
8956 {
8957 rtx link2;
8958
8959 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
8960 if (XEXP (link2, 0) == XEXP (link, 0))
8961 break;
8962
8963 if (link2 == 0)
8964 {
8965 XEXP (link, 1) = LOG_LINKS (place);
8966 LOG_LINKS (place) = link;
8967 }
8968 }
8969 }
8970 }
8971 \f
8972 void
8973 dump_combine_stats (file)
8974 FILE *file;
8975 {
8976 fprintf
8977 (file,
8978 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
8979 combine_attempts, combine_merges, combine_extras, combine_successes);
8980 }
8981
8982 void
8983 dump_combine_total_stats (file)
8984 FILE *file;
8985 {
8986 fprintf
8987 (file,
8988 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
8989 total_attempts, total_merges, total_extras, total_successes);
8990 }
This page took 0.510016 seconds and 6 git commands to generate.