]> gcc.gnu.org Git - gcc.git/blob - gcc/combine.c
(try_combine): Only use I2DEST as a scratch reg for
[gcc.git] / gcc / combine.c
1 /* Optimize by combining instructions for GNU compiler.
2 Copyright (C) 1987, 1988, 1992 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21 /* This module is essentially the "combiner" phase of the U. of Arizona
22 Portable Optimizer, but redone to work on our list-structured
23 representation for RTL instead of their string representation.
24
25 The LOG_LINKS of each insn identify the most recent assignment
26 to each REG used in the insn. It is a list of previous insns,
27 each of which contains a SET for a REG that is used in this insn
28 and not used or set in between. LOG_LINKs never cross basic blocks.
29 They were set up by the preceding pass (lifetime analysis).
30
31 We try to combine each pair of insns joined by a logical link.
32 We also try to combine triples of insns A, B and C when
33 C has a link back to B and B has a link back to A.
34
35 LOG_LINKS does not have links for use of the CC0. They don't
36 need to, because the insn that sets the CC0 is always immediately
37 before the insn that tests it. So we always regard a branch
38 insn as having a logical link to the preceding insn. The same is true
39 for an insn explicitly using CC0.
40
41 We check (with use_crosses_set_p) to avoid combining in such a way
42 as to move a computation to a place where its value would be different.
43
44 Combination is done by mathematically substituting the previous
45 insn(s) values for the regs they set into the expressions in
46 the later insns that refer to these regs. If the result is a valid insn
47 for our target machine, according to the machine description,
48 we install it, delete the earlier insns, and update the data flow
49 information (LOG_LINKS and REG_NOTES) for what we did.
50
51 There are a few exceptions where the dataflow information created by
52 flow.c aren't completely updated:
53
54 - reg_live_length is not updated
55 - reg_n_refs is not adjusted in the rare case when a register is
56 no longer required in a computation
57 - there are extremely rare cases (see distribute_regnotes) when a
58 REG_DEAD note is lost
59 - a LOG_LINKS entry that refers to an insn with multiple SETs may be
60 removed because there is no way to know which register it was
61 linking
62
63 To simplify substitution, we combine only when the earlier insn(s)
64 consist of only a single assignment. To simplify updating afterward,
65 we never combine when a subroutine call appears in the middle.
66
67 Since we do not represent assignments to CC0 explicitly except when that
68 is all an insn does, there is no LOG_LINKS entry in an insn that uses
69 the condition code for the insn that set the condition code.
70 Fortunately, these two insns must be consecutive.
71 Therefore, every JUMP_INSN is taken to have an implicit logical link
72 to the preceding insn. This is not quite right, since non-jumps can
73 also use the condition code; but in practice such insns would not
74 combine anyway. */
75
76 #include "config.h"
77 #include "gvarargs.h"
78 #include "rtl.h"
79 #include "flags.h"
80 #include "regs.h"
81 #include "expr.h"
82 #include "basic-block.h"
83 #include "insn-config.h"
84 #include "insn-flags.h"
85 #include "insn-codes.h"
86 #include "insn-attr.h"
87 #include "recog.h"
88 #include "real.h"
89 #include <stdio.h>
90
91 /* It is not safe to use ordinary gen_lowpart in combine.
92 Use gen_lowpart_for_combine instead. See comments there. */
93 #define gen_lowpart dont_use_gen_lowpart_you_dummy
94
95 /* Number of attempts to combine instructions in this function. */
96
97 static int combine_attempts;
98
99 /* Number of attempts that got as far as substitution in this function. */
100
101 static int combine_merges;
102
103 /* Number of instructions combined with added SETs in this function. */
104
105 static int combine_extras;
106
107 /* Number of instructions combined in this function. */
108
109 static int combine_successes;
110
111 /* Totals over entire compilation. */
112
113 static int total_attempts, total_merges, total_extras, total_successes;
114 \f
115 /* Vector mapping INSN_UIDs to cuids.
116 The cuids are like uids but increase monotonically always.
117 Combine always uses cuids so that it can compare them.
118 But actually renumbering the uids, which we used to do,
119 proves to be a bad idea because it makes it hard to compare
120 the dumps produced by earlier passes with those from later passes. */
121
122 static int *uid_cuid;
123
124 /* Get the cuid of an insn. */
125
126 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
127
128 /* Maximum register number, which is the size of the tables below. */
129
130 static int combine_max_regno;
131
132 /* Record last point of death of (hard or pseudo) register n. */
133
134 static rtx *reg_last_death;
135
136 /* Record last point of modification of (hard or pseudo) register n. */
137
138 static rtx *reg_last_set;
139
140 /* Record the cuid of the last insn that invalidated memory
141 (anything that writes memory, and subroutine calls, but not pushes). */
142
143 static int mem_last_set;
144
145 /* Record the cuid of the last CALL_INSN
146 so we can tell whether a potential combination crosses any calls. */
147
148 static int last_call_cuid;
149
150 /* When `subst' is called, this is the insn that is being modified
151 (by combining in a previous insn). The PATTERN of this insn
152 is still the old pattern partially modified and it should not be
153 looked at, but this may be used to examine the successors of the insn
154 to judge whether a simplification is valid. */
155
156 static rtx subst_insn;
157
158 /* This is the lowest CUID that `subst' is currently dealing with.
159 get_last_value will not return a value if the register was set at or
160 after this CUID. If not for this mechanism, we could get confused if
161 I2 or I1 in try_combine were an insn that used the old value of a register
162 to obtain a new value. In that case, we might erroneously get the
163 new value of the register when we wanted the old one. */
164
165 static int subst_low_cuid;
166
167 /* This is the value of undobuf.num_undo when we started processing this
168 substitution. This will prevent gen_rtx_combine from re-used a piece
169 from the previous expression. Doing so can produce circular rtl
170 structures. */
171
172 static int previous_num_undos;
173 \f
174 /* The next group of arrays allows the recording of the last value assigned
175 to (hard or pseudo) register n. We use this information to see if a
176 operation being processed is redundant given a prior operation performed
177 on the register. For example, an `and' with a constant is redundant if
178 all the zero bits are already known to be turned off.
179
180 We use an approach similar to that used by cse, but change it in the
181 following ways:
182
183 (1) We do not want to reinitialize at each label.
184 (2) It is useful, but not critical, to know the actual value assigned
185 to a register. Often just its form is helpful.
186
187 Therefore, we maintain the following arrays:
188
189 reg_last_set_value the last value assigned
190 reg_last_set_label records the value of label_tick when the
191 register was assigned
192 reg_last_set_table_tick records the value of label_tick when a
193 value using the register is assigned
194 reg_last_set_invalid set to non-zero when it is not valid
195 to use the value of this register in some
196 register's value
197
198 To understand the usage of these tables, it is important to understand
199 the distinction between the value in reg_last_set_value being valid
200 and the register being validly contained in some other expression in the
201 table.
202
203 Entry I in reg_last_set_value is valid if it is non-zero, and either
204 reg_n_sets[i] is 1 or reg_last_set_label[i] == label_tick.
205
206 Register I may validly appear in any expression returned for the value
207 of another register if reg_n_sets[i] is 1. It may also appear in the
208 value for register J if reg_last_set_label[i] < reg_last_set_label[j] or
209 reg_last_set_invalid[j] is zero.
210
211 If an expression is found in the table containing a register which may
212 not validly appear in an expression, the register is replaced by
213 something that won't match, (clobber (const_int 0)).
214
215 reg_last_set_invalid[i] is set non-zero when register I is being assigned
216 to and reg_last_set_table_tick[i] == label_tick. */
217
218 /* Record last value assigned to (hard or pseudo) register n. */
219
220 static rtx *reg_last_set_value;
221
222 /* Record the value of label_tick when the value for register n is placed in
223 reg_last_set_value[n]. */
224
225 static short *reg_last_set_label;
226
227 /* Record the value of label_tick when an expression involving register n
228 is placed in reg_last_set_value. */
229
230 static short *reg_last_set_table_tick;
231
232 /* Set non-zero if references to register n in expressions should not be
233 used. */
234
235 static char *reg_last_set_invalid;
236
237 /* Incremented for each label. */
238
239 static short label_tick;
240
241 /* Some registers that are set more than once and used in more than one
242 basic block are nevertheless always set in similar ways. For example,
243 a QImode register may be loaded from memory in two places on a machine
244 where byte loads zero extend.
245
246 We record in the following array what we know about the significant
247 bits of a register, specifically which bits are known to be zero.
248
249 If an entry is zero, it means that we don't know anything special. */
250
251 static HOST_WIDE_INT *reg_significant;
252
253 /* Mode used to compute significance in reg_significant. It is the largest
254 integer mode that can fit in HOST_BITS_PER_WIDE_INT. */
255
256 static enum machine_mode significant_mode;
257
258 /* Nonzero if we know that a register has some leading bits that are always
259 equal to the sign bit. */
260
261 static char *reg_sign_bit_copies;
262
263 /* Nonzero when reg_significant and reg_sign_bit_copies can be safely used.
264 It is zero while computing them and after combine has completed. This
265 former test prevents propagating values based on previously set values,
266 which can be incorrect if a variable is modified in a loop. */
267
268 static int significant_valid;
269 \f
270 /* Record one modification to rtl structure
271 to be undone by storing old_contents into *where.
272 is_int is 1 if the contents are an int. */
273
274 struct undo
275 {
276 int is_int;
277 union {rtx rtx; int i;} old_contents;
278 union {rtx *rtx; int *i;} where;
279 };
280
281 /* Record a bunch of changes to be undone, up to MAX_UNDO of them.
282 num_undo says how many are currently recorded.
283
284 storage is nonzero if we must undo the allocation of new storage.
285 The value of storage is what to pass to obfree.
286
287 other_insn is nonzero if we have modified some other insn in the process
288 of working on subst_insn. It must be verified too. */
289
290 #define MAX_UNDO 50
291
292 struct undobuf
293 {
294 int num_undo;
295 char *storage;
296 struct undo undo[MAX_UNDO];
297 rtx other_insn;
298 };
299
300 static struct undobuf undobuf;
301
302 /* Substitute NEWVAL, an rtx expression, into INTO, a place in some
303 insn. The substitution can be undone by undo_all. If INTO is already
304 set to NEWVAL, do not record this change. Because computing NEWVAL might
305 also call SUBST, we have to compute it before we put anything into
306 the undo table. */
307
308 #define SUBST(INTO, NEWVAL) \
309 do { rtx _new = (NEWVAL); \
310 if (undobuf.num_undo < MAX_UNDO) \
311 { \
312 undobuf.undo[undobuf.num_undo].is_int = 0; \
313 undobuf.undo[undobuf.num_undo].where.rtx = &INTO; \
314 undobuf.undo[undobuf.num_undo].old_contents.rtx = INTO; \
315 INTO = _new; \
316 if (undobuf.undo[undobuf.num_undo].old_contents.rtx != INTO) \
317 undobuf.num_undo++; \
318 } \
319 } while (0)
320
321 /* Similar to SUBST, but NEWVAL is an int. INTO will normally be an XINT
322 expression.
323 Note that substitution for the value of a CONST_INT is not safe. */
324
325 #define SUBST_INT(INTO, NEWVAL) \
326 do { if (undobuf.num_undo < MAX_UNDO) \
327 { \
328 undobuf.undo[undobuf.num_undo].is_int = 1; \
329 undobuf.undo[undobuf.num_undo].where.i = (int *) &INTO; \
330 undobuf.undo[undobuf.num_undo].old_contents.i = INTO; \
331 INTO = NEWVAL; \
332 if (undobuf.undo[undobuf.num_undo].old_contents.i != INTO) \
333 undobuf.num_undo++; \
334 } \
335 } while (0)
336
337 /* Number of times the pseudo being substituted for
338 was found and replaced. */
339
340 static int n_occurrences;
341
342 static void set_significant ();
343 static void move_deaths ();
344 rtx remove_death ();
345 static void record_value_for_reg ();
346 static void record_dead_and_set_regs ();
347 static int use_crosses_set_p ();
348 static rtx try_combine ();
349 static rtx *find_split_point ();
350 static rtx subst ();
351 static void undo_all ();
352 static int reg_dead_at_p ();
353 static rtx expand_compound_operation ();
354 static rtx expand_field_assignment ();
355 static rtx make_extraction ();
356 static int get_pos_from_mask ();
357 static rtx force_to_mode ();
358 static rtx known_cond ();
359 static rtx make_field_assignment ();
360 static rtx make_compound_operation ();
361 static rtx apply_distributive_law ();
362 static rtx simplify_and_const_int ();
363 static unsigned HOST_WIDE_INT significant_bits ();
364 static int num_sign_bit_copies ();
365 static int merge_outer_ops ();
366 static rtx simplify_shift_const ();
367 static int recog_for_combine ();
368 static rtx gen_lowpart_for_combine ();
369 static rtx gen_rtx_combine ();
370 static rtx gen_binary ();
371 static rtx gen_unary ();
372 static enum rtx_code simplify_comparison ();
373 static int reversible_comparison_p ();
374 static int get_last_value_validate ();
375 static rtx get_last_value ();
376 static void distribute_notes ();
377 static void distribute_links ();
378 \f
379 /* Main entry point for combiner. F is the first insn of the function.
380 NREGS is the first unused pseudo-reg number. */
381
382 void
383 combine_instructions (f, nregs)
384 rtx f;
385 int nregs;
386 {
387 register rtx insn, next, prev;
388 register int i;
389 register rtx links, nextlinks;
390
391 combine_attempts = 0;
392 combine_merges = 0;
393 combine_extras = 0;
394 combine_successes = 0;
395
396 combine_max_regno = nregs;
397
398 reg_last_death = (rtx *) alloca (nregs * sizeof (rtx));
399 reg_last_set = (rtx *) alloca (nregs * sizeof (rtx));
400 reg_last_set_value = (rtx *) alloca (nregs * sizeof (rtx));
401 reg_last_set_table_tick = (short *) alloca (nregs * sizeof (short));
402 reg_last_set_label = (short *) alloca (nregs * sizeof (short));
403 reg_last_set_invalid = (char *) alloca (nregs * sizeof (char));
404 reg_significant = (HOST_WIDE_INT *) alloca (nregs * sizeof (HOST_WIDE_INT));
405 reg_sign_bit_copies = (char *) alloca (nregs * sizeof (char));
406
407 bzero (reg_last_death, nregs * sizeof (rtx));
408 bzero (reg_last_set, nregs * sizeof (rtx));
409 bzero (reg_last_set_value, nregs * sizeof (rtx));
410 bzero (reg_last_set_table_tick, nregs * sizeof (short));
411 bzero (reg_last_set_invalid, nregs * sizeof (char));
412 bzero (reg_significant, nregs * sizeof (HOST_WIDE_INT));
413 bzero (reg_sign_bit_copies, nregs * sizeof (char));
414
415 init_recog_no_volatile ();
416
417 /* Compute maximum uid value so uid_cuid can be allocated. */
418
419 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
420 if (INSN_UID (insn) > i)
421 i = INSN_UID (insn);
422
423 uid_cuid = (int *) alloca ((i + 1) * sizeof (int));
424
425 significant_mode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
426
427 /* Don't use reg_significant when computing it. This can cause problems
428 when, for example, we have j <<= 1 in a loop. */
429
430 significant_valid = 0;
431
432 /* Compute the mapping from uids to cuids.
433 Cuids are numbers assigned to insns, like uids,
434 except that cuids increase monotonically through the code.
435
436 Scan all SETs and see if we can deduce anything about what
437 bits are significant for some registers. */
438
439 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
440 {
441 INSN_CUID (insn) = ++i;
442 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
443 note_stores (PATTERN (insn), set_significant);
444 }
445
446 significant_valid = 1;
447
448 /* Now scan all the insns in forward order. */
449
450 label_tick = 1;
451 last_call_cuid = 0;
452 mem_last_set = 0;
453
454 for (insn = f; insn; insn = next ? next : NEXT_INSN (insn))
455 {
456 next = 0;
457
458 if (GET_CODE (insn) == CODE_LABEL)
459 label_tick++;
460
461 else if (GET_CODE (insn) == INSN
462 || GET_CODE (insn) == CALL_INSN
463 || GET_CODE (insn) == JUMP_INSN)
464 {
465 /* Try this insn with each insn it links back to. */
466
467 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
468 if ((next = try_combine (insn, XEXP (links, 0), NULL_RTX)) != 0)
469 goto retry;
470
471 /* Try each sequence of three linked insns ending with this one. */
472
473 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
474 for (nextlinks = LOG_LINKS (XEXP (links, 0)); nextlinks;
475 nextlinks = XEXP (nextlinks, 1))
476 if ((next = try_combine (insn, XEXP (links, 0),
477 XEXP (nextlinks, 0))) != 0)
478 goto retry;
479
480 #ifdef HAVE_cc0
481 /* Try to combine a jump insn that uses CC0
482 with a preceding insn that sets CC0, and maybe with its
483 logical predecessor as well.
484 This is how we make decrement-and-branch insns.
485 We need this special code because data flow connections
486 via CC0 do not get entered in LOG_LINKS. */
487
488 if (GET_CODE (insn) == JUMP_INSN
489 && (prev = prev_nonnote_insn (insn)) != 0
490 && GET_CODE (prev) == INSN
491 && sets_cc0_p (PATTERN (prev)))
492 {
493 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
494 goto retry;
495
496 for (nextlinks = LOG_LINKS (prev); nextlinks;
497 nextlinks = XEXP (nextlinks, 1))
498 if ((next = try_combine (insn, prev,
499 XEXP (nextlinks, 0))) != 0)
500 goto retry;
501 }
502
503 /* Do the same for an insn that explicitly references CC0. */
504 if (GET_CODE (insn) == INSN
505 && (prev = prev_nonnote_insn (insn)) != 0
506 && GET_CODE (prev) == INSN
507 && sets_cc0_p (PATTERN (prev))
508 && GET_CODE (PATTERN (insn)) == SET
509 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (insn))))
510 {
511 if ((next = try_combine (insn, prev, NULL_RTX)) != 0)
512 goto retry;
513
514 for (nextlinks = LOG_LINKS (prev); nextlinks;
515 nextlinks = XEXP (nextlinks, 1))
516 if ((next = try_combine (insn, prev,
517 XEXP (nextlinks, 0))) != 0)
518 goto retry;
519 }
520
521 /* Finally, see if any of the insns that this insn links to
522 explicitly references CC0. If so, try this insn, that insn,
523 and its predecessor if it sets CC0. */
524 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
525 if (GET_CODE (XEXP (links, 0)) == INSN
526 && GET_CODE (PATTERN (XEXP (links, 0))) == SET
527 && reg_mentioned_p (cc0_rtx, SET_SRC (PATTERN (XEXP (links, 0))))
528 && (prev = prev_nonnote_insn (XEXP (links, 0))) != 0
529 && GET_CODE (prev) == INSN
530 && sets_cc0_p (PATTERN (prev))
531 && (next = try_combine (insn, XEXP (links, 0), prev)) != 0)
532 goto retry;
533 #endif
534
535 /* Try combining an insn with two different insns whose results it
536 uses. */
537 for (links = LOG_LINKS (insn); links; links = XEXP (links, 1))
538 for (nextlinks = XEXP (links, 1); nextlinks;
539 nextlinks = XEXP (nextlinks, 1))
540 if ((next = try_combine (insn, XEXP (links, 0),
541 XEXP (nextlinks, 0))) != 0)
542 goto retry;
543
544 if (GET_CODE (insn) != NOTE)
545 record_dead_and_set_regs (insn);
546
547 retry:
548 ;
549 }
550 }
551
552 total_attempts += combine_attempts;
553 total_merges += combine_merges;
554 total_extras += combine_extras;
555 total_successes += combine_successes;
556
557 significant_valid = 0;
558 }
559 \f
560 /* Called via note_stores. If X is a pseudo that is used in more than
561 one basic block, is narrower that HOST_BITS_PER_WIDE_INT, and is being
562 set, record what bits are significant. If we are clobbering X,
563 ignore this "set" because the clobbered value won't be used.
564
565 If we are setting only a portion of X and we can't figure out what
566 portion, assume all bits will be used since we don't know what will
567 be happening.
568
569 Similarly, set how many bits of X are known to be copies of the sign bit
570 at all locations in the function. This is the smallest number implied
571 by any set of X. */
572
573 static void
574 set_significant (x, set)
575 rtx x;
576 rtx set;
577 {
578 int num;
579
580 if (GET_CODE (x) == REG
581 && REGNO (x) >= FIRST_PSEUDO_REGISTER
582 && reg_n_sets[REGNO (x)] > 1
583 && reg_basic_block[REGNO (x)] < 0
584 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT)
585 {
586 if (GET_CODE (set) == CLOBBER)
587 return;
588
589 /* If this is a complex assignment, see if we can convert it into a
590 simple assignment. */
591 set = expand_field_assignment (set);
592 if (SET_DEST (set) == x)
593 {
594 reg_significant[REGNO (x)]
595 |= significant_bits (SET_SRC (set), significant_mode);
596 num = num_sign_bit_copies (SET_SRC (set), GET_MODE (x));
597 if (reg_sign_bit_copies[REGNO (x)] == 0
598 || reg_sign_bit_copies[REGNO (x)] > num)
599 reg_sign_bit_copies[REGNO (x)] = num;
600 }
601 else
602 {
603 reg_significant[REGNO (x)] = GET_MODE_MASK (GET_MODE (x));
604 reg_sign_bit_copies[REGNO (x)] = 0;
605 }
606 }
607 }
608 \f
609 /* See if INSN can be combined into I3. PRED and SUCC are optionally
610 insns that were previously combined into I3 or that will be combined
611 into the merger of INSN and I3.
612
613 Return 0 if the combination is not allowed for any reason.
614
615 If the combination is allowed, *PDEST will be set to the single
616 destination of INSN and *PSRC to the single source, and this function
617 will return 1. */
618
619 static int
620 can_combine_p (insn, i3, pred, succ, pdest, psrc)
621 rtx insn;
622 rtx i3;
623 rtx pred, succ;
624 rtx *pdest, *psrc;
625 {
626 int i;
627 rtx set = 0, src, dest;
628 rtx p, link;
629 int all_adjacent = (succ ? (next_active_insn (insn) == succ
630 && next_active_insn (succ) == i3)
631 : next_active_insn (insn) == i3);
632
633 /* Can combine only if previous insn is a SET of a REG, a SUBREG or CC0.
634 or a PARALLEL consisting of such a SET and CLOBBERs.
635
636 If INSN has CLOBBER parallel parts, ignore them for our processing.
637 By definition, these happen during the execution of the insn. When it
638 is merged with another insn, all bets are off. If they are, in fact,
639 needed and aren't also supplied in I3, they may be added by
640 recog_for_combine. Otherwise, it won't match.
641
642 We can also ignore a SET whose SET_DEST is mentioned in a REG_UNUSED
643 note.
644
645 Get the source and destination of INSN. If more than one, can't
646 combine. */
647
648 if (GET_CODE (PATTERN (insn)) == SET)
649 set = PATTERN (insn);
650 else if (GET_CODE (PATTERN (insn)) == PARALLEL
651 && GET_CODE (XVECEXP (PATTERN (insn), 0, 0)) == SET)
652 {
653 for (i = 0; i < XVECLEN (PATTERN (insn), 0); i++)
654 {
655 rtx elt = XVECEXP (PATTERN (insn), 0, i);
656
657 switch (GET_CODE (elt))
658 {
659 /* We can ignore CLOBBERs. */
660 case CLOBBER:
661 break;
662
663 case SET:
664 /* Ignore SETs whose result isn't used but not those that
665 have side-effects. */
666 if (find_reg_note (insn, REG_UNUSED, SET_DEST (elt))
667 && ! side_effects_p (elt))
668 break;
669
670 /* If we have already found a SET, this is a second one and
671 so we cannot combine with this insn. */
672 if (set)
673 return 0;
674
675 set = elt;
676 break;
677
678 default:
679 /* Anything else means we can't combine. */
680 return 0;
681 }
682 }
683
684 if (set == 0
685 /* If SET_SRC is an ASM_OPERANDS we can't throw away these CLOBBERs,
686 so don't do anything with it. */
687 || GET_CODE (SET_SRC (set)) == ASM_OPERANDS)
688 return 0;
689 }
690 else
691 return 0;
692
693 if (set == 0)
694 return 0;
695
696 set = expand_field_assignment (set);
697 src = SET_SRC (set), dest = SET_DEST (set);
698
699 /* Don't eliminate a store in the stack pointer. */
700 if (dest == stack_pointer_rtx
701 /* Don't install a subreg involving two modes not tieable.
702 It can worsen register allocation, and can even make invalid reload
703 insns, since the reg inside may need to be copied from in the
704 outside mode, and that may be invalid if it is an fp reg copied in
705 integer mode. As a special exception, we can allow this if
706 I3 is simply copying DEST, a REG, to CC0. */
707 || (GET_CODE (src) == SUBREG
708 && ! MODES_TIEABLE_P (GET_MODE (src), GET_MODE (SUBREG_REG (src)))
709 #ifdef HAVE_cc0
710 && ! (GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
711 && SET_DEST (PATTERN (i3)) == cc0_rtx
712 && GET_CODE (dest) == REG && dest == SET_SRC (PATTERN (i3)))
713 #endif
714 )
715 /* If we couldn't eliminate a field assignment, we can't combine. */
716 || GET_CODE (dest) == ZERO_EXTRACT || GET_CODE (dest) == STRICT_LOW_PART
717 /* Don't combine with an insn that sets a register to itself if it has
718 a REG_EQUAL note. This may be part of a REG_NO_CONFLICT sequence. */
719 || (rtx_equal_p (src, dest) && find_reg_note (insn, REG_EQUAL, NULL_RTX))
720 /* Can't merge a function call. */
721 || GET_CODE (src) == CALL
722 /* Don't substitute into an incremented register. */
723 || FIND_REG_INC_NOTE (i3, dest)
724 || (succ && FIND_REG_INC_NOTE (succ, dest))
725 /* Don't combine the end of a libcall into anything. */
726 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
727 /* Make sure that DEST is not used after SUCC but before I3. */
728 || (succ && ! all_adjacent
729 && reg_used_between_p (dest, succ, i3))
730 /* Make sure that the value that is to be substituted for the register
731 does not use any registers whose values alter in between. However,
732 If the insns are adjacent, a use can't cross a set even though we
733 think it might (this can happen for a sequence of insns each setting
734 the same destination; reg_last_set of that register might point to
735 a NOTE). Also, don't move a volatile asm across any other insns. */
736 || (! all_adjacent
737 && (use_crosses_set_p (src, INSN_CUID (insn))
738 || (GET_CODE (src) == ASM_OPERANDS && MEM_VOLATILE_P (src))))
739 /* If there is a REG_NO_CONFLICT note for DEST in I3 or SUCC, we get
740 better register allocation by not doing the combine. */
741 || find_reg_note (i3, REG_NO_CONFLICT, dest)
742 || (succ && find_reg_note (succ, REG_NO_CONFLICT, dest))
743 /* Don't combine across a CALL_INSN, because that would possibly
744 change whether the life span of some REGs crosses calls or not,
745 and it is a pain to update that information.
746 Exception: if source is a constant, moving it later can't hurt.
747 Accept that special case, because it helps -fforce-addr a lot. */
748 || (INSN_CUID (insn) < last_call_cuid && ! CONSTANT_P (src)))
749 return 0;
750
751 /* DEST must either be a REG or CC0. */
752 if (GET_CODE (dest) == REG)
753 {
754 /* If register alignment is being enforced for multi-word items in all
755 cases except for parameters, it is possible to have a register copy
756 insn referencing a hard register that is not allowed to contain the
757 mode being copied and which would not be valid as an operand of most
758 insns. Eliminate this problem by not combining with such an insn.
759
760 Also, on some machines we don't want to extend the life of a hard
761 register. */
762
763 if (GET_CODE (src) == REG
764 && ((REGNO (dest) < FIRST_PSEUDO_REGISTER
765 && ! HARD_REGNO_MODE_OK (REGNO (dest), GET_MODE (dest)))
766 #ifdef SMALL_REGISTER_CLASSES
767 /* Don't extend the life of a hard register. */
768 || REGNO (src) < FIRST_PSEUDO_REGISTER
769 #else
770 || (REGNO (src) < FIRST_PSEUDO_REGISTER
771 && ! HARD_REGNO_MODE_OK (REGNO (src), GET_MODE (src)))
772 #endif
773 ))
774 return 0;
775 }
776 else if (GET_CODE (dest) != CC0)
777 return 0;
778
779 /* Don't substitute for a register intended as a clobberable operand.
780 Similarly, don't substitute an expression containing a register that
781 will be clobbered in I3. */
782 if (GET_CODE (PATTERN (i3)) == PARALLEL)
783 for (i = XVECLEN (PATTERN (i3), 0) - 1; i >= 0; i--)
784 if (GET_CODE (XVECEXP (PATTERN (i3), 0, i)) == CLOBBER
785 && (reg_overlap_mentioned_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0),
786 src)
787 || rtx_equal_p (XEXP (XVECEXP (PATTERN (i3), 0, i), 0), dest)))
788 return 0;
789
790 /* If INSN contains anything volatile, or is an `asm' (whether volatile
791 or not), reject, unless nothing volatile comes between it and I3,
792 with the exception of SUCC. */
793
794 if (GET_CODE (src) == ASM_OPERANDS || volatile_refs_p (src))
795 for (p = NEXT_INSN (insn); p != i3; p = NEXT_INSN (p))
796 if (GET_RTX_CLASS (GET_CODE (p)) == 'i'
797 && p != succ && volatile_refs_p (PATTERN (p)))
798 return 0;
799
800 /* If INSN or I2 contains an autoincrement or autodecrement,
801 make sure that register is not used between there and I3,
802 and not already used in I3 either.
803 Also insist that I3 not be a jump; if it were one
804 and the incremented register were spilled, we would lose. */
805
806 #ifdef AUTO_INC_DEC
807 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
808 if (REG_NOTE_KIND (link) == REG_INC
809 && (GET_CODE (i3) == JUMP_INSN
810 || reg_used_between_p (XEXP (link, 0), insn, i3)
811 || reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i3))))
812 return 0;
813 #endif
814
815 #ifdef HAVE_cc0
816 /* Don't combine an insn that follows a CC0-setting insn.
817 An insn that uses CC0 must not be separated from the one that sets it.
818 We do, however, allow I2 to follow a CC0-setting insn if that insn
819 is passed as I1; in that case it will be deleted also.
820 We also allow combining in this case if all the insns are adjacent
821 because that would leave the two CC0 insns adjacent as well.
822 It would be more logical to test whether CC0 occurs inside I1 or I2,
823 but that would be much slower, and this ought to be equivalent. */
824
825 p = prev_nonnote_insn (insn);
826 if (p && p != pred && GET_CODE (p) == INSN && sets_cc0_p (PATTERN (p))
827 && ! all_adjacent)
828 return 0;
829 #endif
830
831 /* If we get here, we have passed all the tests and the combination is
832 to be allowed. */
833
834 *pdest = dest;
835 *psrc = src;
836
837 return 1;
838 }
839 \f
840 /* LOC is the location within I3 that contains its pattern or the component
841 of a PARALLEL of the pattern. We validate that it is valid for combining.
842
843 One problem is if I3 modifies its output, as opposed to replacing it
844 entirely, we can't allow the output to contain I2DEST or I1DEST as doing
845 so would produce an insn that is not equivalent to the original insns.
846
847 Consider:
848
849 (set (reg:DI 101) (reg:DI 100))
850 (set (subreg:SI (reg:DI 101) 0) <foo>)
851
852 This is NOT equivalent to:
853
854 (parallel [(set (subreg:SI (reg:DI 100) 0) <foo>)
855 (set (reg:DI 101) (reg:DI 100))])
856
857 Not only does this modify 100 (in which case it might still be valid
858 if 100 were dead in I2), it sets 101 to the ORIGINAL value of 100.
859
860 We can also run into a problem if I2 sets a register that I1
861 uses and I1 gets directly substituted into I3 (not via I2). In that
862 case, we would be getting the wrong value of I2DEST into I3, so we
863 must reject the combination. This case occurs when I2 and I1 both
864 feed into I3, rather than when I1 feeds into I2, which feeds into I3.
865 If I1_NOT_IN_SRC is non-zero, it means that finding I1 in the source
866 of a SET must prevent combination from occurring.
867
868 On machines where SMALL_REGISTER_CLASSES is defined, we don't combine
869 if the destination of a SET is a hard register.
870
871 Before doing the above check, we first try to expand a field assignment
872 into a set of logical operations.
873
874 If PI3_DEST_KILLED is non-zero, it is a pointer to a location in which
875 we place a register that is both set and used within I3. If more than one
876 such register is detected, we fail.
877
878 Return 1 if the combination is valid, zero otherwise. */
879
880 static int
881 combinable_i3pat (i3, loc, i2dest, i1dest, i1_not_in_src, pi3dest_killed)
882 rtx i3;
883 rtx *loc;
884 rtx i2dest;
885 rtx i1dest;
886 int i1_not_in_src;
887 rtx *pi3dest_killed;
888 {
889 rtx x = *loc;
890
891 if (GET_CODE (x) == SET)
892 {
893 rtx set = expand_field_assignment (x);
894 rtx dest = SET_DEST (set);
895 rtx src = SET_SRC (set);
896 rtx inner_dest = dest, inner_src = src;
897
898 SUBST (*loc, set);
899
900 while (GET_CODE (inner_dest) == STRICT_LOW_PART
901 || GET_CODE (inner_dest) == SUBREG
902 || GET_CODE (inner_dest) == ZERO_EXTRACT)
903 inner_dest = XEXP (inner_dest, 0);
904
905 /* We probably don't need this any more now that LIMIT_RELOAD_CLASS
906 was added. */
907 #if 0
908 while (GET_CODE (inner_src) == STRICT_LOW_PART
909 || GET_CODE (inner_src) == SUBREG
910 || GET_CODE (inner_src) == ZERO_EXTRACT)
911 inner_src = XEXP (inner_src, 0);
912
913 /* If it is better that two different modes keep two different pseudos,
914 avoid combining them. This avoids producing the following pattern
915 on a 386:
916 (set (subreg:SI (reg/v:QI 21) 0)
917 (lshiftrt:SI (reg/v:SI 20)
918 (const_int 24)))
919 If that were made, reload could not handle the pair of
920 reg 20/21, since it would try to get any GENERAL_REGS
921 but some of them don't handle QImode. */
922
923 if (rtx_equal_p (inner_src, i2dest)
924 && GET_CODE (inner_dest) == REG
925 && ! MODES_TIEABLE_P (GET_MODE (i2dest), GET_MODE (inner_dest)))
926 return 0;
927 #endif
928
929 /* Check for the case where I3 modifies its output, as
930 discussed above. */
931 if ((inner_dest != dest
932 && (reg_overlap_mentioned_p (i2dest, inner_dest)
933 || (i1dest && reg_overlap_mentioned_p (i1dest, inner_dest))))
934 /* This is the same test done in can_combine_p except that we
935 allow a hard register with SMALL_REGISTER_CLASSES if SRC is a
936 CALL operation. */
937 || (GET_CODE (inner_dest) == REG
938 && REGNO (inner_dest) < FIRST_PSEUDO_REGISTER
939 #ifdef SMALL_REGISTER_CLASSES
940 && GET_CODE (src) != CALL
941 #else
942 && ! HARD_REGNO_MODE_OK (REGNO (inner_dest),
943 GET_MODE (inner_dest))
944 #endif
945 )
946
947 || (i1_not_in_src && reg_overlap_mentioned_p (i1dest, src)))
948 return 0;
949
950 /* If DEST is used in I3, it is being killed in this insn,
951 so record that for later. */
952 if (pi3dest_killed && GET_CODE (dest) == REG
953 && reg_referenced_p (dest, PATTERN (i3)))
954 {
955 if (*pi3dest_killed)
956 return 0;
957
958 *pi3dest_killed = dest;
959 }
960 }
961
962 else if (GET_CODE (x) == PARALLEL)
963 {
964 int i;
965
966 for (i = 0; i < XVECLEN (x, 0); i++)
967 if (! combinable_i3pat (i3, &XVECEXP (x, 0, i), i2dest, i1dest,
968 i1_not_in_src, pi3dest_killed))
969 return 0;
970 }
971
972 return 1;
973 }
974 \f
975 /* Try to combine the insns I1 and I2 into I3.
976 Here I1 and I2 appear earlier than I3.
977 I1 can be zero; then we combine just I2 into I3.
978
979 It we are combining three insns and the resulting insn is not recognized,
980 try splitting it into two insns. If that happens, I2 and I3 are retained
981 and I1 is pseudo-deleted by turning it into a NOTE. Otherwise, I1 and I2
982 are pseudo-deleted.
983
984 If we created two insns, return I2; otherwise return I3.
985 Return 0 if the combination does not work. Then nothing is changed. */
986
987 static rtx
988 try_combine (i3, i2, i1)
989 register rtx i3, i2, i1;
990 {
991 /* New patterns for I3 and I3, respectively. */
992 rtx newpat, newi2pat = 0;
993 /* Indicates need to preserve SET in I1 or I2 in I3 if it is not dead. */
994 int added_sets_1, added_sets_2;
995 /* Total number of SETs to put into I3. */
996 int total_sets;
997 /* Nonzero is I2's body now appears in I3. */
998 int i2_is_used;
999 /* INSN_CODEs for new I3, new I2, and user of condition code. */
1000 int insn_code_number, i2_code_number, other_code_number;
1001 /* Contains I3 if the destination of I3 is used in its source, which means
1002 that the old life of I3 is being killed. If that usage is placed into
1003 I2 and not in I3, a REG_DEAD note must be made. */
1004 rtx i3dest_killed = 0;
1005 /* SET_DEST and SET_SRC of I2 and I1. */
1006 rtx i2dest, i2src, i1dest = 0, i1src = 0;
1007 /* PATTERN (I2), or a copy of it in certain cases. */
1008 rtx i2pat;
1009 /* Indicates if I2DEST or I1DEST is in I2SRC or I1_SRC. */
1010 int i2dest_in_i2src, i1dest_in_i1src = 0, i2dest_in_i1src = 0;
1011 int i1_feeds_i3 = 0;
1012 /* Notes that must be added to REG_NOTES in I3 and I2. */
1013 rtx new_i3_notes, new_i2_notes;
1014
1015 int maxreg;
1016 rtx temp;
1017 register rtx link;
1018 int i;
1019
1020 /* If any of I1, I2, and I3 isn't really an insn, we can't do anything.
1021 This can occur when flow deletes an insn that it has merged into an
1022 auto-increment address. We also can't do anything if I3 has a
1023 REG_LIBCALL note since we don't want to disrupt the contiguity of a
1024 libcall. */
1025
1026 if (GET_RTX_CLASS (GET_CODE (i3)) != 'i'
1027 || GET_RTX_CLASS (GET_CODE (i2)) != 'i'
1028 || (i1 && GET_RTX_CLASS (GET_CODE (i1)) != 'i')
1029 || find_reg_note (i3, REG_LIBCALL, NULL_RTX))
1030 return 0;
1031
1032 combine_attempts++;
1033
1034 undobuf.num_undo = previous_num_undos = 0;
1035 undobuf.other_insn = 0;
1036
1037 /* Save the current high-water-mark so we can free storage if we didn't
1038 accept this combination. */
1039 undobuf.storage = (char *) oballoc (0);
1040
1041 /* If I1 and I2 both feed I3, they can be in any order. To simplify the
1042 code below, set I1 to be the earlier of the two insns. */
1043 if (i1 && INSN_CUID (i1) > INSN_CUID (i2))
1044 temp = i1, i1 = i2, i2 = temp;
1045
1046 /* First check for one important special-case that the code below will
1047 not handle. Namely, the case where I1 is zero, I2 has multiple sets,
1048 and I3 is a SET whose SET_SRC is a SET_DEST in I2. In that case,
1049 we may be able to replace that destination with the destination of I3.
1050 This occurs in the common code where we compute both a quotient and
1051 remainder into a structure, in which case we want to do the computation
1052 directly into the structure to avoid register-register copies.
1053
1054 We make very conservative checks below and only try to handle the
1055 most common cases of this. For example, we only handle the case
1056 where I2 and I3 are adjacent to avoid making difficult register
1057 usage tests. */
1058
1059 if (i1 == 0 && GET_CODE (i3) == INSN && GET_CODE (PATTERN (i3)) == SET
1060 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1061 && REGNO (SET_SRC (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER
1062 #ifdef SMALL_REGISTER_CLASSES
1063 && (GET_CODE (SET_DEST (PATTERN (i3))) != REG
1064 || REGNO (SET_DEST (PATTERN (i3))) >= FIRST_PSEUDO_REGISTER)
1065 #endif
1066 && find_reg_note (i3, REG_DEAD, SET_SRC (PATTERN (i3)))
1067 && GET_CODE (PATTERN (i2)) == PARALLEL
1068 && ! side_effects_p (SET_DEST (PATTERN (i3)))
1069 /* If the dest of I3 is a ZERO_EXTRACT or STRICT_LOW_PART, the code
1070 below would need to check what is inside (and reg_overlap_mentioned_p
1071 doesn't support those codes anyway). Don't allow those destinations;
1072 the resulting insn isn't likely to be recognized anyway. */
1073 && GET_CODE (SET_DEST (PATTERN (i3))) != ZERO_EXTRACT
1074 && GET_CODE (SET_DEST (PATTERN (i3))) != STRICT_LOW_PART
1075 && ! reg_overlap_mentioned_p (SET_SRC (PATTERN (i3)),
1076 SET_DEST (PATTERN (i3)))
1077 && next_real_insn (i2) == i3)
1078 {
1079 rtx p2 = PATTERN (i2);
1080
1081 /* Make sure that the destination of I3,
1082 which we are going to substitute into one output of I2,
1083 is not used within another output of I2. We must avoid making this:
1084 (parallel [(set (mem (reg 69)) ...)
1085 (set (reg 69) ...)])
1086 which is not well-defined as to order of actions.
1087 (Besides, reload can't handle output reloads for this.)
1088
1089 The problem can also happen if the dest of I3 is a memory ref,
1090 if another dest in I2 is an indirect memory ref. */
1091 for (i = 0; i < XVECLEN (p2, 0); i++)
1092 if (GET_CODE (XVECEXP (p2, 0, i)) == SET
1093 && reg_overlap_mentioned_p (SET_DEST (PATTERN (i3)),
1094 SET_DEST (XVECEXP (p2, 0, i))))
1095 break;
1096
1097 if (i == XVECLEN (p2, 0))
1098 for (i = 0; i < XVECLEN (p2, 0); i++)
1099 if (SET_DEST (XVECEXP (p2, 0, i)) == SET_SRC (PATTERN (i3)))
1100 {
1101 combine_merges++;
1102
1103 subst_insn = i3;
1104 subst_low_cuid = INSN_CUID (i2);
1105
1106 added_sets_2 = 0;
1107 i2dest = SET_SRC (PATTERN (i3));
1108
1109 /* Replace the dest in I2 with our dest and make the resulting
1110 insn the new pattern for I3. Then skip to where we
1111 validate the pattern. Everything was set up above. */
1112 SUBST (SET_DEST (XVECEXP (p2, 0, i)),
1113 SET_DEST (PATTERN (i3)));
1114
1115 newpat = p2;
1116 goto validate_replacement;
1117 }
1118 }
1119
1120 #ifndef HAVE_cc0
1121 /* If we have no I1 and I2 looks like:
1122 (parallel [(set (reg:CC X) (compare:CC OP (const_int 0)))
1123 (set Y OP)])
1124 make up a dummy I1 that is
1125 (set Y OP)
1126 and change I2 to be
1127 (set (reg:CC X) (compare:CC Y (const_int 0)))
1128
1129 (We can ignore any trailing CLOBBERs.)
1130
1131 This undoes a previous combination and allows us to match a branch-and-
1132 decrement insn. */
1133
1134 if (i1 == 0 && GET_CODE (PATTERN (i2)) == PARALLEL
1135 && XVECLEN (PATTERN (i2), 0) >= 2
1136 && GET_CODE (XVECEXP (PATTERN (i2), 0, 0)) == SET
1137 && (GET_MODE_CLASS (GET_MODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 0))))
1138 == MODE_CC)
1139 && GET_CODE (SET_SRC (XVECEXP (PATTERN (i2), 0, 0))) == COMPARE
1140 && XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 1) == const0_rtx
1141 && GET_CODE (XVECEXP (PATTERN (i2), 0, 1)) == SET
1142 && GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, 1))) == REG
1143 && rtx_equal_p (XEXP (SET_SRC (XVECEXP (PATTERN (i2), 0, 0)), 0),
1144 SET_SRC (XVECEXP (PATTERN (i2), 0, 1))))
1145 {
1146 for (i = XVECLEN (PATTERN (i2), 0) - 1; i >= 2; i--)
1147 if (GET_CODE (XVECEXP (PATTERN (i2), 0, i)) != CLOBBER)
1148 break;
1149
1150 if (i == 1)
1151 {
1152 /* We make I1 with the same INSN_UID as I2. This gives it
1153 the same INSN_CUID for value tracking. Our fake I1 will
1154 never appear in the insn stream so giving it the same INSN_UID
1155 as I2 will not cause a problem. */
1156
1157 i1 = gen_rtx (INSN, VOIDmode, INSN_UID (i2), 0, i2,
1158 XVECEXP (PATTERN (i2), 0, 1), -1, 0, 0);
1159
1160 SUBST (PATTERN (i2), XVECEXP (PATTERN (i2), 0, 0));
1161 SUBST (XEXP (SET_SRC (PATTERN (i2)), 0),
1162 SET_DEST (PATTERN (i1)));
1163 }
1164 }
1165 #endif
1166
1167 /* Verify that I2 and I1 are valid for combining. */
1168 if (! can_combine_p (i2, i3, i1, NULL_RTX, &i2dest, &i2src)
1169 || (i1 && ! can_combine_p (i1, i3, NULL_RTX, i2, &i1dest, &i1src)))
1170 {
1171 undo_all ();
1172 return 0;
1173 }
1174
1175 /* Record whether I2DEST is used in I2SRC and similarly for the other
1176 cases. Knowing this will help in register status updating below. */
1177 i2dest_in_i2src = reg_overlap_mentioned_p (i2dest, i2src);
1178 i1dest_in_i1src = i1 && reg_overlap_mentioned_p (i1dest, i1src);
1179 i2dest_in_i1src = i1 && reg_overlap_mentioned_p (i2dest, i1src);
1180
1181 /* See if I1 directly feeds into I3. It does if I1DEST is not used
1182 in I2SRC. */
1183 i1_feeds_i3 = i1 && ! reg_overlap_mentioned_p (i1dest, i2src);
1184
1185 /* Ensure that I3's pattern can be the destination of combines. */
1186 if (! combinable_i3pat (i3, &PATTERN (i3), i2dest, i1dest,
1187 i1 && i2dest_in_i1src && i1_feeds_i3,
1188 &i3dest_killed))
1189 {
1190 undo_all ();
1191 return 0;
1192 }
1193
1194 /* If I3 has an inc, then give up if I1 or I2 uses the reg that is inc'd.
1195 We used to do this EXCEPT in one case: I3 has a post-inc in an
1196 output operand. However, that exception can give rise to insns like
1197 mov r3,(r3)+
1198 which is a famous insn on the PDP-11 where the value of r3 used as the
1199 source was model-dependent. Avoid this sort of thing. */
1200
1201 #if 0
1202 if (!(GET_CODE (PATTERN (i3)) == SET
1203 && GET_CODE (SET_SRC (PATTERN (i3))) == REG
1204 && GET_CODE (SET_DEST (PATTERN (i3))) == MEM
1205 && (GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_INC
1206 || GET_CODE (XEXP (SET_DEST (PATTERN (i3)), 0)) == POST_DEC)))
1207 /* It's not the exception. */
1208 #endif
1209 #ifdef AUTO_INC_DEC
1210 for (link = REG_NOTES (i3); link; link = XEXP (link, 1))
1211 if (REG_NOTE_KIND (link) == REG_INC
1212 && (reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i2))
1213 || (i1 != 0
1214 && reg_overlap_mentioned_p (XEXP (link, 0), PATTERN (i1)))))
1215 {
1216 undo_all ();
1217 return 0;
1218 }
1219 #endif
1220
1221 /* See if the SETs in I1 or I2 need to be kept around in the merged
1222 instruction: whenever the value set there is still needed past I3.
1223 For the SETs in I2, this is easy: we see if I2DEST dies or is set in I3.
1224
1225 For the SET in I1, we have two cases: If I1 and I2 independently
1226 feed into I3, the set in I1 needs to be kept around if I1DEST dies
1227 or is set in I3. Otherwise (if I1 feeds I2 which feeds I3), the set
1228 in I1 needs to be kept around unless I1DEST dies or is set in either
1229 I2 or I3. We can distinguish these cases by seeing if I2SRC mentions
1230 I1DEST. If so, we know I1 feeds into I2. */
1231
1232 added_sets_2 = ! dead_or_set_p (i3, i2dest);
1233
1234 added_sets_1
1235 = i1 && ! (i1_feeds_i3 ? dead_or_set_p (i3, i1dest)
1236 : (dead_or_set_p (i3, i1dest) || dead_or_set_p (i2, i1dest)));
1237
1238 /* If the set in I2 needs to be kept around, we must make a copy of
1239 PATTERN (I2), so that when we substitute I1SRC for I1DEST in
1240 PATTERN (I2), we are only substituting for the original I1DEST, not into
1241 an already-substituted copy. This also prevents making self-referential
1242 rtx. If I2 is a PARALLEL, we just need the piece that assigns I2SRC to
1243 I2DEST. */
1244
1245 i2pat = (GET_CODE (PATTERN (i2)) == PARALLEL
1246 ? gen_rtx (SET, VOIDmode, i2dest, i2src)
1247 : PATTERN (i2));
1248
1249 if (added_sets_2)
1250 i2pat = copy_rtx (i2pat);
1251
1252 combine_merges++;
1253
1254 /* Substitute in the latest insn for the regs set by the earlier ones. */
1255
1256 maxreg = max_reg_num ();
1257
1258 subst_insn = i3;
1259
1260 /* It is possible that the source of I2 or I1 may be performing an
1261 unneeded operation, such as a ZERO_EXTEND of something that is known
1262 to have the high part zero. Handle that case by letting subst look at
1263 the innermost one of them.
1264
1265 Another way to do this would be to have a function that tries to
1266 simplify a single insn instead of merging two or more insns. We don't
1267 do this because of the potential of infinite loops and because
1268 of the potential extra memory required. However, doing it the way
1269 we are is a bit of a kludge and doesn't catch all cases.
1270
1271 But only do this if -fexpensive-optimizations since it slows things down
1272 and doesn't usually win. */
1273
1274 if (flag_expensive_optimizations)
1275 {
1276 /* Pass pc_rtx so no substitutions are done, just simplifications.
1277 The cases that we are interested in here do not involve the few
1278 cases were is_replaced is checked. */
1279 if (i1)
1280 {
1281 subst_low_cuid = INSN_CUID (i1);
1282 i1src = subst (i1src, pc_rtx, pc_rtx, 0, 0);
1283 }
1284 else
1285 {
1286 subst_low_cuid = INSN_CUID (i2);
1287 i2src = subst (i2src, pc_rtx, pc_rtx, 0, 0);
1288 }
1289
1290 previous_num_undos = undobuf.num_undo;
1291 }
1292
1293 #ifndef HAVE_cc0
1294 /* Many machines that don't use CC0 have insns that can both perform an
1295 arithmetic operation and set the condition code. These operations will
1296 be represented as a PARALLEL with the first element of the vector
1297 being a COMPARE of an arithmetic operation with the constant zero.
1298 The second element of the vector will set some pseudo to the result
1299 of the same arithmetic operation. If we simplify the COMPARE, we won't
1300 match such a pattern and so will generate an extra insn. Here we test
1301 for this case, where both the comparison and the operation result are
1302 needed, and make the PARALLEL by just replacing I2DEST in I3SRC with
1303 I2SRC. Later we will make the PARALLEL that contains I2. */
1304
1305 if (i1 == 0 && added_sets_2 && GET_CODE (PATTERN (i3)) == SET
1306 && GET_CODE (SET_SRC (PATTERN (i3))) == COMPARE
1307 && XEXP (SET_SRC (PATTERN (i3)), 1) == const0_rtx
1308 && rtx_equal_p (XEXP (SET_SRC (PATTERN (i3)), 0), i2dest))
1309 {
1310 rtx *cc_use;
1311 enum machine_mode compare_mode;
1312
1313 newpat = PATTERN (i3);
1314 SUBST (XEXP (SET_SRC (newpat), 0), i2src);
1315
1316 i2_is_used = 1;
1317
1318 #ifdef EXTRA_CC_MODES
1319 /* See if a COMPARE with the operand we substituted in should be done
1320 with the mode that is currently being used. If not, do the same
1321 processing we do in `subst' for a SET; namely, if the destination
1322 is used only once, try to replace it with a register of the proper
1323 mode and also replace the COMPARE. */
1324 if (undobuf.other_insn == 0
1325 && (cc_use = find_single_use (SET_DEST (newpat), i3,
1326 &undobuf.other_insn))
1327 && ((compare_mode = SELECT_CC_MODE (GET_CODE (*cc_use),
1328 i2src, const0_rtx))
1329 != GET_MODE (SET_DEST (newpat))))
1330 {
1331 int regno = REGNO (SET_DEST (newpat));
1332 rtx new_dest = gen_rtx (REG, compare_mode, regno);
1333
1334 if (regno < FIRST_PSEUDO_REGISTER
1335 || (reg_n_sets[regno] == 1 && ! added_sets_2
1336 && ! REG_USERVAR_P (SET_DEST (newpat))))
1337 {
1338 if (regno >= FIRST_PSEUDO_REGISTER)
1339 SUBST (regno_reg_rtx[regno], new_dest);
1340
1341 SUBST (SET_DEST (newpat), new_dest);
1342 SUBST (XEXP (*cc_use, 0), new_dest);
1343 SUBST (SET_SRC (newpat),
1344 gen_rtx_combine (COMPARE, compare_mode,
1345 i2src, const0_rtx));
1346 }
1347 else
1348 undobuf.other_insn = 0;
1349 }
1350 #endif
1351 }
1352 else
1353 #endif
1354 {
1355 n_occurrences = 0; /* `subst' counts here */
1356
1357 /* If I1 feeds into I2 (not into I3) and I1DEST is in I1SRC, we
1358 need to make a unique copy of I2SRC each time we substitute it
1359 to avoid self-referential rtl. */
1360
1361 subst_low_cuid = INSN_CUID (i2);
1362 newpat = subst (PATTERN (i3), i2dest, i2src, 0,
1363 ! i1_feeds_i3 && i1dest_in_i1src);
1364 previous_num_undos = undobuf.num_undo;
1365
1366 /* Record whether i2's body now appears within i3's body. */
1367 i2_is_used = n_occurrences;
1368 }
1369
1370 /* If we already got a failure, don't try to do more. Otherwise,
1371 try to substitute in I1 if we have it. */
1372
1373 if (i1 && GET_CODE (newpat) != CLOBBER)
1374 {
1375 /* Before we can do this substitution, we must redo the test done
1376 above (see detailed comments there) that ensures that I1DEST
1377 isn't mentioned in any SETs in NEWPAT that are field assignments. */
1378
1379 if (! combinable_i3pat (NULL_RTX, &newpat, i1dest, NULL_RTX,
1380 0, NULL_PTR))
1381 {
1382 undo_all ();
1383 return 0;
1384 }
1385
1386 n_occurrences = 0;
1387 subst_low_cuid = INSN_CUID (i1);
1388 newpat = subst (newpat, i1dest, i1src, 0, 0);
1389 previous_num_undos = undobuf.num_undo;
1390 }
1391
1392 /* Fail if an autoincrement side-effect has been duplicated. Be careful
1393 to count all the ways that I2SRC and I1SRC can be used. */
1394 if ((FIND_REG_INC_NOTE (i2, NULL_RTX) != 0
1395 && i2_is_used + added_sets_2 > 1)
1396 || (i1 != 0 && FIND_REG_INC_NOTE (i1, NULL_RTX) != 0
1397 && (n_occurrences + added_sets_1 + (added_sets_2 && ! i1_feeds_i3)
1398 > 1))
1399 /* Fail if we tried to make a new register (we used to abort, but there's
1400 really no reason to). */
1401 || max_reg_num () != maxreg
1402 /* Fail if we couldn't do something and have a CLOBBER. */
1403 || GET_CODE (newpat) == CLOBBER)
1404 {
1405 undo_all ();
1406 return 0;
1407 }
1408
1409 /* If the actions of the earlier insns must be kept
1410 in addition to substituting them into the latest one,
1411 we must make a new PARALLEL for the latest insn
1412 to hold additional the SETs. */
1413
1414 if (added_sets_1 || added_sets_2)
1415 {
1416 combine_extras++;
1417
1418 if (GET_CODE (newpat) == PARALLEL)
1419 {
1420 rtvec old = XVEC (newpat, 0);
1421 total_sets = XVECLEN (newpat, 0) + added_sets_1 + added_sets_2;
1422 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1423 bcopy (&old->elem[0], &XVECEXP (newpat, 0, 0),
1424 sizeof (old->elem[0]) * old->num_elem);
1425 }
1426 else
1427 {
1428 rtx old = newpat;
1429 total_sets = 1 + added_sets_1 + added_sets_2;
1430 newpat = gen_rtx (PARALLEL, VOIDmode, rtvec_alloc (total_sets));
1431 XVECEXP (newpat, 0, 0) = old;
1432 }
1433
1434 if (added_sets_1)
1435 XVECEXP (newpat, 0, --total_sets)
1436 = (GET_CODE (PATTERN (i1)) == PARALLEL
1437 ? gen_rtx (SET, VOIDmode, i1dest, i1src) : PATTERN (i1));
1438
1439 if (added_sets_2)
1440 {
1441 /* If there is no I1, use I2's body as is. We used to also not do
1442 the subst call below if I2 was substituted into I3,
1443 but that could lose a simplification. */
1444 if (i1 == 0)
1445 XVECEXP (newpat, 0, --total_sets) = i2pat;
1446 else
1447 /* See comment where i2pat is assigned. */
1448 XVECEXP (newpat, 0, --total_sets)
1449 = subst (i2pat, i1dest, i1src, 0, 0);
1450 }
1451 }
1452
1453 /* We come here when we are replacing a destination in I2 with the
1454 destination of I3. */
1455 validate_replacement:
1456
1457 /* Is the result of combination a valid instruction? */
1458 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1459
1460 /* If the result isn't valid, see if it is a PARALLEL of two SETs where
1461 the second SET's destination is a register that is unused. In that case,
1462 we just need the first SET. This can occur when simplifying a divmod
1463 insn. We *must* test for this case here because the code below that
1464 splits two independent SETs doesn't handle this case correctly when it
1465 updates the register status. Also check the case where the first
1466 SET's destination is unused. That would not cause incorrect code, but
1467 does cause an unneeded insn to remain. */
1468
1469 if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1470 && XVECLEN (newpat, 0) == 2
1471 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1472 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1473 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) == REG
1474 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 1)))
1475 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 1)))
1476 && asm_noperands (newpat) < 0)
1477 {
1478 newpat = XVECEXP (newpat, 0, 0);
1479 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1480 }
1481
1482 else if (insn_code_number < 0 && GET_CODE (newpat) == PARALLEL
1483 && XVECLEN (newpat, 0) == 2
1484 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1485 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1486 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) == REG
1487 && find_reg_note (i3, REG_UNUSED, SET_DEST (XVECEXP (newpat, 0, 0)))
1488 && ! side_effects_p (SET_SRC (XVECEXP (newpat, 0, 0)))
1489 && asm_noperands (newpat) < 0)
1490 {
1491 newpat = XVECEXP (newpat, 0, 1);
1492 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1493 }
1494
1495 /* See if this is an XOR. If so, perhaps the problem is that the
1496 constant is out of range. Replace it with a complemented XOR with
1497 a complemented constant; it might be in range. */
1498
1499 else if (insn_code_number < 0 && GET_CODE (newpat) == SET
1500 && GET_CODE (SET_SRC (newpat)) == XOR
1501 && GET_CODE (XEXP (SET_SRC (newpat), 1)) == CONST_INT
1502 && ((temp = simplify_unary_operation (NOT,
1503 GET_MODE (SET_SRC (newpat)),
1504 XEXP (SET_SRC (newpat), 1),
1505 GET_MODE (SET_SRC (newpat))))
1506 != 0))
1507 {
1508 enum machine_mode i_mode = GET_MODE (SET_SRC (newpat));
1509 rtx pat
1510 = gen_rtx_combine (SET, VOIDmode, SET_DEST (newpat),
1511 gen_unary (NOT, i_mode,
1512 gen_binary (XOR, i_mode,
1513 XEXP (SET_SRC (newpat), 0),
1514 temp)));
1515
1516 insn_code_number = recog_for_combine (&pat, i3, &new_i3_notes);
1517 if (insn_code_number >= 0)
1518 newpat = pat;
1519 }
1520
1521 /* If we were combining three insns and the result is a simple SET
1522 with no ASM_OPERANDS that wasn't recognized, try to split it into two
1523 insns. There are two ways to do this. It can be split using a
1524 machine-specific method (like when you have an addition of a large
1525 constant) or by combine in the function find_split_point. */
1526
1527 if (i1 && insn_code_number < 0 && GET_CODE (newpat) == SET
1528 && asm_noperands (newpat) < 0)
1529 {
1530 rtx m_split, *split;
1531 rtx ni2dest = i2dest;
1532
1533 /* See if the MD file can split NEWPAT. If it can't, see if letting it
1534 use I2DEST as a scratch register will help. In the latter case,
1535 convert I2DEST to the mode of the source of NEWPAT if we can. */
1536
1537 m_split = split_insns (newpat, i3);
1538
1539 /* We can only use I2DEST as a scratch reg if it doesn't overlap any
1540 inputs of NEWPAT. */
1541
1542 /* ??? If I2DEST is not safe, and I1DEST exists, then it would be
1543 possible to try that as a scratch reg. This would require adding
1544 more code to make it work though. */
1545
1546 if (m_split == 0 && ! reg_overlap_mentioned_p (ni2dest, newpat))
1547 {
1548 /* If I2DEST is a hard register or the only use of a pseudo,
1549 we can change its mode. */
1550 if (GET_MODE (SET_DEST (newpat)) != GET_MODE (i2dest)
1551 && GET_MODE (SET_DEST (newpat)) != VOIDmode
1552 && GET_CODE (i2dest) == REG
1553 && (REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1554 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1555 && ! REG_USERVAR_P (i2dest))))
1556 ni2dest = gen_rtx (REG, GET_MODE (SET_DEST (newpat)),
1557 REGNO (i2dest));
1558
1559 m_split = split_insns (gen_rtx (PARALLEL, VOIDmode,
1560 gen_rtvec (2, newpat,
1561 gen_rtx (CLOBBER,
1562 VOIDmode,
1563 ni2dest))),
1564 i3);
1565 }
1566
1567 if (m_split && GET_CODE (m_split) == SEQUENCE
1568 && XVECLEN (m_split, 0) == 2
1569 && (next_real_insn (i2) == i3
1570 || ! use_crosses_set_p (PATTERN (XVECEXP (m_split, 0, 0)),
1571 INSN_CUID (i2))))
1572 {
1573 rtx i2set, i3set;
1574 rtx newi3pat = PATTERN (XVECEXP (m_split, 0, 1));
1575 newi2pat = PATTERN (XVECEXP (m_split, 0, 0));
1576
1577 i3set = single_set (XVECEXP (m_split, 0, 1));
1578 i2set = single_set (XVECEXP (m_split, 0, 0));
1579
1580 /* In case we changed the mode of I2DEST, replace it in the
1581 pseudo-register table here. We can't do it above in case this
1582 code doesn't get executed and we do a split the other way. */
1583
1584 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1585 SUBST (regno_reg_rtx[REGNO (i2dest)], ni2dest);
1586
1587 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1588
1589 /* If I2 or I3 has multiple SETs, we won't know how to track
1590 register status, so don't use these insns. */
1591
1592 if (i2_code_number >= 0 && i2set && i3set)
1593 insn_code_number = recog_for_combine (&newi3pat, i3,
1594 &new_i3_notes);
1595
1596 if (insn_code_number >= 0)
1597 newpat = newi3pat;
1598
1599 /* It is possible that both insns now set the destination of I3.
1600 If so, we must show an extra use of it. */
1601
1602 if (insn_code_number >= 0 && GET_CODE (SET_DEST (i3set)) == REG
1603 && GET_CODE (SET_DEST (i2set)) == REG
1604 && REGNO (SET_DEST (i3set)) == REGNO (SET_DEST (i2set)))
1605 reg_n_sets[REGNO (SET_DEST (i2set))]++;
1606 }
1607
1608 /* If we can split it and use I2DEST, go ahead and see if that
1609 helps things be recognized. Verify that none of the registers
1610 are set between I2 and I3. */
1611 if (insn_code_number < 0 && (split = find_split_point (&newpat, i3)) != 0
1612 #ifdef HAVE_cc0
1613 && GET_CODE (i2dest) == REG
1614 #endif
1615 /* We need I2DEST in the proper mode. If it is a hard register
1616 or the only use of a pseudo, we can change its mode. */
1617 && (GET_MODE (*split) == GET_MODE (i2dest)
1618 || GET_MODE (*split) == VOIDmode
1619 || REGNO (i2dest) < FIRST_PSEUDO_REGISTER
1620 || (reg_n_sets[REGNO (i2dest)] == 1 && ! added_sets_2
1621 && ! REG_USERVAR_P (i2dest)))
1622 && (next_real_insn (i2) == i3
1623 || ! use_crosses_set_p (*split, INSN_CUID (i2)))
1624 /* We can't overwrite I2DEST if its value is still used by
1625 NEWPAT. */
1626 && ! reg_referenced_p (i2dest, newpat))
1627 {
1628 rtx newdest = i2dest;
1629
1630 /* Get NEWDEST as a register in the proper mode. We have already
1631 validated that we can do this. */
1632 if (GET_MODE (i2dest) != GET_MODE (*split)
1633 && GET_MODE (*split) != VOIDmode)
1634 {
1635 newdest = gen_rtx (REG, GET_MODE (*split), REGNO (i2dest));
1636
1637 if (REGNO (i2dest) >= FIRST_PSEUDO_REGISTER)
1638 SUBST (regno_reg_rtx[REGNO (i2dest)], newdest);
1639 }
1640
1641 /* If *SPLIT is a (mult FOO (const_int pow2)), convert it to
1642 an ASHIFT. This can occur if it was inside a PLUS and hence
1643 appeared to be a memory address. This is a kludge. */
1644 if (GET_CODE (*split) == MULT
1645 && GET_CODE (XEXP (*split, 1)) == CONST_INT
1646 && (i = exact_log2 (INTVAL (XEXP (*split, 1)))) >= 0)
1647 SUBST (*split, gen_rtx_combine (ASHIFT, GET_MODE (*split),
1648 XEXP (*split, 0), GEN_INT (i)));
1649
1650 #ifdef INSN_SCHEDULING
1651 /* If *SPLIT is a paradoxical SUBREG, when we split it, it should
1652 be written as a ZERO_EXTEND. */
1653 if (GET_CODE (*split) == SUBREG
1654 && GET_CODE (SUBREG_REG (*split)) == MEM)
1655 SUBST (*split, gen_rtx_combine (ZERO_EXTEND, GET_MODE (*split),
1656 XEXP (*split, 0)));
1657 #endif
1658
1659 newi2pat = gen_rtx_combine (SET, VOIDmode, newdest, *split);
1660 SUBST (*split, newdest);
1661 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1662 if (i2_code_number >= 0)
1663 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1664 }
1665 }
1666
1667 /* Check for a case where we loaded from memory in a narrow mode and
1668 then sign extended it, but we need both registers. In that case,
1669 we have a PARALLEL with both loads from the same memory location.
1670 We can split this into a load from memory followed by a register-register
1671 copy. This saves at least one insn, more if register allocation can
1672 eliminate the copy. */
1673
1674 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1675 && GET_CODE (newpat) == PARALLEL
1676 && XVECLEN (newpat, 0) == 2
1677 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1678 && GET_CODE (SET_SRC (XVECEXP (newpat, 0, 0))) == SIGN_EXTEND
1679 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1680 && rtx_equal_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1681 XEXP (SET_SRC (XVECEXP (newpat, 0, 0)), 0))
1682 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1683 INSN_CUID (i2))
1684 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1685 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1686 && ! reg_overlap_mentioned_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1687 SET_SRC (XVECEXP (newpat, 0, 1)))
1688 && ! find_reg_note (i3, REG_UNUSED,
1689 SET_DEST (XVECEXP (newpat, 0, 0))))
1690 {
1691 rtx ni2dest;
1692
1693 newi2pat = XVECEXP (newpat, 0, 0);
1694 ni2dest = SET_DEST (XVECEXP (newpat, 0, 0));
1695 newpat = XVECEXP (newpat, 0, 1);
1696 SUBST (SET_SRC (newpat),
1697 gen_lowpart_for_combine (GET_MODE (SET_SRC (newpat)), ni2dest));
1698 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1699 if (i2_code_number >= 0)
1700 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1701
1702 if (insn_code_number >= 0)
1703 {
1704 rtx insn;
1705 rtx link;
1706
1707 /* If we will be able to accept this, we have made a change to the
1708 destination of I3. This can invalidate a LOG_LINKS pointing
1709 to I3. No other part of combine.c makes such a transformation.
1710
1711 The new I3 will have a destination that was previously the
1712 destination of I1 or I2 and which was used in i2 or I3. Call
1713 distribute_links to make a LOG_LINK from the next use of
1714 that destination. */
1715
1716 PATTERN (i3) = newpat;
1717 distribute_links (gen_rtx (INSN_LIST, VOIDmode, i3, NULL_RTX));
1718
1719 /* I3 now uses what used to be its destination and which is
1720 now I2's destination. That means we need a LOG_LINK from
1721 I3 to I2. But we used to have one, so we still will.
1722
1723 However, some later insn might be using I2's dest and have
1724 a LOG_LINK pointing at I3. We must remove this link.
1725 The simplest way to remove the link is to point it at I1,
1726 which we know will be a NOTE. */
1727
1728 for (insn = NEXT_INSN (i3);
1729 insn && GET_CODE (insn) != CODE_LABEL
1730 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN;
1731 insn = NEXT_INSN (insn))
1732 {
1733 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
1734 && reg_referenced_p (ni2dest, PATTERN (insn)))
1735 {
1736 for (link = LOG_LINKS (insn); link;
1737 link = XEXP (link, 1))
1738 if (XEXP (link, 0) == i3)
1739 XEXP (link, 0) = i1;
1740
1741 break;
1742 }
1743 }
1744 }
1745 }
1746
1747 /* Similarly, check for a case where we have a PARALLEL of two independent
1748 SETs but we started with three insns. In this case, we can do the sets
1749 as two separate insns. This case occurs when some SET allows two
1750 other insns to combine, but the destination of that SET is still live. */
1751
1752 else if (i1 && insn_code_number < 0 && asm_noperands (newpat) < 0
1753 && GET_CODE (newpat) == PARALLEL
1754 && XVECLEN (newpat, 0) == 2
1755 && GET_CODE (XVECEXP (newpat, 0, 0)) == SET
1756 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != ZERO_EXTRACT
1757 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != STRICT_LOW_PART
1758 && GET_CODE (XVECEXP (newpat, 0, 1)) == SET
1759 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != ZERO_EXTRACT
1760 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != STRICT_LOW_PART
1761 && ! use_crosses_set_p (SET_SRC (XVECEXP (newpat, 0, 1)),
1762 INSN_CUID (i2))
1763 /* Don't pass sets with (USE (MEM ...)) dests to the following. */
1764 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 1))) != USE
1765 && GET_CODE (SET_DEST (XVECEXP (newpat, 0, 0))) != USE
1766 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 1)),
1767 XVECEXP (newpat, 0, 0))
1768 && ! reg_referenced_p (SET_DEST (XVECEXP (newpat, 0, 0)),
1769 XVECEXP (newpat, 0, 1)))
1770 {
1771 newi2pat = XVECEXP (newpat, 0, 1);
1772 newpat = XVECEXP (newpat, 0, 0);
1773
1774 i2_code_number = recog_for_combine (&newi2pat, i2, &new_i2_notes);
1775 if (i2_code_number >= 0)
1776 insn_code_number = recog_for_combine (&newpat, i3, &new_i3_notes);
1777 }
1778
1779 /* If it still isn't recognized, fail and change things back the way they
1780 were. */
1781 if ((insn_code_number < 0
1782 /* Is the result a reasonable ASM_OPERANDS? */
1783 && (! check_asm_operands (newpat) || added_sets_1 || added_sets_2)))
1784 {
1785 undo_all ();
1786 return 0;
1787 }
1788
1789 /* If we had to change another insn, make sure it is valid also. */
1790 if (undobuf.other_insn)
1791 {
1792 rtx other_notes = REG_NOTES (undobuf.other_insn);
1793 rtx other_pat = PATTERN (undobuf.other_insn);
1794 rtx new_other_notes;
1795 rtx note, next;
1796
1797 other_code_number = recog_for_combine (&other_pat, undobuf.other_insn,
1798 &new_other_notes);
1799
1800 if (other_code_number < 0 && ! check_asm_operands (other_pat))
1801 {
1802 undo_all ();
1803 return 0;
1804 }
1805
1806 PATTERN (undobuf.other_insn) = other_pat;
1807
1808 /* If any of the notes in OTHER_INSN were REG_UNUSED, ensure that they
1809 are still valid. Then add any non-duplicate notes added by
1810 recog_for_combine. */
1811 for (note = REG_NOTES (undobuf.other_insn); note; note = next)
1812 {
1813 next = XEXP (note, 1);
1814
1815 if (REG_NOTE_KIND (note) == REG_UNUSED
1816 && ! reg_set_p (XEXP (note, 0), PATTERN (undobuf.other_insn)))
1817 {
1818 if (GET_CODE (XEXP (note, 0)) == REG)
1819 reg_n_deaths[REGNO (XEXP (note, 0))]--;
1820
1821 remove_note (undobuf.other_insn, note);
1822 }
1823 }
1824
1825 for (note = new_other_notes; note; note = XEXP (note, 1))
1826 if (GET_CODE (XEXP (note, 0)) == REG)
1827 reg_n_deaths[REGNO (XEXP (note, 0))]++;
1828
1829 distribute_notes (new_other_notes, undobuf.other_insn,
1830 undobuf.other_insn, NULL_RTX, NULL_RTX, NULL_RTX);
1831 }
1832
1833 /* We now know that we can do this combination. Merge the insns and
1834 update the status of registers and LOG_LINKS. */
1835
1836 {
1837 rtx i3notes, i2notes, i1notes = 0;
1838 rtx i3links, i2links, i1links = 0;
1839 rtx midnotes = 0;
1840 int all_adjacent = (next_real_insn (i2) == i3
1841 && (i1 == 0 || next_real_insn (i1) == i2));
1842 register int regno;
1843 /* Compute which registers we expect to eliminate. */
1844 rtx elim_i2 = (newi2pat || i2dest_in_i2src || i2dest_in_i1src
1845 ? 0 : i2dest);
1846 rtx elim_i1 = i1 == 0 || i1dest_in_i1src ? 0 : i1dest;
1847
1848 /* Get the old REG_NOTES and LOG_LINKS from all our insns and
1849 clear them. */
1850 i3notes = REG_NOTES (i3), i3links = LOG_LINKS (i3);
1851 i2notes = REG_NOTES (i2), i2links = LOG_LINKS (i2);
1852 if (i1)
1853 i1notes = REG_NOTES (i1), i1links = LOG_LINKS (i1);
1854
1855 /* Ensure that we do not have something that should not be shared but
1856 occurs multiple times in the new insns. Check this by first
1857 resetting all the `used' flags and then copying anything is shared. */
1858
1859 reset_used_flags (i3notes);
1860 reset_used_flags (i2notes);
1861 reset_used_flags (i1notes);
1862 reset_used_flags (newpat);
1863 reset_used_flags (newi2pat);
1864 if (undobuf.other_insn)
1865 reset_used_flags (PATTERN (undobuf.other_insn));
1866
1867 i3notes = copy_rtx_if_shared (i3notes);
1868 i2notes = copy_rtx_if_shared (i2notes);
1869 i1notes = copy_rtx_if_shared (i1notes);
1870 newpat = copy_rtx_if_shared (newpat);
1871 newi2pat = copy_rtx_if_shared (newi2pat);
1872 if (undobuf.other_insn)
1873 reset_used_flags (PATTERN (undobuf.other_insn));
1874
1875 INSN_CODE (i3) = insn_code_number;
1876 PATTERN (i3) = newpat;
1877 if (undobuf.other_insn)
1878 INSN_CODE (undobuf.other_insn) = other_code_number;
1879
1880 /* We had one special case above where I2 had more than one set and
1881 we replaced a destination of one of those sets with the destination
1882 of I3. In that case, we have to update LOG_LINKS of insns later
1883 in this basic block. Note that this (expensive) case is rare. */
1884
1885 if (GET_CODE (PATTERN (i2)) == PARALLEL)
1886 for (i = 0; i < XVECLEN (PATTERN (i2), 0); i++)
1887 if (GET_CODE (SET_DEST (XVECEXP (PATTERN (i2), 0, i))) == REG
1888 && SET_DEST (XVECEXP (PATTERN (i2), 0, i)) != i2dest
1889 && ! find_reg_note (i2, REG_UNUSED,
1890 SET_DEST (XVECEXP (PATTERN (i2), 0, i))))
1891 {
1892 register rtx insn;
1893
1894 for (insn = NEXT_INSN (i2); insn; insn = NEXT_INSN (insn))
1895 {
1896 if (insn != i3 && GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1897 for (link = LOG_LINKS (insn); link; link = XEXP (link, 1))
1898 if (XEXP (link, 0) == i2)
1899 XEXP (link, 0) = i3;
1900
1901 if (GET_CODE (insn) == CODE_LABEL
1902 || GET_CODE (insn) == JUMP_INSN)
1903 break;
1904 }
1905 }
1906
1907 LOG_LINKS (i3) = 0;
1908 REG_NOTES (i3) = 0;
1909 LOG_LINKS (i2) = 0;
1910 REG_NOTES (i2) = 0;
1911
1912 if (newi2pat)
1913 {
1914 INSN_CODE (i2) = i2_code_number;
1915 PATTERN (i2) = newi2pat;
1916 }
1917 else
1918 {
1919 PUT_CODE (i2, NOTE);
1920 NOTE_LINE_NUMBER (i2) = NOTE_INSN_DELETED;
1921 NOTE_SOURCE_FILE (i2) = 0;
1922 }
1923
1924 if (i1)
1925 {
1926 LOG_LINKS (i1) = 0;
1927 REG_NOTES (i1) = 0;
1928 PUT_CODE (i1, NOTE);
1929 NOTE_LINE_NUMBER (i1) = NOTE_INSN_DELETED;
1930 NOTE_SOURCE_FILE (i1) = 0;
1931 }
1932
1933 /* Get death notes for everything that is now used in either I3 or
1934 I2 and used to die in a previous insn. */
1935
1936 move_deaths (newpat, i1 ? INSN_CUID (i1) : INSN_CUID (i2), i3, &midnotes);
1937 if (newi2pat)
1938 move_deaths (newi2pat, INSN_CUID (i1), i2, &midnotes);
1939
1940 /* Distribute all the LOG_LINKS and REG_NOTES from I1, I2, and I3. */
1941 if (i3notes)
1942 distribute_notes (i3notes, i3, i3, newi2pat ? i2 : NULL_RTX,
1943 elim_i2, elim_i1);
1944 if (i2notes)
1945 distribute_notes (i2notes, i2, i3, newi2pat ? i2 : NULL_RTX,
1946 elim_i2, elim_i1);
1947 if (i1notes)
1948 distribute_notes (i1notes, i1, i3, newi2pat ? i2 : NULL_RTX,
1949 elim_i2, elim_i1);
1950 if (midnotes)
1951 distribute_notes (midnotes, NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1952 elim_i2, elim_i1);
1953
1954 /* Distribute any notes added to I2 or I3 by recog_for_combine. We
1955 know these are REG_UNUSED and want them to go to the desired insn,
1956 so we always pass it as i3. We have not counted the notes in
1957 reg_n_deaths yet, so we need to do so now. */
1958
1959 if (newi2pat && new_i2_notes)
1960 {
1961 for (temp = new_i2_notes; temp; temp = XEXP (temp, 1))
1962 if (GET_CODE (XEXP (temp, 0)) == REG)
1963 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1964
1965 distribute_notes (new_i2_notes, i2, i2, NULL_RTX, NULL_RTX, NULL_RTX);
1966 }
1967
1968 if (new_i3_notes)
1969 {
1970 for (temp = new_i3_notes; temp; temp = XEXP (temp, 1))
1971 if (GET_CODE (XEXP (temp, 0)) == REG)
1972 reg_n_deaths[REGNO (XEXP (temp, 0))]++;
1973
1974 distribute_notes (new_i3_notes, i3, i3, NULL_RTX, NULL_RTX, NULL_RTX);
1975 }
1976
1977 /* If I3DEST was used in I3SRC, it really died in I3. We may need to
1978 put a REG_DEAD note for it somewhere. Similarly for I2 and I1.
1979 Show an additional death due to the REG_DEAD note we make here. If
1980 we discard it in distribute_notes, we will decrement it again. */
1981
1982 if (i3dest_killed)
1983 {
1984 if (GET_CODE (i3dest_killed) == REG)
1985 reg_n_deaths[REGNO (i3dest_killed)]++;
1986
1987 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i3dest_killed,
1988 NULL_RTX),
1989 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
1990 NULL_RTX, NULL_RTX);
1991 }
1992
1993 /* For I2 and I1, we have to be careful. If NEWI2PAT exists and sets
1994 I2DEST or I1DEST, the death must be somewhere before I2, not I3. If
1995 we passed I3 in that case, it might delete I2. */
1996
1997 if (i2dest_in_i2src)
1998 {
1999 if (GET_CODE (i2dest) == REG)
2000 reg_n_deaths[REGNO (i2dest)]++;
2001
2002 if (newi2pat && reg_set_p (i2dest, newi2pat))
2003 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2004 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2005 else
2006 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i2dest, NULL_RTX),
2007 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2008 NULL_RTX, NULL_RTX);
2009 }
2010
2011 if (i1dest_in_i1src)
2012 {
2013 if (GET_CODE (i1dest) == REG)
2014 reg_n_deaths[REGNO (i1dest)]++;
2015
2016 if (newi2pat && reg_set_p (i1dest, newi2pat))
2017 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2018 NULL_RTX, i2, NULL_RTX, NULL_RTX, NULL_RTX);
2019 else
2020 distribute_notes (gen_rtx (EXPR_LIST, REG_DEAD, i1dest, NULL_RTX),
2021 NULL_RTX, i3, newi2pat ? i2 : NULL_RTX,
2022 NULL_RTX, NULL_RTX);
2023 }
2024
2025 distribute_links (i3links);
2026 distribute_links (i2links);
2027 distribute_links (i1links);
2028
2029 if (GET_CODE (i2dest) == REG)
2030 {
2031 rtx link;
2032 rtx i2_insn = 0, i2_val = 0, set;
2033
2034 /* The insn that used to set this register doesn't exist, and
2035 this life of the register may not exist either. See if one of
2036 I3's links points to an insn that sets I2DEST. If it does,
2037 that is now the last known value for I2DEST. If we don't update
2038 this and I2 set the register to a value that depended on its old
2039 contents, we will get confused. If this insn is used, thing
2040 will be set correctly in combine_instructions. */
2041
2042 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2043 if ((set = single_set (XEXP (link, 0))) != 0
2044 && rtx_equal_p (i2dest, SET_DEST (set)))
2045 i2_insn = XEXP (link, 0), i2_val = SET_SRC (set);
2046
2047 record_value_for_reg (i2dest, i2_insn, i2_val);
2048
2049 /* If the reg formerly set in I2 died only once and that was in I3,
2050 zero its use count so it won't make `reload' do any work. */
2051 if (! added_sets_2 && newi2pat == 0)
2052 {
2053 regno = REGNO (i2dest);
2054 reg_n_sets[regno]--;
2055 if (reg_n_sets[regno] == 0
2056 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2057 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2058 reg_n_refs[regno] = 0;
2059 }
2060 }
2061
2062 if (i1 && GET_CODE (i1dest) == REG)
2063 {
2064 rtx link;
2065 rtx i1_insn = 0, i1_val = 0, set;
2066
2067 for (link = LOG_LINKS (i3); link; link = XEXP (link, 1))
2068 if ((set = single_set (XEXP (link, 0))) != 0
2069 && rtx_equal_p (i1dest, SET_DEST (set)))
2070 i1_insn = XEXP (link, 0), i1_val = SET_SRC (set);
2071
2072 record_value_for_reg (i1dest, i1_insn, i1_val);
2073
2074 regno = REGNO (i1dest);
2075 if (! added_sets_1)
2076 {
2077 reg_n_sets[regno]--;
2078 if (reg_n_sets[regno] == 0
2079 && ! (basic_block_live_at_start[0][regno / REGSET_ELT_BITS]
2080 & ((REGSET_ELT_TYPE) 1 << (regno % REGSET_ELT_BITS))))
2081 reg_n_refs[regno] = 0;
2082 }
2083 }
2084
2085 /* Update reg_significant et al for any changes that may have been made
2086 to this insn. */
2087
2088 note_stores (newpat, set_significant);
2089 if (newi2pat)
2090 note_stores (newi2pat, set_significant);
2091
2092 /* If I3 is now an unconditional jump, ensure that it has a
2093 BARRIER following it since it may have initially been a
2094 conditional jump. */
2095
2096 if ((GET_CODE (newpat) == RETURN || simplejump_p (i3))
2097 && GET_CODE (next_nonnote_insn (i3)) != BARRIER)
2098 emit_barrier_after (i3);
2099 }
2100
2101 combine_successes++;
2102
2103 return newi2pat ? i2 : i3;
2104 }
2105 \f
2106 /* Undo all the modifications recorded in undobuf. */
2107
2108 static void
2109 undo_all ()
2110 {
2111 register int i;
2112 if (undobuf.num_undo > MAX_UNDO)
2113 undobuf.num_undo = MAX_UNDO;
2114 for (i = undobuf.num_undo - 1; i >= 0; i--)
2115 {
2116 if (undobuf.undo[i].is_int)
2117 *undobuf.undo[i].where.i = undobuf.undo[i].old_contents.i;
2118 else
2119 *undobuf.undo[i].where.rtx = undobuf.undo[i].old_contents.rtx;
2120
2121 }
2122
2123 obfree (undobuf.storage);
2124 undobuf.num_undo = 0;
2125 }
2126 \f
2127 /* Find the innermost point within the rtx at LOC, possibly LOC itself,
2128 where we have an arithmetic expression and return that point. LOC will
2129 be inside INSN.
2130
2131 try_combine will call this function to see if an insn can be split into
2132 two insns. */
2133
2134 static rtx *
2135 find_split_point (loc, insn)
2136 rtx *loc;
2137 rtx insn;
2138 {
2139 rtx x = *loc;
2140 enum rtx_code code = GET_CODE (x);
2141 rtx *split;
2142 int len = 0, pos, unsignedp;
2143 rtx inner;
2144
2145 /* First special-case some codes. */
2146 switch (code)
2147 {
2148 case SUBREG:
2149 #ifdef INSN_SCHEDULING
2150 /* If we are making a paradoxical SUBREG invalid, it becomes a split
2151 point. */
2152 if (GET_CODE (SUBREG_REG (x)) == MEM)
2153 return loc;
2154 #endif
2155 return find_split_point (&SUBREG_REG (x), insn);
2156
2157 case MEM:
2158 #ifdef HAVE_lo_sum
2159 /* If we have (mem (const ..)) or (mem (symbol_ref ...)), split it
2160 using LO_SUM and HIGH. */
2161 if (GET_CODE (XEXP (x, 0)) == CONST
2162 || GET_CODE (XEXP (x, 0)) == SYMBOL_REF)
2163 {
2164 SUBST (XEXP (x, 0),
2165 gen_rtx_combine (LO_SUM, Pmode,
2166 gen_rtx_combine (HIGH, Pmode, XEXP (x, 0)),
2167 XEXP (x, 0)));
2168 return &XEXP (XEXP (x, 0), 0);
2169 }
2170 #endif
2171
2172 /* If we have a PLUS whose second operand is a constant and the
2173 address is not valid, perhaps will can split it up using
2174 the machine-specific way to split large constants. We use
2175 the first psuedo-reg (one of the virtual regs) as a placeholder;
2176 it will not remain in the result. */
2177 if (GET_CODE (XEXP (x, 0)) == PLUS
2178 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2179 && ! memory_address_p (GET_MODE (x), XEXP (x, 0)))
2180 {
2181 rtx reg = regno_reg_rtx[FIRST_PSEUDO_REGISTER];
2182 rtx seq = split_insns (gen_rtx (SET, VOIDmode, reg, XEXP (x, 0)),
2183 subst_insn);
2184
2185 /* This should have produced two insns, each of which sets our
2186 placeholder. If the source of the second is a valid address,
2187 we can make put both sources together and make a split point
2188 in the middle. */
2189
2190 if (seq && XVECLEN (seq, 0) == 2
2191 && GET_CODE (XVECEXP (seq, 0, 0)) == INSN
2192 && GET_CODE (PATTERN (XVECEXP (seq, 0, 0))) == SET
2193 && SET_DEST (PATTERN (XVECEXP (seq, 0, 0))) == reg
2194 && ! reg_mentioned_p (reg,
2195 SET_SRC (PATTERN (XVECEXP (seq, 0, 0))))
2196 && GET_CODE (XVECEXP (seq, 0, 1)) == INSN
2197 && GET_CODE (PATTERN (XVECEXP (seq, 0, 1))) == SET
2198 && SET_DEST (PATTERN (XVECEXP (seq, 0, 1))) == reg
2199 && memory_address_p (GET_MODE (x),
2200 SET_SRC (PATTERN (XVECEXP (seq, 0, 1)))))
2201 {
2202 rtx src1 = SET_SRC (PATTERN (XVECEXP (seq, 0, 0)));
2203 rtx src2 = SET_SRC (PATTERN (XVECEXP (seq, 0, 1)));
2204
2205 /* Replace the placeholder in SRC2 with SRC1. If we can
2206 find where in SRC2 it was placed, that can become our
2207 split point and we can replace this address with SRC2.
2208 Just try two obvious places. */
2209
2210 src2 = replace_rtx (src2, reg, src1);
2211 split = 0;
2212 if (XEXP (src2, 0) == src1)
2213 split = &XEXP (src2, 0);
2214 else if (GET_RTX_FORMAT (GET_CODE (XEXP (src2, 0)))[0] == 'e'
2215 && XEXP (XEXP (src2, 0), 0) == src1)
2216 split = &XEXP (XEXP (src2, 0), 0);
2217
2218 if (split)
2219 {
2220 SUBST (XEXP (x, 0), src2);
2221 return split;
2222 }
2223 }
2224
2225 /* If that didn't work, perhaps the first operand is complex and
2226 needs to be computed separately, so make a split point there.
2227 This will occur on machines that just support REG + CONST
2228 and have a constant moved through some previous computation. */
2229
2230 else if (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (x, 0), 0))) != 'o'
2231 && ! (GET_CODE (XEXP (XEXP (x, 0), 0)) == SUBREG
2232 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (XEXP (x, 0), 0))))
2233 == 'o')))
2234 return &XEXP (XEXP (x, 0), 0);
2235 }
2236 break;
2237
2238 case SET:
2239 #ifdef HAVE_cc0
2240 /* If SET_DEST is CC0 and SET_SRC is not an operand, a COMPARE, or a
2241 ZERO_EXTRACT, the most likely reason why this doesn't match is that
2242 we need to put the operand into a register. So split at that
2243 point. */
2244
2245 if (SET_DEST (x) == cc0_rtx
2246 && GET_CODE (SET_SRC (x)) != COMPARE
2247 && GET_CODE (SET_SRC (x)) != ZERO_EXTRACT
2248 && GET_RTX_CLASS (GET_CODE (SET_SRC (x))) != 'o'
2249 && ! (GET_CODE (SET_SRC (x)) == SUBREG
2250 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) == 'o'))
2251 return &SET_SRC (x);
2252 #endif
2253
2254 /* See if we can split SET_SRC as it stands. */
2255 split = find_split_point (&SET_SRC (x), insn);
2256 if (split && split != &SET_SRC (x))
2257 return split;
2258
2259 /* See if this is a bitfield assignment with everything constant. If
2260 so, this is an IOR of an AND, so split it into that. */
2261 if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
2262 && (GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)))
2263 <= HOST_BITS_PER_WIDE_INT)
2264 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT
2265 && GET_CODE (XEXP (SET_DEST (x), 2)) == CONST_INT
2266 && GET_CODE (SET_SRC (x)) == CONST_INT
2267 && ((INTVAL (XEXP (SET_DEST (x), 1))
2268 + INTVAL (XEXP (SET_DEST (x), 2)))
2269 <= GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0))))
2270 && ! side_effects_p (XEXP (SET_DEST (x), 0)))
2271 {
2272 int pos = INTVAL (XEXP (SET_DEST (x), 2));
2273 int len = INTVAL (XEXP (SET_DEST (x), 1));
2274 int src = INTVAL (SET_SRC (x));
2275 rtx dest = XEXP (SET_DEST (x), 0);
2276 enum machine_mode mode = GET_MODE (dest);
2277 unsigned HOST_WIDE_INT mask = ((HOST_WIDE_INT) 1 << len) - 1;
2278
2279 #if BITS_BIG_ENDIAN
2280 pos = GET_MODE_BITSIZE (mode) - len - pos;
2281 #endif
2282
2283 if (src == mask)
2284 SUBST (SET_SRC (x),
2285 gen_binary (IOR, mode, dest, GEN_INT (src << pos)));
2286 else
2287 SUBST (SET_SRC (x),
2288 gen_binary (IOR, mode,
2289 gen_binary (AND, mode, dest,
2290 GEN_INT (~ (mask << pos)
2291 & GET_MODE_MASK (mode))),
2292 GEN_INT (src << pos)));
2293
2294 SUBST (SET_DEST (x), dest);
2295
2296 split = find_split_point (&SET_SRC (x), insn);
2297 if (split && split != &SET_SRC (x))
2298 return split;
2299 }
2300
2301 /* Otherwise, see if this is an operation that we can split into two.
2302 If so, try to split that. */
2303 code = GET_CODE (SET_SRC (x));
2304
2305 switch (code)
2306 {
2307 case AND:
2308 /* If we are AND'ing with a large constant that is only a single
2309 bit and the result is only being used in a context where we
2310 need to know if it is zero or non-zero, replace it with a bit
2311 extraction. This will avoid the large constant, which might
2312 have taken more than one insn to make. If the constant were
2313 not a valid argument to the AND but took only one insn to make,
2314 this is no worse, but if it took more than one insn, it will
2315 be better. */
2316
2317 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2318 && GET_CODE (XEXP (SET_SRC (x), 0)) == REG
2319 && (pos = exact_log2 (INTVAL (XEXP (SET_SRC (x), 1)))) >= 7
2320 && GET_CODE (SET_DEST (x)) == REG
2321 && (split = find_single_use (SET_DEST (x), insn, NULL_PTR)) != 0
2322 && (GET_CODE (*split) == EQ || GET_CODE (*split) == NE)
2323 && XEXP (*split, 0) == SET_DEST (x)
2324 && XEXP (*split, 1) == const0_rtx)
2325 {
2326 SUBST (SET_SRC (x),
2327 make_extraction (GET_MODE (SET_DEST (x)),
2328 XEXP (SET_SRC (x), 0),
2329 pos, NULL_RTX, 1, 1, 0, 0));
2330 return find_split_point (loc, insn);
2331 }
2332 break;
2333
2334 case SIGN_EXTEND:
2335 inner = XEXP (SET_SRC (x), 0);
2336 pos = 0;
2337 len = GET_MODE_BITSIZE (GET_MODE (inner));
2338 unsignedp = 0;
2339 break;
2340
2341 case SIGN_EXTRACT:
2342 case ZERO_EXTRACT:
2343 if (GET_CODE (XEXP (SET_SRC (x), 1)) == CONST_INT
2344 && GET_CODE (XEXP (SET_SRC (x), 2)) == CONST_INT)
2345 {
2346 inner = XEXP (SET_SRC (x), 0);
2347 len = INTVAL (XEXP (SET_SRC (x), 1));
2348 pos = INTVAL (XEXP (SET_SRC (x), 2));
2349
2350 #if BITS_BIG_ENDIAN
2351 pos = GET_MODE_BITSIZE (GET_MODE (inner)) - len - pos;
2352 #endif
2353 unsignedp = (code == ZERO_EXTRACT);
2354 }
2355 break;
2356 }
2357
2358 if (len && pos >= 0 && pos + len <= GET_MODE_BITSIZE (GET_MODE (inner)))
2359 {
2360 enum machine_mode mode = GET_MODE (SET_SRC (x));
2361
2362 /* For unsigned, we have a choice of a shift followed by an
2363 AND or two shifts. Use two shifts for field sizes where the
2364 constant might be too large. We assume here that we can
2365 always at least get 8-bit constants in an AND insn, which is
2366 true for every current RISC. */
2367
2368 if (unsignedp && len <= 8)
2369 {
2370 SUBST (SET_SRC (x),
2371 gen_rtx_combine
2372 (AND, mode,
2373 gen_rtx_combine (LSHIFTRT, mode,
2374 gen_lowpart_for_combine (mode, inner),
2375 GEN_INT (pos)),
2376 GEN_INT (((HOST_WIDE_INT) 1 << len) - 1)));
2377
2378 split = find_split_point (&SET_SRC (x), insn);
2379 if (split && split != &SET_SRC (x))
2380 return split;
2381 }
2382 else
2383 {
2384 SUBST (SET_SRC (x),
2385 gen_rtx_combine
2386 (unsignedp ? LSHIFTRT : ASHIFTRT, mode,
2387 gen_rtx_combine (ASHIFT, mode,
2388 gen_lowpart_for_combine (mode, inner),
2389 GEN_INT (GET_MODE_BITSIZE (mode)
2390 - len - pos)),
2391 GEN_INT (GET_MODE_BITSIZE (mode) - len)));
2392
2393 split = find_split_point (&SET_SRC (x), insn);
2394 if (split && split != &SET_SRC (x))
2395 return split;
2396 }
2397 }
2398
2399 /* See if this is a simple operation with a constant as the second
2400 operand. It might be that this constant is out of range and hence
2401 could be used as a split point. */
2402 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2403 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2404 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<')
2405 && CONSTANT_P (XEXP (SET_SRC (x), 1))
2406 && (GET_RTX_CLASS (GET_CODE (XEXP (SET_SRC (x), 0))) == 'o'
2407 || (GET_CODE (XEXP (SET_SRC (x), 0)) == SUBREG
2408 && (GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (SET_SRC (x), 0))))
2409 == 'o'))))
2410 return &XEXP (SET_SRC (x), 1);
2411
2412 /* Finally, see if this is a simple operation with its first operand
2413 not in a register. The operation might require this operand in a
2414 register, so return it as a split point. We can always do this
2415 because if the first operand were another operation, we would have
2416 already found it as a split point. */
2417 if ((GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '2'
2418 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == 'c'
2419 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '<'
2420 || GET_RTX_CLASS (GET_CODE (SET_SRC (x))) == '1')
2421 && ! register_operand (XEXP (SET_SRC (x), 0), VOIDmode))
2422 return &XEXP (SET_SRC (x), 0);
2423
2424 return 0;
2425
2426 case AND:
2427 case IOR:
2428 /* We write NOR as (and (not A) (not B)), but if we don't have a NOR,
2429 it is better to write this as (not (ior A B)) so we can split it.
2430 Similarly for IOR. */
2431 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == NOT)
2432 {
2433 SUBST (*loc,
2434 gen_rtx_combine (NOT, GET_MODE (x),
2435 gen_rtx_combine (code == IOR ? AND : IOR,
2436 GET_MODE (x),
2437 XEXP (XEXP (x, 0), 0),
2438 XEXP (XEXP (x, 1), 0))));
2439 return find_split_point (loc, insn);
2440 }
2441
2442 /* Many RISC machines have a large set of logical insns. If the
2443 second operand is a NOT, put it first so we will try to split the
2444 other operand first. */
2445 if (GET_CODE (XEXP (x, 1)) == NOT)
2446 {
2447 rtx tem = XEXP (x, 0);
2448 SUBST (XEXP (x, 0), XEXP (x, 1));
2449 SUBST (XEXP (x, 1), tem);
2450 }
2451 break;
2452 }
2453
2454 /* Otherwise, select our actions depending on our rtx class. */
2455 switch (GET_RTX_CLASS (code))
2456 {
2457 case 'b': /* This is ZERO_EXTRACT and SIGN_EXTRACT. */
2458 case '3':
2459 split = find_split_point (&XEXP (x, 2), insn);
2460 if (split)
2461 return split;
2462 /* ... fall through ... */
2463 case '2':
2464 case 'c':
2465 case '<':
2466 split = find_split_point (&XEXP (x, 1), insn);
2467 if (split)
2468 return split;
2469 /* ... fall through ... */
2470 case '1':
2471 /* Some machines have (and (shift ...) ...) insns. If X is not
2472 an AND, but XEXP (X, 0) is, use it as our split point. */
2473 if (GET_CODE (x) != AND && GET_CODE (XEXP (x, 0)) == AND)
2474 return &XEXP (x, 0);
2475
2476 split = find_split_point (&XEXP (x, 0), insn);
2477 if (split)
2478 return split;
2479 return loc;
2480 }
2481
2482 /* Otherwise, we don't have a split point. */
2483 return 0;
2484 }
2485 \f
2486 /* Throughout X, replace FROM with TO, and return the result.
2487 The result is TO if X is FROM;
2488 otherwise the result is X, but its contents may have been modified.
2489 If they were modified, a record was made in undobuf so that
2490 undo_all will (among other things) return X to its original state.
2491
2492 If the number of changes necessary is too much to record to undo,
2493 the excess changes are not made, so the result is invalid.
2494 The changes already made can still be undone.
2495 undobuf.num_undo is incremented for such changes, so by testing that
2496 the caller can tell whether the result is valid.
2497
2498 `n_occurrences' is incremented each time FROM is replaced.
2499
2500 IN_DEST is non-zero if we are processing the SET_DEST of a SET.
2501
2502 UNIQUE_COPY is non-zero if each substitution must be unique. We do this
2503 by copying if `n_occurrences' is non-zero. */
2504
2505 static rtx
2506 subst (x, from, to, in_dest, unique_copy)
2507 register rtx x, from, to;
2508 int in_dest;
2509 int unique_copy;
2510 {
2511 register char *fmt;
2512 register int len, i;
2513 register enum rtx_code code = GET_CODE (x), orig_code = code;
2514 rtx temp;
2515 enum machine_mode mode = GET_MODE (x);
2516 enum machine_mode op0_mode = VOIDmode;
2517 rtx other_insn;
2518 rtx *cc_use;
2519 int n_restarts = 0;
2520
2521 /* FAKE_EXTEND_SAFE_P (MODE, FROM) is 1 if (subreg:MODE FROM 0) is a safe
2522 replacement for (zero_extend:MODE FROM) or (sign_extend:MODE FROM).
2523 If it is 0, that cannot be done. We can now do this for any MEM
2524 because (SUBREG (MEM...)) is guaranteed to cause the MEM to be reloaded.
2525 If not for that, MEM's would very rarely be safe. */
2526
2527 /* Reject MODEs bigger than a word, because we might not be able
2528 to reference a two-register group starting with an arbitrary register
2529 (and currently gen_lowpart might crash for a SUBREG). */
2530
2531 #define FAKE_EXTEND_SAFE_P(MODE, FROM) \
2532 (GET_MODE_SIZE (MODE) <= UNITS_PER_WORD)
2533
2534 /* Two expressions are equal if they are identical copies of a shared
2535 RTX or if they are both registers with the same register number
2536 and mode. */
2537
2538 #define COMBINE_RTX_EQUAL_P(X,Y) \
2539 ((X) == (Y) \
2540 || (GET_CODE (X) == REG && GET_CODE (Y) == REG \
2541 && REGNO (X) == REGNO (Y) && GET_MODE (X) == GET_MODE (Y)))
2542
2543 if (! in_dest && COMBINE_RTX_EQUAL_P (x, from))
2544 {
2545 n_occurrences++;
2546 return (unique_copy && n_occurrences > 1 ? copy_rtx (to) : to);
2547 }
2548
2549 /* If X and FROM are the same register but different modes, they will
2550 not have been seen as equal above. However, flow.c will make a
2551 LOG_LINKS entry for that case. If we do nothing, we will try to
2552 rerecognize our original insn and, when it succeeds, we will
2553 delete the feeding insn, which is incorrect.
2554
2555 So force this insn not to match in this (rare) case. */
2556 if (! in_dest && code == REG && GET_CODE (from) == REG
2557 && REGNO (x) == REGNO (from))
2558 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
2559
2560 /* If this is an object, we are done unless it is a MEM or LO_SUM, both
2561 of which may contain things that can be combined. */
2562 if (code != MEM && code != LO_SUM && GET_RTX_CLASS (code) == 'o')
2563 return x;
2564
2565 /* It is possible to have a subexpression appear twice in the insn.
2566 Suppose that FROM is a register that appears within TO.
2567 Then, after that subexpression has been scanned once by `subst',
2568 the second time it is scanned, TO may be found. If we were
2569 to scan TO here, we would find FROM within it and create a
2570 self-referent rtl structure which is completely wrong. */
2571 if (COMBINE_RTX_EQUAL_P (x, to))
2572 return to;
2573
2574 len = GET_RTX_LENGTH (code);
2575 fmt = GET_RTX_FORMAT (code);
2576
2577 /* We don't need to process a SET_DEST that is a register, CC0, or PC, so
2578 set up to skip this common case. All other cases where we want to
2579 suppress replacing something inside a SET_SRC are handled via the
2580 IN_DEST operand. */
2581 if (code == SET
2582 && (GET_CODE (SET_DEST (x)) == REG
2583 || GET_CODE (SET_DEST (x)) == CC0
2584 || GET_CODE (SET_DEST (x)) == PC))
2585 fmt = "ie";
2586
2587 /* Get the mode of operand 0 in case X is now a SIGN_EXTEND of a constant. */
2588 if (fmt[0] == 'e')
2589 op0_mode = GET_MODE (XEXP (x, 0));
2590
2591 for (i = 0; i < len; i++)
2592 {
2593 if (fmt[i] == 'E')
2594 {
2595 register int j;
2596 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2597 {
2598 register rtx new;
2599 if (COMBINE_RTX_EQUAL_P (XVECEXP (x, i, j), from))
2600 {
2601 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2602 n_occurrences++;
2603 }
2604 else
2605 {
2606 new = subst (XVECEXP (x, i, j), from, to, 0, unique_copy);
2607
2608 /* If this substitution failed, this whole thing fails. */
2609 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2610 return new;
2611 }
2612
2613 SUBST (XVECEXP (x, i, j), new);
2614 }
2615 }
2616 else if (fmt[i] == 'e')
2617 {
2618 register rtx new;
2619
2620 if (COMBINE_RTX_EQUAL_P (XEXP (x, i), from))
2621 {
2622 new = (unique_copy && n_occurrences ? copy_rtx (to) : to);
2623 n_occurrences++;
2624 }
2625 else
2626 /* If we are in a SET_DEST, suppress most cases unless we
2627 have gone inside a MEM, in which case we want to
2628 simplify the address. We assume here that things that
2629 are actually part of the destination have their inner
2630 parts in the first expression. This is true for SUBREG,
2631 STRICT_LOW_PART, and ZERO_EXTRACT, which are the only
2632 things aside from REG and MEM that should appear in a
2633 SET_DEST. */
2634 new = subst (XEXP (x, i), from, to,
2635 (((in_dest
2636 && (code == SUBREG || code == STRICT_LOW_PART
2637 || code == ZERO_EXTRACT))
2638 || code == SET)
2639 && i == 0), unique_copy);
2640
2641 /* If we found that we will have to reject this combination,
2642 indicate that by returning the CLOBBER ourselves, rather than
2643 an expression containing it. This will speed things up as
2644 well as prevent accidents where two CLOBBERs are considered
2645 to be equal, thus producing an incorrect simplification. */
2646
2647 if (GET_CODE (new) == CLOBBER && XEXP (new, 0) == const0_rtx)
2648 return new;
2649
2650 SUBST (XEXP (x, i), new);
2651 }
2652 }
2653
2654 /* We come back to here if we have replaced the expression with one of
2655 a different code and it is likely that further simplification will be
2656 possible. */
2657
2658 restart:
2659
2660 /* If we have restarted more than 4 times, we are probably looping, so
2661 give up. */
2662 if (++n_restarts > 4)
2663 return x;
2664
2665 /* If we are restarting at all, it means that we no longer know the
2666 original mode of operand 0 (since we have probably changed the
2667 form of X). */
2668
2669 if (n_restarts > 1)
2670 op0_mode = VOIDmode;
2671
2672 code = GET_CODE (x);
2673
2674 /* If this is a commutative operation, put a constant last and a complex
2675 expression first. We don't need to do this for comparisons here. */
2676 if (GET_RTX_CLASS (code) == 'c'
2677 && ((CONSTANT_P (XEXP (x, 0)) && GET_CODE (XEXP (x, 1)) != CONST_INT)
2678 || (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == 'o'
2679 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')
2680 || (GET_CODE (XEXP (x, 0)) == SUBREG
2681 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o'
2682 && GET_RTX_CLASS (GET_CODE (XEXP (x, 1))) != 'o')))
2683 {
2684 temp = XEXP (x, 0);
2685 SUBST (XEXP (x, 0), XEXP (x, 1));
2686 SUBST (XEXP (x, 1), temp);
2687 }
2688
2689 /* If this is a PLUS, MINUS, or MULT, and the first operand is the
2690 sign extension of a PLUS with a constant, reverse the order of the sign
2691 extension and the addition. Note that this not the same as the original
2692 code, but overflow is undefined for signed values. Also note that the
2693 PLUS will have been partially moved "inside" the sign-extension, so that
2694 the first operand of X will really look like:
2695 (ashiftrt (plus (ashift A C4) C5) C4).
2696 We convert this to
2697 (plus (ashiftrt (ashift A C4) C2) C4)
2698 and replace the first operand of X with that expression. Later parts
2699 of this function may simplify the expression further.
2700
2701 For example, if we start with (mult (sign_extend (plus A C1)) C2),
2702 we swap the SIGN_EXTEND and PLUS. Later code will apply the
2703 distributive law to produce (plus (mult (sign_extend X) C1) C3).
2704
2705 We do this to simplify address expressions. */
2706
2707 if ((code == PLUS || code == MINUS || code == MULT)
2708 && GET_CODE (XEXP (x, 0)) == ASHIFTRT
2709 && GET_CODE (XEXP (XEXP (x, 0), 0)) == PLUS
2710 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 0)) == ASHIFT
2711 && GET_CODE (XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1)) == CONST_INT
2712 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2713 && XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 1) == XEXP (XEXP (x, 0), 1)
2714 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
2715 && (temp = simplify_binary_operation (ASHIFTRT, mode,
2716 XEXP (XEXP (XEXP (x, 0), 0), 1),
2717 XEXP (XEXP (x, 0), 1))) != 0)
2718 {
2719 rtx new
2720 = simplify_shift_const (NULL_RTX, ASHIFT, mode,
2721 XEXP (XEXP (XEXP (XEXP (x, 0), 0), 0), 0),
2722 INTVAL (XEXP (XEXP (x, 0), 1)));
2723
2724 new = simplify_shift_const (NULL_RTX, ASHIFTRT, mode, new,
2725 INTVAL (XEXP (XEXP (x, 0), 1)));
2726
2727 SUBST (XEXP (x, 0), gen_binary (PLUS, mode, new, temp));
2728 }
2729
2730 /* If this is a simple operation applied to an IF_THEN_ELSE, try
2731 applying it to the arms of the IF_THEN_ELSE. This often simplifies
2732 things. Don't deal with operations that change modes here. */
2733
2734 if ((GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
2735 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE)
2736 {
2737 /* Don't do this by using SUBST inside X since we might be messing
2738 up a shared expression. */
2739 rtx cond = XEXP (XEXP (x, 0), 0);
2740 rtx t_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 1),
2741 XEXP (x, 1)),
2742 pc_rtx, pc_rtx, 0, 0);
2743 rtx f_arm = subst (gen_binary (code, mode, XEXP (XEXP (x, 0), 2),
2744 XEXP (x, 1)),
2745 pc_rtx, pc_rtx, 0, 0);
2746
2747
2748 x = gen_rtx (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2749 goto restart;
2750 }
2751
2752 else if (GET_RTX_CLASS (code) == '1'
2753 && GET_CODE (XEXP (x, 0)) == IF_THEN_ELSE
2754 && GET_MODE (XEXP (x, 0)) == mode)
2755 {
2756 rtx cond = XEXP (XEXP (x, 0), 0);
2757 rtx t_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 1)),
2758 pc_rtx, pc_rtx, 0, 0);
2759 rtx f_arm = subst (gen_unary (code, mode, XEXP (XEXP (x, 0), 2)),
2760 pc_rtx, pc_rtx, 0, 0);
2761
2762 x = gen_rtx_combine (IF_THEN_ELSE, mode, cond, t_arm, f_arm);
2763 goto restart;
2764 }
2765
2766 /* Try to fold this expression in case we have constants that weren't
2767 present before. */
2768 temp = 0;
2769 switch (GET_RTX_CLASS (code))
2770 {
2771 case '1':
2772 temp = simplify_unary_operation (code, mode, XEXP (x, 0), op0_mode);
2773 break;
2774 case '<':
2775 temp = simplify_relational_operation (code, op0_mode,
2776 XEXP (x, 0), XEXP (x, 1));
2777 #ifdef FLOAT_STORE_FLAG_VALUE
2778 if (temp != 0 && GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2779 temp = ((temp == const0_rtx) ? CONST0_RTX (GET_MODE (x))
2780 : immed_real_const_1 (FLOAT_STORE_FLAG_VALUE, GET_MODE (x)));
2781 #endif
2782 break;
2783 case 'c':
2784 case '2':
2785 temp = simplify_binary_operation (code, mode, XEXP (x, 0), XEXP (x, 1));
2786 break;
2787 case 'b':
2788 case '3':
2789 temp = simplify_ternary_operation (code, mode, op0_mode, XEXP (x, 0),
2790 XEXP (x, 1), XEXP (x, 2));
2791 break;
2792 }
2793
2794 if (temp)
2795 x = temp, code = GET_CODE (temp);
2796
2797 /* First see if we can apply the inverse distributive law. */
2798 if (code == PLUS || code == MINUS || code == IOR || code == XOR)
2799 {
2800 x = apply_distributive_law (x);
2801 code = GET_CODE (x);
2802 }
2803
2804 /* If CODE is an associative operation not otherwise handled, see if we
2805 can associate some operands. This can win if they are constants or
2806 if they are logically related (i.e. (a & b) & a. */
2807 if ((code == PLUS || code == MINUS
2808 || code == MULT || code == AND || code == IOR || code == XOR
2809 || code == DIV || code == UDIV
2810 || code == SMAX || code == SMIN || code == UMAX || code == UMIN)
2811 && GET_MODE_CLASS (mode) == MODE_INT)
2812 {
2813 if (GET_CODE (XEXP (x, 0)) == code)
2814 {
2815 rtx other = XEXP (XEXP (x, 0), 0);
2816 rtx inner_op0 = XEXP (XEXP (x, 0), 1);
2817 rtx inner_op1 = XEXP (x, 1);
2818 rtx inner;
2819
2820 /* Make sure we pass the constant operand if any as the second
2821 one if this is a commutative operation. */
2822 if (CONSTANT_P (inner_op0) && GET_RTX_CLASS (code) == 'c')
2823 {
2824 rtx tem = inner_op0;
2825 inner_op0 = inner_op1;
2826 inner_op1 = tem;
2827 }
2828 inner = simplify_binary_operation (code == MINUS ? PLUS
2829 : code == DIV ? MULT
2830 : code == UDIV ? MULT
2831 : code,
2832 mode, inner_op0, inner_op1);
2833
2834 /* For commutative operations, try the other pair if that one
2835 didn't simplify. */
2836 if (inner == 0 && GET_RTX_CLASS (code) == 'c')
2837 {
2838 other = XEXP (XEXP (x, 0), 1);
2839 inner = simplify_binary_operation (code, mode,
2840 XEXP (XEXP (x, 0), 0),
2841 XEXP (x, 1));
2842 }
2843
2844 if (inner)
2845 {
2846 x = gen_binary (code, mode, other, inner);
2847 goto restart;
2848
2849 }
2850 }
2851 }
2852
2853 /* A little bit of algebraic simplification here. */
2854 switch (code)
2855 {
2856 case MEM:
2857 /* Ensure that our address has any ASHIFTs converted to MULT in case
2858 address-recognizing predicates are called later. */
2859 temp = make_compound_operation (XEXP (x, 0), MEM);
2860 SUBST (XEXP (x, 0), temp);
2861 break;
2862
2863 case SUBREG:
2864 /* (subreg:A (mem:B X) N) becomes a modified MEM unless the SUBREG
2865 is paradoxical. If we can't do that safely, then it becomes
2866 something nonsensical so that this combination won't take place. */
2867
2868 if (GET_CODE (SUBREG_REG (x)) == MEM
2869 && (GET_MODE_SIZE (mode)
2870 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))))
2871 {
2872 rtx inner = SUBREG_REG (x);
2873 int endian_offset = 0;
2874 /* Don't change the mode of the MEM
2875 if that would change the meaning of the address. */
2876 if (MEM_VOLATILE_P (SUBREG_REG (x))
2877 || mode_dependent_address_p (XEXP (inner, 0)))
2878 return gen_rtx (CLOBBER, mode, const0_rtx);
2879
2880 #if BYTES_BIG_ENDIAN
2881 if (GET_MODE_SIZE (mode) < UNITS_PER_WORD)
2882 endian_offset += UNITS_PER_WORD - GET_MODE_SIZE (mode);
2883 if (GET_MODE_SIZE (GET_MODE (inner)) < UNITS_PER_WORD)
2884 endian_offset -= UNITS_PER_WORD - GET_MODE_SIZE (GET_MODE (inner));
2885 #endif
2886 /* Note if the plus_constant doesn't make a valid address
2887 then this combination won't be accepted. */
2888 x = gen_rtx (MEM, mode,
2889 plus_constant (XEXP (inner, 0),
2890 (SUBREG_WORD (x) * UNITS_PER_WORD
2891 + endian_offset)));
2892 MEM_VOLATILE_P (x) = MEM_VOLATILE_P (inner);
2893 RTX_UNCHANGING_P (x) = RTX_UNCHANGING_P (inner);
2894 MEM_IN_STRUCT_P (x) = MEM_IN_STRUCT_P (inner);
2895 return x;
2896 }
2897
2898 /* If we are in a SET_DEST, these other cases can't apply. */
2899 if (in_dest)
2900 return x;
2901
2902 /* Changing mode twice with SUBREG => just change it once,
2903 or not at all if changing back to starting mode. */
2904 if (GET_CODE (SUBREG_REG (x)) == SUBREG)
2905 {
2906 if (mode == GET_MODE (SUBREG_REG (SUBREG_REG (x)))
2907 && SUBREG_WORD (x) == 0 && SUBREG_WORD (SUBREG_REG (x)) == 0)
2908 return SUBREG_REG (SUBREG_REG (x));
2909
2910 SUBST_INT (SUBREG_WORD (x),
2911 SUBREG_WORD (x) + SUBREG_WORD (SUBREG_REG (x)));
2912 SUBST (SUBREG_REG (x), SUBREG_REG (SUBREG_REG (x)));
2913 }
2914
2915 /* SUBREG of a hard register => just change the register number
2916 and/or mode. If the hard register is not valid in that mode,
2917 suppress this combination. If the hard register is the stack,
2918 frame, or argument pointer, leave this as a SUBREG. */
2919
2920 if (GET_CODE (SUBREG_REG (x)) == REG
2921 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
2922 && REGNO (SUBREG_REG (x)) != FRAME_POINTER_REGNUM
2923 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
2924 && REGNO (SUBREG_REG (x)) != ARG_POINTER_REGNUM
2925 #endif
2926 && REGNO (SUBREG_REG (x)) != STACK_POINTER_REGNUM)
2927 {
2928 if (HARD_REGNO_MODE_OK (REGNO (SUBREG_REG (x)) + SUBREG_WORD (x),
2929 mode))
2930 return gen_rtx (REG, mode,
2931 REGNO (SUBREG_REG (x)) + SUBREG_WORD (x));
2932 else
2933 return gen_rtx (CLOBBER, mode, const0_rtx);
2934 }
2935
2936 /* For a constant, try to pick up the part we want. Handle a full
2937 word and low-order part. Only do this if we are narrowing
2938 the constant; if it is being widened, we have no idea what
2939 the extra bits will have been set to. */
2940
2941 if (CONSTANT_P (SUBREG_REG (x)) && op0_mode != VOIDmode
2942 && GET_MODE_SIZE (mode) == UNITS_PER_WORD
2943 && GET_MODE_SIZE (op0_mode) < UNITS_PER_WORD
2944 && GET_MODE_CLASS (mode) == MODE_INT)
2945 {
2946 temp = operand_subword (SUBREG_REG (x), SUBREG_WORD (x),
2947 0, op0_mode);
2948 if (temp)
2949 return temp;
2950 }
2951
2952 if (CONSTANT_P (SUBREG_REG (x)) && subreg_lowpart_p (x)
2953 && GET_MODE_SIZE (mode) < GET_MODE_SIZE (op0_mode))
2954 return gen_lowpart_for_combine (mode, SUBREG_REG (x));
2955
2956 /* If we are narrowing the object, we need to see if we can simplify
2957 the expression for the object knowing that we only need the
2958 low-order bits. */
2959
2960 if (GET_MODE_SIZE (mode) < GET_MODE_SIZE (GET_MODE (SUBREG_REG (x)))
2961 && subreg_lowpart_p (x))
2962 return force_to_mode (SUBREG_REG (x), mode, GET_MODE_BITSIZE (mode),
2963 NULL_RTX);
2964 break;
2965
2966 case NOT:
2967 /* (not (plus X -1)) can become (neg X). */
2968 if (GET_CODE (XEXP (x, 0)) == PLUS
2969 && XEXP (XEXP (x, 0), 1) == constm1_rtx)
2970 {
2971 x = gen_rtx_combine (NEG, mode, XEXP (XEXP (x, 0), 0));
2972 goto restart;
2973 }
2974
2975 /* Similarly, (not (neg X)) is (plus X -1). */
2976 if (GET_CODE (XEXP (x, 0)) == NEG)
2977 {
2978 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
2979 goto restart;
2980 }
2981
2982 /* (not (xor X C)) for C constant is (xor X D) with D = ~ C. */
2983 if (GET_CODE (XEXP (x, 0)) == XOR
2984 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
2985 && (temp = simplify_unary_operation (NOT, mode,
2986 XEXP (XEXP (x, 0), 1),
2987 mode)) != 0)
2988 {
2989 SUBST (XEXP (XEXP (x, 0), 1), temp);
2990 return XEXP (x, 0);
2991 }
2992
2993 /* (not (ashift 1 X)) is (rotate ~1 X). We used to do this for operands
2994 other than 1, but that is not valid. We could do a similar
2995 simplification for (not (lshiftrt C X)) where C is just the sign bit,
2996 but this doesn't seem common enough to bother with. */
2997 if (GET_CODE (XEXP (x, 0)) == ASHIFT
2998 && XEXP (XEXP (x, 0), 0) == const1_rtx)
2999 {
3000 x = gen_rtx (ROTATE, mode, gen_unary (NOT, mode, const1_rtx),
3001 XEXP (XEXP (x, 0), 1));
3002 goto restart;
3003 }
3004
3005 if (GET_CODE (XEXP (x, 0)) == SUBREG
3006 && subreg_lowpart_p (XEXP (x, 0))
3007 && (GET_MODE_SIZE (GET_MODE (XEXP (x, 0)))
3008 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (x, 0)))))
3009 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == ASHIFT
3010 && XEXP (SUBREG_REG (XEXP (x, 0)), 0) == const1_rtx)
3011 {
3012 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (XEXP (x, 0)));
3013
3014 x = gen_rtx (ROTATE, inner_mode,
3015 gen_unary (NOT, inner_mode, const1_rtx),
3016 XEXP (SUBREG_REG (XEXP (x, 0)), 1));
3017 x = gen_lowpart_for_combine (mode, x);
3018 goto restart;
3019 }
3020
3021 #if STORE_FLAG_VALUE == -1
3022 /* (not (comparison foo bar)) can be done by reversing the comparison
3023 code if valid. */
3024 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3025 && reversible_comparison_p (XEXP (x, 0)))
3026 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
3027 mode, XEXP (XEXP (x, 0), 0),
3028 XEXP (XEXP (x, 0), 1));
3029 #endif
3030
3031 /* Apply De Morgan's laws to reduce number of patterns for machines
3032 with negating logical insns (and-not, nand, etc.). If result has
3033 only one NOT, put it first, since that is how the patterns are
3034 coded. */
3035
3036 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND)
3037 {
3038 rtx in1 = XEXP (XEXP (x, 0), 0), in2 = XEXP (XEXP (x, 0), 1);
3039
3040 if (GET_CODE (in1) == NOT)
3041 in1 = XEXP (in1, 0);
3042 else
3043 in1 = gen_rtx_combine (NOT, GET_MODE (in1), in1);
3044
3045 if (GET_CODE (in2) == NOT)
3046 in2 = XEXP (in2, 0);
3047 else if (GET_CODE (in2) == CONST_INT
3048 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
3049 in2 = GEN_INT (GET_MODE_MASK (mode) & ~ INTVAL (in2));
3050 else
3051 in2 = gen_rtx_combine (NOT, GET_MODE (in2), in2);
3052
3053 if (GET_CODE (in2) == NOT)
3054 {
3055 rtx tem = in2;
3056 in2 = in1; in1 = tem;
3057 }
3058
3059 x = gen_rtx_combine (GET_CODE (XEXP (x, 0)) == IOR ? AND : IOR,
3060 mode, in1, in2);
3061 goto restart;
3062 }
3063 break;
3064
3065 case NEG:
3066 /* (neg (plus X 1)) can become (not X). */
3067 if (GET_CODE (XEXP (x, 0)) == PLUS
3068 && XEXP (XEXP (x, 0), 1) == const1_rtx)
3069 {
3070 x = gen_rtx_combine (NOT, mode, XEXP (XEXP (x, 0), 0));
3071 goto restart;
3072 }
3073
3074 /* Similarly, (neg (not X)) is (plus X 1). */
3075 if (GET_CODE (XEXP (x, 0)) == NOT)
3076 {
3077 x = gen_rtx_combine (PLUS, mode, XEXP (XEXP (x, 0), 0), const1_rtx);
3078 goto restart;
3079 }
3080
3081 /* (neg (minus X Y)) can become (minus Y X). */
3082 if (GET_CODE (XEXP (x, 0)) == MINUS
3083 && (GET_MODE_CLASS (mode) != MODE_FLOAT
3084 /* x-y != -(y-x) with IEEE floating point. */
3085 || TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT))
3086 {
3087 x = gen_binary (MINUS, mode, XEXP (XEXP (x, 0), 1),
3088 XEXP (XEXP (x, 0), 0));
3089 goto restart;
3090 }
3091
3092 /* (neg (xor A 1)) is (plus A -1) if A is known to be either 0 or 1. */
3093 if (GET_CODE (XEXP (x, 0)) == XOR && XEXP (XEXP (x, 0), 1) == const1_rtx
3094 && significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1)
3095 {
3096 x = gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0), constm1_rtx);
3097 goto restart;
3098 }
3099
3100 /* NEG commutes with ASHIFT since it is multiplication. Only do this
3101 if we can then eliminate the NEG (e.g.,
3102 if the operand is a constant). */
3103
3104 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
3105 {
3106 temp = simplify_unary_operation (NEG, mode,
3107 XEXP (XEXP (x, 0), 0), mode);
3108 if (temp)
3109 {
3110 SUBST (XEXP (XEXP (x, 0), 0), temp);
3111 return XEXP (x, 0);
3112 }
3113 }
3114
3115 temp = expand_compound_operation (XEXP (x, 0));
3116
3117 /* For C equal to the width of MODE minus 1, (neg (ashiftrt X C)) can be
3118 replaced by (lshiftrt X C). This will convert
3119 (neg (sign_extract X 1 Y)) to (zero_extract X 1 Y). */
3120
3121 if (GET_CODE (temp) == ASHIFTRT
3122 && GET_CODE (XEXP (temp, 1)) == CONST_INT
3123 && INTVAL (XEXP (temp, 1)) == GET_MODE_BITSIZE (mode) - 1)
3124 {
3125 x = simplify_shift_const (temp, LSHIFTRT, mode, XEXP (temp, 0),
3126 INTVAL (XEXP (temp, 1)));
3127 goto restart;
3128 }
3129
3130 /* If X has only a single bit significant, say, bit I, convert
3131 (neg X) to (ashiftrt (ashift X C-I) C-I) where C is the bitsize of
3132 MODE minus 1. This will convert (neg (zero_extract X 1 Y)) to
3133 (sign_extract X 1 Y). But only do this if TEMP isn't a register
3134 or a SUBREG of one since we'd be making the expression more
3135 complex if it was just a register. */
3136
3137 if (GET_CODE (temp) != REG
3138 && ! (GET_CODE (temp) == SUBREG
3139 && GET_CODE (SUBREG_REG (temp)) == REG)
3140 && (i = exact_log2 (significant_bits (temp, mode))) >= 0)
3141 {
3142 rtx temp1 = simplify_shift_const
3143 (NULL_RTX, ASHIFTRT, mode,
3144 simplify_shift_const (NULL_RTX, ASHIFT, mode, temp,
3145 GET_MODE_BITSIZE (mode) - 1 - i),
3146 GET_MODE_BITSIZE (mode) - 1 - i);
3147
3148 /* If all we did was surround TEMP with the two shifts, we
3149 haven't improved anything, so don't use it. Otherwise,
3150 we are better off with TEMP1. */
3151 if (GET_CODE (temp1) != ASHIFTRT
3152 || GET_CODE (XEXP (temp1, 0)) != ASHIFT
3153 || XEXP (XEXP (temp1, 0), 0) != temp)
3154 {
3155 x = temp1;
3156 goto restart;
3157 }
3158 }
3159 break;
3160
3161 case FLOAT_TRUNCATE:
3162 /* (float_truncate:SF (float_extend:DF foo:SF)) = foo:SF. */
3163 if (GET_CODE (XEXP (x, 0)) == FLOAT_EXTEND
3164 && GET_MODE (XEXP (XEXP (x, 0), 0)) == mode)
3165 return XEXP (XEXP (x, 0), 0);
3166 break;
3167
3168 #ifdef HAVE_cc0
3169 case COMPARE:
3170 /* Convert (compare FOO (const_int 0)) to FOO unless we aren't
3171 using cc0, in which case we want to leave it as a COMPARE
3172 so we can distinguish it from a register-register-copy. */
3173 if (XEXP (x, 1) == const0_rtx)
3174 return XEXP (x, 0);
3175
3176 /* In IEEE floating point, x-0 is not the same as x. */
3177 if ((TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
3178 || GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT)
3179 && XEXP (x, 1) == CONST0_RTX (GET_MODE (XEXP (x, 0))))
3180 return XEXP (x, 0);
3181 break;
3182 #endif
3183
3184 case CONST:
3185 /* (const (const X)) can become (const X). Do it this way rather than
3186 returning the inner CONST since CONST can be shared with a
3187 REG_EQUAL note. */
3188 if (GET_CODE (XEXP (x, 0)) == CONST)
3189 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
3190 break;
3191
3192 #ifdef HAVE_lo_sum
3193 case LO_SUM:
3194 /* Convert (lo_sum (high FOO) FOO) to FOO. This is necessary so we
3195 can add in an offset. find_split_point will split this address up
3196 again if it doesn't match. */
3197 if (GET_CODE (XEXP (x, 0)) == HIGH
3198 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1)))
3199 return XEXP (x, 1);
3200 break;
3201 #endif
3202
3203 case PLUS:
3204 /* If we have (plus (plus (A const) B)), associate it so that CONST is
3205 outermost. That's because that's the way indexed addresses are
3206 supposed to appear. This code used to check many more cases, but
3207 they are now checked elsewhere. */
3208 if (GET_CODE (XEXP (x, 0)) == PLUS
3209 && CONSTANT_ADDRESS_P (XEXP (XEXP (x, 0), 1)))
3210 return gen_binary (PLUS, mode,
3211 gen_binary (PLUS, mode, XEXP (XEXP (x, 0), 0),
3212 XEXP (x, 1)),
3213 XEXP (XEXP (x, 0), 1));
3214
3215 /* (plus (xor (and <foo> (const_int pow2 - 1)) <c>) <-c>)
3216 when c is (const_int (pow2 + 1) / 2) is a sign extension of a
3217 bit-field and can be replaced by either a sign_extend or a
3218 sign_extract. The `and' may be a zero_extend. */
3219 if (GET_CODE (XEXP (x, 0)) == XOR
3220 && GET_CODE (XEXP (x, 1)) == CONST_INT
3221 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3222 && INTVAL (XEXP (x, 1)) == - INTVAL (XEXP (XEXP (x, 0), 1))
3223 && (i = exact_log2 (INTVAL (XEXP (XEXP (x, 0), 1)))) >= 0
3224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3225 && ((GET_CODE (XEXP (XEXP (x, 0), 0)) == AND
3226 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
3227 && (INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
3228 == ((HOST_WIDE_INT) 1 << (i + 1)) - 1))
3229 || (GET_CODE (XEXP (XEXP (x, 0), 0)) == ZERO_EXTEND
3230 && (GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (XEXP (x, 0), 0), 0)))
3231 == i + 1))))
3232 {
3233 x = simplify_shift_const
3234 (NULL_RTX, ASHIFTRT, mode,
3235 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3236 XEXP (XEXP (XEXP (x, 0), 0), 0),
3237 GET_MODE_BITSIZE (mode) - (i + 1)),
3238 GET_MODE_BITSIZE (mode) - (i + 1));
3239 goto restart;
3240 }
3241
3242 /* If only the low-order bit of X is significant, (plus x -1)
3243 can become (ashiftrt (ashift (xor x 1) C) C) where C is
3244 the bitsize of the mode - 1. This allows simplification of
3245 "a = (b & 8) == 0;" */
3246 if (XEXP (x, 1) == constm1_rtx
3247 && GET_CODE (XEXP (x, 0)) != REG
3248 && ! (GET_CODE (XEXP (x,0)) == SUBREG
3249 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == REG)
3250 && significant_bits (XEXP (x, 0), mode) == 1)
3251 {
3252 x = simplify_shift_const
3253 (NULL_RTX, ASHIFTRT, mode,
3254 simplify_shift_const (NULL_RTX, ASHIFT, mode,
3255 gen_rtx_combine (XOR, mode,
3256 XEXP (x, 0), const1_rtx),
3257 GET_MODE_BITSIZE (mode) - 1),
3258 GET_MODE_BITSIZE (mode) - 1);
3259 goto restart;
3260 }
3261
3262 /* If we are adding two things that have no bits in common, convert
3263 the addition into an IOR. This will often be further simplified,
3264 for example in cases like ((a & 1) + (a & 2)), which can
3265 become a & 3. */
3266
3267 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3268 && (significant_bits (XEXP (x, 0), mode)
3269 & significant_bits (XEXP (x, 1), mode)) == 0)
3270 {
3271 x = gen_binary (IOR, mode, XEXP (x, 0), XEXP (x, 1));
3272 goto restart;
3273 }
3274 break;
3275
3276 case MINUS:
3277 /* (minus <foo> (and <foo> (const_int -pow2))) becomes
3278 (and <foo> (const_int pow2-1)) */
3279 if (GET_CODE (XEXP (x, 1)) == AND
3280 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
3281 && exact_log2 (- INTVAL (XEXP (XEXP (x, 1), 1))) >= 0
3282 && rtx_equal_p (XEXP (XEXP (x, 1), 0), XEXP (x, 0)))
3283 {
3284 x = simplify_and_const_int (NULL_RTX, mode, XEXP (x, 0),
3285 - INTVAL (XEXP (XEXP (x, 1), 1)) - 1);
3286 goto restart;
3287 }
3288 break;
3289
3290 case MULT:
3291 /* If we have (mult (plus A B) C), apply the distributive law and then
3292 the inverse distributive law to see if things simplify. This
3293 occurs mostly in addresses, often when unrolling loops. */
3294
3295 if (GET_CODE (XEXP (x, 0)) == PLUS)
3296 {
3297 x = apply_distributive_law
3298 (gen_binary (PLUS, mode,
3299 gen_binary (MULT, mode,
3300 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
3301 gen_binary (MULT, mode,
3302 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
3303
3304 if (GET_CODE (x) != MULT)
3305 goto restart;
3306 }
3307
3308 /* If this is multiplication by a power of two and its first operand is
3309 a shift, treat the multiply as a shift to allow the shifts to
3310 possibly combine. */
3311 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3312 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3313 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3314 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3315 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3316 || GET_CODE (XEXP (x, 0)) == ROTATE
3317 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3318 {
3319 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, XEXP (x, 0), i);
3320 goto restart;
3321 }
3322
3323 /* Convert (mult (ashift (const_int 1) A) B) to (ashift B A). */
3324 if (GET_CODE (XEXP (x, 0)) == ASHIFT
3325 && XEXP (XEXP (x, 0), 0) == const1_rtx)
3326 return gen_rtx_combine (ASHIFT, mode, XEXP (x, 1),
3327 XEXP (XEXP (x, 0), 1));
3328 break;
3329
3330 case UDIV:
3331 /* If this is a divide by a power of two, treat it as a shift if
3332 its first operand is a shift. */
3333 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3334 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0
3335 && (GET_CODE (XEXP (x, 0)) == ASHIFT
3336 || GET_CODE (XEXP (x, 0)) == LSHIFTRT
3337 || GET_CODE (XEXP (x, 0)) == ASHIFTRT
3338 || GET_CODE (XEXP (x, 0)) == ROTATE
3339 || GET_CODE (XEXP (x, 0)) == ROTATERT))
3340 {
3341 x = simplify_shift_const (NULL_RTX, LSHIFTRT, mode, XEXP (x, 0), i);
3342 goto restart;
3343 }
3344 break;
3345
3346 case EQ: case NE:
3347 case GT: case GTU: case GE: case GEU:
3348 case LT: case LTU: case LE: case LEU:
3349 /* If the first operand is a condition code, we can't do anything
3350 with it. */
3351 if (GET_CODE (XEXP (x, 0)) == COMPARE
3352 || (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) != MODE_CC
3353 #ifdef HAVE_cc0
3354 && XEXP (x, 0) != cc0_rtx
3355 #endif
3356 ))
3357 {
3358 rtx op0 = XEXP (x, 0);
3359 rtx op1 = XEXP (x, 1);
3360 enum rtx_code new_code;
3361
3362 if (GET_CODE (op0) == COMPARE)
3363 op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
3364
3365 /* Simplify our comparison, if possible. */
3366 new_code = simplify_comparison (code, &op0, &op1);
3367
3368 #if STORE_FLAG_VALUE == 1
3369 /* If STORE_FLAG_VALUE is 1, we can convert (ne x 0) to simply X
3370 if only the low-order bit is significant in X (such as when
3371 X is a ZERO_EXTRACT of one bit. Similarly, we can convert
3372 EQ to (xor X 1). */
3373 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3374 && op1 == const0_rtx
3375 && significant_bits (op0, GET_MODE (op0)) == 1)
3376 return gen_lowpart_for_combine (mode, op0);
3377 else if (new_code == EQ && GET_MODE_CLASS (mode) == MODE_INT
3378 && op1 == const0_rtx
3379 && significant_bits (op0, GET_MODE (op0)) == 1)
3380 return gen_rtx_combine (XOR, mode,
3381 gen_lowpart_for_combine (mode, op0),
3382 const1_rtx);
3383 #endif
3384
3385 #if STORE_FLAG_VALUE == -1
3386 /* If STORE_FLAG_VALUE is -1, we can convert (ne x 0)
3387 to (neg x) if only the low-order bit of X is significant.
3388 This converts (ne (zero_extract X 1 Y) 0) to
3389 (sign_extract X 1 Y). */
3390 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3391 && op1 == const0_rtx
3392 && significant_bits (op0, GET_MODE (op0)) == 1)
3393 {
3394 x = gen_rtx_combine (NEG, mode,
3395 gen_lowpart_for_combine (mode, op0));
3396 goto restart;
3397 }
3398 #endif
3399
3400 /* If STORE_FLAG_VALUE says to just test the sign bit and X has just
3401 one significant bit, we can convert (ne x 0) to (ashift x c)
3402 where C puts the bit in the sign bit. Remove any AND with
3403 STORE_FLAG_VALUE when we are done, since we are only going to
3404 test the sign bit. */
3405 if (new_code == NE && GET_MODE_CLASS (mode) == MODE_INT
3406 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3407 && (STORE_FLAG_VALUE
3408 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
3409 && op1 == const0_rtx
3410 && mode == GET_MODE (op0)
3411 && (i = exact_log2 (significant_bits (op0, GET_MODE (op0)))) >= 0)
3412 {
3413 x = simplify_shift_const (NULL_RTX, ASHIFT, mode, op0,
3414 GET_MODE_BITSIZE (mode) - 1 - i);
3415 if (GET_CODE (x) == AND && XEXP (x, 1) == const_true_rtx)
3416 return XEXP (x, 0);
3417 else
3418 return x;
3419 }
3420
3421 /* If the code changed, return a whole new comparison. */
3422 if (new_code != code)
3423 return gen_rtx_combine (new_code, mode, op0, op1);
3424
3425 /* Otherwise, keep this operation, but maybe change its operands.
3426 This also converts (ne (compare FOO BAR) 0) to (ne FOO BAR). */
3427 SUBST (XEXP (x, 0), op0);
3428 SUBST (XEXP (x, 1), op1);
3429 }
3430 break;
3431
3432 case IF_THEN_ELSE:
3433 /* Sometimes we can simplify the arm of an IF_THEN_ELSE if a register
3434 used in it is being compared against certain values. Get the
3435 true and false comparisons and see if that says anything about the
3436 value of each arm. */
3437
3438 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3439 && reversible_comparison_p (XEXP (x, 0))
3440 && GET_CODE (XEXP (XEXP (x, 0), 0)) == REG)
3441 {
3442 HOST_WIDE_INT sig;
3443 rtx from = XEXP (XEXP (x, 0), 0);
3444 enum rtx_code true_code = GET_CODE (XEXP (x, 0));
3445 enum rtx_code false_code = reverse_condition (true_code);
3446 rtx true_val = XEXP (XEXP (x, 0), 1);
3447 rtx false_val = true_val;
3448 rtx true_arm = XEXP (x, 1);
3449 rtx false_arm = XEXP (x, 2);
3450 int swapped = 0;
3451
3452 /* If FALSE_CODE is EQ, swap the codes and arms. */
3453
3454 if (false_code == EQ)
3455 {
3456 swapped = 1, true_code = EQ, false_code = NE;
3457 true_arm = XEXP (x, 2), false_arm = XEXP (x, 1);
3458 }
3459
3460 /* If we are comparing against zero and the expression being tested
3461 has only a single significant bit, that is its value when it is
3462 not equal to zero. Similarly if it is known to be -1 or 0. */
3463
3464 if (true_code == EQ && true_val == const0_rtx
3465 && exact_log2 (sig = significant_bits (from,
3466 GET_MODE (from))) >= 0)
3467 false_code = EQ, false_val = GEN_INT (sig);
3468 else if (true_code == EQ && true_val == const0_rtx
3469 && (num_sign_bit_copies (from, GET_MODE (from))
3470 == GET_MODE_BITSIZE (GET_MODE (from))))
3471 false_code = EQ, false_val = constm1_rtx;
3472
3473 /* Now simplify an arm if we know the value of the register
3474 in the branch and it is used in the arm. Be carefull due to
3475 the potential of locally-shared RTL. */
3476
3477 if (reg_mentioned_p (from, true_arm))
3478 true_arm = subst (known_cond (copy_rtx (true_arm), true_code,
3479 from, true_val),
3480 pc_rtx, pc_rtx, 0, 0);
3481 if (reg_mentioned_p (from, false_arm))
3482 false_arm = subst (known_cond (copy_rtx (false_arm), false_code,
3483 from, false_val),
3484 pc_rtx, pc_rtx, 0, 0);
3485
3486 SUBST (XEXP (x, 1), swapped ? false_arm : true_arm);
3487 SUBST (XEXP (x, 2), swapped ? true_arm : false_arm);
3488 }
3489
3490 /* If we have (if_then_else FOO (pc) (label_ref BAR)) and FOO can be
3491 reversed, do so to avoid needing two sets of patterns for
3492 subtract-and-branch insns. Similarly if we have a constant in that
3493 position or if the third operand is the same as the first operand
3494 of the comparison. */
3495
3496 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3497 && reversible_comparison_p (XEXP (x, 0))
3498 && (XEXP (x, 1) == pc_rtx || GET_CODE (XEXP (x, 1)) == CONST_INT
3499 || rtx_equal_p (XEXP (x, 2), XEXP (XEXP (x, 0), 0))))
3500 {
3501 SUBST (XEXP (x, 0),
3502 gen_binary (reverse_condition (GET_CODE (XEXP (x, 0))),
3503 GET_MODE (XEXP (x, 0)),
3504 XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 0), 1)));
3505
3506 temp = XEXP (x, 1);
3507 SUBST (XEXP (x, 1), XEXP (x, 2));
3508 SUBST (XEXP (x, 2), temp);
3509 }
3510
3511 /* If the two arms are identical, we don't need the comparison. */
3512
3513 if (rtx_equal_p (XEXP (x, 1), XEXP (x, 2))
3514 && ! side_effects_p (XEXP (x, 0)))
3515 return XEXP (x, 1);
3516
3517 /* Look for cases where we have (abs x) or (neg (abs X)). */
3518
3519 if (GET_MODE_CLASS (mode) == MODE_INT
3520 && GET_CODE (XEXP (x, 2)) == NEG
3521 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 2), 0))
3522 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3523 && rtx_equal_p (XEXP (x, 1), XEXP (XEXP (x, 0), 0))
3524 && ! side_effects_p (XEXP (x, 1)))
3525 switch (GET_CODE (XEXP (x, 0)))
3526 {
3527 case GT:
3528 case GE:
3529 x = gen_unary (ABS, mode, XEXP (x, 1));
3530 goto restart;
3531 case LT:
3532 case LE:
3533 x = gen_unary (NEG, mode, gen_unary (ABS, mode, XEXP (x, 1)));
3534 goto restart;
3535 }
3536
3537 /* Look for MIN or MAX. */
3538
3539 if (GET_MODE_CLASS (mode) == MODE_INT
3540 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
3541 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3542 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 2))
3543 && ! side_effects_p (XEXP (x, 0)))
3544 switch (GET_CODE (XEXP (x, 0)))
3545 {
3546 case GE:
3547 case GT:
3548 x = gen_binary (SMAX, mode, XEXP (x, 1), XEXP (x, 2));
3549 goto restart;
3550 case LE:
3551 case LT:
3552 x = gen_binary (SMIN, mode, XEXP (x, 1), XEXP (x, 2));
3553 goto restart;
3554 case GEU:
3555 case GTU:
3556 x = gen_binary (UMAX, mode, XEXP (x, 1), XEXP (x, 2));
3557 goto restart;
3558 case LEU:
3559 case LTU:
3560 x = gen_binary (UMIN, mode, XEXP (x, 1), XEXP (x, 2));
3561 goto restart;
3562 }
3563
3564 /* If we have something like (if_then_else (ne A 0) (OP X C) X),
3565 A is known to be either 0 or 1, and OP is an identity when its
3566 second operand is zero, this can be done as (OP X (mult A C)).
3567 Similarly if A is known to be 0 or -1 and also similarly if we have
3568 a ZERO_EXTEND or SIGN_EXTEND as long as X is already extended (so
3569 we don't destroy it). */
3570
3571 if (mode != VOIDmode
3572 && (GET_CODE (XEXP (x, 0)) == EQ || GET_CODE (XEXP (x, 0)) == NE)
3573 && XEXP (XEXP (x, 0), 1) == const0_rtx
3574 && (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3575 || (num_sign_bit_copies (XEXP (XEXP (x, 0), 0), mode)
3576 == GET_MODE_BITSIZE (mode))))
3577 {
3578 rtx nz = make_compound_operation (GET_CODE (XEXP (x, 0)) == NE
3579 ? XEXP (x, 1) : XEXP (x, 2));
3580 rtx z = GET_CODE (XEXP (x, 0)) == NE ? XEXP (x, 2) : XEXP (x, 1);
3581 rtx dir = (significant_bits (XEXP (XEXP (x, 0), 0), mode) == 1
3582 ? const1_rtx : constm1_rtx);
3583 rtx c = 0;
3584 enum machine_mode m = mode;
3585 enum rtx_code op, extend_op = 0;
3586
3587 if ((GET_CODE (nz) == PLUS || GET_CODE (nz) == MINUS
3588 || GET_CODE (nz) == IOR || GET_CODE (nz) == XOR
3589 || GET_CODE (nz) == ASHIFT
3590 || GET_CODE (nz) == LSHIFTRT || GET_CODE (nz) == ASHIFTRT)
3591 && rtx_equal_p (XEXP (nz, 0), z))
3592 c = XEXP (nz, 1), op = GET_CODE (nz);
3593 else if (GET_CODE (nz) == SIGN_EXTEND
3594 && (GET_CODE (XEXP (nz, 0)) == PLUS
3595 || GET_CODE (XEXP (nz, 0)) == MINUS
3596 || GET_CODE (XEXP (nz, 0)) == IOR
3597 || GET_CODE (XEXP (nz, 0)) == XOR
3598 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3599 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3600 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3601 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3602 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3603 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3604 && (num_sign_bit_copies (z, GET_MODE (z))
3605 >= (GET_MODE_BITSIZE (mode)
3606 - GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (nz, 0), 0))))))
3607 {
3608 c = XEXP (XEXP (nz, 0), 1);
3609 op = GET_CODE (XEXP (nz, 0));
3610 extend_op = SIGN_EXTEND;
3611 m = GET_MODE (XEXP (nz, 0));
3612 }
3613 else if (GET_CODE (nz) == ZERO_EXTEND
3614 && (GET_CODE (XEXP (nz, 0)) == PLUS
3615 || GET_CODE (XEXP (nz, 0)) == MINUS
3616 || GET_CODE (XEXP (nz, 0)) == IOR
3617 || GET_CODE (XEXP (nz, 0)) == XOR
3618 || GET_CODE (XEXP (nz, 0)) == ASHIFT
3619 || GET_CODE (XEXP (nz, 0)) == LSHIFTRT
3620 || GET_CODE (XEXP (nz, 0)) == ASHIFTRT)
3621 && GET_CODE (XEXP (XEXP (nz, 0), 0)) == SUBREG
3622 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
3623 && subreg_lowpart_p (XEXP (XEXP (nz, 0), 0))
3624 && rtx_equal_p (SUBREG_REG (XEXP (XEXP (nz, 0), 0)), z)
3625 && ((significant_bits (z, GET_MODE (z))
3626 & ~ GET_MODE_MASK (GET_MODE (XEXP (XEXP (nz, 0), 0))))
3627 == 0))
3628 {
3629 c = XEXP (XEXP (nz, 0), 1);
3630 op = GET_CODE (XEXP (nz, 0));
3631 extend_op = ZERO_EXTEND;
3632 m = GET_MODE (XEXP (nz, 0));
3633 }
3634
3635 if (c && ! side_effects_p (c) && ! side_effects_p (z))
3636 {
3637 temp
3638 = gen_binary (MULT, m,
3639 gen_lowpart_for_combine (m,
3640 XEXP (XEXP (x, 0), 0)),
3641 gen_binary (MULT, m, c, dir));
3642
3643 temp = gen_binary (op, m, gen_lowpart_for_combine (m, z), temp);
3644
3645 if (extend_op != 0)
3646 temp = gen_unary (extend_op, mode, temp);
3647
3648 return temp;
3649 }
3650 }
3651 break;
3652
3653 case ZERO_EXTRACT:
3654 case SIGN_EXTRACT:
3655 case ZERO_EXTEND:
3656 case SIGN_EXTEND:
3657 /* If we are processing SET_DEST, we are done. */
3658 if (in_dest)
3659 return x;
3660
3661 x = expand_compound_operation (x);
3662 if (GET_CODE (x) != code)
3663 goto restart;
3664 break;
3665
3666 case SET:
3667 /* (set (pc) (return)) gets written as (return). */
3668 if (GET_CODE (SET_DEST (x)) == PC && GET_CODE (SET_SRC (x)) == RETURN)
3669 return SET_SRC (x);
3670
3671 /* Convert this into a field assignment operation, if possible. */
3672 x = make_field_assignment (x);
3673
3674 /* If we are setting CC0 or if the source is a COMPARE, look for the
3675 use of the comparison result and try to simplify it unless we already
3676 have used undobuf.other_insn. */
3677 if ((GET_CODE (SET_SRC (x)) == COMPARE
3678 #ifdef HAVE_cc0
3679 || SET_DEST (x) == cc0_rtx
3680 #endif
3681 )
3682 && (cc_use = find_single_use (SET_DEST (x), subst_insn,
3683 &other_insn)) != 0
3684 && (undobuf.other_insn == 0 || other_insn == undobuf.other_insn)
3685 && GET_RTX_CLASS (GET_CODE (*cc_use)) == '<'
3686 && XEXP (*cc_use, 0) == SET_DEST (x))
3687 {
3688 enum rtx_code old_code = GET_CODE (*cc_use);
3689 enum rtx_code new_code;
3690 rtx op0, op1;
3691 int other_changed = 0;
3692 enum machine_mode compare_mode = GET_MODE (SET_DEST (x));
3693
3694 if (GET_CODE (SET_SRC (x)) == COMPARE)
3695 op0 = XEXP (SET_SRC (x), 0), op1 = XEXP (SET_SRC (x), 1);
3696 else
3697 op0 = SET_SRC (x), op1 = const0_rtx;
3698
3699 /* Simplify our comparison, if possible. */
3700 new_code = simplify_comparison (old_code, &op0, &op1);
3701
3702 #if !defined (HAVE_cc0) && defined (EXTRA_CC_MODES)
3703 /* If this machine has CC modes other than CCmode, check to see
3704 if we need to use a different CC mode here. */
3705 compare_mode = SELECT_CC_MODE (new_code, op0, op1);
3706
3707 /* If the mode changed, we have to change SET_DEST, the mode
3708 in the compare, and the mode in the place SET_DEST is used.
3709 If SET_DEST is a hard register, just build new versions with
3710 the proper mode. If it is a pseudo, we lose unless it is only
3711 time we set the pseudo, in which case we can safely change
3712 its mode. */
3713 if (compare_mode != GET_MODE (SET_DEST (x)))
3714 {
3715 int regno = REGNO (SET_DEST (x));
3716 rtx new_dest = gen_rtx (REG, compare_mode, regno);
3717
3718 if (regno < FIRST_PSEUDO_REGISTER
3719 || (reg_n_sets[regno] == 1
3720 && ! REG_USERVAR_P (SET_DEST (x))))
3721 {
3722 if (regno >= FIRST_PSEUDO_REGISTER)
3723 SUBST (regno_reg_rtx[regno], new_dest);
3724
3725 SUBST (SET_DEST (x), new_dest);
3726 SUBST (XEXP (*cc_use, 0), new_dest);
3727 other_changed = 1;
3728 }
3729 }
3730 #endif
3731
3732 /* If the code changed, we have to build a new comparison
3733 in undobuf.other_insn. */
3734 if (new_code != old_code)
3735 {
3736 unsigned mask;
3737
3738 SUBST (*cc_use, gen_rtx_combine (new_code, GET_MODE (*cc_use),
3739 SET_DEST (x), const0_rtx));
3740
3741 /* If the only change we made was to change an EQ into an
3742 NE or vice versa, OP0 has only one significant bit,
3743 and OP1 is zero, check if changing the user of the condition
3744 code will produce a valid insn. If it won't, we can keep
3745 the original code in that insn by surrounding our operation
3746 with an XOR. */
3747
3748 if (((old_code == NE && new_code == EQ)
3749 || (old_code == EQ && new_code == NE))
3750 && ! other_changed && op1 == const0_rtx
3751 && (GET_MODE_BITSIZE (GET_MODE (op0))
3752 <= HOST_BITS_PER_WIDE_INT)
3753 && (exact_log2 (mask = significant_bits (op0,
3754 GET_MODE (op0)))
3755 >= 0))
3756 {
3757 rtx pat = PATTERN (other_insn), note = 0;
3758
3759 if ((recog_for_combine (&pat, undobuf.other_insn, &note) < 0
3760 && ! check_asm_operands (pat)))
3761 {
3762 PUT_CODE (*cc_use, old_code);
3763 other_insn = 0;
3764
3765 op0 = gen_binary (XOR, GET_MODE (op0), op0,
3766 GEN_INT (mask));
3767 }
3768 }
3769
3770 other_changed = 1;
3771 }
3772
3773 if (other_changed)
3774 undobuf.other_insn = other_insn;
3775
3776 #ifdef HAVE_cc0
3777 /* If we are now comparing against zero, change our source if
3778 needed. If we do not use cc0, we always have a COMPARE. */
3779 if (op1 == const0_rtx && SET_DEST (x) == cc0_rtx)
3780 SUBST (SET_SRC (x), op0);
3781 else
3782 #endif
3783
3784 /* Otherwise, if we didn't previously have a COMPARE in the
3785 correct mode, we need one. */
3786 if (GET_CODE (SET_SRC (x)) != COMPARE
3787 || GET_MODE (SET_SRC (x)) != compare_mode)
3788 SUBST (SET_SRC (x), gen_rtx_combine (COMPARE, compare_mode,
3789 op0, op1));
3790 else
3791 {
3792 /* Otherwise, update the COMPARE if needed. */
3793 SUBST (XEXP (SET_SRC (x), 0), op0);
3794 SUBST (XEXP (SET_SRC (x), 1), op1);
3795 }
3796 }
3797 else
3798 {
3799 /* Get SET_SRC in a form where we have placed back any
3800 compound expressions. Then do the checks below. */
3801 temp = make_compound_operation (SET_SRC (x), SET);
3802 SUBST (SET_SRC (x), temp);
3803 }
3804
3805 /* If we have (set x (subreg:m1 (op:m2 ...) 0)) with OP being some
3806 operation, and X being a REG or (subreg (reg)), we may be able to
3807 convert this to (set (subreg:m2 x) (op)).
3808
3809 We can always do this if M1 is narrower than M2 because that
3810 means that we only care about the low bits of the result.
3811
3812 However, on most machines (those with BYTE_LOADS_ZERO_EXTEND
3813 and BYTES_LOADS_SIGN_EXTEND not defined), we cannot perform a
3814 narrower operation that requested since the high-order bits will
3815 be undefined. On machine where BYTE_LOADS_*_EXTEND is defined,
3816 however, this transformation is safe as long as M1 and M2 have
3817 the same number of words. */
3818
3819 if (GET_CODE (SET_SRC (x)) == SUBREG
3820 && subreg_lowpart_p (SET_SRC (x))
3821 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (SET_SRC (x)))) != 'o'
3822 && (((GET_MODE_SIZE (GET_MODE (SET_SRC (x))) + (UNITS_PER_WORD - 1))
3823 / UNITS_PER_WORD)
3824 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x))))
3825 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
3826 #if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined (BYTE_LOADS_SIGN_EXTEND)
3827 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3828 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3829 #endif
3830 && (GET_CODE (SET_DEST (x)) == REG
3831 || (GET_CODE (SET_DEST (x)) == SUBREG
3832 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG)))
3833 {
3834 SUBST (SET_DEST (x),
3835 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_SRC (x))),
3836 SET_DEST (x)));
3837 SUBST (SET_SRC (x), SUBREG_REG (SET_SRC (x)));
3838 }
3839
3840 #ifdef BYTE_LOADS_ZERO_EXTEND
3841 /* If we have (set FOO (subreg:M (mem:N BAR) 0)) with
3842 M wider than N, this would require a paradoxical subreg.
3843 Replace the subreg with a zero_extend to avoid the reload that
3844 would otherwise be required. */
3845 if (GET_CODE (SET_SRC (x)) == SUBREG
3846 && subreg_lowpart_p (SET_SRC (x))
3847 && SUBREG_WORD (SET_SRC (x)) == 0
3848 && (GET_MODE_SIZE (GET_MODE (SET_SRC (x)))
3849 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_SRC (x)))))
3850 && GET_CODE (SUBREG_REG (SET_SRC (x))) == MEM)
3851 SUBST (SET_SRC (x), gen_rtx_combine (ZERO_EXTEND,
3852 GET_MODE (SET_SRC (x)),
3853 XEXP (SET_SRC (x), 0)));
3854 #endif
3855
3856 #ifndef HAVE_conditional_move
3857
3858 /* If we don't have a conditional move, SET_SRC is an IF_THEN_ELSE,
3859 and we are comparing an item known to be 0 or -1 against 0, use a
3860 logical operation instead. Check for one of the arms being an IOR
3861 of the other arm with some value. We compute three terms to be
3862 IOR'ed together. In practice, at most two will be nonzero. Then
3863 we do the IOR's. */
3864
3865 if (GET_CODE (SET_DEST (x)) != PC
3866 && GET_CODE (SET_SRC (x)) == IF_THEN_ELSE
3867 && (GET_CODE (XEXP (SET_SRC (x), 0)) == EQ
3868 || GET_CODE (XEXP (SET_SRC (x), 0)) == NE)
3869 && XEXP (XEXP (SET_SRC (x), 0), 1) == const0_rtx
3870 && (num_sign_bit_copies (XEXP (XEXP (SET_SRC (x), 0), 0),
3871 GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0)))
3872 == GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (SET_SRC (x), 0), 0))))
3873 && ! side_effects_p (SET_SRC (x)))
3874 {
3875 rtx true = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3876 ? XEXP (SET_SRC (x), 1) : XEXP (SET_SRC (x), 2));
3877 rtx false = (GET_CODE (XEXP (SET_SRC (x), 0)) == NE
3878 ? XEXP (SET_SRC (x), 2) : XEXP (SET_SRC (x), 1));
3879 rtx term1 = const0_rtx, term2, term3;
3880
3881 if (GET_CODE (true) == IOR && rtx_equal_p (XEXP (true, 0), false))
3882 term1 = false, true = XEXP (true, 1), false = const0_rtx;
3883 else if (GET_CODE (true) == IOR
3884 && rtx_equal_p (XEXP (true, 1), false))
3885 term1 = false, true = XEXP (true, 0), false = const0_rtx;
3886 else if (GET_CODE (false) == IOR
3887 && rtx_equal_p (XEXP (false, 0), true))
3888 term1 = true, false = XEXP (false, 1), true = const0_rtx;
3889 else if (GET_CODE (false) == IOR
3890 && rtx_equal_p (XEXP (false, 1), true))
3891 term1 = true, false = XEXP (false, 0), true = const0_rtx;
3892
3893 term2 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3894 XEXP (XEXP (SET_SRC (x), 0), 0), true);
3895 term3 = gen_binary (AND, GET_MODE (SET_SRC (x)),
3896 gen_unary (NOT, GET_MODE (SET_SRC (x)),
3897 XEXP (XEXP (SET_SRC (x), 0), 0)),
3898 false);
3899
3900 SUBST (SET_SRC (x),
3901 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3902 gen_binary (IOR, GET_MODE (SET_SRC (x)),
3903 term1, term2),
3904 term3));
3905 }
3906 #endif
3907 break;
3908
3909 case AND:
3910 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
3911 {
3912 x = simplify_and_const_int (x, mode, XEXP (x, 0),
3913 INTVAL (XEXP (x, 1)));
3914
3915 /* If we have (ior (and (X C1) C2)) and the next restart would be
3916 the last, simplify this by making C1 as small as possible
3917 and then exit. */
3918 if (n_restarts >= 3 && GET_CODE (x) == IOR
3919 && GET_CODE (XEXP (x, 0)) == AND
3920 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
3921 && GET_CODE (XEXP (x, 1)) == CONST_INT)
3922 {
3923 temp = gen_binary (AND, mode, XEXP (XEXP (x, 0), 0),
3924 GEN_INT (INTVAL (XEXP (XEXP (x, 0), 1))
3925 & ~ INTVAL (XEXP (x, 1))));
3926 return gen_binary (IOR, mode, temp, XEXP (x, 1));
3927 }
3928
3929 if (GET_CODE (x) != AND)
3930 goto restart;
3931 }
3932
3933 /* Convert (A | B) & A to A. */
3934 if (GET_CODE (XEXP (x, 0)) == IOR
3935 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3936 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
3937 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
3938 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
3939 return XEXP (x, 1);
3940
3941 /* Convert (A ^ B) & A to A & (~ B) since the latter is often a single
3942 insn (and may simplify more). */
3943 else if (GET_CODE (XEXP (x, 0)) == XOR
3944 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
3945 && ! side_effects_p (XEXP (x, 1)))
3946 {
3947 x = gen_binary (AND, mode,
3948 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
3949 XEXP (x, 1));
3950 goto restart;
3951 }
3952 else if (GET_CODE (XEXP (x, 0)) == XOR
3953 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
3954 && ! side_effects_p (XEXP (x, 1)))
3955 {
3956 x = gen_binary (AND, mode,
3957 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
3958 XEXP (x, 1));
3959 goto restart;
3960 }
3961
3962 /* Similarly for (~ (A ^ B)) & A. */
3963 else if (GET_CODE (XEXP (x, 0)) == NOT
3964 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3965 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 0), XEXP (x, 1))
3966 && ! side_effects_p (XEXP (x, 1)))
3967 {
3968 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 1),
3969 XEXP (x, 1));
3970 goto restart;
3971 }
3972 else if (GET_CODE (XEXP (x, 0)) == NOT
3973 && GET_CODE (XEXP (XEXP (x, 0), 0)) == XOR
3974 && rtx_equal_p (XEXP (XEXP (XEXP (x, 0), 0), 1), XEXP (x, 1))
3975 && ! side_effects_p (XEXP (x, 1)))
3976 {
3977 x = gen_binary (AND, mode, XEXP (XEXP (XEXP (x, 0), 0), 0),
3978 XEXP (x, 1));
3979 goto restart;
3980 }
3981
3982 /* If we have (and A B) with A not an object but that is known to
3983 be -1 or 0, this is equivalent to the expression
3984 (if_then_else (ne A (const_int 0)) B (const_int 0))
3985 We make this conversion because it may allow further
3986 simplifications and then allow use of conditional move insns.
3987 If the machine doesn't have condition moves, code in case SET
3988 will convert the IF_THEN_ELSE back to the logical operation.
3989 We build the IF_THEN_ELSE here in case further simplification
3990 is possible (e.g., we can convert it to ABS). */
3991
3992 if (GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) != 'o'
3993 && ! (GET_CODE (XEXP (x, 0)) == SUBREG
3994 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (XEXP (x, 0)))) == 'o')
3995 && (num_sign_bit_copies (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
3996 == GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))))
3997 {
3998 rtx op0 = XEXP (x, 0);
3999 rtx op1 = const0_rtx;
4000 enum rtx_code comp_code
4001 = simplify_comparison (NE, &op0, &op1);
4002
4003 x = gen_rtx_combine (IF_THEN_ELSE, mode,
4004 gen_binary (comp_code, VOIDmode, op0, op1),
4005 XEXP (x, 1), const0_rtx);
4006 goto restart;
4007 }
4008
4009 /* In the following group of tests (and those in case IOR below),
4010 we start with some combination of logical operations and apply
4011 the distributive law followed by the inverse distributive law.
4012 Most of the time, this results in no change. However, if some of
4013 the operands are the same or inverses of each other, simplifications
4014 will result.
4015
4016 For example, (and (ior A B) (not B)) can occur as the result of
4017 expanding a bit field assignment. When we apply the distributive
4018 law to this, we get (ior (and (A (not B))) (and (B (not B)))),
4019 which then simplifies to (and (A (not B))). */
4020
4021 /* If we have (and (ior A B) C), apply the distributive law and then
4022 the inverse distributive law to see if things simplify. */
4023
4024 if (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == XOR)
4025 {
4026 x = apply_distributive_law
4027 (gen_binary (GET_CODE (XEXP (x, 0)), mode,
4028 gen_binary (AND, mode,
4029 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4030 gen_binary (AND, mode,
4031 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4032 if (GET_CODE (x) != AND)
4033 goto restart;
4034 }
4035
4036 if (GET_CODE (XEXP (x, 1)) == IOR || GET_CODE (XEXP (x, 1)) == XOR)
4037 {
4038 x = apply_distributive_law
4039 (gen_binary (GET_CODE (XEXP (x, 1)), mode,
4040 gen_binary (AND, mode,
4041 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4042 gen_binary (AND, mode,
4043 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4044 if (GET_CODE (x) != AND)
4045 goto restart;
4046 }
4047
4048 /* Similarly, taking advantage of the fact that
4049 (and (not A) (xor B C)) == (xor (ior A B) (ior A C)) */
4050
4051 if (GET_CODE (XEXP (x, 0)) == NOT && GET_CODE (XEXP (x, 1)) == XOR)
4052 {
4053 x = apply_distributive_law
4054 (gen_binary (XOR, mode,
4055 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4056 XEXP (XEXP (x, 1), 0)),
4057 gen_binary (IOR, mode, XEXP (XEXP (x, 0), 0),
4058 XEXP (XEXP (x, 1), 1))));
4059 if (GET_CODE (x) != AND)
4060 goto restart;
4061 }
4062
4063 else if (GET_CODE (XEXP (x, 1)) == NOT && GET_CODE (XEXP (x, 0)) == XOR)
4064 {
4065 x = apply_distributive_law
4066 (gen_binary (XOR, mode,
4067 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4068 XEXP (XEXP (x, 0), 0)),
4069 gen_binary (IOR, mode, XEXP (XEXP (x, 1), 0),
4070 XEXP (XEXP (x, 0), 1))));
4071 if (GET_CODE (x) != AND)
4072 goto restart;
4073 }
4074 break;
4075
4076 case IOR:
4077 /* (ior A C) is C if all significant bits of A are on in C. */
4078 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4079 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4080 && (significant_bits (XEXP (x, 0), mode)
4081 & ~ INTVAL (XEXP (x, 1))) == 0)
4082 return XEXP (x, 1);
4083
4084 /* Convert (A & B) | A to A. */
4085 if (GET_CODE (XEXP (x, 0)) == AND
4086 && (rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4087 || rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1)))
4088 && ! side_effects_p (XEXP (XEXP (x, 0), 0))
4089 && ! side_effects_p (XEXP (XEXP (x, 0), 1)))
4090 return XEXP (x, 1);
4091
4092 /* If we have (ior (and A B) C), apply the distributive law and then
4093 the inverse distributive law to see if things simplify. */
4094
4095 if (GET_CODE (XEXP (x, 0)) == AND)
4096 {
4097 x = apply_distributive_law
4098 (gen_binary (AND, mode,
4099 gen_binary (IOR, mode,
4100 XEXP (XEXP (x, 0), 0), XEXP (x, 1)),
4101 gen_binary (IOR, mode,
4102 XEXP (XEXP (x, 0), 1), XEXP (x, 1))));
4103
4104 if (GET_CODE (x) != IOR)
4105 goto restart;
4106 }
4107
4108 if (GET_CODE (XEXP (x, 1)) == AND)
4109 {
4110 x = apply_distributive_law
4111 (gen_binary (AND, mode,
4112 gen_binary (IOR, mode,
4113 XEXP (XEXP (x, 1), 0), XEXP (x, 0)),
4114 gen_binary (IOR, mode,
4115 XEXP (XEXP (x, 1), 1), XEXP (x, 0))));
4116
4117 if (GET_CODE (x) != IOR)
4118 goto restart;
4119 }
4120
4121 /* Convert (ior (ashift A CX) (lshiftrt A CY)) where CX+CY equals the
4122 mode size to (rotate A CX). */
4123
4124 if (((GET_CODE (XEXP (x, 0)) == ASHIFT
4125 && GET_CODE (XEXP (x, 1)) == LSHIFTRT)
4126 || (GET_CODE (XEXP (x, 1)) == ASHIFT
4127 && GET_CODE (XEXP (x, 0)) == LSHIFTRT))
4128 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (XEXP (x, 1), 0))
4129 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4130 && GET_CODE (XEXP (XEXP (x, 1), 1)) == CONST_INT
4131 && (INTVAL (XEXP (XEXP (x, 0), 1)) + INTVAL (XEXP (XEXP (x, 1), 1))
4132 == GET_MODE_BITSIZE (mode)))
4133 {
4134 rtx shift_count;
4135
4136 if (GET_CODE (XEXP (x, 0)) == ASHIFT)
4137 shift_count = XEXP (XEXP (x, 0), 1);
4138 else
4139 shift_count = XEXP (XEXP (x, 1), 1);
4140 x = gen_rtx (ROTATE, mode, XEXP (XEXP (x, 0), 0), shift_count);
4141 goto restart;
4142 }
4143 break;
4144
4145 case XOR:
4146 /* Convert (XOR (NOT x) (NOT y)) to (XOR x y).
4147 Also convert (XOR (NOT x) y) to (NOT (XOR x y)), similarly for
4148 (NOT y). */
4149 {
4150 int num_negated = 0;
4151 rtx in1 = XEXP (x, 0), in2 = XEXP (x, 1);
4152
4153 if (GET_CODE (in1) == NOT)
4154 num_negated++, in1 = XEXP (in1, 0);
4155 if (GET_CODE (in2) == NOT)
4156 num_negated++, in2 = XEXP (in2, 0);
4157
4158 if (num_negated == 2)
4159 {
4160 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4161 SUBST (XEXP (x, 1), XEXP (XEXP (x, 1), 0));
4162 }
4163 else if (num_negated == 1)
4164 {
4165 x = gen_unary (NOT, mode,
4166 gen_binary (XOR, mode, in1, in2));
4167 goto restart;
4168 }
4169 }
4170
4171 /* Convert (xor (and A B) B) to (and (not A) B). The latter may
4172 correspond to a machine insn or result in further simplifications
4173 if B is a constant. */
4174
4175 if (GET_CODE (XEXP (x, 0)) == AND
4176 && rtx_equal_p (XEXP (XEXP (x, 0), 1), XEXP (x, 1))
4177 && ! side_effects_p (XEXP (x, 1)))
4178 {
4179 x = gen_binary (AND, mode,
4180 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 0)),
4181 XEXP (x, 1));
4182 goto restart;
4183 }
4184 else if (GET_CODE (XEXP (x, 0)) == AND
4185 && rtx_equal_p (XEXP (XEXP (x, 0), 0), XEXP (x, 1))
4186 && ! side_effects_p (XEXP (x, 1)))
4187 {
4188 x = gen_binary (AND, mode,
4189 gen_unary (NOT, mode, XEXP (XEXP (x, 0), 1)),
4190 XEXP (x, 1));
4191 goto restart;
4192 }
4193
4194
4195 #if STORE_FLAG_VALUE == 1
4196 /* (xor (comparison foo bar) (const_int 1)) can become the reversed
4197 comparison. */
4198 if (XEXP (x, 1) == const1_rtx
4199 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4200 && reversible_comparison_p (XEXP (x, 0)))
4201 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4202 mode, XEXP (XEXP (x, 0), 0),
4203 XEXP (XEXP (x, 0), 1));
4204 #endif
4205
4206 /* (xor (comparison foo bar) (const_int sign-bit))
4207 when STORE_FLAG_VALUE is the sign bit. */
4208 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4209 && (STORE_FLAG_VALUE
4210 == (HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (mode) - 1))
4211 && XEXP (x, 1) == const_true_rtx
4212 && GET_RTX_CLASS (GET_CODE (XEXP (x, 0))) == '<'
4213 && reversible_comparison_p (XEXP (x, 0)))
4214 return gen_rtx_combine (reverse_condition (GET_CODE (XEXP (x, 0))),
4215 mode, XEXP (XEXP (x, 0), 0),
4216 XEXP (XEXP (x, 0), 1));
4217 break;
4218
4219 case ABS:
4220 /* (abs (neg <foo>)) -> (abs <foo>) */
4221 if (GET_CODE (XEXP (x, 0)) == NEG)
4222 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4223
4224 /* If operand is something known to be positive, ignore the ABS. */
4225 if (GET_CODE (XEXP (x, 0)) == FFS || GET_CODE (XEXP (x, 0)) == ABS
4226 || ((GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4227 <= HOST_BITS_PER_WIDE_INT)
4228 && ((significant_bits (XEXP (x, 0), GET_MODE (XEXP (x, 0)))
4229 & ((HOST_WIDE_INT) 1
4230 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1)))
4231 == 0)))
4232 return XEXP (x, 0);
4233
4234
4235 /* If operand is known to be only -1 or 0, convert ABS to NEG. */
4236 if (num_sign_bit_copies (XEXP (x, 0), mode) == GET_MODE_BITSIZE (mode))
4237 {
4238 x = gen_rtx_combine (NEG, mode, XEXP (x, 0));
4239 goto restart;
4240 }
4241 break;
4242
4243 case FFS:
4244 /* (ffs (*_extend <X>)) = (ffs <X>) */
4245 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND
4246 || GET_CODE (XEXP (x, 0)) == ZERO_EXTEND)
4247 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4248 break;
4249
4250 case FLOAT:
4251 /* (float (sign_extend <X>)) = (float <X>). */
4252 if (GET_CODE (XEXP (x, 0)) == SIGN_EXTEND)
4253 SUBST (XEXP (x, 0), XEXP (XEXP (x, 0), 0));
4254 break;
4255
4256 case LSHIFT:
4257 case ASHIFT:
4258 case LSHIFTRT:
4259 case ASHIFTRT:
4260 case ROTATE:
4261 case ROTATERT:
4262 /* If this is a shift by a constant amount, simplify it. */
4263 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4264 {
4265 x = simplify_shift_const (x, code, mode, XEXP (x, 0),
4266 INTVAL (XEXP (x, 1)));
4267 if (GET_CODE (x) != code)
4268 goto restart;
4269 }
4270
4271 #ifdef SHIFT_COUNT_TRUNCATED
4272 else if (GET_CODE (XEXP (x, 1)) != REG)
4273 SUBST (XEXP (x, 1),
4274 force_to_mode (XEXP (x, 1), GET_MODE (x),
4275 exact_log2 (GET_MODE_BITSIZE (GET_MODE (x))),
4276 NULL_RTX));
4277 #endif
4278
4279 break;
4280 }
4281
4282 return x;
4283 }
4284 \f
4285 /* We consider ZERO_EXTRACT, SIGN_EXTRACT, and SIGN_EXTEND as "compound
4286 operations" because they can be replaced with two more basic operations.
4287 ZERO_EXTEND is also considered "compound" because it can be replaced with
4288 an AND operation, which is simpler, though only one operation.
4289
4290 The function expand_compound_operation is called with an rtx expression
4291 and will convert it to the appropriate shifts and AND operations,
4292 simplifying at each stage.
4293
4294 The function make_compound_operation is called to convert an expression
4295 consisting of shifts and ANDs into the equivalent compound expression.
4296 It is the inverse of this function, loosely speaking. */
4297
4298 static rtx
4299 expand_compound_operation (x)
4300 rtx x;
4301 {
4302 int pos = 0, len;
4303 int unsignedp = 0;
4304 int modewidth;
4305 rtx tem;
4306
4307 switch (GET_CODE (x))
4308 {
4309 case ZERO_EXTEND:
4310 unsignedp = 1;
4311 case SIGN_EXTEND:
4312 /* We can't necessarily use a const_int for a multiword mode;
4313 it depends on implicitly extending the value.
4314 Since we don't know the right way to extend it,
4315 we can't tell whether the implicit way is right.
4316
4317 Even for a mode that is no wider than a const_int,
4318 we can't win, because we need to sign extend one of its bits through
4319 the rest of it, and we don't know which bit. */
4320 if (GET_CODE (XEXP (x, 0)) == CONST_INT)
4321 return x;
4322
4323 if (! FAKE_EXTEND_SAFE_P (GET_MODE (XEXP (x, 0)), XEXP (x, 0)))
4324 return x;
4325
4326 len = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)));
4327 /* If the inner object has VOIDmode (the only way this can happen
4328 is if it is a ASM_OPERANDS), we can't do anything since we don't
4329 know how much masking to do. */
4330 if (len == 0)
4331 return x;
4332
4333 break;
4334
4335 case ZERO_EXTRACT:
4336 unsignedp = 1;
4337 case SIGN_EXTRACT:
4338 /* If the operand is a CLOBBER, just return it. */
4339 if (GET_CODE (XEXP (x, 0)) == CLOBBER)
4340 return XEXP (x, 0);
4341
4342 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4343 || GET_CODE (XEXP (x, 2)) != CONST_INT
4344 || GET_MODE (XEXP (x, 0)) == VOIDmode)
4345 return x;
4346
4347 len = INTVAL (XEXP (x, 1));
4348 pos = INTVAL (XEXP (x, 2));
4349
4350 /* If this goes outside the object being extracted, replace the object
4351 with a (use (mem ...)) construct that only combine understands
4352 and is used only for this purpose. */
4353 if (len + pos > GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))))
4354 SUBST (XEXP (x, 0), gen_rtx (USE, GET_MODE (x), XEXP (x, 0)));
4355
4356 #if BITS_BIG_ENDIAN
4357 pos = GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - len - pos;
4358 #endif
4359 break;
4360
4361 default:
4362 return x;
4363 }
4364
4365 /* If we reach here, we want to return a pair of shifts. The inner
4366 shift is a left shift of BITSIZE - POS - LEN bits. The outer
4367 shift is a right shift of BITSIZE - LEN bits. It is arithmetic or
4368 logical depending on the value of UNSIGNEDP.
4369
4370 If this was a ZERO_EXTEND or ZERO_EXTRACT, this pair of shifts will be
4371 converted into an AND of a shift.
4372
4373 We must check for the case where the left shift would have a negative
4374 count. This can happen in a case like (x >> 31) & 255 on machines
4375 that can't shift by a constant. On those machines, we would first
4376 combine the shift with the AND to produce a variable-position
4377 extraction. Then the constant of 31 would be substituted in to produce
4378 a such a position. */
4379
4380 modewidth = GET_MODE_BITSIZE (GET_MODE (x));
4381 if (modewidth >= pos - len)
4382 tem = simplify_shift_const (NULL_RTX, unsignedp ? LSHIFTRT : ASHIFTRT,
4383 GET_MODE (x),
4384 simplify_shift_const (NULL_RTX, ASHIFT,
4385 GET_MODE (x),
4386 XEXP (x, 0),
4387 modewidth - pos - len),
4388 modewidth - len);
4389
4390 else if (unsignedp && len < HOST_BITS_PER_WIDE_INT)
4391 tem = simplify_and_const_int (NULL_RTX, GET_MODE (x),
4392 simplify_shift_const (NULL_RTX, LSHIFTRT,
4393 GET_MODE (x),
4394 XEXP (x, 0), pos),
4395 ((HOST_WIDE_INT) 1 << len) - 1);
4396 else
4397 /* Any other cases we can't handle. */
4398 return x;
4399
4400
4401 /* If we couldn't do this for some reason, return the original
4402 expression. */
4403 if (GET_CODE (tem) == CLOBBER)
4404 return x;
4405
4406 return tem;
4407 }
4408 \f
4409 /* X is a SET which contains an assignment of one object into
4410 a part of another (such as a bit-field assignment, STRICT_LOW_PART,
4411 or certain SUBREGS). If possible, convert it into a series of
4412 logical operations.
4413
4414 We half-heartedly support variable positions, but do not at all
4415 support variable lengths. */
4416
4417 static rtx
4418 expand_field_assignment (x)
4419 rtx x;
4420 {
4421 rtx inner;
4422 rtx pos; /* Always counts from low bit. */
4423 int len;
4424 rtx mask;
4425 enum machine_mode compute_mode;
4426
4427 /* Loop until we find something we can't simplify. */
4428 while (1)
4429 {
4430 if (GET_CODE (SET_DEST (x)) == STRICT_LOW_PART
4431 && GET_CODE (XEXP (SET_DEST (x), 0)) == SUBREG)
4432 {
4433 inner = SUBREG_REG (XEXP (SET_DEST (x), 0));
4434 len = GET_MODE_BITSIZE (GET_MODE (XEXP (SET_DEST (x), 0)));
4435 pos = const0_rtx;
4436 }
4437 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT
4438 && GET_CODE (XEXP (SET_DEST (x), 1)) == CONST_INT)
4439 {
4440 inner = XEXP (SET_DEST (x), 0);
4441 len = INTVAL (XEXP (SET_DEST (x), 1));
4442 pos = XEXP (SET_DEST (x), 2);
4443
4444 /* If the position is constant and spans the width of INNER,
4445 surround INNER with a USE to indicate this. */
4446 if (GET_CODE (pos) == CONST_INT
4447 && INTVAL (pos) + len > GET_MODE_BITSIZE (GET_MODE (inner)))
4448 inner = gen_rtx (USE, GET_MODE (SET_DEST (x)), inner);
4449
4450 #if BITS_BIG_ENDIAN
4451 if (GET_CODE (pos) == CONST_INT)
4452 pos = GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner)) - len
4453 - INTVAL (pos));
4454 else if (GET_CODE (pos) == MINUS
4455 && GET_CODE (XEXP (pos, 1)) == CONST_INT
4456 && (INTVAL (XEXP (pos, 1))
4457 == GET_MODE_BITSIZE (GET_MODE (inner)) - len))
4458 /* If position is ADJUST - X, new position is X. */
4459 pos = XEXP (pos, 0);
4460 else
4461 pos = gen_binary (MINUS, GET_MODE (pos),
4462 GEN_INT (GET_MODE_BITSIZE (GET_MODE (inner))
4463 - len),
4464 pos);
4465 #endif
4466 }
4467
4468 /* A SUBREG between two modes that occupy the same numbers of words
4469 can be done by moving the SUBREG to the source. */
4470 else if (GET_CODE (SET_DEST (x)) == SUBREG
4471 && (((GET_MODE_SIZE (GET_MODE (SET_DEST (x)))
4472 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
4473 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (x))))
4474 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
4475 {
4476 x = gen_rtx (SET, VOIDmode, SUBREG_REG (SET_DEST (x)),
4477 gen_lowpart_for_combine (GET_MODE (SUBREG_REG (SET_DEST (x))),
4478 SET_SRC (x)));
4479 continue;
4480 }
4481 else
4482 break;
4483
4484 while (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4485 inner = SUBREG_REG (inner);
4486
4487 compute_mode = GET_MODE (inner);
4488
4489 /* Compute a mask of LEN bits, if we can do this on the host machine. */
4490 if (len < HOST_BITS_PER_WIDE_INT)
4491 mask = GEN_INT (((HOST_WIDE_INT) 1 << len) - 1);
4492 else
4493 break;
4494
4495 /* Now compute the equivalent expression. Make a copy of INNER
4496 for the SET_DEST in case it is a MEM into which we will substitute;
4497 we don't want shared RTL in that case. */
4498 x = gen_rtx (SET, VOIDmode, copy_rtx (inner),
4499 gen_binary (IOR, compute_mode,
4500 gen_binary (AND, compute_mode,
4501 gen_unary (NOT, compute_mode,
4502 gen_binary (ASHIFT,
4503 compute_mode,
4504 mask, pos)),
4505 inner),
4506 gen_binary (ASHIFT, compute_mode,
4507 gen_binary (AND, compute_mode,
4508 gen_lowpart_for_combine
4509 (compute_mode,
4510 SET_SRC (x)),
4511 mask),
4512 pos)));
4513 }
4514
4515 return x;
4516 }
4517 \f
4518 /* Return an RTX for a reference to LEN bits of INNER. POS is the starting
4519 bit position (counted from the LSB) if >= 0; otherwise POS_RTX represents
4520 the starting bit position.
4521
4522 INNER may be a USE. This will occur when we started with a bitfield
4523 that went outside the boundary of the object in memory, which is
4524 allowed on most machines. To isolate this case, we produce a USE
4525 whose mode is wide enough and surround the MEM with it. The only
4526 code that understands the USE is this routine. If it is not removed,
4527 it will cause the resulting insn not to match.
4528
4529 UNSIGNEDP is non-zero for an unsigned reference and zero for a
4530 signed reference.
4531
4532 IN_DEST is non-zero if this is a reference in the destination of a
4533 SET. This is used when a ZERO_ or SIGN_EXTRACT isn't needed. If non-zero,
4534 a STRICT_LOW_PART will be used, if zero, ZERO_EXTEND or SIGN_EXTEND will
4535 be used.
4536
4537 IN_COMPARE is non-zero if we are in a COMPARE. This means that a
4538 ZERO_EXTRACT should be built even for bits starting at bit 0.
4539
4540 MODE is the desired mode of the result (if IN_DEST == 0). */
4541
4542 static rtx
4543 make_extraction (mode, inner, pos, pos_rtx, len,
4544 unsignedp, in_dest, in_compare)
4545 enum machine_mode mode;
4546 rtx inner;
4547 int pos;
4548 rtx pos_rtx;
4549 int len;
4550 int unsignedp;
4551 int in_dest, in_compare;
4552 {
4553 /* This mode describes the size of the storage area
4554 to fetch the overall value from. Within that, we
4555 ignore the POS lowest bits, etc. */
4556 enum machine_mode is_mode = GET_MODE (inner);
4557 enum machine_mode inner_mode;
4558 enum machine_mode wanted_mem_mode = byte_mode;
4559 enum machine_mode pos_mode = word_mode;
4560 enum machine_mode extraction_mode = word_mode;
4561 enum machine_mode tmode = mode_for_size (len, MODE_INT, 1);
4562 int spans_byte = 0;
4563 rtx new = 0;
4564
4565 /* Get some information about INNER and get the innermost object. */
4566 if (GET_CODE (inner) == USE)
4567 /* (use:SI (mem:QI foo)) stands for (mem:SI foo). */
4568 /* We don't need to adjust the position because we set up the USE
4569 to pretend that it was a full-word object. */
4570 spans_byte = 1, inner = XEXP (inner, 0);
4571 else if (GET_CODE (inner) == SUBREG && subreg_lowpart_p (inner))
4572 {
4573 /* If going from (subreg:SI (mem:QI ...)) to (mem:QI ...),
4574 consider just the QI as the memory to extract from.
4575 The subreg adds or removes high bits; its mode is
4576 irrelevant to the meaning of this extraction,
4577 since POS and LEN count from the lsb. */
4578 if (GET_CODE (SUBREG_REG (inner)) == MEM)
4579 is_mode = GET_MODE (SUBREG_REG (inner));
4580 inner = SUBREG_REG (inner);
4581 }
4582
4583 inner_mode = GET_MODE (inner);
4584
4585 if (pos_rtx && GET_CODE (pos_rtx) == CONST_INT)
4586 pos = INTVAL (pos_rtx);
4587
4588 /* See if this can be done without an extraction. We never can if the
4589 width of the field is not the same as that of some integer mode. For
4590 registers, we can only avoid the extraction if the position is at the
4591 low-order bit and this is either not in the destination or we have the
4592 appropriate STRICT_LOW_PART operation available.
4593
4594 For MEM, we can avoid an extract if the field starts on an appropriate
4595 boundary and we can change the mode of the memory reference. However,
4596 we cannot directly access the MEM if we have a USE and the underlying
4597 MEM is not TMODE. This combination means that MEM was being used in a
4598 context where bits outside its mode were being referenced; that is only
4599 valid in bit-field insns. */
4600
4601 if (tmode != BLKmode
4602 && ! (spans_byte && inner_mode != tmode)
4603 && ((pos == 0 && GET_CODE (inner) != MEM
4604 && (! in_dest
4605 || (GET_CODE (inner) == REG
4606 && (movstrict_optab->handlers[(int) tmode].insn_code
4607 != CODE_FOR_nothing))))
4608 || (GET_CODE (inner) == MEM && pos >= 0
4609 && (pos
4610 % (STRICT_ALIGNMENT ? GET_MODE_ALIGNMENT (tmode)
4611 : BITS_PER_UNIT)) == 0
4612 /* We can't do this if we are widening INNER_MODE (it
4613 may not be aligned, for one thing). */
4614 && GET_MODE_BITSIZE (inner_mode) >= GET_MODE_BITSIZE (tmode)
4615 && (inner_mode == tmode
4616 || (! mode_dependent_address_p (XEXP (inner, 0))
4617 && ! MEM_VOLATILE_P (inner))))))
4618 {
4619 /* If INNER is a MEM, make a new MEM that encompasses just the desired
4620 field. If the original and current mode are the same, we need not
4621 adjust the offset. Otherwise, we do if bytes big endian.
4622
4623 If INNER is not a MEM, get a piece consisting of the just the field
4624 of interest (in this case POS must be 0). */
4625
4626 if (GET_CODE (inner) == MEM)
4627 {
4628 int offset;
4629 /* POS counts from lsb, but make OFFSET count in memory order. */
4630 if (BYTES_BIG_ENDIAN)
4631 offset = (GET_MODE_BITSIZE (is_mode) - len - pos) / BITS_PER_UNIT;
4632 else
4633 offset = pos / BITS_PER_UNIT;
4634
4635 new = gen_rtx (MEM, tmode, plus_constant (XEXP (inner, 0), offset));
4636 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (inner);
4637 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (inner);
4638 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (inner);
4639 }
4640 else if (GET_CODE (inner) == REG)
4641 /* We can't call gen_lowpart_for_combine here since we always want
4642 a SUBREG and it would sometimes return a new hard register. */
4643 new = gen_rtx (SUBREG, tmode, inner,
4644 (WORDS_BIG_ENDIAN
4645 && GET_MODE_SIZE (inner_mode) > UNITS_PER_WORD
4646 ? ((GET_MODE_SIZE (inner_mode) - GET_MODE_SIZE (tmode))
4647 / UNITS_PER_WORD)
4648 : 0));
4649 else
4650 new = force_to_mode (inner, tmode, len, NULL_RTX);
4651
4652 /* If this extraction is going into the destination of a SET,
4653 make a STRICT_LOW_PART unless we made a MEM. */
4654
4655 if (in_dest)
4656 return (GET_CODE (new) == MEM ? new
4657 : (GET_CODE (new) != SUBREG
4658 ? gen_rtx (CLOBBER, tmode, const0_rtx)
4659 : gen_rtx_combine (STRICT_LOW_PART, VOIDmode, new)));
4660
4661 /* Otherwise, sign- or zero-extend unless we already are in the
4662 proper mode. */
4663
4664 return (mode == tmode ? new
4665 : gen_rtx_combine (unsignedp ? ZERO_EXTEND : SIGN_EXTEND,
4666 mode, new));
4667 }
4668
4669 /* Unless this is a COMPARE or we have a funny memory reference,
4670 don't do anything with zero-extending field extracts starting at
4671 the low-order bit since they are simple AND operations. */
4672 if (pos == 0 && ! in_dest && ! in_compare && ! spans_byte && unsignedp)
4673 return 0;
4674
4675 /* Get the mode to use should INNER be a MEM, the mode for the position,
4676 and the mode for the result. */
4677 #ifdef HAVE_insv
4678 if (in_dest)
4679 {
4680 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_insv][0];
4681 pos_mode = insn_operand_mode[(int) CODE_FOR_insv][2];
4682 extraction_mode = insn_operand_mode[(int) CODE_FOR_insv][3];
4683 }
4684 #endif
4685
4686 #ifdef HAVE_extzv
4687 if (! in_dest && unsignedp)
4688 {
4689 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extzv][1];
4690 pos_mode = insn_operand_mode[(int) CODE_FOR_extzv][3];
4691 extraction_mode = insn_operand_mode[(int) CODE_FOR_extzv][0];
4692 }
4693 #endif
4694
4695 #ifdef HAVE_extv
4696 if (! in_dest && ! unsignedp)
4697 {
4698 wanted_mem_mode = insn_operand_mode[(int) CODE_FOR_extv][1];
4699 pos_mode = insn_operand_mode[(int) CODE_FOR_extv][3];
4700 extraction_mode = insn_operand_mode[(int) CODE_FOR_extv][0];
4701 }
4702 #endif
4703
4704 /* Never narrow an object, since that might not be safe. */
4705
4706 if (mode != VOIDmode
4707 && GET_MODE_SIZE (extraction_mode) < GET_MODE_SIZE (mode))
4708 extraction_mode = mode;
4709
4710 if (pos_rtx && GET_MODE (pos_rtx) != VOIDmode
4711 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4712 pos_mode = GET_MODE (pos_rtx);
4713
4714 /* If this is not from memory or we have to change the mode of memory and
4715 cannot, the desired mode is EXTRACTION_MODE. */
4716 if (GET_CODE (inner) != MEM
4717 || (inner_mode != wanted_mem_mode
4718 && (mode_dependent_address_p (XEXP (inner, 0))
4719 || MEM_VOLATILE_P (inner))))
4720 wanted_mem_mode = extraction_mode;
4721
4722 #if BITS_BIG_ENDIAN
4723 /* If position is constant, compute new position. Otherwise, build
4724 subtraction. */
4725 if (pos >= 0)
4726 pos = (MAX (GET_MODE_BITSIZE (is_mode), GET_MODE_BITSIZE (wanted_mem_mode))
4727 - len - pos);
4728 else
4729 pos_rtx
4730 = gen_rtx_combine (MINUS, GET_MODE (pos_rtx),
4731 GEN_INT (MAX (GET_MODE_BITSIZE (is_mode),
4732 GET_MODE_BITSIZE (wanted_mem_mode))
4733 - len),
4734 pos_rtx);
4735 #endif
4736
4737 /* If INNER has a wider mode, make it smaller. If this is a constant
4738 extract, try to adjust the byte to point to the byte containing
4739 the value. */
4740 if (wanted_mem_mode != VOIDmode
4741 && GET_MODE_SIZE (wanted_mem_mode) < GET_MODE_SIZE (is_mode)
4742 && ((GET_CODE (inner) == MEM
4743 && (inner_mode == wanted_mem_mode
4744 || (! mode_dependent_address_p (XEXP (inner, 0))
4745 && ! MEM_VOLATILE_P (inner))))))
4746 {
4747 int offset = 0;
4748
4749 /* The computations below will be correct if the machine is big
4750 endian in both bits and bytes or little endian in bits and bytes.
4751 If it is mixed, we must adjust. */
4752
4753 #if BYTES_BIG_ENDIAN != BITS_BIG_ENDIAN
4754 if (! spans_byte && is_mode != wanted_mem_mode)
4755 offset = (GET_MODE_SIZE (is_mode)
4756 - GET_MODE_SIZE (wanted_mem_mode) - offset);
4757 #endif
4758
4759 /* If bytes are big endian and we had a paradoxical SUBREG, we must
4760 adjust OFFSET to compensate. */
4761 #if BYTES_BIG_ENDIAN
4762 if (! spans_byte
4763 && GET_MODE_SIZE (inner_mode) < GET_MODE_SIZE (is_mode))
4764 offset -= GET_MODE_SIZE (is_mode) - GET_MODE_SIZE (inner_mode);
4765 #endif
4766
4767 /* If this is a constant position, we can move to the desired byte. */
4768 if (pos >= 0)
4769 {
4770 offset += pos / BITS_PER_UNIT;
4771 pos %= GET_MODE_BITSIZE (wanted_mem_mode);
4772 }
4773
4774 if (offset != 0 || inner_mode != wanted_mem_mode)
4775 {
4776 rtx newmem = gen_rtx (MEM, wanted_mem_mode,
4777 plus_constant (XEXP (inner, 0), offset));
4778 RTX_UNCHANGING_P (newmem) = RTX_UNCHANGING_P (inner);
4779 MEM_VOLATILE_P (newmem) = MEM_VOLATILE_P (inner);
4780 MEM_IN_STRUCT_P (newmem) = MEM_IN_STRUCT_P (inner);
4781 inner = newmem;
4782 }
4783 }
4784
4785 /* If INNER is not memory, we can always get it into the proper mode. */
4786 else if (GET_CODE (inner) != MEM)
4787 inner = force_to_mode (inner, extraction_mode,
4788 (pos < 0 ? GET_MODE_BITSIZE (extraction_mode)
4789 : len + pos),
4790 NULL_RTX);
4791
4792 /* Adjust mode of POS_RTX, if needed. If we want a wider mode, we
4793 have to zero extend. Otherwise, we can just use a SUBREG. */
4794 if (pos < 0
4795 && GET_MODE_SIZE (pos_mode) > GET_MODE_SIZE (GET_MODE (pos_rtx)))
4796 pos_rtx = gen_rtx_combine (ZERO_EXTEND, pos_mode, pos_rtx);
4797 else if (pos < 0
4798 && GET_MODE_SIZE (pos_mode) < GET_MODE_SIZE (GET_MODE (pos_rtx)))
4799 pos_rtx = gen_lowpart_for_combine (pos_mode, pos_rtx);
4800
4801 /* Make POS_RTX unless we already have it and it is correct. */
4802 if (pos_rtx == 0 || (pos >= 0 && INTVAL (pos_rtx) != pos))
4803 pos_rtx = GEN_INT (pos);
4804
4805 /* Make the required operation. See if we can use existing rtx. */
4806 new = gen_rtx_combine (unsignedp ? ZERO_EXTRACT : SIGN_EXTRACT,
4807 extraction_mode, inner, GEN_INT (len), pos_rtx);
4808 if (! in_dest)
4809 new = gen_lowpart_for_combine (mode, new);
4810
4811 return new;
4812 }
4813 \f
4814 /* Look at the expression rooted at X. Look for expressions
4815 equivalent to ZERO_EXTRACT, SIGN_EXTRACT, ZERO_EXTEND, SIGN_EXTEND.
4816 Form these expressions.
4817
4818 Return the new rtx, usually just X.
4819
4820 Also, for machines like the Vax that don't have logical shift insns,
4821 try to convert logical to arithmetic shift operations in cases where
4822 they are equivalent. This undoes the canonicalizations to logical
4823 shifts done elsewhere.
4824
4825 We try, as much as possible, to re-use rtl expressions to save memory.
4826
4827 IN_CODE says what kind of expression we are processing. Normally, it is
4828 SET. In a memory address (inside a MEM, PLUS or minus, the latter two
4829 being kludges), it is MEM. When processing the arguments of a comparison
4830 or a COMPARE against zero, it is COMPARE. */
4831
4832 static rtx
4833 make_compound_operation (x, in_code)
4834 rtx x;
4835 enum rtx_code in_code;
4836 {
4837 enum rtx_code code = GET_CODE (x);
4838 enum machine_mode mode = GET_MODE (x);
4839 int mode_width = GET_MODE_BITSIZE (mode);
4840 enum rtx_code next_code;
4841 int i, count;
4842 rtx new = 0;
4843 char *fmt;
4844
4845 /* Select the code to be used in recursive calls. Once we are inside an
4846 address, we stay there. If we have a comparison, set to COMPARE,
4847 but once inside, go back to our default of SET. */
4848
4849 next_code = (code == MEM || code == PLUS || code == MINUS ? MEM
4850 : ((code == COMPARE || GET_RTX_CLASS (code) == '<')
4851 && XEXP (x, 1) == const0_rtx) ? COMPARE
4852 : in_code == COMPARE ? SET : in_code);
4853
4854 /* Process depending on the code of this operation. If NEW is set
4855 non-zero, it will be returned. */
4856
4857 switch (code)
4858 {
4859 case ASHIFT:
4860 case LSHIFT:
4861 /* Convert shifts by constants into multiplications if inside
4862 an address. */
4863 if (in_code == MEM && GET_CODE (XEXP (x, 1)) == CONST_INT
4864 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4865 && INTVAL (XEXP (x, 1)) >= 0)
4866 new = gen_rtx_combine (MULT, mode, XEXP (x, 0),
4867 GEN_INT ((HOST_WIDE_INT) 1
4868 << INTVAL (XEXP (x, 1))));
4869 break;
4870
4871 case AND:
4872 /* If the second operand is not a constant, we can't do anything
4873 with it. */
4874 if (GET_CODE (XEXP (x, 1)) != CONST_INT)
4875 break;
4876
4877 /* If the constant is a power of two minus one and the first operand
4878 is a logical right shift, make an extraction. */
4879 if (GET_CODE (XEXP (x, 0)) == LSHIFTRT
4880 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4881 new = make_extraction (mode, XEXP (XEXP (x, 0), 0), -1,
4882 XEXP (XEXP (x, 0), 1), i, 1,
4883 0, in_code == COMPARE);
4884
4885 /* Same as previous, but for (subreg (lshiftrt ...)) in first op. */
4886 else if (GET_CODE (XEXP (x, 0)) == SUBREG
4887 && subreg_lowpart_p (XEXP (x, 0))
4888 && GET_CODE (SUBREG_REG (XEXP (x, 0))) == LSHIFTRT
4889 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4890 new = make_extraction (GET_MODE (SUBREG_REG (XEXP (x, 0))),
4891 XEXP (SUBREG_REG (XEXP (x, 0)), 0), -1,
4892 XEXP (SUBREG_REG (XEXP (x, 0)), 1), i, 1,
4893 0, in_code == COMPARE);
4894
4895
4896 /* If we are have (and (rotate X C) M) and C is larger than the number
4897 of bits in M, this is an extraction. */
4898
4899 else if (GET_CODE (XEXP (x, 0)) == ROTATE
4900 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4901 && (i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0
4902 && i <= INTVAL (XEXP (XEXP (x, 0), 1)))
4903 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4904 (GET_MODE_BITSIZE (mode)
4905 - INTVAL (XEXP (XEXP (x, 0), 1))),
4906 NULL_RTX, i, 1, 0, in_code == COMPARE);
4907
4908 /* On machines without logical shifts, if the operand of the AND is
4909 a logical shift and our mask turns off all the propagated sign
4910 bits, we can replace the logical shift with an arithmetic shift. */
4911 else if (ashr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4912 && (lshr_optab->handlers[(int) mode].insn_code
4913 == CODE_FOR_nothing)
4914 && GET_CODE (XEXP (x, 0)) == LSHIFTRT
4915 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4916 && INTVAL (XEXP (XEXP (x, 0), 1)) >= 0
4917 && INTVAL (XEXP (XEXP (x, 0), 1)) < HOST_BITS_PER_WIDE_INT
4918 && mode_width <= HOST_BITS_PER_WIDE_INT)
4919 {
4920 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
4921
4922 mask >>= INTVAL (XEXP (XEXP (x, 0), 1));
4923 if ((INTVAL (XEXP (x, 1)) & ~mask) == 0)
4924 SUBST (XEXP (x, 0),
4925 gen_rtx_combine (ASHIFTRT, mode, XEXP (XEXP (x, 0), 0),
4926 XEXP (XEXP (x, 0), 1)));
4927 }
4928
4929 /* If the constant is one less than a power of two, this might be
4930 representable by an extraction even if no shift is present.
4931 If it doesn't end up being a ZERO_EXTEND, we will ignore it unless
4932 we are in a COMPARE. */
4933 else if ((i = exact_log2 (INTVAL (XEXP (x, 1)) + 1)) >= 0)
4934 new = make_extraction (mode, XEXP (x, 0), 0, NULL_RTX, i, 1,
4935 0, in_code == COMPARE);
4936
4937 /* If we are in a comparison and this is an AND with a power of two,
4938 convert this into the appropriate bit extract. */
4939 else if (in_code == COMPARE
4940 && (i = exact_log2 (INTVAL (XEXP (x, 1)))) >= 0)
4941 new = make_extraction (mode, XEXP (x, 0), i, NULL_RTX, 1, 1, 0, 1);
4942
4943 break;
4944
4945 case LSHIFTRT:
4946 /* If the sign bit is known to be zero, replace this with an
4947 arithmetic shift. */
4948 if (ashr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing
4949 && lshr_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
4950 && mode_width <= HOST_BITS_PER_WIDE_INT
4951 && (significant_bits (XEXP (x, 0), mode)
4952 & (1 << (mode_width - 1))) == 0)
4953 {
4954 new = gen_rtx_combine (ASHIFTRT, mode, XEXP (x, 0), XEXP (x, 1));
4955 break;
4956 }
4957
4958 /* ... fall through ... */
4959
4960 case ASHIFTRT:
4961 /* If we have (ashiftrt (ashift foo C1) C2) with C2 >= C1,
4962 this is a SIGN_EXTRACT. */
4963 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4964 && GET_CODE (XEXP (x, 0)) == ASHIFT
4965 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4966 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (x, 0), 1)))
4967 new = make_extraction (mode, XEXP (XEXP (x, 0), 0),
4968 (INTVAL (XEXP (x, 1))
4969 - INTVAL (XEXP (XEXP (x, 0), 1))),
4970 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
4971 code == LSHIFTRT, 0, in_code == COMPARE);
4972
4973 /* Similarly if we have (ashifrt (OP (ashift foo C1) C3) C2). In these
4974 cases, we are better off returning a SIGN_EXTEND of the operation. */
4975
4976 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4977 && (GET_CODE (XEXP (x, 0)) == IOR || GET_CODE (XEXP (x, 0)) == AND
4978 || GET_CODE (XEXP (x, 0)) == XOR
4979 || GET_CODE (XEXP (x, 0)) == PLUS)
4980 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
4981 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
4982 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))
4983 && INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)) < HOST_BITS_PER_WIDE_INT
4984 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
4985 && (INTVAL (XEXP (XEXP (x, 0), 1))
4986 & (((HOST_WIDE_INT) 1
4987 << INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))) - 1)) == 0)
4988 {
4989 HOST_WIDE_INT newop1
4990 = (INTVAL (XEXP (XEXP (x, 0), 1))
4991 >> INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)));
4992
4993 new = make_extraction (mode,
4994 gen_binary (GET_CODE (XEXP (x, 0)), mode,
4995 XEXP (XEXP (XEXP (x, 0), 0), 0),
4996 GEN_INT (newop1)),
4997 (INTVAL (XEXP (x, 1))
4998 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
4999 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5000 code == LSHIFTRT, 0, in_code == COMPARE);
5001 }
5002
5003 /* Similarly for (ashiftrt (neg (ashift FOO C1)) C2). */
5004 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5005 && GET_CODE (XEXP (x, 0)) == NEG
5006 && GET_CODE (XEXP (XEXP (x, 0), 0)) == ASHIFT
5007 && GET_CODE (XEXP (XEXP (XEXP (x, 0), 0), 1)) == CONST_INT
5008 && INTVAL (XEXP (x, 1)) >= INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1)))
5009 new = make_extraction (mode,
5010 gen_unary (GET_CODE (XEXP (x, 0)), mode,
5011 XEXP (XEXP (XEXP (x, 0), 0), 0)),
5012 (INTVAL (XEXP (x, 1))
5013 - INTVAL (XEXP (XEXP (XEXP (x, 0), 0), 1))),
5014 NULL_RTX, mode_width - INTVAL (XEXP (x, 1)),
5015 code == LSHIFTRT, 0, in_code == COMPARE);
5016 break;
5017 }
5018
5019 if (new)
5020 {
5021 x = gen_lowpart_for_combine (mode, new);
5022 code = GET_CODE (x);
5023 }
5024
5025 /* Now recursively process each operand of this operation. */
5026 fmt = GET_RTX_FORMAT (code);
5027 for (i = 0; i < GET_RTX_LENGTH (code); i++)
5028 if (fmt[i] == 'e')
5029 {
5030 new = make_compound_operation (XEXP (x, i), next_code);
5031 SUBST (XEXP (x, i), new);
5032 }
5033
5034 return x;
5035 }
5036 \f
5037 /* Given M see if it is a value that would select a field of bits
5038 within an item, but not the entire word. Return -1 if not.
5039 Otherwise, return the starting position of the field, where 0 is the
5040 low-order bit.
5041
5042 *PLEN is set to the length of the field. */
5043
5044 static int
5045 get_pos_from_mask (m, plen)
5046 unsigned HOST_WIDE_INT m;
5047 int *plen;
5048 {
5049 /* Get the bit number of the first 1 bit from the right, -1 if none. */
5050 int pos = exact_log2 (m & - m);
5051
5052 if (pos < 0)
5053 return -1;
5054
5055 /* Now shift off the low-order zero bits and see if we have a power of
5056 two minus 1. */
5057 *plen = exact_log2 ((m >> pos) + 1);
5058
5059 if (*plen <= 0)
5060 return -1;
5061
5062 return pos;
5063 }
5064 \f
5065 /* Rewrite X so that it is an expression in MODE. We only care about the
5066 low-order BITS bits so we can ignore AND operations that just clear
5067 higher-order bits.
5068
5069 Also, if REG is non-zero and X is a register equal in value to REG,
5070 replace X with REG. */
5071
5072 static rtx
5073 force_to_mode (x, mode, bits, reg)
5074 rtx x;
5075 enum machine_mode mode;
5076 int bits;
5077 rtx reg;
5078 {
5079 enum rtx_code code = GET_CODE (x);
5080 enum machine_mode op_mode = mode;
5081
5082 /* If X is narrower than MODE or if BITS is larger than the size of MODE,
5083 just get X in the proper mode. */
5084
5085 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode)
5086 || bits > GET_MODE_BITSIZE (mode))
5087 return gen_lowpart_for_combine (mode, x);
5088
5089 switch (code)
5090 {
5091 case SIGN_EXTEND:
5092 case ZERO_EXTEND:
5093 case ZERO_EXTRACT:
5094 case SIGN_EXTRACT:
5095 x = expand_compound_operation (x);
5096 if (GET_CODE (x) != code)
5097 return force_to_mode (x, mode, bits, reg);
5098 break;
5099
5100 case REG:
5101 if (reg != 0 && (rtx_equal_p (get_last_value (reg), x)
5102 || rtx_equal_p (reg, get_last_value (x))))
5103 x = reg;
5104 break;
5105
5106 case CONST_INT:
5107 if (bits < HOST_BITS_PER_WIDE_INT)
5108 x = GEN_INT (INTVAL (x) & (((HOST_WIDE_INT) 1 << bits) - 1));
5109 return x;
5110
5111 case SUBREG:
5112 /* Ignore low-order SUBREGs. */
5113 if (subreg_lowpart_p (x))
5114 return force_to_mode (SUBREG_REG (x), mode, bits, reg);
5115 break;
5116
5117 case AND:
5118 /* If this is an AND with a constant. Otherwise, we fall through to
5119 do the general binary case. */
5120
5121 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
5122 {
5123 HOST_WIDE_INT mask = INTVAL (XEXP (x, 1));
5124 int len = exact_log2 (mask + 1);
5125 rtx op = XEXP (x, 0);
5126
5127 /* If this is masking some low-order bits, we may be able to
5128 impose a stricter constraint on what bits of the operand are
5129 required. */
5130
5131 op = force_to_mode (op, mode, len > 0 ? MIN (len, bits) : bits,
5132 reg);
5133
5134 if (bits < HOST_BITS_PER_WIDE_INT)
5135 mask &= ((HOST_WIDE_INT) 1 << bits) - 1;
5136
5137 /* If we have no AND in MODE, use the original mode for the
5138 operation. */
5139
5140 if (and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5141 op_mode = GET_MODE (x);
5142
5143 x = simplify_and_const_int (x, op_mode, op, mask);
5144
5145 /* If X is still an AND, see if it is an AND with a mask that
5146 is just some low-order bits. If so, and it is BITS wide (it
5147 can't be wider), we don't need it. */
5148
5149 if (GET_CODE (x) == AND && GET_CODE (XEXP (x, 1)) == CONST_INT
5150 && bits < HOST_BITS_PER_WIDE_INT
5151 && INTVAL (XEXP (x, 1)) == ((HOST_WIDE_INT) 1 << bits) - 1)
5152 x = XEXP (x, 0);
5153
5154 break;
5155 }
5156
5157 /* ... fall through ... */
5158
5159 case PLUS:
5160 case MINUS:
5161 case MULT:
5162 case IOR:
5163 case XOR:
5164 /* For most binary operations, just propagate into the operation and
5165 change the mode if we have an operation of that mode. */
5166
5167 if ((code == PLUS
5168 && add_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5169 || (code == MINUS
5170 && sub_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5171 || (code == MULT && (smul_optab->handlers[(int) mode].insn_code
5172 == CODE_FOR_nothing))
5173 || (code == AND
5174 && and_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5175 || (code == IOR
5176 && ior_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5177 || (code == XOR && (xor_optab->handlers[(int) mode].insn_code
5178 == CODE_FOR_nothing)))
5179 op_mode = GET_MODE (x);
5180
5181 x = gen_binary (code, op_mode,
5182 gen_lowpart_for_combine (op_mode,
5183 force_to_mode (XEXP (x, 0),
5184 mode, bits,
5185 reg)),
5186 gen_lowpart_for_combine (op_mode,
5187 force_to_mode (XEXP (x, 1),
5188 mode, bits,
5189 reg)));
5190 break;
5191
5192 case ASHIFT:
5193 case LSHIFT:
5194 /* For left shifts, do the same, but just for the first operand.
5195 If the shift count is a constant, we need even fewer bits of the
5196 first operand. */
5197
5198 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) < bits)
5199 bits -= INTVAL (XEXP (x, 1));
5200
5201 if ((code == ASHIFT
5202 && ashl_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5203 || (code == LSHIFT && (lshl_optab->handlers[(int) mode].insn_code
5204 == CODE_FOR_nothing)))
5205 op_mode = GET_MODE (x);
5206
5207 x = gen_binary (code, op_mode,
5208 gen_lowpart_for_combine (op_mode,
5209 force_to_mode (XEXP (x, 0),
5210 mode, bits,
5211 reg)),
5212 XEXP (x, 1));
5213 break;
5214
5215 case LSHIFTRT:
5216 /* Here we can only do something if the shift count is a constant and
5217 the count plus BITS is no larger than the width of MODE, we can do
5218 the shift in MODE. */
5219
5220 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5221 && INTVAL (XEXP (x, 1)) + bits <= GET_MODE_BITSIZE (mode))
5222 {
5223 rtx inner = force_to_mode (XEXP (x, 0), mode,
5224 bits + INTVAL (XEXP (x, 1)), reg);
5225
5226 if (lshr_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5227 op_mode = GET_MODE (x);
5228
5229 x = gen_binary (LSHIFTRT, op_mode,
5230 gen_lowpart_for_combine (op_mode, inner),
5231 XEXP (x, 1));
5232 }
5233 break;
5234
5235 case ASHIFTRT:
5236 /* If this is a sign-extension operation that just affects bits
5237 we don't care about, remove it. */
5238
5239 if (GET_CODE (XEXP (x, 1)) == CONST_INT
5240 && INTVAL (XEXP (x, 1)) >= 0
5241 && INTVAL (XEXP (x, 1)) <= GET_MODE_BITSIZE (GET_MODE (x)) - bits
5242 && GET_CODE (XEXP (x, 0)) == ASHIFT
5243 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT
5244 && INTVAL (XEXP (XEXP (x, 0), 1)) == INTVAL (XEXP (x, 1)))
5245 return force_to_mode (XEXP (XEXP (x, 0), 0), mode, bits, reg);
5246 break;
5247
5248 case NEG:
5249 case NOT:
5250 if ((code == NEG
5251 && neg_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
5252 || (code == NOT && (one_cmpl_optab->handlers[(int) mode].insn_code
5253 == CODE_FOR_nothing)))
5254 op_mode = GET_MODE (x);
5255
5256 /* Handle these similarly to the way we handle most binary operations. */
5257 x = gen_unary (code, op_mode,
5258 gen_lowpart_for_combine (op_mode,
5259 force_to_mode (XEXP (x, 0), mode,
5260 bits, reg)));
5261 break;
5262
5263 case IF_THEN_ELSE:
5264 /* We have no way of knowing if the IF_THEN_ELSE can itself be
5265 written in a narrower mode. We play it safe and do not do so. */
5266
5267 SUBST (XEXP (x, 1),
5268 gen_lowpart_for_combine (GET_MODE (x),
5269 force_to_mode (XEXP (x, 1), mode,
5270 bits, reg)));
5271 SUBST (XEXP (x, 2),
5272 gen_lowpart_for_combine (GET_MODE (x),
5273 force_to_mode (XEXP (x, 2), mode,
5274 bits, reg)));
5275 break;
5276 }
5277
5278 /* Ensure we return a value of the proper mode. */
5279 return gen_lowpart_for_combine (mode, x);
5280 }
5281 \f
5282 /* Return the value of expression X given the fact that condition COND
5283 is known to be true when applied to REG as its first operand and VAL
5284 as its second. X is known to not be shared and so can be modified in
5285 place.
5286
5287 We only handle the simplest cases, and specifically those cases that
5288 arise with IF_THEN_ELSE expressions. */
5289
5290 static rtx
5291 known_cond (x, cond, reg, val)
5292 rtx x;
5293 enum rtx_code cond;
5294 rtx reg, val;
5295 {
5296 enum rtx_code code = GET_CODE (x);
5297 rtx new, temp;
5298 char *fmt;
5299 int i, j;
5300
5301 if (side_effects_p (x))
5302 return x;
5303
5304 if (cond == EQ && rtx_equal_p (x, reg))
5305 return val;
5306
5307 /* If X is (abs REG) and we know something about REG's relationship
5308 with zero, we may be able to simplify this. */
5309
5310 if (code == ABS && rtx_equal_p (XEXP (x, 0), reg) && val == const0_rtx)
5311 switch (cond)
5312 {
5313 case GE: case GT: case EQ:
5314 return XEXP (x, 0);
5315 case LT: case LE:
5316 return gen_unary (NEG, GET_MODE (XEXP (x, 0)), XEXP (x, 0));
5317 }
5318
5319 /* The only other cases we handle are MIN, MAX, and comparisons if the
5320 operands are the same as REG and VAL. */
5321
5322 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == 'c')
5323 {
5324 if (rtx_equal_p (XEXP (x, 0), val))
5325 cond = swap_condition (cond), temp = val, val = reg, reg = temp;
5326
5327 if (rtx_equal_p (XEXP (x, 0), reg) && rtx_equal_p (XEXP (x, 1), val))
5328 {
5329 if (GET_RTX_CLASS (code) == '<')
5330 return (comparison_dominates_p (cond, code) ? const_true_rtx
5331 : (comparison_dominates_p (cond,
5332 reverse_condition (code))
5333 ? const0_rtx : x));
5334
5335 else if (code == SMAX || code == SMIN
5336 || code == UMIN || code == UMAX)
5337 {
5338 int unsignedp = (code == UMIN || code == UMAX);
5339
5340 if (code == SMAX || code == UMAX)
5341 cond = reverse_condition (cond);
5342
5343 switch (cond)
5344 {
5345 case GE: case GT:
5346 return unsignedp ? x : XEXP (x, 1);
5347 case LE: case LT:
5348 return unsignedp ? x : XEXP (x, 0);
5349 case GEU: case GTU:
5350 return unsignedp ? XEXP (x, 1) : x;
5351 case LEU: case LTU:
5352 return unsignedp ? XEXP (x, 0) : x;
5353 }
5354 }
5355 }
5356 }
5357
5358 fmt = GET_RTX_FORMAT (code);
5359 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5360 {
5361 if (fmt[i] == 'e')
5362 SUBST (XEXP (x, i), known_cond (XEXP (x, i), cond, reg, val));
5363 else if (fmt[i] == 'E')
5364 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
5365 SUBST (XVECEXP (x, i, j), known_cond (XVECEXP (x, i, j),
5366 cond, reg, val));
5367 }
5368
5369 return x;
5370 }
5371 \f
5372 /* See if X, a SET operation, can be rewritten as a bit-field assignment.
5373 Return that assignment if so.
5374
5375 We only handle the most common cases. */
5376
5377 static rtx
5378 make_field_assignment (x)
5379 rtx x;
5380 {
5381 rtx dest = SET_DEST (x);
5382 rtx src = SET_SRC (x);
5383 rtx ourdest;
5384 rtx assign;
5385 HOST_WIDE_INT c1;
5386 int pos, len;
5387 rtx other;
5388 enum machine_mode mode;
5389
5390 /* If SRC was (and (not (ashift (const_int 1) POS)) DEST), this is
5391 a clear of a one-bit field. We will have changed it to
5392 (and (rotate (const_int -2) POS) DEST), so check for that. Also check
5393 for a SUBREG. */
5394
5395 if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == ROTATE
5396 && GET_CODE (XEXP (XEXP (src, 0), 0)) == CONST_INT
5397 && INTVAL (XEXP (XEXP (src, 0), 0)) == -2
5398 && (rtx_equal_p (dest, XEXP (src, 1))
5399 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5400 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5401 {
5402 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5403 1, 1, 1, 0);
5404 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5405 }
5406
5407 else if (GET_CODE (src) == AND && GET_CODE (XEXP (src, 0)) == SUBREG
5408 && subreg_lowpart_p (XEXP (src, 0))
5409 && (GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
5410 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (src, 0)))))
5411 && GET_CODE (SUBREG_REG (XEXP (src, 0))) == ROTATE
5412 && INTVAL (XEXP (SUBREG_REG (XEXP (src, 0)), 0)) == -2
5413 && (rtx_equal_p (dest, XEXP (src, 1))
5414 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5415 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5416 {
5417 assign = make_extraction (VOIDmode, dest, -1,
5418 XEXP (SUBREG_REG (XEXP (src, 0)), 1),
5419 1, 1, 1, 0);
5420 return gen_rtx (SET, VOIDmode, assign, const0_rtx);
5421 }
5422
5423 /* If SRC is (ior (ashift (const_int 1) POS DEST)), this is a set of a
5424 one-bit field. */
5425 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == ASHIFT
5426 && XEXP (XEXP (src, 0), 0) == const1_rtx
5427 && (rtx_equal_p (dest, XEXP (src, 1))
5428 || rtx_equal_p (dest, get_last_value (XEXP (src, 1)))
5429 || rtx_equal_p (get_last_value (dest), XEXP (src, 1))))
5430 {
5431 assign = make_extraction (VOIDmode, dest, -1, XEXP (XEXP (src, 0), 1),
5432 1, 1, 1, 0);
5433 return gen_rtx (SET, VOIDmode, assign, const1_rtx);
5434 }
5435
5436 /* The other case we handle is assignments into a constant-position
5437 field. They look like (ior (and DEST C1) OTHER). If C1 represents
5438 a mask that has all one bits except for a group of zero bits and
5439 OTHER is known to have zeros where C1 has ones, this is such an
5440 assignment. Compute the position and length from C1. Shift OTHER
5441 to the appropriate position, force it to the required mode, and
5442 make the extraction. Check for the AND in both operands. */
5443
5444 if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 0)) == AND
5445 && GET_CODE (XEXP (XEXP (src, 0), 1)) == CONST_INT
5446 && (rtx_equal_p (XEXP (XEXP (src, 0), 0), dest)
5447 || rtx_equal_p (XEXP (XEXP (src, 0), 0), get_last_value (dest))
5448 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 0), 1)), dest)))
5449 c1 = INTVAL (XEXP (XEXP (src, 0), 1)), other = XEXP (src, 1);
5450 else if (GET_CODE (src) == IOR && GET_CODE (XEXP (src, 1)) == AND
5451 && GET_CODE (XEXP (XEXP (src, 1), 1)) == CONST_INT
5452 && (rtx_equal_p (XEXP (XEXP (src, 1), 0), dest)
5453 || rtx_equal_p (XEXP (XEXP (src, 1), 0), get_last_value (dest))
5454 || rtx_equal_p (get_last_value (XEXP (XEXP (src, 1), 0)),
5455 dest)))
5456 c1 = INTVAL (XEXP (XEXP (src, 1), 1)), other = XEXP (src, 0);
5457 else
5458 return x;
5459
5460 pos = get_pos_from_mask (~c1, &len);
5461 if (pos < 0 || pos + len > GET_MODE_BITSIZE (GET_MODE (dest))
5462 || (GET_MODE_BITSIZE (GET_MODE (other)) <= HOST_BITS_PER_WIDE_INT
5463 && (c1 & significant_bits (other, GET_MODE (other))) != 0))
5464 return x;
5465
5466 assign = make_extraction (VOIDmode, dest, pos, NULL_RTX, len, 1, 1, 0);
5467
5468 /* The mode to use for the source is the mode of the assignment, or of
5469 what is inside a possible STRICT_LOW_PART. */
5470 mode = (GET_CODE (assign) == STRICT_LOW_PART
5471 ? GET_MODE (XEXP (assign, 0)) : GET_MODE (assign));
5472
5473 /* Shift OTHER right POS places and make it the source, restricting it
5474 to the proper length and mode. */
5475
5476 src = force_to_mode (simplify_shift_const (NULL_RTX, LSHIFTRT,
5477 GET_MODE (src), other, pos),
5478 mode, len, dest);
5479
5480 return gen_rtx_combine (SET, VOIDmode, assign, src);
5481 }
5482 \f
5483 /* See if X is of the form (+ (* a c) (* b c)) and convert to (* (+ a b) c)
5484 if so. */
5485
5486 static rtx
5487 apply_distributive_law (x)
5488 rtx x;
5489 {
5490 enum rtx_code code = GET_CODE (x);
5491 rtx lhs, rhs, other;
5492 rtx tem;
5493 enum rtx_code inner_code;
5494
5495 /* The outer operation can only be one of the following: */
5496 if (code != IOR && code != AND && code != XOR
5497 && code != PLUS && code != MINUS)
5498 return x;
5499
5500 lhs = XEXP (x, 0), rhs = XEXP (x, 1);
5501
5502 /* If either operand is a primitive we can't do anything, so get out fast. */
5503 if (GET_RTX_CLASS (GET_CODE (lhs)) == 'o'
5504 || GET_RTX_CLASS (GET_CODE (rhs)) == 'o')
5505 return x;
5506
5507 lhs = expand_compound_operation (lhs);
5508 rhs = expand_compound_operation (rhs);
5509 inner_code = GET_CODE (lhs);
5510 if (inner_code != GET_CODE (rhs))
5511 return x;
5512
5513 /* See if the inner and outer operations distribute. */
5514 switch (inner_code)
5515 {
5516 case LSHIFTRT:
5517 case ASHIFTRT:
5518 case AND:
5519 case IOR:
5520 /* These all distribute except over PLUS. */
5521 if (code == PLUS || code == MINUS)
5522 return x;
5523 break;
5524
5525 case MULT:
5526 if (code != PLUS && code != MINUS)
5527 return x;
5528 break;
5529
5530 case ASHIFT:
5531 case LSHIFT:
5532 /* These are also multiplies, so they distribute over everything. */
5533 break;
5534
5535 case SUBREG:
5536 /* Non-paradoxical SUBREGs distributes over all operations, provided
5537 the inner modes and word numbers are the same, this is an extraction
5538 of a low-order part, we don't convert an fp operation to int or
5539 vice versa, and we would not be converting a single-word
5540 operation into a multi-word operation. The latter test is not
5541 required, but it prevents generating unneeded multi-word operations.
5542 Some of the previous tests are redundant given the latter test, but
5543 are retained because they are required for correctness.
5544
5545 We produce the result slightly differently in this case. */
5546
5547 if (GET_MODE (SUBREG_REG (lhs)) != GET_MODE (SUBREG_REG (rhs))
5548 || SUBREG_WORD (lhs) != SUBREG_WORD (rhs)
5549 || ! subreg_lowpart_p (lhs)
5550 || (GET_MODE_CLASS (GET_MODE (lhs))
5551 != GET_MODE_CLASS (GET_MODE (SUBREG_REG (lhs))))
5552 || (GET_MODE_SIZE (GET_MODE (lhs))
5553 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))))
5554 || GET_MODE_SIZE (GET_MODE (SUBREG_REG (lhs))) > UNITS_PER_WORD)
5555 return x;
5556
5557 tem = gen_binary (code, GET_MODE (SUBREG_REG (lhs)),
5558 SUBREG_REG (lhs), SUBREG_REG (rhs));
5559 return gen_lowpart_for_combine (GET_MODE (x), tem);
5560
5561 default:
5562 return x;
5563 }
5564
5565 /* Set LHS and RHS to the inner operands (A and B in the example
5566 above) and set OTHER to the common operand (C in the example).
5567 These is only one way to do this unless the inner operation is
5568 commutative. */
5569 if (GET_RTX_CLASS (inner_code) == 'c'
5570 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 0)))
5571 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 1);
5572 else if (GET_RTX_CLASS (inner_code) == 'c'
5573 && rtx_equal_p (XEXP (lhs, 0), XEXP (rhs, 1)))
5574 other = XEXP (lhs, 0), lhs = XEXP (lhs, 1), rhs = XEXP (rhs, 0);
5575 else if (GET_RTX_CLASS (inner_code) == 'c'
5576 && rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 0)))
5577 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 1);
5578 else if (rtx_equal_p (XEXP (lhs, 1), XEXP (rhs, 1)))
5579 other = XEXP (lhs, 1), lhs = XEXP (lhs, 0), rhs = XEXP (rhs, 0);
5580 else
5581 return x;
5582
5583 /* Form the new inner operation, seeing if it simplifies first. */
5584 tem = gen_binary (code, GET_MODE (x), lhs, rhs);
5585
5586 /* There is one exception to the general way of distributing:
5587 (a ^ b) | (a ^ c) -> (~a) & (b ^ c) */
5588 if (code == XOR && inner_code == IOR)
5589 {
5590 inner_code = AND;
5591 other = gen_unary (NOT, GET_MODE (x), other);
5592 }
5593
5594 /* We may be able to continuing distributing the result, so call
5595 ourselves recursively on the inner operation before forming the
5596 outer operation, which we return. */
5597 return gen_binary (inner_code, GET_MODE (x),
5598 apply_distributive_law (tem), other);
5599 }
5600 \f
5601 /* We have X, a logical `and' of VAROP with the constant CONSTOP, to be done
5602 in MODE.
5603
5604 Return an equivalent form, if different from X. Otherwise, return X. If
5605 X is zero, we are to always construct the equivalent form. */
5606
5607 static rtx
5608 simplify_and_const_int (x, mode, varop, constop)
5609 rtx x;
5610 enum machine_mode mode;
5611 rtx varop;
5612 unsigned HOST_WIDE_INT constop;
5613 {
5614 register enum machine_mode tmode;
5615 register rtx temp;
5616 unsigned HOST_WIDE_INT significant;
5617
5618 /* There is a large class of optimizations based on the principle that
5619 some operations produce results where certain bits are known to be zero,
5620 and hence are not significant to the AND. For example, if we have just
5621 done a left shift of one bit, the low-order bit is known to be zero and
5622 hence an AND with a mask of ~1 would not do anything.
5623
5624 At the end of the following loop, we set:
5625
5626 VAROP to be the item to be AND'ed with;
5627 CONSTOP to the constant value to AND it with. */
5628
5629 while (1)
5630 {
5631 /* If we ever encounter a mode wider than the host machine's widest
5632 integer size, we can't compute the masks accurately, so give up. */
5633 if (GET_MODE_BITSIZE (GET_MODE (varop)) > HOST_BITS_PER_WIDE_INT)
5634 break;
5635
5636 /* Unless one of the cases below does a `continue',
5637 a `break' will be executed to exit the loop. */
5638
5639 switch (GET_CODE (varop))
5640 {
5641 case CLOBBER:
5642 /* If VAROP is a (clobber (const_int)), return it since we know
5643 we are generating something that won't match. */
5644 return varop;
5645
5646 #if ! BITS_BIG_ENDIAN
5647 case USE:
5648 /* VAROP is a (use (mem ..)) that was made from a bit-field
5649 extraction that spanned the boundary of the MEM. If we are
5650 now masking so it is within that boundary, we don't need the
5651 USE any more. */
5652 if ((constop & ~ GET_MODE_MASK (GET_MODE (XEXP (varop, 0)))) == 0)
5653 {
5654 varop = XEXP (varop, 0);
5655 continue;
5656 }
5657 break;
5658 #endif
5659
5660 case SUBREG:
5661 if (subreg_lowpart_p (varop)
5662 /* We can ignore the effect this SUBREG if it narrows the mode
5663 or, on machines where byte operations extend, if the
5664 constant masks to zero all the bits the mode doesn't have. */
5665 && ((GET_MODE_SIZE (GET_MODE (varop))
5666 < GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop))))
5667 #if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
5668 || (0 == (constop
5669 & GET_MODE_MASK (GET_MODE (varop))
5670 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (varop)))))
5671 #endif
5672 ))
5673 {
5674 varop = SUBREG_REG (varop);
5675 continue;
5676 }
5677 break;
5678
5679 case ZERO_EXTRACT:
5680 case SIGN_EXTRACT:
5681 case ZERO_EXTEND:
5682 case SIGN_EXTEND:
5683 /* Try to expand these into a series of shifts and then work
5684 with that result. If we can't, for example, if the extract
5685 isn't at a fixed position, give up. */
5686 temp = expand_compound_operation (varop);
5687 if (temp != varop)
5688 {
5689 varop = temp;
5690 continue;
5691 }
5692 break;
5693
5694 case AND:
5695 if (GET_CODE (XEXP (varop, 1)) == CONST_INT)
5696 {
5697 constop &= INTVAL (XEXP (varop, 1));
5698 varop = XEXP (varop, 0);
5699 continue;
5700 }
5701 break;
5702
5703 case IOR:
5704 case XOR:
5705 /* If VAROP is (ior (lshiftrt FOO C1) C2), try to commute the IOR and
5706 LSHIFT so we end up with an (and (lshiftrt (ior ...) ...) ...)
5707 operation which may be a bitfield extraction. */
5708
5709 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5710 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5711 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5712 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT
5713 && GET_CODE (XEXP (varop, 1)) == CONST_INT
5714 && (INTVAL (XEXP (varop, 1))
5715 & ~ significant_bits (XEXP (varop, 0),
5716 GET_MODE (varop)) == 0))
5717 {
5718 temp = GEN_INT ((INTVAL (XEXP (varop, 1)) & constop)
5719 << INTVAL (XEXP (XEXP (varop, 0), 1)));
5720 temp = gen_binary (GET_CODE (varop), GET_MODE (varop),
5721 XEXP (XEXP (varop, 0), 0), temp);
5722 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5723 temp, XEXP (varop, 1));
5724 continue;
5725 }
5726
5727 /* Apply the AND to both branches of the IOR or XOR, then try to
5728 apply the distributive law. This may eliminate operations
5729 if either branch can be simplified because of the AND.
5730 It may also make some cases more complex, but those cases
5731 probably won't match a pattern either with or without this. */
5732 return
5733 gen_lowpart_for_combine
5734 (mode, apply_distributive_law
5735 (gen_rtx_combine
5736 (GET_CODE (varop), GET_MODE (varop),
5737 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5738 XEXP (varop, 0), constop),
5739 simplify_and_const_int (NULL_RTX, GET_MODE (varop),
5740 XEXP (varop, 1), constop))));
5741
5742 case NOT:
5743 /* (and (not FOO)) is (and (xor FOO CONST_OP)) so if FOO is an
5744 LSHIFTRT we can do the same as above. */
5745
5746 if (GET_CODE (XEXP (varop, 0)) == LSHIFTRT
5747 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5748 && INTVAL (XEXP (XEXP (varop, 0), 1)) >= 0
5749 && INTVAL (XEXP (XEXP (varop, 0), 1)) < HOST_BITS_PER_WIDE_INT)
5750 {
5751 temp = GEN_INT (constop << INTVAL (XEXP (XEXP (varop, 0), 1)));
5752 temp = gen_binary (XOR, GET_MODE (varop),
5753 XEXP (XEXP (varop, 0), 0), temp);
5754 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5755 temp, XEXP (XEXP (varop, 0), 1));
5756 continue;
5757 }
5758 break;
5759
5760 case ASHIFTRT:
5761 /* If we are just looking for the sign bit, we don't need this
5762 shift at all, even if it has a variable count. */
5763 if (constop == ((HOST_WIDE_INT) 1
5764 << (GET_MODE_BITSIZE (GET_MODE (varop)) - 1)))
5765 {
5766 varop = XEXP (varop, 0);
5767 continue;
5768 }
5769
5770 /* If this is a shift by a constant, get a mask that contains
5771 those bits that are not copies of the sign bit. We then have
5772 two cases: If CONSTOP only includes those bits, this can be
5773 a logical shift, which may allow simplifications. If CONSTOP
5774 is a single-bit field not within those bits, we are requesting
5775 a copy of the sign bit and hence can shift the sign bit to
5776 the appropriate location. */
5777 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5778 && INTVAL (XEXP (varop, 1)) >= 0
5779 && INTVAL (XEXP (varop, 1)) < HOST_BITS_PER_WIDE_INT)
5780 {
5781 int i = -1;
5782
5783 significant = GET_MODE_MASK (GET_MODE (varop));
5784 significant >>= INTVAL (XEXP (varop, 1));
5785
5786 if ((constop & ~significant) == 0
5787 || (i = exact_log2 (constop)) >= 0)
5788 {
5789 varop = simplify_shift_const
5790 (varop, LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
5791 i < 0 ? INTVAL (XEXP (varop, 1))
5792 : GET_MODE_BITSIZE (GET_MODE (varop)) - 1 - i);
5793 if (GET_CODE (varop) != ASHIFTRT)
5794 continue;
5795 }
5796 }
5797
5798 /* If our mask is 1, convert this to a LSHIFTRT. This can be done
5799 even if the shift count isn't a constant. */
5800 if (constop == 1)
5801 varop = gen_rtx_combine (LSHIFTRT, GET_MODE (varop),
5802 XEXP (varop, 0), XEXP (varop, 1));
5803 break;
5804
5805 case NE:
5806 /* (and (ne FOO 0) CONST) can be (and FOO CONST) if CONST is
5807 included in STORE_FLAG_VALUE and FOO has no significant bits
5808 not in CONST. */
5809 if ((constop & ~ STORE_FLAG_VALUE) == 0
5810 && XEXP (varop, 0) == const0_rtx
5811 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5812 {
5813 varop = XEXP (varop, 0);
5814 continue;
5815 }
5816 break;
5817
5818 case PLUS:
5819 /* In (and (plus FOO C1) M), if M is a mask that just turns off
5820 low-order bits (as in an alignment operation) and FOO is already
5821 aligned to that boundary, we can convert remove this AND
5822 and possibly the PLUS if it is now adding zero. */
5823 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
5824 && exact_log2 (-constop) >= 0
5825 && (significant_bits (XEXP (varop, 0), mode) & ~ constop) == 0)
5826 {
5827 varop = plus_constant (XEXP (varop, 0),
5828 INTVAL (XEXP (varop, 1)) & constop);
5829 constop = ~0;
5830 break;
5831 }
5832
5833 /* ... fall through ... */
5834
5835 case MINUS:
5836 /* In (and (plus (and FOO M1) BAR) M2), if M1 and M2 are one
5837 less than powers of two and M2 is narrower than M1, we can
5838 eliminate the inner AND. This occurs when incrementing
5839 bit fields. */
5840
5841 if (GET_CODE (XEXP (varop, 0)) == ZERO_EXTRACT
5842 || GET_CODE (XEXP (varop, 0)) == ZERO_EXTEND)
5843 SUBST (XEXP (varop, 0),
5844 expand_compound_operation (XEXP (varop, 0)));
5845
5846 if (GET_CODE (XEXP (varop, 0)) == AND
5847 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
5848 && exact_log2 (constop + 1) >= 0
5849 && exact_log2 (INTVAL (XEXP (XEXP (varop, 0), 1)) + 1) >= 0
5850 && (~ INTVAL (XEXP (XEXP (varop, 0), 1)) & constop) == 0)
5851 SUBST (XEXP (varop, 0), XEXP (XEXP (varop, 0), 0));
5852 break;
5853 }
5854
5855 break;
5856 }
5857
5858 /* If we have reached a constant, this whole thing is constant. */
5859 if (GET_CODE (varop) == CONST_INT)
5860 return GEN_INT (constop & INTVAL (varop));
5861
5862 /* See what bits are significant in VAROP. */
5863 significant = significant_bits (varop, mode);
5864
5865 /* Turn off all bits in the constant that are known to already be zero.
5866 Thus, if the AND isn't needed at all, we will have CONSTOP == SIGNIFICANT
5867 which is tested below. */
5868
5869 constop &= significant;
5870
5871 /* If we don't have any bits left, return zero. */
5872 if (constop == 0)
5873 return const0_rtx;
5874
5875 /* Get VAROP in MODE. Try to get a SUBREG if not. Don't make a new SUBREG
5876 if we already had one (just check for the simplest cases). */
5877 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
5878 && GET_MODE (XEXP (x, 0)) == mode
5879 && SUBREG_REG (XEXP (x, 0)) == varop)
5880 varop = XEXP (x, 0);
5881 else
5882 varop = gen_lowpart_for_combine (mode, varop);
5883
5884 /* If we can't make the SUBREG, try to return what we were given. */
5885 if (GET_CODE (varop) == CLOBBER)
5886 return x ? x : varop;
5887
5888 /* If we are only masking insignificant bits, return VAROP. */
5889 if (constop == significant)
5890 x = varop;
5891
5892 /* Otherwise, return an AND. See how much, if any, of X we can use. */
5893 else if (x == 0 || GET_CODE (x) != AND || GET_MODE (x) != mode)
5894 x = gen_rtx_combine (AND, mode, varop, GEN_INT (constop));
5895
5896 else
5897 {
5898 if (GET_CODE (XEXP (x, 1)) != CONST_INT
5899 || INTVAL (XEXP (x, 1)) != constop)
5900 SUBST (XEXP (x, 1), GEN_INT (constop));
5901
5902 SUBST (XEXP (x, 0), varop);
5903 }
5904
5905 return x;
5906 }
5907 \f
5908 /* Given an expression, X, compute which bits in X can be non-zero.
5909 We don't care about bits outside of those defined in MODE.
5910
5911 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
5912 a shift, AND, or zero_extract, we can do better. */
5913
5914 static unsigned HOST_WIDE_INT
5915 significant_bits (x, mode)
5916 rtx x;
5917 enum machine_mode mode;
5918 {
5919 unsigned HOST_WIDE_INT significant = GET_MODE_MASK (mode);
5920 unsigned HOST_WIDE_INT inner_sig;
5921 enum rtx_code code;
5922 int mode_width = GET_MODE_BITSIZE (mode);
5923 rtx tem;
5924
5925 /* If X is wider than MODE, use its mode instead. */
5926 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
5927 {
5928 mode = GET_MODE (x);
5929 significant = GET_MODE_MASK (mode);
5930 mode_width = GET_MODE_BITSIZE (mode);
5931 }
5932
5933 if (mode_width > HOST_BITS_PER_WIDE_INT)
5934 /* Our only callers in this case look for single bit values. So
5935 just return the mode mask. Those tests will then be false. */
5936 return significant;
5937
5938 code = GET_CODE (x);
5939 switch (code)
5940 {
5941 case REG:
5942 #ifdef STACK_BOUNDARY
5943 /* If this is the stack pointer, we may know something about its
5944 alignment. If PUSH_ROUNDING is defined, it is possible for the
5945 stack to be momentarily aligned only to that amount, so we pick
5946 the least alignment. */
5947
5948 if (x == stack_pointer_rtx)
5949 {
5950 int sp_alignment = STACK_BOUNDARY / BITS_PER_UNIT;
5951
5952 #ifdef PUSH_ROUNDING
5953 sp_alignment = MIN (PUSH_ROUNDING (1), sp_alignment);
5954 #endif
5955
5956 return significant & ~ (sp_alignment - 1);
5957 }
5958 #endif
5959
5960 /* If X is a register whose value we can find, use that value.
5961 Otherwise, use the previously-computed significant bits for this
5962 register. */
5963
5964 tem = get_last_value (x);
5965 if (tem)
5966 return significant_bits (tem, mode);
5967 else if (significant_valid && reg_significant[REGNO (x)])
5968 return reg_significant[REGNO (x)] & significant;
5969 else
5970 return significant;
5971
5972 case CONST_INT:
5973 return INTVAL (x);
5974
5975 #ifdef BYTE_LOADS_ZERO_EXTEND
5976 case MEM:
5977 /* In many, if not most, RISC machines, reading a byte from memory
5978 zeros the rest of the register. Noticing that fact saves a lot
5979 of extra zero-extends. */
5980 significant &= GET_MODE_MASK (GET_MODE (x));
5981 break;
5982 #endif
5983
5984 #if STORE_FLAG_VALUE == 1
5985 case EQ: case NE:
5986 case GT: case GTU:
5987 case LT: case LTU:
5988 case GE: case GEU:
5989 case LE: case LEU:
5990
5991 if (GET_MODE_CLASS (mode) == MODE_INT)
5992 significant = 1;
5993
5994 /* A comparison operation only sets the bits given by its mode. The
5995 rest are set undefined. */
5996 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
5997 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
5998 break;
5999 #endif
6000
6001 case NEG:
6002 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6003 == GET_MODE_BITSIZE (GET_MODE (x)))
6004 significant = 1;
6005
6006 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
6007 significant |= (GET_MODE_MASK (mode) & ~ GET_MODE_MASK (GET_MODE (x)));
6008 break;
6009
6010 case ABS:
6011 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
6012 == GET_MODE_BITSIZE (GET_MODE (x)))
6013 significant = 1;
6014 break;
6015
6016 case TRUNCATE:
6017 significant &= (significant_bits (XEXP (x, 0), mode)
6018 & GET_MODE_MASK (mode));
6019 break;
6020
6021 case ZERO_EXTEND:
6022 significant &= significant_bits (XEXP (x, 0), mode);
6023 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6024 significant &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6025 break;
6026
6027 case SIGN_EXTEND:
6028 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
6029 Otherwise, show all the bits in the outer mode but not the inner
6030 may be non-zero. */
6031 inner_sig = significant_bits (XEXP (x, 0), mode);
6032 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
6033 {
6034 inner_sig &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
6035 if (inner_sig &
6036 (((HOST_WIDE_INT) 1
6037 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
6038 inner_sig |= (GET_MODE_MASK (mode)
6039 & ~ GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
6040 }
6041
6042 significant &= inner_sig;
6043 break;
6044
6045 case AND:
6046 significant &= (significant_bits (XEXP (x, 0), mode)
6047 & significant_bits (XEXP (x, 1), mode));
6048 break;
6049
6050 case XOR: case IOR:
6051 case UMIN: case UMAX: case SMIN: case SMAX:
6052 significant &= (significant_bits (XEXP (x, 0), mode)
6053 | significant_bits (XEXP (x, 1), mode));
6054 break;
6055
6056 case PLUS: case MINUS:
6057 case MULT:
6058 case DIV: case UDIV:
6059 case MOD: case UMOD:
6060 /* We can apply the rules of arithmetic to compute the number of
6061 high- and low-order zero bits of these operations. We start by
6062 computing the width (position of the highest-order non-zero bit)
6063 and the number of low-order zero bits for each value. */
6064 {
6065 unsigned HOST_WIDE_INT sig0 = significant_bits (XEXP (x, 0), mode);
6066 unsigned HOST_WIDE_INT sig1 = significant_bits (XEXP (x, 1), mode);
6067 int width0 = floor_log2 (sig0) + 1;
6068 int width1 = floor_log2 (sig1) + 1;
6069 int low0 = floor_log2 (sig0 & -sig0);
6070 int low1 = floor_log2 (sig1 & -sig1);
6071 int op0_maybe_minusp = (sig0 & (1 << (mode_width - 1)));
6072 int op1_maybe_minusp = (sig1 & (1 << (mode_width - 1)));
6073 int result_width = mode_width;
6074 int result_low = 0;
6075
6076 switch (code)
6077 {
6078 case PLUS:
6079 result_width = MAX (width0, width1) + 1;
6080 result_low = MIN (low0, low1);
6081 break;
6082 case MINUS:
6083 result_low = MIN (low0, low1);
6084 break;
6085 case MULT:
6086 result_width = width0 + width1;
6087 result_low = low0 + low1;
6088 break;
6089 case DIV:
6090 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6091 result_width = width0;
6092 break;
6093 case UDIV:
6094 result_width = width0;
6095 break;
6096 case MOD:
6097 if (! op0_maybe_minusp && ! op1_maybe_minusp)
6098 result_width = MIN (width0, width1);
6099 result_low = MIN (low0, low1);
6100 break;
6101 case UMOD:
6102 result_width = MIN (width0, width1);
6103 result_low = MIN (low0, low1);
6104 break;
6105 }
6106
6107 if (result_width < mode_width)
6108 significant &= ((HOST_WIDE_INT) 1 << result_width) - 1;
6109
6110 if (result_low > 0)
6111 significant &= ~ (((HOST_WIDE_INT) 1 << result_low) - 1);
6112 }
6113 break;
6114
6115 case ZERO_EXTRACT:
6116 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6117 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6118 significant &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
6119 break;
6120
6121 case SUBREG:
6122 /* If this is a SUBREG formed for a promoted variable that has
6123 been zero-extended, we know that at least the high-order bits
6124 are zero, though others might be too. */
6125
6126 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
6127 significant = (GET_MODE_MASK (GET_MODE (x))
6128 & significant_bits (SUBREG_REG (x), GET_MODE (x)));
6129
6130 /* If the inner mode is a single word for both the host and target
6131 machines, we can compute this from which bits of the inner
6132 object are known significant. */
6133 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
6134 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6135 <= HOST_BITS_PER_WIDE_INT))
6136 {
6137 significant &= significant_bits (SUBREG_REG (x), mode);
6138 #if ! defined(BYTE_LOADS_ZERO_EXTEND) && ! defined(BYTE_LOADS_SIGN_EXTEND)
6139 /* On many CISC machines, accessing an object in a wider mode
6140 causes the high-order bits to become undefined. So they are
6141 not known to be zero. */
6142 if (GET_MODE_SIZE (GET_MODE (x))
6143 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6144 significant |= (GET_MODE_MASK (GET_MODE (x))
6145 & ~ GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
6146 #endif
6147 }
6148 break;
6149
6150 case ASHIFTRT:
6151 case LSHIFTRT:
6152 case ASHIFT:
6153 case LSHIFT:
6154 case ROTATE:
6155 /* The significant bits are in two classes: any bits within MODE
6156 that aren't in GET_MODE (x) are always significant. The rest of the
6157 significant bits are those that are significant in the operand of
6158 the shift when shifted the appropriate number of bits. This
6159 shows that high-order bits are cleared by the right shift and
6160 low-order bits by left shifts. */
6161 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6162 && INTVAL (XEXP (x, 1)) >= 0
6163 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
6164 {
6165 enum machine_mode inner_mode = GET_MODE (x);
6166 int width = GET_MODE_BITSIZE (inner_mode);
6167 int count = INTVAL (XEXP (x, 1));
6168 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
6169 unsigned HOST_WIDE_INT op_significant
6170 = significant_bits (XEXP (x, 0), mode);
6171 unsigned HOST_WIDE_INT inner = op_significant & mode_mask;
6172 unsigned HOST_WIDE_INT outer = 0;
6173
6174 if (mode_width > width)
6175 outer = (op_significant & significant & ~ mode_mask);
6176
6177 if (code == LSHIFTRT)
6178 inner >>= count;
6179 else if (code == ASHIFTRT)
6180 {
6181 inner >>= count;
6182
6183 /* If the sign bit was significant at before the shift, we
6184 need to mark all the places it could have been copied to
6185 by the shift significant. */
6186 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
6187 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
6188 }
6189 else if (code == LSHIFT || code == ASHIFT)
6190 inner <<= count;
6191 else
6192 inner = ((inner << (count % width)
6193 | (inner >> (width - (count % width)))) & mode_mask);
6194
6195 significant &= (outer | inner);
6196 }
6197 break;
6198
6199 case FFS:
6200 /* This is at most the number of bits in the mode. */
6201 significant = ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width) + 1)) - 1;
6202 break;
6203
6204 case IF_THEN_ELSE:
6205 significant &= (significant_bits (XEXP (x, 1), mode)
6206 | significant_bits (XEXP (x, 2), mode));
6207 break;
6208 }
6209
6210 return significant;
6211 }
6212 \f
6213 /* Return the number of bits at the high-order end of X that are known to
6214 be equal to the sign bit. This number will always be between 1 and
6215 the number of bits in the mode of X. MODE is the mode to be used
6216 if X is VOIDmode. */
6217
6218 static int
6219 num_sign_bit_copies (x, mode)
6220 rtx x;
6221 enum machine_mode mode;
6222 {
6223 enum rtx_code code = GET_CODE (x);
6224 int bitwidth;
6225 int num0, num1, result;
6226 unsigned HOST_WIDE_INT sig;
6227 rtx tem;
6228
6229 /* If we weren't given a mode, use the mode of X. If the mode is still
6230 VOIDmode, we don't know anything. */
6231
6232 if (mode == VOIDmode)
6233 mode = GET_MODE (x);
6234
6235 if (mode == VOIDmode)
6236 return 1;
6237
6238 bitwidth = GET_MODE_BITSIZE (mode);
6239
6240 switch (code)
6241 {
6242 case REG:
6243 if (significant_valid && reg_sign_bit_copies[REGNO (x)] != 0)
6244 return reg_sign_bit_copies[REGNO (x)];
6245
6246 tem = get_last_value (x);
6247 if (tem != 0)
6248 return num_sign_bit_copies (tem, mode);
6249 break;
6250
6251 #ifdef BYTE_LOADS_SIGN_EXTEND
6252 case MEM:
6253 /* Some RISC machines sign-extend all loads of smaller than a word. */
6254 return MAX (1, bitwidth - GET_MODE_BITSIZE (GET_MODE (x)) + 1);
6255 #endif
6256
6257 case CONST_INT:
6258 /* If the constant is negative, take its 1's complement and remask.
6259 Then see how many zero bits we have. */
6260 sig = INTVAL (x) & GET_MODE_MASK (mode);
6261 if (bitwidth <= HOST_BITS_PER_WIDE_INT
6262 && (sig & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6263 sig = (~ sig) & GET_MODE_MASK (mode);
6264
6265 return (sig == 0 ? bitwidth : bitwidth - floor_log2 (sig) - 1);
6266
6267 case SUBREG:
6268 /* If this is a SUBREG for a promoted object that is sign-extended
6269 and we are looking at it in a wider mode, we know that at least the
6270 high-order bits are known to be sign bit copies. */
6271
6272 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
6273 return (GET_MODE_BITSIZE (mode) - GET_MODE_BITSIZE (GET_MODE (x))
6274 + num_sign_bit_copies (SUBREG_REG (x), GET_MODE (x)));
6275
6276 /* For a smaller object, just ignore the high bits. */
6277 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
6278 {
6279 num0 = num_sign_bit_copies (SUBREG_REG (x), VOIDmode);
6280 return MAX (1, (num0
6281 - (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
6282 - bitwidth)));
6283 }
6284
6285 #if defined(BYTE_LOADS_ZERO_EXTEND) || defined(BYTE_LOADS_SIGN_EXTEND)
6286 /* For paradoxical SUBREGs, just look inside since, on machines with
6287 one of these defined, we assume that operations are actually
6288 performed on the full register. Note that we are passing MODE
6289 to the recursive call, so the number of sign bit copies will
6290 remain relative to that mode, not the inner mode. */
6291
6292 if (GET_MODE_SIZE (GET_MODE (x))
6293 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
6294 return num_sign_bit_copies (SUBREG_REG (x), mode);
6295 #endif
6296
6297 break;
6298
6299 case SIGN_EXTRACT:
6300 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
6301 return MAX (1, bitwidth - INTVAL (XEXP (x, 1)));
6302 break;
6303
6304 case SIGN_EXTEND:
6305 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6306 + num_sign_bit_copies (XEXP (x, 0), VOIDmode));
6307
6308 case TRUNCATE:
6309 /* For a smaller object, just ignore the high bits. */
6310 num0 = num_sign_bit_copies (XEXP (x, 0), VOIDmode);
6311 return MAX (1, (num0 - (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
6312 - bitwidth)));
6313
6314 case NOT:
6315 return num_sign_bit_copies (XEXP (x, 0), mode);
6316
6317 case ROTATE: case ROTATERT:
6318 /* If we are rotating left by a number of bits less than the number
6319 of sign bit copies, we can just subtract that amount from the
6320 number. */
6321 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6322 && INTVAL (XEXP (x, 1)) >= 0 && INTVAL (XEXP (x, 1)) < bitwidth)
6323 {
6324 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6325 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
6326 : bitwidth - INTVAL (XEXP (x, 1))));
6327 }
6328 break;
6329
6330 case NEG:
6331 /* In general, this subtracts one sign bit copy. But if the value
6332 is known to be positive, the number of sign bit copies is the
6333 same as that of the input. Finally, if the input has just one
6334 significant bit, all the bits are copies of the sign bit. */
6335 sig = significant_bits (XEXP (x, 0), mode);
6336 if (sig == 1)
6337 return bitwidth;
6338
6339 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6340 if (num0 > 1
6341 && bitwidth <= HOST_BITS_PER_WIDE_INT
6342 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig))
6343 num0--;
6344
6345 return num0;
6346
6347 case IOR: case AND: case XOR:
6348 case SMIN: case SMAX: case UMIN: case UMAX:
6349 /* Logical operations will preserve the number of sign-bit copies.
6350 MIN and MAX operations always return one of the operands. */
6351 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6352 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6353 return MIN (num0, num1);
6354
6355 case PLUS: case MINUS:
6356 /* For addition and subtraction, we can have a 1-bit carry. However,
6357 if we are subtracting 1 from a positive number, there will not
6358 be such a carry. Furthermore, if the positive number is known to
6359 be 0 or 1, we know the result is either -1 or 0. */
6360
6361 if (code == PLUS && XEXP (x, 1) == constm1_rtx
6362 /* Don't do this if XEXP (x, 0) is a paradoxical subreg
6363 because in principle we don't know what the high bits are. */
6364 && !(GET_CODE (XEXP (x, 0)) == SUBREG
6365 && (GET_MODE_SIZE (GET_MODE (XEXP (XEXP (x, 0), 0)))
6366 < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))))
6367 {
6368 sig = significant_bits (XEXP (x, 0), mode);
6369 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & sig) == 0)
6370 return (sig == 1 || sig == 0 ? bitwidth
6371 : bitwidth - floor_log2 (sig) - 1);
6372 }
6373
6374 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6375 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6376 return MAX (1, MIN (num0, num1) - 1);
6377
6378 case MULT:
6379 /* The number of bits of the product is the sum of the number of
6380 bits of both terms. However, unless one of the terms if known
6381 to be positive, we must allow for an additional bit since negating
6382 a negative number can remove one sign bit copy. */
6383
6384 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6385 num1 = num_sign_bit_copies (XEXP (x, 1), mode);
6386
6387 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
6388 if (result > 0
6389 && bitwidth <= HOST_BITS_PER_INT
6390 && ((significant_bits (XEXP (x, 0), mode)
6391 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6392 && (significant_bits (XEXP (x, 1), mode)
6393 & ((HOST_WIDE_INT) 1 << (bitwidth - 1)) != 0))
6394 result--;
6395
6396 return MAX (1, result);
6397
6398 case UDIV:
6399 /* The result must be <= the first operand. */
6400 return num_sign_bit_copies (XEXP (x, 0), mode);
6401
6402 case UMOD:
6403 /* The result must be <= the scond operand. */
6404 return num_sign_bit_copies (XEXP (x, 1), mode);
6405
6406 case DIV:
6407 /* Similar to unsigned division, except that we have to worry about
6408 the case where the divisor is negative, in which case we have
6409 to add 1. */
6410 result = num_sign_bit_copies (XEXP (x, 0), mode);
6411 if (result > 1
6412 && bitwidth <= HOST_BITS_PER_WIDE_INT
6413 && (significant_bits (XEXP (x, 1), mode)
6414 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6415 result --;
6416
6417 return result;
6418
6419 case MOD:
6420 result = num_sign_bit_copies (XEXP (x, 1), mode);
6421 if (result > 1
6422 && bitwidth <= HOST_BITS_PER_WIDE_INT
6423 && (significant_bits (XEXP (x, 1), mode)
6424 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
6425 result --;
6426
6427 return result;
6428
6429 case ASHIFTRT:
6430 /* Shifts by a constant add to the number of bits equal to the
6431 sign bit. */
6432 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6433 if (GET_CODE (XEXP (x, 1)) == CONST_INT
6434 && INTVAL (XEXP (x, 1)) > 0)
6435 num0 = MIN (bitwidth, num0 + INTVAL (XEXP (x, 1)));
6436
6437 return num0;
6438
6439 case ASHIFT:
6440 case LSHIFT:
6441 /* Left shifts destroy copies. */
6442 if (GET_CODE (XEXP (x, 1)) != CONST_INT
6443 || INTVAL (XEXP (x, 1)) < 0
6444 || INTVAL (XEXP (x, 1)) >= bitwidth)
6445 return 1;
6446
6447 num0 = num_sign_bit_copies (XEXP (x, 0), mode);
6448 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
6449
6450 case IF_THEN_ELSE:
6451 num0 = num_sign_bit_copies (XEXP (x, 1), mode);
6452 num1 = num_sign_bit_copies (XEXP (x, 2), mode);
6453 return MIN (num0, num1);
6454
6455 #if STORE_FLAG_VALUE == -1
6456 case EQ: case NE: case GE: case GT: case LE: case LT:
6457 case GEU: case GTU: case LEU: case LTU:
6458 return bitwidth;
6459 #endif
6460 }
6461
6462 /* If we haven't been able to figure it out by one of the above rules,
6463 see if some of the high-order bits are known to be zero. If so,
6464 count those bits and return one less than that amount. If we can't
6465 safely compute the mask for this mode, always return BITWIDTH. */
6466
6467 if (bitwidth > HOST_BITS_PER_WIDE_INT)
6468 return 1;
6469
6470 sig = significant_bits (x, mode);
6471 return sig == GET_MODE_MASK (mode) ? 1 : bitwidth - floor_log2 (sig) - 1;
6472 }
6473 \f
6474 /* Return the number of "extended" bits there are in X, when interpreted
6475 as a quantity in MODE whose signedness is indicated by UNSIGNEDP. For
6476 unsigned quantities, this is the number of high-order zero bits.
6477 For signed quantities, this is the number of copies of the sign bit
6478 minus 1. In both case, this function returns the number of "spare"
6479 bits. For example, if two quantities for which this function returns
6480 at least 1 are added, the addition is known not to overflow.
6481
6482 This function will always return 0 unless called during combine, which
6483 implies that it must be called from a define_split. */
6484
6485 int
6486 extended_count (x, mode, unsignedp)
6487 rtx x;
6488 enum machine_mode mode;
6489 int unsignedp;
6490 {
6491 if (significant_valid == 0)
6492 return 0;
6493
6494 return (unsignedp
6495 ? (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6496 && (GET_MODE_BITSIZE (mode) - 1
6497 - floor_log2 (significant_bits (x, mode))))
6498 : num_sign_bit_copies (x, mode) - 1);
6499 }
6500 \f
6501 /* This function is called from `simplify_shift_const' to merge two
6502 outer operations. Specifically, we have already found that we need
6503 to perform operation *POP0 with constant *PCONST0 at the outermost
6504 position. We would now like to also perform OP1 with constant CONST1
6505 (with *POP0 being done last).
6506
6507 Return 1 if we can do the operation and update *POP0 and *PCONST0 with
6508 the resulting operation. *PCOMP_P is set to 1 if we would need to
6509 complement the innermost operand, otherwise it is unchanged.
6510
6511 MODE is the mode in which the operation will be done. No bits outside
6512 the width of this mode matter. It is assumed that the width of this mode
6513 is smaller than or equal to HOST_BITS_PER_WIDE_INT.
6514
6515 If *POP0 or OP1 are NIL, it means no operation is required. Only NEG, PLUS,
6516 IOR, XOR, and AND are supported. We may set *POP0 to SET if the proper
6517 result is simply *PCONST0.
6518
6519 If the resulting operation cannot be expressed as one operation, we
6520 return 0 and do not change *POP0, *PCONST0, and *PCOMP_P. */
6521
6522 static int
6523 merge_outer_ops (pop0, pconst0, op1, const1, mode, pcomp_p)
6524 enum rtx_code *pop0;
6525 HOST_WIDE_INT *pconst0;
6526 enum rtx_code op1;
6527 HOST_WIDE_INT const1;
6528 enum machine_mode mode;
6529 int *pcomp_p;
6530 {
6531 enum rtx_code op0 = *pop0;
6532 HOST_WIDE_INT const0 = *pconst0;
6533
6534 const0 &= GET_MODE_MASK (mode);
6535 const1 &= GET_MODE_MASK (mode);
6536
6537 /* If OP0 is an AND, clear unimportant bits in CONST1. */
6538 if (op0 == AND)
6539 const1 &= const0;
6540
6541 /* If OP0 or OP1 is NIL, this is easy. Similarly if they are the same or
6542 if OP0 is SET. */
6543
6544 if (op1 == NIL || op0 == SET)
6545 return 1;
6546
6547 else if (op0 == NIL)
6548 op0 = op1, const0 = const1;
6549
6550 else if (op0 == op1)
6551 {
6552 switch (op0)
6553 {
6554 case AND:
6555 const0 &= const1;
6556 break;
6557 case IOR:
6558 const0 |= const1;
6559 break;
6560 case XOR:
6561 const0 ^= const1;
6562 break;
6563 case PLUS:
6564 const0 += const1;
6565 break;
6566 case NEG:
6567 op0 = NIL;
6568 break;
6569 }
6570 }
6571
6572 /* Otherwise, if either is a PLUS or NEG, we can't do anything. */
6573 else if (op0 == PLUS || op1 == PLUS || op0 == NEG || op1 == NEG)
6574 return 0;
6575
6576 /* If the two constants aren't the same, we can't do anything. The
6577 remaining six cases can all be done. */
6578 else if (const0 != const1)
6579 return 0;
6580
6581 else
6582 switch (op0)
6583 {
6584 case IOR:
6585 if (op1 == AND)
6586 /* (a & b) | b == b */
6587 op0 = SET;
6588 else /* op1 == XOR */
6589 /* (a ^ b) | b == a | b */
6590 ;
6591 break;
6592
6593 case XOR:
6594 if (op1 == AND)
6595 /* (a & b) ^ b == (~a) & b */
6596 op0 = AND, *pcomp_p = 1;
6597 else /* op1 == IOR */
6598 /* (a | b) ^ b == a & ~b */
6599 op0 = AND, *pconst0 = ~ const0;
6600 break;
6601
6602 case AND:
6603 if (op1 == IOR)
6604 /* (a | b) & b == b */
6605 op0 = SET;
6606 else /* op1 == XOR */
6607 /* (a ^ b) & b) == (~a) & b */
6608 *pcomp_p = 1;
6609 break;
6610 }
6611
6612 /* Check for NO-OP cases. */
6613 const0 &= GET_MODE_MASK (mode);
6614 if (const0 == 0
6615 && (op0 == IOR || op0 == XOR || op0 == PLUS))
6616 op0 = NIL;
6617 else if (const0 == 0 && op0 == AND)
6618 op0 = SET;
6619 else if (const0 == GET_MODE_MASK (mode) && op0 == AND)
6620 op0 = NIL;
6621
6622 *pop0 = op0;
6623 *pconst0 = const0;
6624
6625 return 1;
6626 }
6627 \f
6628 /* Simplify a shift of VAROP by COUNT bits. CODE says what kind of shift.
6629 The result of the shift is RESULT_MODE. X, if non-zero, is an expression
6630 that we started with.
6631
6632 The shift is normally computed in the widest mode we find in VAROP, as
6633 long as it isn't a different number of words than RESULT_MODE. Exceptions
6634 are ASHIFTRT and ROTATE, which are always done in their original mode, */
6635
6636 static rtx
6637 simplify_shift_const (x, code, result_mode, varop, count)
6638 rtx x;
6639 enum rtx_code code;
6640 enum machine_mode result_mode;
6641 rtx varop;
6642 int count;
6643 {
6644 enum rtx_code orig_code = code;
6645 int orig_count = count;
6646 enum machine_mode mode = result_mode;
6647 enum machine_mode shift_mode, tmode;
6648 int mode_words
6649 = (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
6650 /* We form (outer_op (code varop count) (outer_const)). */
6651 enum rtx_code outer_op = NIL;
6652 HOST_WIDE_INT outer_const;
6653 rtx const_rtx;
6654 int complement_p = 0;
6655 rtx new;
6656
6657 /* If we were given an invalid count, don't do anything except exactly
6658 what was requested. */
6659
6660 if (count < 0 || count > GET_MODE_BITSIZE (mode))
6661 {
6662 if (x)
6663 return x;
6664
6665 return gen_rtx (code, mode, varop, GEN_INT (count));
6666 }
6667
6668 /* Unless one of the branches of the `if' in this loop does a `continue',
6669 we will `break' the loop after the `if'. */
6670
6671 while (count != 0)
6672 {
6673 /* If we have an operand of (clobber (const_int 0)), just return that
6674 value. */
6675 if (GET_CODE (varop) == CLOBBER)
6676 return varop;
6677
6678 /* If we discovered we had to complement VAROP, leave. Making a NOT
6679 here would cause an infinite loop. */
6680 if (complement_p)
6681 break;
6682
6683 /* Convert ROTATETRT to ROTATE. */
6684 if (code == ROTATERT)
6685 code = ROTATE, count = GET_MODE_BITSIZE (result_mode) - count;
6686
6687 /* Canonicalize LSHIFT to ASHIFT. */
6688 if (code == LSHIFT)
6689 code = ASHIFT;
6690
6691 /* We need to determine what mode we will do the shift in. If the
6692 shift is a ASHIFTRT or ROTATE, we must always do it in the mode it
6693 was originally done in. Otherwise, we can do it in MODE, the widest
6694 mode encountered. */
6695 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
6696
6697 /* Handle cases where the count is greater than the size of the mode
6698 minus 1. For ASHIFT, use the size minus one as the count (this can
6699 occur when simplifying (lshiftrt (ashiftrt ..))). For rotates,
6700 take the count modulo the size. For other shifts, the result is
6701 zero.
6702
6703 Since these shifts are being produced by the compiler by combining
6704 multiple operations, each of which are defined, we know what the
6705 result is supposed to be. */
6706
6707 if (count > GET_MODE_BITSIZE (shift_mode) - 1)
6708 {
6709 if (code == ASHIFTRT)
6710 count = GET_MODE_BITSIZE (shift_mode) - 1;
6711 else if (code == ROTATE || code == ROTATERT)
6712 count %= GET_MODE_BITSIZE (shift_mode);
6713 else
6714 {
6715 /* We can't simply return zero because there may be an
6716 outer op. */
6717 varop = const0_rtx;
6718 count = 0;
6719 break;
6720 }
6721 }
6722
6723 /* Negative counts are invalid and should not have been made (a
6724 programmer-specified negative count should have been handled
6725 above). */
6726 else if (count < 0)
6727 abort ();
6728
6729 /* An arithmetic right shift of a quantity known to be -1 or 0
6730 is a no-op. */
6731 if (code == ASHIFTRT
6732 && (num_sign_bit_copies (varop, shift_mode)
6733 == GET_MODE_BITSIZE (shift_mode)))
6734 {
6735 count = 0;
6736 break;
6737 }
6738
6739 /* We simplify the tests below and elsewhere by converting
6740 ASHIFTRT to LSHIFTRT if we know the sign bit is clear.
6741 `make_compound_operation' will convert it to a ASHIFTRT for
6742 those machines (such as Vax) that don't have a LSHIFTRT. */
6743 if (GET_MODE_BITSIZE (shift_mode) <= HOST_BITS_PER_WIDE_INT
6744 && code == ASHIFTRT
6745 && ((significant_bits (varop, shift_mode)
6746 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (shift_mode) - 1)))
6747 == 0))
6748 code = LSHIFTRT;
6749
6750 switch (GET_CODE (varop))
6751 {
6752 case SIGN_EXTEND:
6753 case ZERO_EXTEND:
6754 case SIGN_EXTRACT:
6755 case ZERO_EXTRACT:
6756 new = expand_compound_operation (varop);
6757 if (new != varop)
6758 {
6759 varop = new;
6760 continue;
6761 }
6762 break;
6763
6764 case MEM:
6765 /* If we have (xshiftrt (mem ...) C) and C is MODE_WIDTH
6766 minus the width of a smaller mode, we can do this with a
6767 SIGN_EXTEND or ZERO_EXTEND from the narrower memory location. */
6768 if ((code == ASHIFTRT || code == LSHIFTRT)
6769 && ! mode_dependent_address_p (XEXP (varop, 0))
6770 && ! MEM_VOLATILE_P (varop)
6771 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6772 MODE_INT, 1)) != BLKmode)
6773 {
6774 #if BYTES_BIG_ENDIAN
6775 new = gen_rtx (MEM, tmode, XEXP (varop, 0));
6776 #else
6777 new = gen_rtx (MEM, tmode,
6778 plus_constant (XEXP (varop, 0),
6779 count / BITS_PER_UNIT));
6780 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (varop);
6781 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (varop);
6782 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (varop);
6783 #endif
6784 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6785 : ZERO_EXTEND, mode, new);
6786 count = 0;
6787 continue;
6788 }
6789 break;
6790
6791 case USE:
6792 /* Similar to the case above, except that we can only do this if
6793 the resulting mode is the same as that of the underlying
6794 MEM and adjust the address depending on the *bits* endianness
6795 because of the way that bit-field extract insns are defined. */
6796 if ((code == ASHIFTRT || code == LSHIFTRT)
6797 && (tmode = mode_for_size (GET_MODE_BITSIZE (mode) - count,
6798 MODE_INT, 1)) != BLKmode
6799 && tmode == GET_MODE (XEXP (varop, 0)))
6800 {
6801 #if BITS_BIG_ENDIAN
6802 new = XEXP (varop, 0);
6803 #else
6804 new = copy_rtx (XEXP (varop, 0));
6805 SUBST (XEXP (new, 0),
6806 plus_constant (XEXP (new, 0),
6807 count / BITS_PER_UNIT));
6808 #endif
6809
6810 varop = gen_rtx_combine (code == ASHIFTRT ? SIGN_EXTEND
6811 : ZERO_EXTEND, mode, new);
6812 count = 0;
6813 continue;
6814 }
6815 break;
6816
6817 case SUBREG:
6818 /* If VAROP is a SUBREG, strip it as long as the inner operand has
6819 the same number of words as what we've seen so far. Then store
6820 the widest mode in MODE. */
6821 if (subreg_lowpart_p (varop)
6822 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6823 > GET_MODE_SIZE (GET_MODE (varop)))
6824 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (varop)))
6825 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
6826 == mode_words))
6827 {
6828 varop = SUBREG_REG (varop);
6829 if (GET_MODE_SIZE (GET_MODE (varop)) > GET_MODE_SIZE (mode))
6830 mode = GET_MODE (varop);
6831 continue;
6832 }
6833 break;
6834
6835 case MULT:
6836 /* Some machines use MULT instead of ASHIFT because MULT
6837 is cheaper. But it is still better on those machines to
6838 merge two shifts into one. */
6839 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6840 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6841 {
6842 varop = gen_binary (ASHIFT, GET_MODE (varop), XEXP (varop, 0),
6843 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));;
6844 continue;
6845 }
6846 break;
6847
6848 case UDIV:
6849 /* Similar, for when divides are cheaper. */
6850 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6851 && exact_log2 (INTVAL (XEXP (varop, 1))) >= 0)
6852 {
6853 varop = gen_binary (LSHIFTRT, GET_MODE (varop), XEXP (varop, 0),
6854 GEN_INT (exact_log2 (INTVAL (XEXP (varop, 1)))));
6855 continue;
6856 }
6857 break;
6858
6859 case ASHIFTRT:
6860 /* If we are extracting just the sign bit of an arithmetic right
6861 shift, that shift is not needed. */
6862 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1)
6863 {
6864 varop = XEXP (varop, 0);
6865 continue;
6866 }
6867
6868 /* ... fall through ... */
6869
6870 case LSHIFTRT:
6871 case ASHIFT:
6872 case LSHIFT:
6873 case ROTATE:
6874 /* Here we have two nested shifts. The result is usually the
6875 AND of a new shift with a mask. We compute the result below. */
6876 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
6877 && INTVAL (XEXP (varop, 1)) >= 0
6878 && INTVAL (XEXP (varop, 1)) < GET_MODE_BITSIZE (GET_MODE (varop))
6879 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
6880 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
6881 {
6882 enum rtx_code first_code = GET_CODE (varop);
6883 int first_count = INTVAL (XEXP (varop, 1));
6884 unsigned HOST_WIDE_INT mask;
6885 rtx mask_rtx;
6886 rtx inner;
6887
6888 if (first_code == LSHIFT)
6889 first_code = ASHIFT;
6890
6891 /* We have one common special case. We can't do any merging if
6892 the inner code is an ASHIFTRT of a smaller mode. However, if
6893 we have (ashift:M1 (subreg:M1 (ashiftrt:M2 FOO C1) 0) C2)
6894 with C2 == GET_MODE_BITSIZE (M1) - GET_MODE_BITSIZE (M2),
6895 we can convert it to
6896 (ashiftrt:M1 (ashift:M1 (and:M1 (subreg:M1 FOO 0 C2) C3) C1).
6897 This simplifies certain SIGN_EXTEND operations. */
6898 if (code == ASHIFT && first_code == ASHIFTRT
6899 && (GET_MODE_BITSIZE (result_mode)
6900 - GET_MODE_BITSIZE (GET_MODE (varop))) == count)
6901 {
6902 /* C3 has the low-order C1 bits zero. */
6903
6904 mask = (GET_MODE_MASK (mode)
6905 & ~ (((HOST_WIDE_INT) 1 << first_count) - 1));
6906
6907 varop = simplify_and_const_int (NULL_RTX, result_mode,
6908 XEXP (varop, 0), mask);
6909 varop = simplify_shift_const (NULL_RTX, ASHIFT, result_mode,
6910 varop, count);
6911 count = first_count;
6912 code = ASHIFTRT;
6913 continue;
6914 }
6915
6916 /* If this was (ashiftrt (ashift foo C1) C2) and FOO has more
6917 than C1 high-order bits equal to the sign bit, we can convert
6918 this to either an ASHIFT or a ASHIFTRT depending on the
6919 two counts.
6920
6921 We cannot do this if VAROP's mode is not SHIFT_MODE. */
6922
6923 if (code == ASHIFTRT && first_code == ASHIFT
6924 && GET_MODE (varop) == shift_mode
6925 && (num_sign_bit_copies (XEXP (varop, 0), shift_mode)
6926 > first_count))
6927 {
6928 count -= first_count;
6929 if (count < 0)
6930 count = - count, code = ASHIFT;
6931 varop = XEXP (varop, 0);
6932 continue;
6933 }
6934
6935 /* There are some cases we can't do. If CODE is ASHIFTRT,
6936 we can only do this if FIRST_CODE is also ASHIFTRT.
6937
6938 We can't do the case when CODE is ROTATE and FIRST_CODE is
6939 ASHIFTRT.
6940
6941 If the mode of this shift is not the mode of the outer shift,
6942 we can't do this if either shift is ASHIFTRT or ROTATE.
6943
6944 Finally, we can't do any of these if the mode is too wide
6945 unless the codes are the same.
6946
6947 Handle the case where the shift codes are the same
6948 first. */
6949
6950 if (code == first_code)
6951 {
6952 if (GET_MODE (varop) != result_mode
6953 && (code == ASHIFTRT || code == ROTATE))
6954 break;
6955
6956 count += first_count;
6957 varop = XEXP (varop, 0);
6958 continue;
6959 }
6960
6961 if (code == ASHIFTRT
6962 || (code == ROTATE && first_code == ASHIFTRT)
6963 || GET_MODE_BITSIZE (mode) > HOST_BITS_PER_WIDE_INT
6964 || (GET_MODE (varop) != result_mode
6965 && (first_code == ASHIFTRT || first_code == ROTATE
6966 || code == ROTATE)))
6967 break;
6968
6969 /* To compute the mask to apply after the shift, shift the
6970 significant bits of the inner shift the same way the
6971 outer shift will. */
6972
6973 mask_rtx = GEN_INT (significant_bits (varop, GET_MODE (varop)));
6974
6975 mask_rtx
6976 = simplify_binary_operation (code, result_mode, mask_rtx,
6977 GEN_INT (count));
6978
6979 /* Give up if we can't compute an outer operation to use. */
6980 if (mask_rtx == 0
6981 || GET_CODE (mask_rtx) != CONST_INT
6982 || ! merge_outer_ops (&outer_op, &outer_const, AND,
6983 INTVAL (mask_rtx),
6984 result_mode, &complement_p))
6985 break;
6986
6987 /* If the shifts are in the same direction, we add the
6988 counts. Otherwise, we subtract them. */
6989 if ((code == ASHIFTRT || code == LSHIFTRT)
6990 == (first_code == ASHIFTRT || first_code == LSHIFTRT))
6991 count += first_count;
6992 else
6993 count -= first_count;
6994
6995 /* If COUNT is positive, the new shift is usually CODE,
6996 except for the two exceptions below, in which case it is
6997 FIRST_CODE. If the count is negative, FIRST_CODE should
6998 always be used */
6999 if (count > 0
7000 && ((first_code == ROTATE && code == ASHIFT)
7001 || (first_code == ASHIFTRT && code == LSHIFTRT)))
7002 code = first_code;
7003 else if (count < 0)
7004 code = first_code, count = - count;
7005
7006 varop = XEXP (varop, 0);
7007 continue;
7008 }
7009
7010 /* If we have (A << B << C) for any shift, we can convert this to
7011 (A << C << B). This wins if A is a constant. Only try this if
7012 B is not a constant. */
7013
7014 else if (GET_CODE (varop) == code
7015 && GET_CODE (XEXP (varop, 1)) != CONST_INT
7016 && 0 != (new
7017 = simplify_binary_operation (code, mode,
7018 XEXP (varop, 0),
7019 GEN_INT (count))))
7020 {
7021 varop = gen_rtx_combine (code, mode, new, XEXP (varop, 1));
7022 count = 0;
7023 continue;
7024 }
7025 break;
7026
7027 case NOT:
7028 /* Make this fit the case below. */
7029 varop = gen_rtx_combine (XOR, mode, XEXP (varop, 0),
7030 GEN_INT (GET_MODE_MASK (mode)));
7031 continue;
7032
7033 case IOR:
7034 case AND:
7035 case XOR:
7036 /* If we have (xshiftrt (ior (plus X (const_int -1)) X) C)
7037 with C the size of VAROP - 1 and the shift is logical if
7038 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7039 we have an (le X 0) operation. If we have an arithmetic shift
7040 and STORE_FLAG_VALUE is 1 or we have a logical shift with
7041 STORE_FLAG_VALUE of -1, we have a (neg (le X 0)) operation. */
7042
7043 if (GET_CODE (varop) == IOR && GET_CODE (XEXP (varop, 0)) == PLUS
7044 && XEXP (XEXP (varop, 0), 1) == constm1_rtx
7045 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7046 && (code == LSHIFTRT || code == ASHIFTRT)
7047 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7048 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7049 {
7050 count = 0;
7051 varop = gen_rtx_combine (LE, GET_MODE (varop), XEXP (varop, 1),
7052 const0_rtx);
7053
7054 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7055 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7056
7057 continue;
7058 }
7059
7060 /* If we have (shift (logical)), move the logical to the outside
7061 to allow it to possibly combine with another logical and the
7062 shift to combine with another shift. This also canonicalizes to
7063 what a ZERO_EXTRACT looks like. Also, some machines have
7064 (and (shift)) insns. */
7065
7066 if (GET_CODE (XEXP (varop, 1)) == CONST_INT
7067 && (new = simplify_binary_operation (code, result_mode,
7068 XEXP (varop, 1),
7069 GEN_INT (count))) != 0
7070 && merge_outer_ops (&outer_op, &outer_const, GET_CODE (varop),
7071 INTVAL (new), result_mode, &complement_p))
7072 {
7073 varop = XEXP (varop, 0);
7074 continue;
7075 }
7076
7077 /* If we can't do that, try to simplify the shift in each arm of the
7078 logical expression, make a new logical expression, and apply
7079 the inverse distributive law. */
7080 {
7081 rtx lhs = simplify_shift_const (NULL_RTX, code, result_mode,
7082 XEXP (varop, 0), count);
7083 rtx rhs = simplify_shift_const (NULL_RTX, code, result_mode,
7084 XEXP (varop, 1), count);
7085
7086 varop = gen_binary (GET_CODE (varop), result_mode, lhs, rhs);
7087 varop = apply_distributive_law (varop);
7088
7089 count = 0;
7090 }
7091 break;
7092
7093 case EQ:
7094 /* convert (lshift (eq FOO 0) C) to (xor FOO 1) if STORE_FLAG_VALUE
7095 says that the sign bit can be tested, FOO has mode MODE, C is
7096 GET_MODE_BITSIZE (MODE) - 1, and FOO has only the low-order bit
7097 significant. */
7098 if (code == LSHIFT
7099 && XEXP (varop, 1) == const0_rtx
7100 && GET_MODE (XEXP (varop, 0)) == result_mode
7101 && count == GET_MODE_BITSIZE (result_mode) - 1
7102 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7103 && ((STORE_FLAG_VALUE
7104 & ((HOST_WIDE_INT) 1 << (GET_MODE_BITSIZE (result_mode) - 1))))
7105 && significant_bits (XEXP (varop, 0), result_mode) == 1
7106 && merge_outer_ops (&outer_op, &outer_const, XOR,
7107 (HOST_WIDE_INT) 1, result_mode,
7108 &complement_p))
7109 {
7110 varop = XEXP (varop, 0);
7111 count = 0;
7112 continue;
7113 }
7114 break;
7115
7116 case NEG:
7117 /* (lshiftrt (neg A) C) where A is either 0 or 1 and C is one less
7118 than the number of bits in the mode is equivalent to A. */
7119 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7120 && significant_bits (XEXP (varop, 0), result_mode) == 1)
7121 {
7122 varop = XEXP (varop, 0);
7123 count = 0;
7124 continue;
7125 }
7126
7127 /* NEG commutes with ASHIFT since it is multiplication. Move the
7128 NEG outside to allow shifts to combine. */
7129 if (code == ASHIFT
7130 && merge_outer_ops (&outer_op, &outer_const, NEG,
7131 (HOST_WIDE_INT) 0, result_mode,
7132 &complement_p))
7133 {
7134 varop = XEXP (varop, 0);
7135 continue;
7136 }
7137 break;
7138
7139 case PLUS:
7140 /* (lshiftrt (plus A -1) C) where A is either 0 or 1 and C
7141 is one less than the number of bits in the mode is
7142 equivalent to (xor A 1). */
7143 if (code == LSHIFTRT && count == GET_MODE_BITSIZE (result_mode) - 1
7144 && XEXP (varop, 1) == constm1_rtx
7145 && significant_bits (XEXP (varop, 0), result_mode) == 1
7146 && merge_outer_ops (&outer_op, &outer_const, XOR,
7147 (HOST_WIDE_INT) 1, result_mode,
7148 &complement_p))
7149 {
7150 count = 0;
7151 varop = XEXP (varop, 0);
7152 continue;
7153 }
7154
7155 /* If we have (xshiftrt (plus FOO BAR) C), and the only bits
7156 significant in BAR are those being shifted out and those
7157 bits are known zero in FOO, we can replace the PLUS with FOO.
7158 Similarly in the other operand order. This code occurs when
7159 we are computing the size of a variable-size array. */
7160
7161 if ((code == ASHIFTRT || code == LSHIFTRT)
7162 && count < HOST_BITS_PER_WIDE_INT
7163 && significant_bits (XEXP (varop, 1), result_mode) >> count == 0
7164 && (significant_bits (XEXP (varop, 1), result_mode)
7165 & significant_bits (XEXP (varop, 0), result_mode)) == 0)
7166 {
7167 varop = XEXP (varop, 0);
7168 continue;
7169 }
7170 else if ((code == ASHIFTRT || code == LSHIFTRT)
7171 && count < HOST_BITS_PER_WIDE_INT
7172 && GET_MODE_BITSIZE (result_mode) <= HOST_BITS_PER_WIDE_INT
7173 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7174 >> count)
7175 && 0 == (significant_bits (XEXP (varop, 0), result_mode)
7176 & significant_bits (XEXP (varop, 1),
7177 result_mode)))
7178 {
7179 varop = XEXP (varop, 1);
7180 continue;
7181 }
7182
7183 /* (ashift (plus foo C) N) is (plus (ashift foo N) C'). */
7184 if (code == ASHIFT
7185 && GET_CODE (XEXP (varop, 1)) == CONST_INT
7186 && (new = simplify_binary_operation (ASHIFT, result_mode,
7187 XEXP (varop, 1),
7188 GEN_INT (count))) != 0
7189 && merge_outer_ops (&outer_op, &outer_const, PLUS,
7190 INTVAL (new), result_mode, &complement_p))
7191 {
7192 varop = XEXP (varop, 0);
7193 continue;
7194 }
7195 break;
7196
7197 case MINUS:
7198 /* If we have (xshiftrt (minus (ashiftrt X C)) X) C)
7199 with C the size of VAROP - 1 and the shift is logical if
7200 STORE_FLAG_VALUE is 1 and arithmetic if STORE_FLAG_VALUE is -1,
7201 we have a (gt X 0) operation. If the shift is arithmetic with
7202 STORE_FLAG_VALUE of 1 or logical with STORE_FLAG_VALUE == -1,
7203 we have a (neg (gt X 0)) operation. */
7204
7205 if (GET_CODE (XEXP (varop, 0)) == ASHIFTRT
7206 && count == GET_MODE_BITSIZE (GET_MODE (varop)) - 1
7207 && (STORE_FLAG_VALUE == 1 || STORE_FLAG_VALUE == -1)
7208 && (code == LSHIFTRT || code == ASHIFTRT)
7209 && GET_CODE (XEXP (XEXP (varop, 0), 1)) == CONST_INT
7210 && INTVAL (XEXP (XEXP (varop, 0), 1)) == count
7211 && rtx_equal_p (XEXP (XEXP (varop, 0), 0), XEXP (varop, 1)))
7212 {
7213 count = 0;
7214 varop = gen_rtx_combine (GT, GET_MODE (varop), XEXP (varop, 1),
7215 const0_rtx);
7216
7217 if (STORE_FLAG_VALUE == 1 ? code == ASHIFTRT : code == LSHIFTRT)
7218 varop = gen_rtx_combine (NEG, GET_MODE (varop), varop);
7219
7220 continue;
7221 }
7222 break;
7223 }
7224
7225 break;
7226 }
7227
7228 /* We need to determine what mode to do the shift in. If the shift is
7229 a ASHIFTRT or ROTATE, we must always do it in the mode it was originally
7230 done in. Otherwise, we can do it in MODE, the widest mode encountered.
7231 The code we care about is that of the shift that will actually be done,
7232 not the shift that was originally requested. */
7233 shift_mode = (code == ASHIFTRT || code == ROTATE ? result_mode : mode);
7234
7235 /* We have now finished analyzing the shift. The result should be
7236 a shift of type CODE with SHIFT_MODE shifting VAROP COUNT places. If
7237 OUTER_OP is non-NIL, it is an operation that needs to be applied
7238 to the result of the shift. OUTER_CONST is the relevant constant,
7239 but we must turn off all bits turned off in the shift.
7240
7241 If we were passed a value for X, see if we can use any pieces of
7242 it. If not, make new rtx. */
7243
7244 if (x && GET_RTX_CLASS (GET_CODE (x)) == '2'
7245 && GET_CODE (XEXP (x, 1)) == CONST_INT
7246 && INTVAL (XEXP (x, 1)) == count)
7247 const_rtx = XEXP (x, 1);
7248 else
7249 const_rtx = GEN_INT (count);
7250
7251 if (x && GET_CODE (XEXP (x, 0)) == SUBREG
7252 && GET_MODE (XEXP (x, 0)) == shift_mode
7253 && SUBREG_REG (XEXP (x, 0)) == varop)
7254 varop = XEXP (x, 0);
7255 else if (GET_MODE (varop) != shift_mode)
7256 varop = gen_lowpart_for_combine (shift_mode, varop);
7257
7258 /* If we can't make the SUBREG, try to return what we were given. */
7259 if (GET_CODE (varop) == CLOBBER)
7260 return x ? x : varop;
7261
7262 new = simplify_binary_operation (code, shift_mode, varop, const_rtx);
7263 if (new != 0)
7264 x = new;
7265 else
7266 {
7267 if (x == 0 || GET_CODE (x) != code || GET_MODE (x) != shift_mode)
7268 x = gen_rtx_combine (code, shift_mode, varop, const_rtx);
7269
7270 SUBST (XEXP (x, 0), varop);
7271 SUBST (XEXP (x, 1), const_rtx);
7272 }
7273
7274 /* If we were doing a LSHIFTRT in a wider mode than it was originally,
7275 turn off all the bits that the shift would have turned off. */
7276 if (orig_code == LSHIFTRT && result_mode != shift_mode)
7277 x = simplify_and_const_int (NULL_RTX, shift_mode, x,
7278 GET_MODE_MASK (result_mode) >> orig_count);
7279
7280 /* Do the remainder of the processing in RESULT_MODE. */
7281 x = gen_lowpart_for_combine (result_mode, x);
7282
7283 /* If COMPLEMENT_P is set, we have to complement X before doing the outer
7284 operation. */
7285 if (complement_p)
7286 x = gen_unary (NOT, result_mode, x);
7287
7288 if (outer_op != NIL)
7289 {
7290 if (GET_MODE_BITSIZE (result_mode) < HOST_BITS_PER_WIDE_INT)
7291 outer_const &= GET_MODE_MASK (result_mode);
7292
7293 if (outer_op == AND)
7294 x = simplify_and_const_int (NULL_RTX, result_mode, x, outer_const);
7295 else if (outer_op == SET)
7296 /* This means that we have determined that the result is
7297 equivalent to a constant. This should be rare. */
7298 x = GEN_INT (outer_const);
7299 else if (GET_RTX_CLASS (outer_op) == '1')
7300 x = gen_unary (outer_op, result_mode, x);
7301 else
7302 x = gen_binary (outer_op, result_mode, x, GEN_INT (outer_const));
7303 }
7304
7305 return x;
7306 }
7307 \f
7308 /* Like recog, but we receive the address of a pointer to a new pattern.
7309 We try to match the rtx that the pointer points to.
7310 If that fails, we may try to modify or replace the pattern,
7311 storing the replacement into the same pointer object.
7312
7313 Modifications include deletion or addition of CLOBBERs.
7314
7315 PNOTES is a pointer to a location where any REG_UNUSED notes added for
7316 the CLOBBERs are placed.
7317
7318 The value is the final insn code from the pattern ultimately matched,
7319 or -1. */
7320
7321 static int
7322 recog_for_combine (pnewpat, insn, pnotes)
7323 rtx *pnewpat;
7324 rtx insn;
7325 rtx *pnotes;
7326 {
7327 register rtx pat = *pnewpat;
7328 int insn_code_number;
7329 int num_clobbers_to_add = 0;
7330 int i;
7331 rtx notes = 0;
7332
7333 /* Is the result of combination a valid instruction? */
7334 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7335
7336 /* If it isn't, there is the possibility that we previously had an insn
7337 that clobbered some register as a side effect, but the combined
7338 insn doesn't need to do that. So try once more without the clobbers
7339 unless this represents an ASM insn. */
7340
7341 if (insn_code_number < 0 && ! check_asm_operands (pat)
7342 && GET_CODE (pat) == PARALLEL)
7343 {
7344 int pos;
7345
7346 for (pos = 0, i = 0; i < XVECLEN (pat, 0); i++)
7347 if (GET_CODE (XVECEXP (pat, 0, i)) != CLOBBER)
7348 {
7349 if (i != pos)
7350 SUBST (XVECEXP (pat, 0, pos), XVECEXP (pat, 0, i));
7351 pos++;
7352 }
7353
7354 SUBST_INT (XVECLEN (pat, 0), pos);
7355
7356 if (pos == 1)
7357 pat = XVECEXP (pat, 0, 0);
7358
7359 insn_code_number = recog (pat, insn, &num_clobbers_to_add);
7360 }
7361
7362 /* If we had any clobbers to add, make a new pattern than contains
7363 them. Then check to make sure that all of them are dead. */
7364 if (num_clobbers_to_add)
7365 {
7366 rtx newpat = gen_rtx (PARALLEL, VOIDmode,
7367 gen_rtvec (GET_CODE (pat) == PARALLEL
7368 ? XVECLEN (pat, 0) + num_clobbers_to_add
7369 : num_clobbers_to_add + 1));
7370
7371 if (GET_CODE (pat) == PARALLEL)
7372 for (i = 0; i < XVECLEN (pat, 0); i++)
7373 XVECEXP (newpat, 0, i) = XVECEXP (pat, 0, i);
7374 else
7375 XVECEXP (newpat, 0, 0) = pat;
7376
7377 add_clobbers (newpat, insn_code_number);
7378
7379 for (i = XVECLEN (newpat, 0) - num_clobbers_to_add;
7380 i < XVECLEN (newpat, 0); i++)
7381 {
7382 if (GET_CODE (XEXP (XVECEXP (newpat, 0, i), 0)) == REG
7383 && ! reg_dead_at_p (XEXP (XVECEXP (newpat, 0, i), 0), insn))
7384 return -1;
7385 notes = gen_rtx (EXPR_LIST, REG_UNUSED,
7386 XEXP (XVECEXP (newpat, 0, i), 0), notes);
7387 }
7388 pat = newpat;
7389 }
7390
7391 *pnewpat = pat;
7392 *pnotes = notes;
7393
7394 return insn_code_number;
7395 }
7396 \f
7397 /* Like gen_lowpart but for use by combine. In combine it is not possible
7398 to create any new pseudoregs. However, it is safe to create
7399 invalid memory addresses, because combine will try to recognize
7400 them and all they will do is make the combine attempt fail.
7401
7402 If for some reason this cannot do its job, an rtx
7403 (clobber (const_int 0)) is returned.
7404 An insn containing that will not be recognized. */
7405
7406 #undef gen_lowpart
7407
7408 static rtx
7409 gen_lowpart_for_combine (mode, x)
7410 enum machine_mode mode;
7411 register rtx x;
7412 {
7413 rtx result;
7414
7415 if (GET_MODE (x) == mode)
7416 return x;
7417
7418 if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
7419 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7420
7421 /* X might be a paradoxical (subreg (mem)). In that case, gen_lowpart
7422 won't know what to do. So we will strip off the SUBREG here and
7423 process normally. */
7424 if (GET_CODE (x) == SUBREG && GET_CODE (SUBREG_REG (x)) == MEM)
7425 {
7426 x = SUBREG_REG (x);
7427 if (GET_MODE (x) == mode)
7428 return x;
7429 }
7430
7431 result = gen_lowpart_common (mode, x);
7432 if (result)
7433 return result;
7434
7435 if (GET_CODE (x) == MEM)
7436 {
7437 register int offset = 0;
7438 rtx new;
7439
7440 /* Refuse to work on a volatile memory ref or one with a mode-dependent
7441 address. */
7442 if (MEM_VOLATILE_P (x) || mode_dependent_address_p (XEXP (x, 0)))
7443 return gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
7444
7445 /* If we want to refer to something bigger than the original memref,
7446 generate a perverse subreg instead. That will force a reload
7447 of the original memref X. */
7448 if (GET_MODE_SIZE (GET_MODE (x)) < GET_MODE_SIZE (mode))
7449 return gen_rtx (SUBREG, mode, x, 0);
7450
7451 #if WORDS_BIG_ENDIAN
7452 offset = (MAX (GET_MODE_SIZE (GET_MODE (x)), UNITS_PER_WORD)
7453 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD));
7454 #endif
7455 #if BYTES_BIG_ENDIAN
7456 /* Adjust the address so that the address-after-the-data
7457 is unchanged. */
7458 offset -= (MIN (UNITS_PER_WORD, GET_MODE_SIZE (mode))
7459 - MIN (UNITS_PER_WORD, GET_MODE_SIZE (GET_MODE (x))));
7460 #endif
7461 new = gen_rtx (MEM, mode, plus_constant (XEXP (x, 0), offset));
7462 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
7463 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
7464 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
7465 return new;
7466 }
7467
7468 /* If X is a comparison operator, rewrite it in a new mode. This
7469 probably won't match, but may allow further simplifications. */
7470 else if (GET_RTX_CLASS (GET_CODE (x)) == '<')
7471 return gen_rtx_combine (GET_CODE (x), mode, XEXP (x, 0), XEXP (x, 1));
7472
7473 /* If we couldn't simplify X any other way, just enclose it in a
7474 SUBREG. Normally, this SUBREG won't match, but some patterns may
7475 include an explicit SUBREG or we may simplify it further in combine. */
7476 else
7477 {
7478 int word = 0;
7479
7480 if (WORDS_BIG_ENDIAN && GET_MODE_SIZE (GET_MODE (x)) > UNITS_PER_WORD)
7481 word = ((GET_MODE_SIZE (GET_MODE (x))
7482 - MAX (GET_MODE_SIZE (mode), UNITS_PER_WORD))
7483 / UNITS_PER_WORD);
7484 return gen_rtx (SUBREG, mode, x, word);
7485 }
7486 }
7487 \f
7488 /* Make an rtx expression. This is a subset of gen_rtx and only supports
7489 expressions of 1, 2, or 3 operands, each of which are rtx expressions.
7490
7491 If the identical expression was previously in the insn (in the undobuf),
7492 it will be returned. Only if it is not found will a new expression
7493 be made. */
7494
7495 /*VARARGS2*/
7496 static rtx
7497 gen_rtx_combine (va_alist)
7498 va_dcl
7499 {
7500 va_list p;
7501 enum rtx_code code;
7502 enum machine_mode mode;
7503 int n_args;
7504 rtx args[3];
7505 int i, j;
7506 char *fmt;
7507 rtx rt;
7508
7509 va_start (p);
7510 code = va_arg (p, enum rtx_code);
7511 mode = va_arg (p, enum machine_mode);
7512 n_args = GET_RTX_LENGTH (code);
7513 fmt = GET_RTX_FORMAT (code);
7514
7515 if (n_args == 0 || n_args > 3)
7516 abort ();
7517
7518 /* Get each arg and verify that it is supposed to be an expression. */
7519 for (j = 0; j < n_args; j++)
7520 {
7521 if (*fmt++ != 'e')
7522 abort ();
7523
7524 args[j] = va_arg (p, rtx);
7525 }
7526
7527 /* See if this is in undobuf. Be sure we don't use objects that came
7528 from another insn; this could produce circular rtl structures. */
7529
7530 for (i = previous_num_undos; i < undobuf.num_undo; i++)
7531 if (!undobuf.undo[i].is_int
7532 && GET_CODE (undobuf.undo[i].old_contents.rtx) == code
7533 && GET_MODE (undobuf.undo[i].old_contents.rtx) == mode)
7534 {
7535 for (j = 0; j < n_args; j++)
7536 if (XEXP (undobuf.undo[i].old_contents.rtx, j) != args[j])
7537 break;
7538
7539 if (j == n_args)
7540 return undobuf.undo[i].old_contents.rtx;
7541 }
7542
7543 /* Otherwise make a new rtx. We know we have 1, 2, or 3 args.
7544 Use rtx_alloc instead of gen_rtx because it's faster on RISC. */
7545 rt = rtx_alloc (code);
7546 PUT_MODE (rt, mode);
7547 XEXP (rt, 0) = args[0];
7548 if (n_args > 1)
7549 {
7550 XEXP (rt, 1) = args[1];
7551 if (n_args > 2)
7552 XEXP (rt, 2) = args[2];
7553 }
7554 return rt;
7555 }
7556
7557 /* These routines make binary and unary operations by first seeing if they
7558 fold; if not, a new expression is allocated. */
7559
7560 static rtx
7561 gen_binary (code, mode, op0, op1)
7562 enum rtx_code code;
7563 enum machine_mode mode;
7564 rtx op0, op1;
7565 {
7566 rtx result;
7567 rtx tem;
7568
7569 if (GET_RTX_CLASS (code) == 'c'
7570 && (GET_CODE (op0) == CONST_INT
7571 || (CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)))
7572 tem = op0, op0 = op1, op1 = tem;
7573
7574 if (GET_RTX_CLASS (code) == '<')
7575 {
7576 enum machine_mode op_mode = GET_MODE (op0);
7577 if (op_mode == VOIDmode)
7578 op_mode = GET_MODE (op1);
7579 result = simplify_relational_operation (code, op_mode, op0, op1);
7580 }
7581 else
7582 result = simplify_binary_operation (code, mode, op0, op1);
7583
7584 if (result)
7585 return result;
7586
7587 /* Put complex operands first and constants second. */
7588 if (GET_RTX_CLASS (code) == 'c'
7589 && ((CONSTANT_P (op0) && GET_CODE (op1) != CONST_INT)
7590 || (GET_RTX_CLASS (GET_CODE (op0)) == 'o'
7591 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')
7592 || (GET_CODE (op0) == SUBREG
7593 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op0))) == 'o'
7594 && GET_RTX_CLASS (GET_CODE (op1)) != 'o')))
7595 return gen_rtx_combine (code, mode, op1, op0);
7596
7597 return gen_rtx_combine (code, mode, op0, op1);
7598 }
7599
7600 static rtx
7601 gen_unary (code, mode, op0)
7602 enum rtx_code code;
7603 enum machine_mode mode;
7604 rtx op0;
7605 {
7606 rtx result = simplify_unary_operation (code, mode, op0, mode);
7607
7608 if (result)
7609 return result;
7610
7611 return gen_rtx_combine (code, mode, op0);
7612 }
7613 \f
7614 /* Simplify a comparison between *POP0 and *POP1 where CODE is the
7615 comparison code that will be tested.
7616
7617 The result is a possibly different comparison code to use. *POP0 and
7618 *POP1 may be updated.
7619
7620 It is possible that we might detect that a comparison is either always
7621 true or always false. However, we do not perform general constant
7622 folding in combine, so this knowledge isn't useful. Such tautologies
7623 should have been detected earlier. Hence we ignore all such cases. */
7624
7625 static enum rtx_code
7626 simplify_comparison (code, pop0, pop1)
7627 enum rtx_code code;
7628 rtx *pop0;
7629 rtx *pop1;
7630 {
7631 rtx op0 = *pop0;
7632 rtx op1 = *pop1;
7633 rtx tem, tem1;
7634 int i;
7635 enum machine_mode mode, tmode;
7636
7637 /* Try a few ways of applying the same transformation to both operands. */
7638 while (1)
7639 {
7640 /* If both operands are the same constant shift, see if we can ignore the
7641 shift. We can if the shift is a rotate or if the bits shifted out of
7642 this shift are not significant for either input and if the type of
7643 comparison is compatible with the shift. */
7644 if (GET_CODE (op0) == GET_CODE (op1)
7645 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
7646 && ((GET_CODE (op0) == ROTATE && (code == NE || code == EQ))
7647 || ((GET_CODE (op0) == LSHIFTRT
7648 || GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7649 && (code != GT && code != LT && code != GE && code != LE))
7650 || (GET_CODE (op0) == ASHIFTRT
7651 && (code != GTU && code != LTU
7652 && code != GEU && code != GEU)))
7653 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7654 && INTVAL (XEXP (op0, 1)) >= 0
7655 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
7656 && XEXP (op0, 1) == XEXP (op1, 1))
7657 {
7658 enum machine_mode mode = GET_MODE (op0);
7659 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7660 int shift_count = INTVAL (XEXP (op0, 1));
7661
7662 if (GET_CODE (op0) == LSHIFTRT || GET_CODE (op0) == ASHIFTRT)
7663 mask &= (mask >> shift_count) << shift_count;
7664 else if (GET_CODE (op0) == ASHIFT || GET_CODE (op0) == LSHIFT)
7665 mask = (mask & (mask << shift_count)) >> shift_count;
7666
7667 if ((significant_bits (XEXP (op0, 0), mode) & ~ mask) == 0
7668 && (significant_bits (XEXP (op1, 0), mode) & ~ mask) == 0)
7669 op0 = XEXP (op0, 0), op1 = XEXP (op1, 0);
7670 else
7671 break;
7672 }
7673
7674 /* If both operands are AND's of a paradoxical SUBREG by constant, the
7675 SUBREGs are of the same mode, and, in both cases, the AND would
7676 be redundant if the comparison was done in the narrower mode,
7677 do the comparison in the narrower mode (e.g., we are AND'ing with 1
7678 and the operand's significant bits are 0xffffff01; in that case if
7679 we only care about QImode, we don't need the AND). This case occurs
7680 if the output mode of an scc insn is not SImode and
7681 STORE_FLAG_VALUE == 1 (e.g., the 386). */
7682
7683 else if (GET_CODE (op0) == AND && GET_CODE (op1) == AND
7684 && GET_CODE (XEXP (op0, 1)) == CONST_INT
7685 && GET_CODE (XEXP (op1, 1)) == CONST_INT
7686 && GET_CODE (XEXP (op0, 0)) == SUBREG
7687 && GET_CODE (XEXP (op1, 0)) == SUBREG
7688 && (GET_MODE_SIZE (GET_MODE (XEXP (op0, 0)))
7689 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0)))))
7690 && (GET_MODE (SUBREG_REG (XEXP (op0, 0)))
7691 == GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7692 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7693 <= HOST_BITS_PER_WIDE_INT)
7694 && (significant_bits (SUBREG_REG (XEXP (op0, 0)),
7695 GET_MODE (SUBREG_REG (XEXP (op0, 0))))
7696 & ~ INTVAL (XEXP (op0, 1))) == 0
7697 && (significant_bits (SUBREG_REG (XEXP (op1, 0)),
7698 GET_MODE (SUBREG_REG (XEXP (op1, 0))))
7699 & ~ INTVAL (XEXP (op1, 1))) == 0)
7700 {
7701 op0 = SUBREG_REG (XEXP (op0, 0));
7702 op1 = SUBREG_REG (XEXP (op1, 0));
7703
7704 /* the resulting comparison is always unsigned since we masked off
7705 the original sign bit. */
7706 code = unsigned_condition (code);
7707 }
7708 else
7709 break;
7710 }
7711
7712 /* If the first operand is a constant, swap the operands and adjust the
7713 comparison code appropriately. */
7714 if (CONSTANT_P (op0))
7715 {
7716 tem = op0, op0 = op1, op1 = tem;
7717 code = swap_condition (code);
7718 }
7719
7720 /* We now enter a loop during which we will try to simplify the comparison.
7721 For the most part, we only are concerned with comparisons with zero,
7722 but some things may really be comparisons with zero but not start
7723 out looking that way. */
7724
7725 while (GET_CODE (op1) == CONST_INT)
7726 {
7727 enum machine_mode mode = GET_MODE (op0);
7728 int mode_width = GET_MODE_BITSIZE (mode);
7729 unsigned HOST_WIDE_INT mask = GET_MODE_MASK (mode);
7730 int equality_comparison_p;
7731 int sign_bit_comparison_p;
7732 int unsigned_comparison_p;
7733 HOST_WIDE_INT const_op;
7734
7735 /* We only want to handle integral modes. This catches VOIDmode,
7736 CCmode, and the floating-point modes. An exception is that we
7737 can handle VOIDmode if OP0 is a COMPARE or a comparison
7738 operation. */
7739
7740 if (GET_MODE_CLASS (mode) != MODE_INT
7741 && ! (mode == VOIDmode
7742 && (GET_CODE (op0) == COMPARE
7743 || GET_RTX_CLASS (GET_CODE (op0)) == '<')))
7744 break;
7745
7746 /* Get the constant we are comparing against and turn off all bits
7747 not on in our mode. */
7748 const_op = INTVAL (op1);
7749 if (mode_width <= HOST_BITS_PER_WIDE_INT)
7750 const_op &= mask;
7751
7752 /* If we are comparing against a constant power of two and the value
7753 being compared has only that single significant bit (e.g., it was
7754 `and'ed with that bit), we can replace this with a comparison
7755 with zero. */
7756 if (const_op
7757 && (code == EQ || code == NE || code == GE || code == GEU
7758 || code == LT || code == LTU)
7759 && mode_width <= HOST_BITS_PER_WIDE_INT
7760 && exact_log2 (const_op) >= 0
7761 && significant_bits (op0, mode) == const_op)
7762 {
7763 code = (code == EQ || code == GE || code == GEU ? NE : EQ);
7764 op1 = const0_rtx, const_op = 0;
7765 }
7766
7767 /* Similarly, if we are comparing a value known to be either -1 or
7768 0 with -1, change it to the opposite comparison against zero. */
7769
7770 if (const_op == -1
7771 && (code == EQ || code == NE || code == GT || code == LE
7772 || code == GEU || code == LTU)
7773 && num_sign_bit_copies (op0, mode) == mode_width)
7774 {
7775 code = (code == EQ || code == LE || code == GEU ? NE : EQ);
7776 op1 = const0_rtx, const_op = 0;
7777 }
7778
7779 /* Do some canonicalizations based on the comparison code. We prefer
7780 comparisons against zero and then prefer equality comparisons.
7781 If we can reduce the size of a constant, we will do that too. */
7782
7783 switch (code)
7784 {
7785 case LT:
7786 /* < C is equivalent to <= (C - 1) */
7787 if (const_op > 0)
7788 {
7789 const_op -= 1;
7790 op1 = GEN_INT (const_op);
7791 code = LE;
7792 /* ... fall through to LE case below. */
7793 }
7794 else
7795 break;
7796
7797 case LE:
7798 /* <= C is equivalent to < (C + 1); we do this for C < 0 */
7799 if (const_op < 0)
7800 {
7801 const_op += 1;
7802 op1 = GEN_INT (const_op);
7803 code = LT;
7804 }
7805
7806 /* If we are doing a <= 0 comparison on a value known to have
7807 a zero sign bit, we can replace this with == 0. */
7808 else if (const_op == 0
7809 && mode_width <= HOST_BITS_PER_WIDE_INT
7810 && (significant_bits (op0, mode)
7811 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7812 code = EQ;
7813 break;
7814
7815 case GE:
7816 /* >= C is equivalent to > (C - 1). */
7817 if (const_op > 0)
7818 {
7819 const_op -= 1;
7820 op1 = GEN_INT (const_op);
7821 code = GT;
7822 /* ... fall through to GT below. */
7823 }
7824 else
7825 break;
7826
7827 case GT:
7828 /* > C is equivalent to >= (C + 1); we do this for C < 0*/
7829 if (const_op < 0)
7830 {
7831 const_op += 1;
7832 op1 = GEN_INT (const_op);
7833 code = GE;
7834 }
7835
7836 /* If we are doing a > 0 comparison on a value known to have
7837 a zero sign bit, we can replace this with != 0. */
7838 else if (const_op == 0
7839 && mode_width <= HOST_BITS_PER_WIDE_INT
7840 && (significant_bits (op0, mode)
7841 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)
7842 code = NE;
7843 break;
7844
7845 case LTU:
7846 /* < C is equivalent to <= (C - 1). */
7847 if (const_op > 0)
7848 {
7849 const_op -= 1;
7850 op1 = GEN_INT (const_op);
7851 code = LEU;
7852 /* ... fall through ... */
7853 }
7854
7855 /* (unsigned) < 0x80000000 is equivalent to >= 0. */
7856 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7857 {
7858 const_op = 0, op1 = const0_rtx;
7859 code = GE;
7860 break;
7861 }
7862 else
7863 break;
7864
7865 case LEU:
7866 /* unsigned <= 0 is equivalent to == 0 */
7867 if (const_op == 0)
7868 code = EQ;
7869
7870 /* (unsigned) <= 0x7fffffff is equivalent to >= 0. */
7871 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7872 {
7873 const_op = 0, op1 = const0_rtx;
7874 code = GE;
7875 }
7876 break;
7877
7878 case GEU:
7879 /* >= C is equivalent to < (C - 1). */
7880 if (const_op > 1)
7881 {
7882 const_op -= 1;
7883 op1 = GEN_INT (const_op);
7884 code = GTU;
7885 /* ... fall through ... */
7886 }
7887
7888 /* (unsigned) >= 0x80000000 is equivalent to < 0. */
7889 else if (const_op == (HOST_WIDE_INT) 1 << (mode_width - 1))
7890 {
7891 const_op = 0, op1 = const0_rtx;
7892 code = LT;
7893 }
7894 else
7895 break;
7896
7897 case GTU:
7898 /* unsigned > 0 is equivalent to != 0 */
7899 if (const_op == 0)
7900 code = NE;
7901
7902 /* (unsigned) > 0x7fffffff is equivalent to < 0. */
7903 else if (const_op == ((HOST_WIDE_INT) 1 << (mode_width - 1)) - 1)
7904 {
7905 const_op = 0, op1 = const0_rtx;
7906 code = LT;
7907 }
7908 break;
7909 }
7910
7911 /* Compute some predicates to simplify code below. */
7912
7913 equality_comparison_p = (code == EQ || code == NE);
7914 sign_bit_comparison_p = ((code == LT || code == GE) && const_op == 0);
7915 unsigned_comparison_p = (code == LTU || code == LEU || code == GTU
7916 || code == LEU);
7917
7918 /* Now try cases based on the opcode of OP0. If none of the cases
7919 does a "continue", we exit this loop immediately after the
7920 switch. */
7921
7922 switch (GET_CODE (op0))
7923 {
7924 case ZERO_EXTRACT:
7925 /* If we are extracting a single bit from a variable position in
7926 a constant that has only a single bit set and are comparing it
7927 with zero, we can convert this into an equality comparison
7928 between the position and the location of the single bit. We can't
7929 do this if bit endian and we don't have an extzv since we then
7930 can't know what mode to use for the endianness adjustment. */
7931
7932 #if ! BITS_BIG_ENDIAN || defined (HAVE_extzv)
7933 if (GET_CODE (XEXP (op0, 0)) == CONST_INT
7934 && XEXP (op0, 1) == const1_rtx
7935 && equality_comparison_p && const_op == 0
7936 && (i = exact_log2 (INTVAL (XEXP (op0, 0)))) >= 0)
7937 {
7938 #if BITS_BIG_ENDIAN
7939 i = (GET_MODE_BITSIZE
7940 (insn_operand_mode[(int) CODE_FOR_extzv][1]) - 1 - i);
7941 #endif
7942
7943 op0 = XEXP (op0, 2);
7944 op1 = GEN_INT (i);
7945 const_op = i;
7946
7947 /* Result is nonzero iff shift count is equal to I. */
7948 code = reverse_condition (code);
7949 continue;
7950 }
7951 #endif
7952
7953 /* ... fall through ... */
7954
7955 case SIGN_EXTRACT:
7956 tem = expand_compound_operation (op0);
7957 if (tem != op0)
7958 {
7959 op0 = tem;
7960 continue;
7961 }
7962 break;
7963
7964 case NOT:
7965 /* If testing for equality, we can take the NOT of the constant. */
7966 if (equality_comparison_p
7967 && (tem = simplify_unary_operation (NOT, mode, op1, mode)) != 0)
7968 {
7969 op0 = XEXP (op0, 0);
7970 op1 = tem;
7971 continue;
7972 }
7973
7974 /* If just looking at the sign bit, reverse the sense of the
7975 comparison. */
7976 if (sign_bit_comparison_p)
7977 {
7978 op0 = XEXP (op0, 0);
7979 code = (code == GE ? LT : GE);
7980 continue;
7981 }
7982 break;
7983
7984 case NEG:
7985 /* If testing for equality, we can take the NEG of the constant. */
7986 if (equality_comparison_p
7987 && (tem = simplify_unary_operation (NEG, mode, op1, mode)) != 0)
7988 {
7989 op0 = XEXP (op0, 0);
7990 op1 = tem;
7991 continue;
7992 }
7993
7994 /* The remaining cases only apply to comparisons with zero. */
7995 if (const_op != 0)
7996 break;
7997
7998 /* When X is ABS or is known positive,
7999 (neg X) is < 0 if and only if X != 0. */
8000
8001 if (sign_bit_comparison_p
8002 && (GET_CODE (XEXP (op0, 0)) == ABS
8003 || (mode_width <= HOST_BITS_PER_WIDE_INT
8004 && (significant_bits (XEXP (op0, 0), mode)
8005 & ((HOST_WIDE_INT) 1 << (mode_width - 1))) == 0)))
8006 {
8007 op0 = XEXP (op0, 0);
8008 code = (code == LT ? NE : EQ);
8009 continue;
8010 }
8011
8012 /* If we have NEG of something that is the result of a
8013 SIGN_EXTEND, SIGN_EXTRACT, or ASHIFTRT, we know that the
8014 two high-order bits must be the same and hence that
8015 "(-a) < 0" is equivalent to "a > 0". Otherwise, we can't
8016 do this. */
8017 if (GET_CODE (XEXP (op0, 0)) == SIGN_EXTEND
8018 || (GET_CODE (XEXP (op0, 0)) == SIGN_EXTRACT
8019 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8020 && (INTVAL (XEXP (XEXP (op0, 0), 1))
8021 < GET_MODE_BITSIZE (GET_MODE (XEXP (XEXP (op0, 0), 0)))))
8022 || (GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8023 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8024 && XEXP (XEXP (op0, 0), 1) != const0_rtx)
8025 || ((tem = get_last_value (XEXP (op0, 0))) != 0
8026 && (GET_CODE (tem) == SIGN_EXTEND
8027 || (GET_CODE (tem) == SIGN_EXTRACT
8028 && GET_CODE (XEXP (tem, 1)) == CONST_INT
8029 && (INTVAL (XEXP (tem, 1))
8030 < GET_MODE_BITSIZE (GET_MODE (XEXP (tem, 0)))))
8031 || (GET_CODE (tem) == ASHIFTRT
8032 && GET_CODE (XEXP (tem, 1)) == CONST_INT
8033 && XEXP (tem, 1) != const0_rtx))))
8034 {
8035 op0 = XEXP (op0, 0);
8036 code = swap_condition (code);
8037 continue;
8038 }
8039 break;
8040
8041 case ROTATE:
8042 /* If we are testing equality and our count is a constant, we
8043 can perform the inverse operation on our RHS. */
8044 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8045 && (tem = simplify_binary_operation (ROTATERT, mode,
8046 op1, XEXP (op0, 1))) != 0)
8047 {
8048 op0 = XEXP (op0, 0);
8049 op1 = tem;
8050 continue;
8051 }
8052
8053 /* If we are doing a < 0 or >= 0 comparison, it means we are testing
8054 a particular bit. Convert it to an AND of a constant of that
8055 bit. This will be converted into a ZERO_EXTRACT. */
8056 if (const_op == 0 && sign_bit_comparison_p
8057 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8058 && mode_width <= HOST_BITS_PER_WIDE_INT)
8059 {
8060 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8061 ((HOST_WIDE_INT) 1
8062 << (mode_width - 1
8063 - INTVAL (XEXP (op0, 1)))));
8064 code = (code == LT ? NE : EQ);
8065 continue;
8066 }
8067
8068 /* ... fall through ... */
8069
8070 case ABS:
8071 /* ABS is ignorable inside an equality comparison with zero. */
8072 if (const_op == 0 && equality_comparison_p)
8073 {
8074 op0 = XEXP (op0, 0);
8075 continue;
8076 }
8077 break;
8078
8079
8080 case SIGN_EXTEND:
8081 /* Can simplify (compare (zero/sign_extend FOO) CONST)
8082 to (compare FOO CONST) if CONST fits in FOO's mode and we
8083 are either testing inequality or have an unsigned comparison
8084 with ZERO_EXTEND or a signed comparison with SIGN_EXTEND. */
8085 if (! unsigned_comparison_p
8086 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8087 <= HOST_BITS_PER_WIDE_INT)
8088 && ((unsigned HOST_WIDE_INT) const_op
8089 < (((HOST_WIDE_INT) 1
8090 << (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0))) - 1)))))
8091 {
8092 op0 = XEXP (op0, 0);
8093 continue;
8094 }
8095 break;
8096
8097 case SUBREG:
8098 /* Check for the case where we are comparing A - C1 with C2,
8099 both constants are smaller than 1/2 the maxium positive
8100 value in MODE, and the comparison is equality or unsigned.
8101 In that case, if A is either zero-extended to MODE or has
8102 sufficient sign bits so that the high-order bit in MODE
8103 is a copy of the sign in the inner mode, we can prove that it is
8104 safe to do the operation in the wider mode. This simplifies
8105 many range checks. */
8106
8107 if (mode_width <= HOST_BITS_PER_WIDE_INT
8108 && subreg_lowpart_p (op0)
8109 && GET_CODE (SUBREG_REG (op0)) == PLUS
8110 && GET_CODE (XEXP (SUBREG_REG (op0), 1)) == CONST_INT
8111 && INTVAL (XEXP (SUBREG_REG (op0), 1)) < 0
8112 && (- INTVAL (XEXP (SUBREG_REG (op0), 1))
8113 < GET_MODE_MASK (mode) / 2)
8114 && (unsigned) const_op < GET_MODE_MASK (mode) / 2
8115 && (0 == (significant_bits (XEXP (SUBREG_REG (op0), 0),
8116 GET_MODE (SUBREG_REG (op0)))
8117 & ~ GET_MODE_MASK (mode))
8118 || (num_sign_bit_copies (XEXP (SUBREG_REG (op0), 0),
8119 GET_MODE (SUBREG_REG (op0)))
8120 > (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8121 - GET_MODE_BITSIZE (mode)))))
8122 {
8123 op0 = SUBREG_REG (op0);
8124 continue;
8125 }
8126
8127 /* If the inner mode is narrower and we are extracting the low part,
8128 we can treat the SUBREG as if it were a ZERO_EXTEND. */
8129 if (subreg_lowpart_p (op0)
8130 && GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0))) < mode_width)
8131 /* Fall through */ ;
8132 else
8133 break;
8134
8135 /* ... fall through ... */
8136
8137 case ZERO_EXTEND:
8138 if ((unsigned_comparison_p || equality_comparison_p)
8139 && (GET_MODE_BITSIZE (GET_MODE (XEXP (op0, 0)))
8140 <= HOST_BITS_PER_WIDE_INT)
8141 && ((unsigned HOST_WIDE_INT) const_op
8142 < GET_MODE_MASK (GET_MODE (XEXP (op0, 0)))))
8143 {
8144 op0 = XEXP (op0, 0);
8145 continue;
8146 }
8147 break;
8148
8149 case PLUS:
8150 /* (eq (plus X C1) C2) -> (eq X (minus C2 C1)). We can only do
8151 this for equality comparisons due to pathological cases involving
8152 overflows. */
8153 if (equality_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8154 && (tem = simplify_binary_operation (MINUS, mode, op1,
8155 XEXP (op0, 1))) != 0)
8156 {
8157 op0 = XEXP (op0, 0);
8158 op1 = tem;
8159 continue;
8160 }
8161
8162 /* (plus (abs X) (const_int -1)) is < 0 if and only if X == 0. */
8163 if (const_op == 0 && XEXP (op0, 1) == constm1_rtx
8164 && GET_CODE (XEXP (op0, 0)) == ABS && sign_bit_comparison_p)
8165 {
8166 op0 = XEXP (XEXP (op0, 0), 0);
8167 code = (code == LT ? EQ : NE);
8168 continue;
8169 }
8170 break;
8171
8172 case MINUS:
8173 /* The sign bit of (minus (ashiftrt X C) X), where C is the number
8174 of bits in X minus 1, is one iff X > 0. */
8175 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == ASHIFTRT
8176 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8177 && INTVAL (XEXP (XEXP (op0, 0), 1)) == mode_width - 1
8178 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8179 {
8180 op0 = XEXP (op0, 1);
8181 code = (code == GE ? LE : GT);
8182 continue;
8183 }
8184 break;
8185
8186 case XOR:
8187 /* (eq (xor A B) C) -> (eq A (xor B C)). This is a simplification
8188 if C is zero or B is a constant. */
8189 if (equality_comparison_p
8190 && 0 != (tem = simplify_binary_operation (XOR, mode,
8191 XEXP (op0, 1), op1)))
8192 {
8193 op0 = XEXP (op0, 0);
8194 op1 = tem;
8195 continue;
8196 }
8197 break;
8198
8199 case EQ: case NE:
8200 case LT: case LTU: case LE: case LEU:
8201 case GT: case GTU: case GE: case GEU:
8202 /* We can't do anything if OP0 is a condition code value, rather
8203 than an actual data value. */
8204 if (const_op != 0
8205 #ifdef HAVE_cc0
8206 || XEXP (op0, 0) == cc0_rtx
8207 #endif
8208 || GET_MODE_CLASS (GET_MODE (XEXP (op0, 0))) == MODE_CC)
8209 break;
8210
8211 /* Get the two operands being compared. */
8212 if (GET_CODE (XEXP (op0, 0)) == COMPARE)
8213 tem = XEXP (XEXP (op0, 0), 0), tem1 = XEXP (XEXP (op0, 0), 1);
8214 else
8215 tem = XEXP (op0, 0), tem1 = XEXP (op0, 1);
8216
8217 /* Check for the cases where we simply want the result of the
8218 earlier test or the opposite of that result. */
8219 if (code == NE
8220 || (code == EQ && reversible_comparison_p (op0))
8221 || (GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT
8222 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8223 && (STORE_FLAG_VALUE
8224 & (((HOST_WIDE_INT) 1
8225 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
8226 && (code == LT
8227 || (code == GE && reversible_comparison_p (op0)))))
8228 {
8229 code = (code == LT || code == NE
8230 ? GET_CODE (op0) : reverse_condition (GET_CODE (op0)));
8231 op0 = tem, op1 = tem1;
8232 continue;
8233 }
8234 break;
8235
8236 case IOR:
8237 /* The sign bit of (ior (plus X (const_int -1)) X) is non-zero
8238 iff X <= 0. */
8239 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 0)) == PLUS
8240 && XEXP (XEXP (op0, 0), 1) == constm1_rtx
8241 && rtx_equal_p (XEXP (XEXP (op0, 0), 0), XEXP (op0, 1)))
8242 {
8243 op0 = XEXP (op0, 1);
8244 code = (code == GE ? GT : LE);
8245 continue;
8246 }
8247 break;
8248
8249 case AND:
8250 /* Convert (and (xshift 1 X) Y) to (and (lshiftrt Y X) 1). This
8251 will be converted to a ZERO_EXTRACT later. */
8252 if (const_op == 0 && equality_comparison_p
8253 && (GET_CODE (XEXP (op0, 0)) == ASHIFT
8254 || GET_CODE (XEXP (op0, 0)) == LSHIFT)
8255 && XEXP (XEXP (op0, 0), 0) == const1_rtx)
8256 {
8257 op0 = simplify_and_const_int
8258 (op0, mode, gen_rtx_combine (LSHIFTRT, mode,
8259 XEXP (op0, 1),
8260 XEXP (XEXP (op0, 0), 1)),
8261 (HOST_WIDE_INT) 1);
8262 continue;
8263 }
8264
8265 /* If we are comparing (and (lshiftrt X C1) C2) for equality with
8266 zero and X is a comparison and C1 and C2 describe only bits set
8267 in STORE_FLAG_VALUE, we can compare with X. */
8268 if (const_op == 0 && equality_comparison_p
8269 && mode_width <= HOST_BITS_PER_WIDE_INT
8270 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8271 && GET_CODE (XEXP (op0, 0)) == LSHIFTRT
8272 && GET_CODE (XEXP (XEXP (op0, 0), 1)) == CONST_INT
8273 && INTVAL (XEXP (XEXP (op0, 0), 1)) >= 0
8274 && INTVAL (XEXP (XEXP (op0, 0), 1)) < HOST_BITS_PER_WIDE_INT)
8275 {
8276 mask = ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8277 << INTVAL (XEXP (XEXP (op0, 0), 1)));
8278 if ((~ STORE_FLAG_VALUE & mask) == 0
8279 && (GET_RTX_CLASS (GET_CODE (XEXP (XEXP (op0, 0), 0))) == '<'
8280 || ((tem = get_last_value (XEXP (XEXP (op0, 0), 0))) != 0
8281 && GET_RTX_CLASS (GET_CODE (tem)) == '<')))
8282 {
8283 op0 = XEXP (XEXP (op0, 0), 0);
8284 continue;
8285 }
8286 }
8287
8288 /* If we are doing an equality comparison of an AND of a bit equal
8289 to the sign bit, replace this with a LT or GE comparison of
8290 the underlying value. */
8291 if (equality_comparison_p
8292 && const_op == 0
8293 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8294 && mode_width <= HOST_BITS_PER_WIDE_INT
8295 && ((INTVAL (XEXP (op0, 1)) & GET_MODE_MASK (mode))
8296 == (HOST_WIDE_INT) 1 << (mode_width - 1)))
8297 {
8298 op0 = XEXP (op0, 0);
8299 code = (code == EQ ? GE : LT);
8300 continue;
8301 }
8302
8303 /* If this AND operation is really a ZERO_EXTEND from a narrower
8304 mode, the constant fits within that mode, and this is either an
8305 equality or unsigned comparison, try to do this comparison in
8306 the narrower mode. */
8307 if ((equality_comparison_p || unsigned_comparison_p)
8308 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8309 && (i = exact_log2 ((INTVAL (XEXP (op0, 1))
8310 & GET_MODE_MASK (mode))
8311 + 1)) >= 0
8312 && const_op >> i == 0
8313 && (tmode = mode_for_size (i, MODE_INT, 1)) != BLKmode)
8314 {
8315 op0 = gen_lowpart_for_combine (tmode, XEXP (op0, 0));
8316 continue;
8317 }
8318 break;
8319
8320 case ASHIFT:
8321 case LSHIFT:
8322 /* If we have (compare (xshift FOO N) (const_int C)) and
8323 the high order N bits of FOO (N+1 if an inequality comparison)
8324 are not significant, we can do this by comparing FOO with C
8325 shifted right N bits so long as the low-order N bits of C are
8326 zero. */
8327 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8328 && INTVAL (XEXP (op0, 1)) >= 0
8329 && ((INTVAL (XEXP (op0, 1)) + ! equality_comparison_p)
8330 < HOST_BITS_PER_WIDE_INT)
8331 && ((const_op
8332 & ((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1) == 0)
8333 && mode_width <= HOST_BITS_PER_WIDE_INT
8334 && (significant_bits (XEXP (op0, 0), mode)
8335 & ~ (mask >> (INTVAL (XEXP (op0, 1))
8336 + ! equality_comparison_p))) == 0)
8337 {
8338 const_op >>= INTVAL (XEXP (op0, 1));
8339 op1 = GEN_INT (const_op);
8340 op0 = XEXP (op0, 0);
8341 continue;
8342 }
8343
8344 /* If we are doing a sign bit comparison, it means we are testing
8345 a particular bit. Convert it to the appropriate AND. */
8346 if (sign_bit_comparison_p && GET_CODE (XEXP (op0, 1)) == CONST_INT
8347 && mode_width <= HOST_BITS_PER_WIDE_INT)
8348 {
8349 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8350 ((HOST_WIDE_INT) 1
8351 << (mode_width - 1
8352 - INTVAL (XEXP (op0, 1)))));
8353 code = (code == LT ? NE : EQ);
8354 continue;
8355 }
8356
8357 /* If this an equality comparison with zero and we are shifting
8358 the low bit to the sign bit, we can convert this to an AND of the
8359 low-order bit. */
8360 if (const_op == 0 && equality_comparison_p
8361 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8362 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8363 {
8364 op0 = simplify_and_const_int (NULL_RTX, mode, XEXP (op0, 0),
8365 (HOST_WIDE_INT) 1);
8366 continue;
8367 }
8368 break;
8369
8370 case ASHIFTRT:
8371 /* If this is an equality comparison with zero, we can do this
8372 as a logical shift, which might be much simpler. */
8373 if (equality_comparison_p && const_op == 0
8374 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
8375 {
8376 op0 = simplify_shift_const (NULL_RTX, LSHIFTRT, mode,
8377 XEXP (op0, 0),
8378 INTVAL (XEXP (op0, 1)));
8379 continue;
8380 }
8381
8382 /* If OP0 is a sign extension and CODE is not an unsigned comparison,
8383 do the comparison in a narrower mode. */
8384 if (! unsigned_comparison_p
8385 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8386 && GET_CODE (XEXP (op0, 0)) == ASHIFT
8387 && XEXP (op0, 1) == XEXP (XEXP (op0, 0), 1)
8388 && (tmode = mode_for_size (mode_width - INTVAL (XEXP (op0, 1)),
8389 MODE_INT, 1)) != BLKmode
8390 && ((unsigned HOST_WIDE_INT) const_op <= GET_MODE_MASK (tmode)
8391 || ((unsigned HOST_WIDE_INT) - const_op
8392 <= GET_MODE_MASK (tmode))))
8393 {
8394 op0 = gen_lowpart_for_combine (tmode, XEXP (XEXP (op0, 0), 0));
8395 continue;
8396 }
8397
8398 /* ... fall through ... */
8399 case LSHIFTRT:
8400 /* If we have (compare (xshiftrt FOO N) (const_int C)) and
8401 the low order N bits of FOO are not significant, we can do this
8402 by comparing FOO with C shifted left N bits so long as no
8403 overflow occurs. */
8404 if (GET_CODE (XEXP (op0, 1)) == CONST_INT
8405 && INTVAL (XEXP (op0, 1)) >= 0
8406 && INTVAL (XEXP (op0, 1)) < HOST_BITS_PER_WIDE_INT
8407 && mode_width <= HOST_BITS_PER_WIDE_INT
8408 && (significant_bits (XEXP (op0, 0), mode)
8409 & (((HOST_WIDE_INT) 1 << INTVAL (XEXP (op0, 1))) - 1)) == 0
8410 && (const_op == 0
8411 || (floor_log2 (const_op) + INTVAL (XEXP (op0, 1))
8412 < mode_width)))
8413 {
8414 const_op <<= INTVAL (XEXP (op0, 1));
8415 op1 = GEN_INT (const_op);
8416 op0 = XEXP (op0, 0);
8417 continue;
8418 }
8419
8420 /* If we are using this shift to extract just the sign bit, we
8421 can replace this with an LT or GE comparison. */
8422 if (const_op == 0
8423 && (equality_comparison_p || sign_bit_comparison_p)
8424 && GET_CODE (XEXP (op0, 1)) == CONST_INT
8425 && INTVAL (XEXP (op0, 1)) == mode_width - 1)
8426 {
8427 op0 = XEXP (op0, 0);
8428 code = (code == NE || code == GT ? LT : GE);
8429 continue;
8430 }
8431 break;
8432 }
8433
8434 break;
8435 }
8436
8437 /* Now make any compound operations involved in this comparison. Then,
8438 check for an outmost SUBREG on OP0 that isn't doing anything or is
8439 paradoxical. The latter case can only occur when it is known that the
8440 "extra" bits will be zero. Therefore, it is safe to remove the SUBREG.
8441 We can never remove a SUBREG for a non-equality comparison because the
8442 sign bit is in a different place in the underlying object. */
8443
8444 op0 = make_compound_operation (op0, op1 == const0_rtx ? COMPARE : SET);
8445 op1 = make_compound_operation (op1, SET);
8446
8447 if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8448 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8449 && (code == NE || code == EQ)
8450 && ((GET_MODE_SIZE (GET_MODE (op0))
8451 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (op0))))))
8452 {
8453 op0 = SUBREG_REG (op0);
8454 op1 = gen_lowpart_for_combine (GET_MODE (op0), op1);
8455 }
8456
8457 else if (GET_CODE (op0) == SUBREG && subreg_lowpart_p (op0)
8458 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8459 && (code == NE || code == EQ)
8460 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (op0)))
8461 <= HOST_BITS_PER_WIDE_INT)
8462 && (significant_bits (SUBREG_REG (op0), GET_MODE (SUBREG_REG (op0)))
8463 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0
8464 && (tem = gen_lowpart_for_combine (GET_MODE (SUBREG_REG (op0)),
8465 op1),
8466 (significant_bits (tem, GET_MODE (SUBREG_REG (op0)))
8467 & ~ GET_MODE_MASK (GET_MODE (op0))) == 0))
8468 op0 = SUBREG_REG (op0), op1 = tem;
8469
8470 /* We now do the opposite procedure: Some machines don't have compare
8471 insns in all modes. If OP0's mode is an integer mode smaller than a
8472 word and we can't do a compare in that mode, see if there is a larger
8473 mode for which we can do the compare. There are a number of cases in
8474 which we can use the wider mode. */
8475
8476 mode = GET_MODE (op0);
8477 if (mode != VOIDmode && GET_MODE_CLASS (mode) == MODE_INT
8478 && GET_MODE_SIZE (mode) < UNITS_PER_WORD
8479 && cmp_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
8480 for (tmode = GET_MODE_WIDER_MODE (mode);
8481 (tmode != VOIDmode
8482 && GET_MODE_BITSIZE (tmode) <= HOST_BITS_PER_WIDE_INT);
8483 tmode = GET_MODE_WIDER_MODE (tmode))
8484 if (cmp_optab->handlers[(int) tmode].insn_code != CODE_FOR_nothing)
8485 {
8486 /* If the only significant bits in OP0 and OP1 are those in the
8487 narrower mode and this is an equality or unsigned comparison,
8488 we can use the wider mode. Similarly for sign-extended
8489 values and equality or signed comparisons. */
8490 if (((code == EQ || code == NE
8491 || code == GEU || code == GTU || code == LEU || code == LTU)
8492 && ((significant_bits (op0, tmode) & ~ GET_MODE_MASK (mode))
8493 == 0)
8494 && ((significant_bits (op1, tmode) & ~ GET_MODE_MASK (mode))
8495 == 0))
8496 || ((code == EQ || code == NE
8497 || code == GE || code == GT || code == LE || code == LT)
8498 && (num_sign_bit_copies (op0, tmode)
8499 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))
8500 && (num_sign_bit_copies (op1, tmode)
8501 > GET_MODE_BITSIZE (tmode) - GET_MODE_BITSIZE (mode))))
8502 {
8503 op0 = gen_lowpart_for_combine (tmode, op0);
8504 op1 = gen_lowpart_for_combine (tmode, op1);
8505 break;
8506 }
8507
8508 /* If this is a test for negative, we can make an explicit
8509 test of the sign bit. */
8510
8511 if (op1 == const0_rtx && (code == LT || code == GE)
8512 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
8513 {
8514 op0 = gen_binary (AND, tmode,
8515 gen_lowpart_for_combine (tmode, op0),
8516 GEN_INT ((HOST_WIDE_INT) 1
8517 << (GET_MODE_BITSIZE (mode) - 1)));
8518 code = (code == LT) ? NE : EQ;
8519 break;
8520 }
8521 }
8522
8523 *pop0 = op0;
8524 *pop1 = op1;
8525
8526 return code;
8527 }
8528 \f
8529 /* Return 1 if we know that X, a comparison operation, is not operating
8530 on a floating-point value or is EQ or NE, meaning that we can safely
8531 reverse it. */
8532
8533 static int
8534 reversible_comparison_p (x)
8535 rtx x;
8536 {
8537 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
8538 || GET_CODE (x) == NE || GET_CODE (x) == EQ)
8539 return 1;
8540
8541 switch (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))))
8542 {
8543 case MODE_INT:
8544 return 1;
8545
8546 case MODE_CC:
8547 x = get_last_value (XEXP (x, 0));
8548 return (x && GET_CODE (x) == COMPARE
8549 && GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_INT);
8550 }
8551
8552 return 0;
8553 }
8554 \f
8555 /* Utility function for following routine. Called when X is part of a value
8556 being stored into reg_last_set_value. Sets reg_last_set_table_tick
8557 for each register mentioned. Similar to mention_regs in cse.c */
8558
8559 static void
8560 update_table_tick (x)
8561 rtx x;
8562 {
8563 register enum rtx_code code = GET_CODE (x);
8564 register char *fmt = GET_RTX_FORMAT (code);
8565 register int i;
8566
8567 if (code == REG)
8568 {
8569 int regno = REGNO (x);
8570 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8571 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8572
8573 for (i = regno; i < endregno; i++)
8574 reg_last_set_table_tick[i] = label_tick;
8575
8576 return;
8577 }
8578
8579 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8580 /* Note that we can't have an "E" in values stored; see
8581 get_last_value_validate. */
8582 if (fmt[i] == 'e')
8583 update_table_tick (XEXP (x, i));
8584 }
8585
8586 /* Record that REG is set to VALUE in insn INSN. If VALUE is zero, we
8587 are saying that the register is clobbered and we no longer know its
8588 value. If INSN is zero, don't update reg_last_set; this call is normally
8589 done with VALUE also zero to invalidate the register. */
8590
8591 static void
8592 record_value_for_reg (reg, insn, value)
8593 rtx reg;
8594 rtx insn;
8595 rtx value;
8596 {
8597 int regno = REGNO (reg);
8598 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8599 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1);
8600 int i;
8601
8602 /* If VALUE contains REG and we have a previous value for REG, substitute
8603 the previous value. */
8604 if (value && insn && reg_overlap_mentioned_p (reg, value))
8605 {
8606 rtx tem;
8607
8608 /* Set things up so get_last_value is allowed to see anything set up to
8609 our insn. */
8610 subst_low_cuid = INSN_CUID (insn);
8611 tem = get_last_value (reg);
8612
8613 if (tem)
8614 value = replace_rtx (copy_rtx (value), reg, tem);
8615 }
8616
8617 /* For each register modified, show we don't know its value, that
8618 its value has been updated, and that we don't know the location of
8619 the death of the register. */
8620 for (i = regno; i < endregno; i ++)
8621 {
8622 if (insn)
8623 reg_last_set[i] = insn;
8624 reg_last_set_value[i] = 0;
8625 reg_last_death[i] = 0;
8626 }
8627
8628 /* Mark registers that are being referenced in this value. */
8629 if (value)
8630 update_table_tick (value);
8631
8632 /* Now update the status of each register being set.
8633 If someone is using this register in this block, set this register
8634 to invalid since we will get confused between the two lives in this
8635 basic block. This makes using this register always invalid. In cse, we
8636 scan the table to invalidate all entries using this register, but this
8637 is too much work for us. */
8638
8639 for (i = regno; i < endregno; i++)
8640 {
8641 reg_last_set_label[i] = label_tick;
8642 if (value && reg_last_set_table_tick[i] == label_tick)
8643 reg_last_set_invalid[i] = 1;
8644 else
8645 reg_last_set_invalid[i] = 0;
8646 }
8647
8648 /* The value being assigned might refer to X (like in "x++;"). In that
8649 case, we must replace it with (clobber (const_int 0)) to prevent
8650 infinite loops. */
8651 if (value && ! get_last_value_validate (&value,
8652 reg_last_set_label[regno], 0))
8653 {
8654 value = copy_rtx (value);
8655 if (! get_last_value_validate (&value, reg_last_set_label[regno], 1))
8656 value = 0;
8657 }
8658
8659 /* For the main register being modified, update the value. */
8660 reg_last_set_value[regno] = value;
8661
8662 }
8663
8664 /* Used for communication between the following two routines. */
8665 static rtx record_dead_insn;
8666
8667 /* Called via note_stores from record_dead_and_set_regs to handle one
8668 SET or CLOBBER in an insn. */
8669
8670 static void
8671 record_dead_and_set_regs_1 (dest, setter)
8672 rtx dest, setter;
8673 {
8674 if (GET_CODE (dest) == REG)
8675 {
8676 /* If we are setting the whole register, we know its value. Otherwise
8677 show that we don't know the value. We can handle SUBREG in
8678 some cases. */
8679 if (GET_CODE (setter) == SET && dest == SET_DEST (setter))
8680 record_value_for_reg (dest, record_dead_insn, SET_SRC (setter));
8681 else if (GET_CODE (setter) == SET
8682 && GET_CODE (SET_DEST (setter)) == SUBREG
8683 && SUBREG_REG (SET_DEST (setter)) == dest
8684 && subreg_lowpart_p (SET_DEST (setter)))
8685 record_value_for_reg (dest, record_dead_insn,
8686 gen_lowpart_for_combine (GET_MODE (dest),
8687 SET_SRC (setter)));
8688 else
8689 record_value_for_reg (dest, record_dead_insn, NULL_RTX);
8690 }
8691 else if (GET_CODE (dest) == MEM
8692 /* Ignore pushes, they clobber nothing. */
8693 && ! push_operand (dest, GET_MODE (dest)))
8694 mem_last_set = INSN_CUID (record_dead_insn);
8695 }
8696
8697 /* Update the records of when each REG was most recently set or killed
8698 for the things done by INSN. This is the last thing done in processing
8699 INSN in the combiner loop.
8700
8701 We update reg_last_set, reg_last_set_value, reg_last_death, and also the
8702 similar information mem_last_set (which insn most recently modified memory)
8703 and last_call_cuid (which insn was the most recent subroutine call). */
8704
8705 static void
8706 record_dead_and_set_regs (insn)
8707 rtx insn;
8708 {
8709 register rtx link;
8710 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
8711 {
8712 if (REG_NOTE_KIND (link) == REG_DEAD)
8713 reg_last_death[REGNO (XEXP (link, 0))] = insn;
8714 else if (REG_NOTE_KIND (link) == REG_INC)
8715 record_value_for_reg (XEXP (link, 0), insn, NULL_RTX);
8716 }
8717
8718 if (GET_CODE (insn) == CALL_INSN)
8719 last_call_cuid = mem_last_set = INSN_CUID (insn);
8720
8721 record_dead_insn = insn;
8722 note_stores (PATTERN (insn), record_dead_and_set_regs_1);
8723 }
8724 \f
8725 /* Utility routine for the following function. Verify that all the registers
8726 mentioned in *LOC are valid when *LOC was part of a value set when
8727 label_tick == TICK. Return 0 if some are not.
8728
8729 If REPLACE is non-zero, replace the invalid reference with
8730 (clobber (const_int 0)) and return 1. This replacement is useful because
8731 we often can get useful information about the form of a value (e.g., if
8732 it was produced by a shift that always produces -1 or 0) even though
8733 we don't know exactly what registers it was produced from. */
8734
8735 static int
8736 get_last_value_validate (loc, tick, replace)
8737 rtx *loc;
8738 int tick;
8739 int replace;
8740 {
8741 rtx x = *loc;
8742 char *fmt = GET_RTX_FORMAT (GET_CODE (x));
8743 int len = GET_RTX_LENGTH (GET_CODE (x));
8744 int i;
8745
8746 if (GET_CODE (x) == REG)
8747 {
8748 int regno = REGNO (x);
8749 int endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8750 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
8751 int j;
8752
8753 for (j = regno; j < endregno; j++)
8754 if (reg_last_set_invalid[j]
8755 /* If this is a pseudo-register that was only set once, it is
8756 always valid. */
8757 || (! (regno >= FIRST_PSEUDO_REGISTER && reg_n_sets[regno] == 1)
8758 && reg_last_set_label[j] > tick))
8759 {
8760 if (replace)
8761 *loc = gen_rtx (CLOBBER, GET_MODE (x), const0_rtx);
8762 return replace;
8763 }
8764
8765 return 1;
8766 }
8767
8768 for (i = 0; i < len; i++)
8769 if ((fmt[i] == 'e'
8770 && get_last_value_validate (&XEXP (x, i), tick, replace) == 0)
8771 /* Don't bother with these. They shouldn't occur anyway. */
8772 || fmt[i] == 'E')
8773 return 0;
8774
8775 /* If we haven't found a reason for it to be invalid, it is valid. */
8776 return 1;
8777 }
8778
8779 /* Get the last value assigned to X, if known. Some registers
8780 in the value may be replaced with (clobber (const_int 0)) if their value
8781 is known longer known reliably. */
8782
8783 static rtx
8784 get_last_value (x)
8785 rtx x;
8786 {
8787 int regno;
8788 rtx value;
8789
8790 /* If this is a non-paradoxical SUBREG, get the value of its operand and
8791 then convert it to the desired mode. If this is a paradoxical SUBREG,
8792 we cannot predict what values the "extra" bits might have. */
8793 if (GET_CODE (x) == SUBREG
8794 && subreg_lowpart_p (x)
8795 && (GET_MODE_SIZE (GET_MODE (x))
8796 <= GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
8797 && (value = get_last_value (SUBREG_REG (x))) != 0)
8798 return gen_lowpart_for_combine (GET_MODE (x), value);
8799
8800 if (GET_CODE (x) != REG)
8801 return 0;
8802
8803 regno = REGNO (x);
8804 value = reg_last_set_value[regno];
8805
8806 /* If we don't have a value or if it isn't for this basic block, return 0. */
8807
8808 if (value == 0
8809 || (reg_n_sets[regno] != 1
8810 && (reg_last_set_label[regno] != label_tick)))
8811 return 0;
8812
8813 /* If the value was set in a later insn that the ones we are processing,
8814 we can't use it even if the register was only set once, but make a quick
8815 check to see if the previous insn set it to something. This is commonly
8816 the case when the same pseudo is used by repeated insns. */
8817
8818 if (INSN_CUID (reg_last_set[regno]) >= subst_low_cuid)
8819 {
8820 rtx insn, set;
8821
8822 for (insn = prev_nonnote_insn (subst_insn);
8823 insn && INSN_CUID (insn) >= subst_low_cuid;
8824 insn = prev_nonnote_insn (insn))
8825 ;
8826
8827 if (insn
8828 && (set = single_set (insn)) != 0
8829 && rtx_equal_p (SET_DEST (set), x))
8830 {
8831 value = SET_SRC (set);
8832
8833 /* Make sure that VALUE doesn't reference X. Replace any
8834 expliit references with a CLOBBER. If there are any remaining
8835 references (rare), don't use the value. */
8836
8837 if (reg_mentioned_p (x, value))
8838 value = replace_rtx (copy_rtx (value), x,
8839 gen_rtx (CLOBBER, GET_MODE (x), const0_rtx));
8840
8841 if (reg_overlap_mentioned_p (x, value))
8842 return 0;
8843 }
8844 else
8845 return 0;
8846 }
8847
8848 /* If the value has all its registers valid, return it. */
8849 if (get_last_value_validate (&value, reg_last_set_label[regno], 0))
8850 return value;
8851
8852 /* Otherwise, make a copy and replace any invalid register with
8853 (clobber (const_int 0)). If that fails for some reason, return 0. */
8854
8855 value = copy_rtx (value);
8856 if (get_last_value_validate (&value, reg_last_set_label[regno], 1))
8857 return value;
8858
8859 return 0;
8860 }
8861 \f
8862 /* Return nonzero if expression X refers to a REG or to memory
8863 that is set in an instruction more recent than FROM_CUID. */
8864
8865 static int
8866 use_crosses_set_p (x, from_cuid)
8867 register rtx x;
8868 int from_cuid;
8869 {
8870 register char *fmt;
8871 register int i;
8872 register enum rtx_code code = GET_CODE (x);
8873
8874 if (code == REG)
8875 {
8876 register int regno = REGNO (x);
8877 #ifdef PUSH_ROUNDING
8878 /* Don't allow uses of the stack pointer to be moved,
8879 because we don't know whether the move crosses a push insn. */
8880 if (regno == STACK_POINTER_REGNUM)
8881 return 1;
8882 #endif
8883 return (reg_last_set[regno]
8884 && INSN_CUID (reg_last_set[regno]) > from_cuid);
8885 }
8886
8887 if (code == MEM && mem_last_set > from_cuid)
8888 return 1;
8889
8890 fmt = GET_RTX_FORMAT (code);
8891
8892 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
8893 {
8894 if (fmt[i] == 'E')
8895 {
8896 register int j;
8897 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
8898 if (use_crosses_set_p (XVECEXP (x, i, j), from_cuid))
8899 return 1;
8900 }
8901 else if (fmt[i] == 'e'
8902 && use_crosses_set_p (XEXP (x, i), from_cuid))
8903 return 1;
8904 }
8905 return 0;
8906 }
8907 \f
8908 /* Define three variables used for communication between the following
8909 routines. */
8910
8911 static int reg_dead_regno, reg_dead_endregno;
8912 static int reg_dead_flag;
8913
8914 /* Function called via note_stores from reg_dead_at_p.
8915
8916 If DEST is within [reg_dead_rengno, reg_dead_endregno), set
8917 reg_dead_flag to 1 if X is a CLOBBER and to -1 it is a SET. */
8918
8919 static void
8920 reg_dead_at_p_1 (dest, x)
8921 rtx dest;
8922 rtx x;
8923 {
8924 int regno, endregno;
8925
8926 if (GET_CODE (dest) != REG)
8927 return;
8928
8929 regno = REGNO (dest);
8930 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
8931 ? HARD_REGNO_NREGS (regno, GET_MODE (dest)) : 1);
8932
8933 if (reg_dead_endregno > regno && reg_dead_regno < endregno)
8934 reg_dead_flag = (GET_CODE (x) == CLOBBER) ? 1 : -1;
8935 }
8936
8937 /* Return non-zero if REG is known to be dead at INSN.
8938
8939 We scan backwards from INSN. If we hit a REG_DEAD note or a CLOBBER
8940 referencing REG, it is dead. If we hit a SET referencing REG, it is
8941 live. Otherwise, see if it is live or dead at the start of the basic
8942 block we are in. */
8943
8944 static int
8945 reg_dead_at_p (reg, insn)
8946 rtx reg;
8947 rtx insn;
8948 {
8949 int block, i;
8950
8951 /* Set variables for reg_dead_at_p_1. */
8952 reg_dead_regno = REGNO (reg);
8953 reg_dead_endregno = reg_dead_regno + (reg_dead_regno < FIRST_PSEUDO_REGISTER
8954 ? HARD_REGNO_NREGS (reg_dead_regno,
8955 GET_MODE (reg))
8956 : 1);
8957
8958 reg_dead_flag = 0;
8959
8960 /* Scan backwards until we find a REG_DEAD note, SET, CLOBBER, label, or
8961 beginning of function. */
8962 for (; insn && GET_CODE (insn) != CODE_LABEL;
8963 insn = prev_nonnote_insn (insn))
8964 {
8965 note_stores (PATTERN (insn), reg_dead_at_p_1);
8966 if (reg_dead_flag)
8967 return reg_dead_flag == 1 ? 1 : 0;
8968
8969 if (find_regno_note (insn, REG_DEAD, reg_dead_regno))
8970 return 1;
8971 }
8972
8973 /* Get the basic block number that we were in. */
8974 if (insn == 0)
8975 block = 0;
8976 else
8977 {
8978 for (block = 0; block < n_basic_blocks; block++)
8979 if (insn == basic_block_head[block])
8980 break;
8981
8982 if (block == n_basic_blocks)
8983 return 0;
8984 }
8985
8986 for (i = reg_dead_regno; i < reg_dead_endregno; i++)
8987 if (basic_block_live_at_start[block][i / REGSET_ELT_BITS]
8988 & ((REGSET_ELT_TYPE) 1 << (i % REGSET_ELT_BITS)))
8989 return 0;
8990
8991 return 1;
8992 }
8993 \f
8994 /* Remove register number REGNO from the dead registers list of INSN.
8995
8996 Return the note used to record the death, if there was one. */
8997
8998 rtx
8999 remove_death (regno, insn)
9000 int regno;
9001 rtx insn;
9002 {
9003 register rtx note = find_regno_note (insn, REG_DEAD, regno);
9004
9005 if (note)
9006 {
9007 reg_n_deaths[regno]--;
9008 remove_note (insn, note);
9009 }
9010
9011 return note;
9012 }
9013
9014 /* For each register (hardware or pseudo) used within expression X, if its
9015 death is in an instruction with cuid between FROM_CUID (inclusive) and
9016 TO_INSN (exclusive), put a REG_DEAD note for that register in the
9017 list headed by PNOTES.
9018
9019 This is done when X is being merged by combination into TO_INSN. These
9020 notes will then be distributed as needed. */
9021
9022 static void
9023 move_deaths (x, from_cuid, to_insn, pnotes)
9024 rtx x;
9025 int from_cuid;
9026 rtx to_insn;
9027 rtx *pnotes;
9028 {
9029 register char *fmt;
9030 register int len, i;
9031 register enum rtx_code code = GET_CODE (x);
9032
9033 if (code == REG)
9034 {
9035 register int regno = REGNO (x);
9036 register rtx where_dead = reg_last_death[regno];
9037
9038 if (where_dead && INSN_CUID (where_dead) >= from_cuid
9039 && INSN_CUID (where_dead) < INSN_CUID (to_insn))
9040 {
9041 rtx note = remove_death (regno, reg_last_death[regno]);
9042
9043 /* It is possible for the call above to return 0. This can occur
9044 when reg_last_death points to I2 or I1 that we combined with.
9045 In that case make a new note. */
9046
9047 if (note)
9048 {
9049 XEXP (note, 1) = *pnotes;
9050 *pnotes = note;
9051 }
9052 else
9053 *pnotes = gen_rtx (EXPR_LIST, REG_DEAD, x, *pnotes);
9054
9055 reg_n_deaths[regno]++;
9056 }
9057
9058 return;
9059 }
9060
9061 else if (GET_CODE (x) == SET)
9062 {
9063 rtx dest = SET_DEST (x);
9064
9065 move_deaths (SET_SRC (x), from_cuid, to_insn, pnotes);
9066
9067 /* In the case of a ZERO_EXTRACT, a STRICT_LOW_PART, or a SUBREG
9068 that accesses one word of a multi-word item, some
9069 piece of everything register in the expression is used by
9070 this insn, so remove any old death. */
9071
9072 if (GET_CODE (dest) == ZERO_EXTRACT
9073 || GET_CODE (dest) == STRICT_LOW_PART
9074 || (GET_CODE (dest) == SUBREG
9075 && (((GET_MODE_SIZE (GET_MODE (dest))
9076 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
9077 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
9078 + UNITS_PER_WORD - 1) / UNITS_PER_WORD))))
9079 {
9080 move_deaths (dest, from_cuid, to_insn, pnotes);
9081 return;
9082 }
9083
9084 /* If this is some other SUBREG, we know it replaces the entire
9085 value, so use that as the destination. */
9086 if (GET_CODE (dest) == SUBREG)
9087 dest = SUBREG_REG (dest);
9088
9089 /* If this is a MEM, adjust deaths of anything used in the address.
9090 For a REG (the only other possibility), the entire value is
9091 being replaced so the old value is not used in this insn. */
9092
9093 if (GET_CODE (dest) == MEM)
9094 move_deaths (XEXP (dest, 0), from_cuid, to_insn, pnotes);
9095 return;
9096 }
9097
9098 else if (GET_CODE (x) == CLOBBER)
9099 return;
9100
9101 len = GET_RTX_LENGTH (code);
9102 fmt = GET_RTX_FORMAT (code);
9103
9104 for (i = 0; i < len; i++)
9105 {
9106 if (fmt[i] == 'E')
9107 {
9108 register int j;
9109 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
9110 move_deaths (XVECEXP (x, i, j), from_cuid, to_insn, pnotes);
9111 }
9112 else if (fmt[i] == 'e')
9113 move_deaths (XEXP (x, i), from_cuid, to_insn, pnotes);
9114 }
9115 }
9116 \f
9117 /* Return 1 if X is the target of a bit-field assignment in BODY, the
9118 pattern of an insn. X must be a REG. */
9119
9120 static int
9121 reg_bitfield_target_p (x, body)
9122 rtx x;
9123 rtx body;
9124 {
9125 int i;
9126
9127 if (GET_CODE (body) == SET)
9128 {
9129 rtx dest = SET_DEST (body);
9130 rtx target;
9131 int regno, tregno, endregno, endtregno;
9132
9133 if (GET_CODE (dest) == ZERO_EXTRACT)
9134 target = XEXP (dest, 0);
9135 else if (GET_CODE (dest) == STRICT_LOW_PART)
9136 target = SUBREG_REG (XEXP (dest, 0));
9137 else
9138 return 0;
9139
9140 if (GET_CODE (target) == SUBREG)
9141 target = SUBREG_REG (target);
9142
9143 if (GET_CODE (target) != REG)
9144 return 0;
9145
9146 tregno = REGNO (target), regno = REGNO (x);
9147 if (tregno >= FIRST_PSEUDO_REGISTER || regno >= FIRST_PSEUDO_REGISTER)
9148 return target == x;
9149
9150 endtregno = tregno + HARD_REGNO_NREGS (tregno, GET_MODE (target));
9151 endregno = regno + HARD_REGNO_NREGS (regno, GET_MODE (x));
9152
9153 return endregno > tregno && regno < endtregno;
9154 }
9155
9156 else if (GET_CODE (body) == PARALLEL)
9157 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
9158 if (reg_bitfield_target_p (x, XVECEXP (body, 0, i)))
9159 return 1;
9160
9161 return 0;
9162 }
9163 \f
9164 /* Given a chain of REG_NOTES originally from FROM_INSN, try to place them
9165 as appropriate. I3 and I2 are the insns resulting from the combination
9166 insns including FROM (I2 may be zero).
9167
9168 ELIM_I2 and ELIM_I1 are either zero or registers that we know will
9169 not need REG_DEAD notes because they are being substituted for. This
9170 saves searching in the most common cases.
9171
9172 Each note in the list is either ignored or placed on some insns, depending
9173 on the type of note. */
9174
9175 static void
9176 distribute_notes (notes, from_insn, i3, i2, elim_i2, elim_i1)
9177 rtx notes;
9178 rtx from_insn;
9179 rtx i3, i2;
9180 rtx elim_i2, elim_i1;
9181 {
9182 rtx note, next_note;
9183 rtx tem;
9184
9185 for (note = notes; note; note = next_note)
9186 {
9187 rtx place = 0, place2 = 0;
9188
9189 /* If this NOTE references a pseudo register, ensure it references
9190 the latest copy of that register. */
9191 if (XEXP (note, 0) && GET_CODE (XEXP (note, 0)) == REG
9192 && REGNO (XEXP (note, 0)) >= FIRST_PSEUDO_REGISTER)
9193 XEXP (note, 0) = regno_reg_rtx[REGNO (XEXP (note, 0))];
9194
9195 next_note = XEXP (note, 1);
9196 switch (REG_NOTE_KIND (note))
9197 {
9198 case REG_UNUSED:
9199 /* If this register is set or clobbered in I3, put the note there
9200 unless there is one already. */
9201 if (reg_set_p (XEXP (note, 0), PATTERN (i3)))
9202 {
9203 if (! (GET_CODE (XEXP (note, 0)) == REG
9204 ? find_regno_note (i3, REG_UNUSED, REGNO (XEXP (note, 0)))
9205 : find_reg_note (i3, REG_UNUSED, XEXP (note, 0))))
9206 place = i3;
9207 }
9208 /* Otherwise, if this register is used by I3, then this register
9209 now dies here, so we must put a REG_DEAD note here unless there
9210 is one already. */
9211 else if (reg_referenced_p (XEXP (note, 0), PATTERN (i3))
9212 && ! (GET_CODE (XEXP (note, 0)) == REG
9213 ? find_regno_note (i3, REG_DEAD, REGNO (XEXP (note, 0)))
9214 : find_reg_note (i3, REG_DEAD, XEXP (note, 0))))
9215 {
9216 PUT_REG_NOTE_KIND (note, REG_DEAD);
9217 place = i3;
9218 }
9219 break;
9220
9221 case REG_EQUAL:
9222 case REG_EQUIV:
9223 case REG_NONNEG:
9224 /* These notes say something about results of an insn. We can
9225 only support them if they used to be on I3 in which case they
9226 remain on I3. Otherwise they are ignored.
9227
9228 If the note refers to an expression that is not a constant, we
9229 must also ignore the note since we cannot tell whether the
9230 equivalence is still true. It might be possible to do
9231 slightly better than this (we only have a problem if I2DEST
9232 or I1DEST is present in the expression), but it doesn't
9233 seem worth the trouble. */
9234
9235 if (from_insn == i3
9236 && (XEXP (note, 0) == 0 || CONSTANT_P (XEXP (note, 0))))
9237 place = i3;
9238 break;
9239
9240 case REG_INC:
9241 case REG_NO_CONFLICT:
9242 case REG_LABEL:
9243 /* These notes say something about how a register is used. They must
9244 be present on any use of the register in I2 or I3. */
9245 if (reg_mentioned_p (XEXP (note, 0), PATTERN (i3)))
9246 place = i3;
9247
9248 if (i2 && reg_mentioned_p (XEXP (note, 0), PATTERN (i2)))
9249 {
9250 if (place)
9251 place2 = i2;
9252 else
9253 place = i2;
9254 }
9255 break;
9256
9257 case REG_WAS_0:
9258 /* It is too much trouble to try to see if this note is still
9259 correct in all situations. It is better to simply delete it. */
9260 break;
9261
9262 case REG_RETVAL:
9263 /* If the insn previously containing this note still exists,
9264 put it back where it was. Otherwise move it to the previous
9265 insn. Adjust the corresponding REG_LIBCALL note. */
9266 if (GET_CODE (from_insn) != NOTE)
9267 place = from_insn;
9268 else
9269 {
9270 tem = find_reg_note (XEXP (note, 0), REG_LIBCALL, NULL_RTX);
9271 place = prev_real_insn (from_insn);
9272 if (tem && place)
9273 XEXP (tem, 0) = place;
9274 }
9275 break;
9276
9277 case REG_LIBCALL:
9278 /* This is handled similarly to REG_RETVAL. */
9279 if (GET_CODE (from_insn) != NOTE)
9280 place = from_insn;
9281 else
9282 {
9283 tem = find_reg_note (XEXP (note, 0), REG_RETVAL, NULL_RTX);
9284 place = next_real_insn (from_insn);
9285 if (tem && place)
9286 XEXP (tem, 0) = place;
9287 }
9288 break;
9289
9290 case REG_DEAD:
9291 /* If the register is used as an input in I3, it dies there.
9292 Similarly for I2, if it is non-zero and adjacent to I3.
9293
9294 If the register is not used as an input in either I3 or I2
9295 and it is not one of the registers we were supposed to eliminate,
9296 there are two possibilities. We might have a non-adjacent I2
9297 or we might have somehow eliminated an additional register
9298 from a computation. For example, we might have had A & B where
9299 we discover that B will always be zero. In this case we will
9300 eliminate the reference to A.
9301
9302 In both cases, we must search to see if we can find a previous
9303 use of A and put the death note there. */
9304
9305 if (reg_referenced_p (XEXP (note, 0), PATTERN (i3)))
9306 place = i3;
9307 else if (i2 != 0 && next_nonnote_insn (i2) == i3
9308 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9309 place = i2;
9310
9311 if (XEXP (note, 0) == elim_i2 || XEXP (note, 0) == elim_i1)
9312 break;
9313
9314 /* If the register is used in both I2 and I3 and it dies in I3,
9315 we might have added another reference to it. If reg_n_refs
9316 was 2, bump it to 3. This has to be correct since the
9317 register must have been set somewhere. The reason this is
9318 done is because local-alloc.c treats 2 references as a
9319 special case. */
9320
9321 if (place == i3 && i2 != 0 && GET_CODE (XEXP (note, 0)) == REG
9322 && reg_n_refs[REGNO (XEXP (note, 0))]== 2
9323 && reg_referenced_p (XEXP (note, 0), PATTERN (i2)))
9324 reg_n_refs[REGNO (XEXP (note, 0))] = 3;
9325
9326 if (place == 0)
9327 for (tem = prev_nonnote_insn (i3);
9328 tem && (GET_CODE (tem) == INSN
9329 || GET_CODE (tem) == CALL_INSN);
9330 tem = prev_nonnote_insn (tem))
9331 {
9332 /* If the register is being set at TEM, see if that is all
9333 TEM is doing. If so, delete TEM. Otherwise, make this
9334 into a REG_UNUSED note instead. */
9335 if (reg_set_p (XEXP (note, 0), PATTERN (tem)))
9336 {
9337 rtx set = single_set (tem);
9338
9339 /* Verify that it was the set, and not a clobber that
9340 modified the register. */
9341
9342 if (set != 0 && ! side_effects_p (SET_SRC (set))
9343 && rtx_equal_p (XEXP (note, 0), SET_DEST (set)))
9344 {
9345 /* Move the notes and links of TEM elsewhere.
9346 This might delete other dead insns recursively.
9347 First set the pattern to something that won't use
9348 any register. */
9349
9350 PATTERN (tem) = pc_rtx;
9351
9352 distribute_notes (REG_NOTES (tem), tem, tem,
9353 NULL_RTX, NULL_RTX, NULL_RTX);
9354 distribute_links (LOG_LINKS (tem));
9355
9356 PUT_CODE (tem, NOTE);
9357 NOTE_LINE_NUMBER (tem) = NOTE_INSN_DELETED;
9358 NOTE_SOURCE_FILE (tem) = 0;
9359 }
9360 else
9361 {
9362 PUT_REG_NOTE_KIND (note, REG_UNUSED);
9363
9364 /* If there isn't already a REG_UNUSED note, put one
9365 here. */
9366 if (! find_regno_note (tem, REG_UNUSED,
9367 REGNO (XEXP (note, 0))))
9368 place = tem;
9369 break;
9370 }
9371 }
9372 else if (reg_referenced_p (XEXP (note, 0), PATTERN (tem)))
9373 {
9374 place = tem;
9375 break;
9376 }
9377 }
9378
9379 /* If the register is set or already dead at PLACE, we needn't do
9380 anything with this note if it is still a REG_DEAD note.
9381
9382 Note that we cannot use just `dead_or_set_p' here since we can
9383 convert an assignment to a register into a bit-field assignment.
9384 Therefore, we must also omit the note if the register is the
9385 target of a bitfield assignment. */
9386
9387 if (place && REG_NOTE_KIND (note) == REG_DEAD)
9388 {
9389 int regno = REGNO (XEXP (note, 0));
9390
9391 if (dead_or_set_p (place, XEXP (note, 0))
9392 || reg_bitfield_target_p (XEXP (note, 0), PATTERN (place)))
9393 {
9394 /* Unless the register previously died in PLACE, clear
9395 reg_last_death. [I no longer understand why this is
9396 being done.] */
9397 if (reg_last_death[regno] != place)
9398 reg_last_death[regno] = 0;
9399 place = 0;
9400 }
9401 else
9402 reg_last_death[regno] = place;
9403
9404 /* If this is a death note for a hard reg that is occupying
9405 multiple registers, ensure that we are still using all
9406 parts of the object. If we find a piece of the object
9407 that is unused, we must add a USE for that piece before
9408 PLACE and put the appropriate REG_DEAD note on it.
9409
9410 An alternative would be to put a REG_UNUSED for the pieces
9411 on the insn that set the register, but that can't be done if
9412 it is not in the same block. It is simpler, though less
9413 efficient, to add the USE insns. */
9414
9415 if (place && regno < FIRST_PSEUDO_REGISTER
9416 && HARD_REGNO_NREGS (regno, GET_MODE (XEXP (note, 0))) > 1)
9417 {
9418 int endregno
9419 = regno + HARD_REGNO_NREGS (regno,
9420 GET_MODE (XEXP (note, 0)));
9421 int all_used = 1;
9422 int i;
9423
9424 for (i = regno; i < endregno; i++)
9425 if (! refers_to_regno_p (i, i + 1, PATTERN (place), 0))
9426 {
9427 rtx piece = gen_rtx (REG, word_mode, i);
9428 rtx p;
9429
9430 /* See if we already placed a USE note for this
9431 register in front of PLACE. */
9432 for (p = place;
9433 GET_CODE (PREV_INSN (p)) == INSN
9434 && GET_CODE (PATTERN (PREV_INSN (p))) == USE;
9435 p = PREV_INSN (p))
9436 if (rtx_equal_p (piece,
9437 XEXP (PATTERN (PREV_INSN (p)), 0)))
9438 {
9439 p = 0;
9440 break;
9441 }
9442
9443 if (p)
9444 {
9445 rtx use_insn
9446 = emit_insn_before (gen_rtx (USE, VOIDmode,
9447 piece),
9448 p);
9449 REG_NOTES (use_insn)
9450 = gen_rtx (EXPR_LIST, REG_DEAD, piece,
9451 REG_NOTES (use_insn));
9452 }
9453
9454 all_used = 0;
9455 }
9456
9457 if (! all_used)
9458 {
9459 /* Put only REG_DEAD notes for pieces that are
9460 still used and that are not already dead or set. */
9461
9462 for (i = regno; i < endregno; i++)
9463 {
9464 rtx piece = gen_rtx (REG, word_mode, i);
9465
9466 if (reg_referenced_p (piece, PATTERN (place))
9467 && ! dead_or_set_p (place, piece)
9468 && ! reg_bitfield_target_p (piece,
9469 PATTERN (place)))
9470 REG_NOTES (place) = gen_rtx (EXPR_LIST, REG_DEAD,
9471 piece,
9472 REG_NOTES (place));
9473 }
9474
9475 place = 0;
9476 }
9477 }
9478 }
9479 break;
9480
9481 default:
9482 /* Any other notes should not be present at this point in the
9483 compilation. */
9484 abort ();
9485 }
9486
9487 if (place)
9488 {
9489 XEXP (note, 1) = REG_NOTES (place);
9490 REG_NOTES (place) = note;
9491 }
9492 else if ((REG_NOTE_KIND (note) == REG_DEAD
9493 || REG_NOTE_KIND (note) == REG_UNUSED)
9494 && GET_CODE (XEXP (note, 0)) == REG)
9495 reg_n_deaths[REGNO (XEXP (note, 0))]--;
9496
9497 if (place2)
9498 {
9499 if ((REG_NOTE_KIND (note) == REG_DEAD
9500 || REG_NOTE_KIND (note) == REG_UNUSED)
9501 && GET_CODE (XEXP (note, 0)) == REG)
9502 reg_n_deaths[REGNO (XEXP (note, 0))]++;
9503
9504 REG_NOTES (place2) = gen_rtx (GET_CODE (note), REG_NOTE_KIND (note),
9505 XEXP (note, 0), REG_NOTES (place2));
9506 }
9507 }
9508 }
9509 \f
9510 /* Similarly to above, distribute the LOG_LINKS that used to be present on
9511 I3, I2, and I1 to new locations. This is also called in one case to
9512 add a link pointing at I3 when I3's destination is changed. */
9513
9514 static void
9515 distribute_links (links)
9516 rtx links;
9517 {
9518 rtx link, next_link;
9519
9520 for (link = links; link; link = next_link)
9521 {
9522 rtx place = 0;
9523 rtx insn;
9524 rtx set, reg;
9525
9526 next_link = XEXP (link, 1);
9527
9528 /* If the insn that this link points to is a NOTE or isn't a single
9529 set, ignore it. In the latter case, it isn't clear what we
9530 can do other than ignore the link, since we can't tell which
9531 register it was for. Such links wouldn't be used by combine
9532 anyway.
9533
9534 It is not possible for the destination of the target of the link to
9535 have been changed by combine. The only potential of this is if we
9536 replace I3, I2, and I1 by I3 and I2. But in that case the
9537 destination of I2 also remains unchanged. */
9538
9539 if (GET_CODE (XEXP (link, 0)) == NOTE
9540 || (set = single_set (XEXP (link, 0))) == 0)
9541 continue;
9542
9543 reg = SET_DEST (set);
9544 while (GET_CODE (reg) == SUBREG || GET_CODE (reg) == ZERO_EXTRACT
9545 || GET_CODE (reg) == SIGN_EXTRACT
9546 || GET_CODE (reg) == STRICT_LOW_PART)
9547 reg = XEXP (reg, 0);
9548
9549 /* A LOG_LINK is defined as being placed on the first insn that uses
9550 a register and points to the insn that sets the register. Start
9551 searching at the next insn after the target of the link and stop
9552 when we reach a set of the register or the end of the basic block.
9553
9554 Note that this correctly handles the link that used to point from
9555 I3 to I2. Also note that not much searching is typically done here
9556 since most links don't point very far away. */
9557
9558 for (insn = NEXT_INSN (XEXP (link, 0));
9559 (insn && GET_CODE (insn) != CODE_LABEL
9560 && GET_CODE (PREV_INSN (insn)) != JUMP_INSN);
9561 insn = NEXT_INSN (insn))
9562 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i'
9563 && reg_overlap_mentioned_p (reg, PATTERN (insn)))
9564 {
9565 if (reg_referenced_p (reg, PATTERN (insn)))
9566 place = insn;
9567 break;
9568 }
9569
9570 /* If we found a place to put the link, place it there unless there
9571 is already a link to the same insn as LINK at that point. */
9572
9573 if (place)
9574 {
9575 rtx link2;
9576
9577 for (link2 = LOG_LINKS (place); link2; link2 = XEXP (link2, 1))
9578 if (XEXP (link2, 0) == XEXP (link, 0))
9579 break;
9580
9581 if (link2 == 0)
9582 {
9583 XEXP (link, 1) = LOG_LINKS (place);
9584 LOG_LINKS (place) = link;
9585 }
9586 }
9587 }
9588 }
9589 \f
9590 void
9591 dump_combine_stats (file)
9592 FILE *file;
9593 {
9594 fprintf
9595 (file,
9596 ";; Combiner statistics: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n\n",
9597 combine_attempts, combine_merges, combine_extras, combine_successes);
9598 }
9599
9600 void
9601 dump_combine_total_stats (file)
9602 FILE *file;
9603 {
9604 fprintf
9605 (file,
9606 "\n;; Combiner totals: %d attempts, %d substitutions (%d requiring new space),\n;; %d successes.\n",
9607 total_attempts, total_merges, total_extras, total_successes);
9608 }
This page took 0.564715 seconds and 6 git commands to generate.